Merge pull request #51 from Loudr/supress-reabort-log-50
Surpresses log: bad state for purpose "abort": "aborted"
diff --git a/python/build.sh b/python/build.sh
index 29e802b..bc97f36 100755
--- a/python/build.sh
+++ b/python/build.sh
@@ -42,6 +42,7 @@
echo "FAILED"
((exit_status++))
fi
+ sleep 2
done
echo "----------------------------------------------------------------------"
@@ -54,6 +55,16 @@
exit $exit_status
}
+build_demo () {
+ fetch_dependencies
+ [ ! -d "$demo_dir/demo/pipeline" ] && ln -s "$demo_dir/src/" "$demo_dir/demo/pipeline"
+}
+
+run_demo () {
+ build_demo
+ dev_appserver.py "$dir/demo"
+}
+
fetch_dependencies() {
if [ ! `which pip` ]
then
@@ -64,6 +75,7 @@
for dep in `cat $dir/src/todelete.txt`
do
rm -r $dir/src/$dep $dir/src/$dep*-info 2>/dev/null
+ rm -r $dir/demo/$dep $dir/demo/$dep*-info 2>/dev/null
done
pip install --exists-action=s -r $dir/src/requirements.txt -t $dir/src/ --upgrade || exit 1
@@ -77,7 +89,13 @@
deps)
fetch_dependencies
;;
+ build_demo)
+ build_demo
+ ;;
+ run_demo)
+ run_demo
+ ;;
*)
- echo $"Usage: $0 {test|deps}"
+ echo $"Usage: $0 {test|deps|build_demo|run_demo}"
exit 1
esac
diff --git a/python/demo/app.yaml b/python/demo/app.yaml
index 510bfc4..90f9b7a 100644
--- a/python/demo/app.yaml
+++ b/python/demo/app.yaml
@@ -1,6 +1,6 @@
application: pipeline-test
version: 1
-runtime: python
+runtime: python27
api_version: 1
handlers:
diff --git a/python/demo/pipeline b/python/demo/pipeline
new file mode 120000
index 0000000..6d34329
--- /dev/null
+++ b/python/demo/pipeline
@@ -0,0 +1 @@
+/usr/local/google/home/tkaitchuck/temp/appengine-pipelines/python/src/
\ No newline at end of file
diff --git a/python/demo/pipeline/__init__.py b/python/demo/pipeline/__init__.py
deleted file mode 100755
index ca9f8bf..0000000
--- a/python/demo/pipeline/__init__.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# pylint: disable=g-import-not-at-top
-# pylint: disable=g-bad-name
-
-
-def _fix_path():
- """Finds the google_appengine directory and fixes Python imports to use it."""
- import os
- import sys
- all_paths = os.environ.get('PYTHONPATH').split(os.pathsep)
- for path_dir in all_paths:
- dev_appserver_path = os.path.join(path_dir, 'dev_appserver.py')
- if os.path.exists(dev_appserver_path):
- logging.debug('Found appengine SDK on path!')
- google_appengine = os.path.dirname(os.path.realpath(dev_appserver_path))
- sys.path.append(google_appengine)
- # Use the next import will fix up sys.path even further to bring in
- # any dependent lib directories that the SDK needs.
- dev_appserver = __import__('dev_appserver')
- sys.path.extend(dev_appserver.EXTRA_PATHS)
- return
-
-
-try:
- from pipeline import *
-except ImportError, e:
- import logging
- logging.warning(
- 'Could not load Pipeline API. Will fix path for testing. %s: %s',
- e.__class__.__name__, str(e))
- _fix_path()
- del logging
- from pipeline import *
diff --git a/python/demo/pipeline/common.py b/python/demo/pipeline/common.py
deleted file mode 100755
index f7dc7e1..0000000
--- a/python/demo/pipeline/common.py
+++ /dev/null
@@ -1,436 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Common Pipelines for easy reuse."""
-
-import cgi
-import logging
-import random
-
-from google.appengine.api import mail
-from google.appengine.api import taskqueue
-
-import pipeline
-
-
-class Return(pipeline.Pipeline):
- """Causes calling generator to have the supplied default output value.
-
- Only works when yielded last!
- """
-
- def run(self, return_value=None):
- return return_value
-
-
-class Ignore(pipeline.Pipeline):
- """Mark the supplied parameters as unused outputs of sibling pipelines."""
-
- def run(self, *args):
- pass
-
-
-class Dict(pipeline.Pipeline):
- """Returns a dictionary with the supplied keyword arguments."""
-
- def run(self, **kwargs):
- return dict(**kwargs)
-
-
-class List(pipeline.Pipeline):
- """Returns a list with the supplied positional arguments."""
-
- def run(self, *args):
- return list(args)
-
-
-class AbortIfTrue(pipeline.Pipeline):
- """Aborts the entire pipeline if the supplied argument is True."""
-
- def run(self, value, message=''):
- if value:
- raise pipeline.Abort(message)
-
-
-class All(pipeline.Pipeline):
- """Returns True if all of the values are True.
-
- Returns False if there are no values present.
- """
-
- def run(self, *args):
- if len(args) == 0:
- return False
- for value in args:
- if not value:
- return False
- return True
-
-
-class Any(pipeline.Pipeline):
- """Returns True if any of the values are True."""
-
- def run(self, *args):
- for value in args:
- if value:
- return True
- return False
-
-
-class Complement(pipeline.Pipeline):
- """Returns the boolean complement of the values."""
-
- def run(self, *args):
- if len(args) == 1:
- return not args[0]
- else:
- return [not value for value in args]
-
-
-class Max(pipeline.Pipeline):
- """Returns the max value."""
-
- def __init__(self, *args):
- if len(args) == 0:
- raise TypeError('max expected at least 1 argument, got 0')
- pipeline.Pipeline.__init__(self, *args)
-
- def run(self, *args):
- return max(args)
-
-
-class Min(pipeline.Pipeline):
- """Returns the min value."""
-
- def __init__(self, *args):
- if len(args) == 0:
- raise TypeError('min expected at least 1 argument, got 0')
- pipeline.Pipeline.__init__(self, *args)
-
- def run(self, *args):
- return min(args)
-
-
-class Sum(pipeline.Pipeline):
- """Returns the sum of all values."""
-
- def __init__(self, *args):
- if len(args) == 0:
- raise TypeError('sum expected at least 1 argument, got 0')
- pipeline.Pipeline.__init__(self, *args)
-
- def run(self, *args):
- return sum(args)
-
-
-class Multiply(pipeline.Pipeline):
- """Returns all values multiplied together."""
-
- def __init__(self, *args):
- if len(args) == 0:
- raise TypeError('multiply expected at least 1 argument, got 0')
- pipeline.Pipeline.__init__(self, *args)
-
- def run(self, *args):
- total = 1
- for value in args:
- total *= value
- return total
-
-
-class Negate(pipeline.Pipeline):
- """Returns each value supplied multiplied by -1."""
-
- def __init__(self, *args):
- if len(args) == 0:
- raise TypeError('negate expected at least 1 argument, got 0')
- pipeline.Pipeline.__init__(self, *args)
-
- def run(self, *args):
- if len(args) == 1:
- return -1 * args[0]
- else:
- return [-1 * x for x in args]
-
-
-class Extend(pipeline.Pipeline):
- """Combine together lists and tuples into a single list.
-
- Args:
- *args: One or more lists or tuples.
-
- Returns:
- A single list of all supplied lists merged together in order. Length of
- the output list is the sum of the lengths of all input lists.
- """
-
- def run(self, *args):
- combined = []
- for value in args:
- combined.extend(value)
- return combined
-
-
-class Append(pipeline.Pipeline):
- """Combine together values into a list.
-
- Args:
- *args: One or more values.
-
- Returns:
- A single list of all values appended to the same list. Length of the
- output list matches the length of the input list.
- """
-
- def run(self, *args):
- combined = []
- for value in args:
- combined.append(value)
- return combined
-
-
-class Concat(pipeline.Pipeline):
- """Concatenates strings together using a join character.
-
- Args:
- *args: One or more strings.
- separator: Keyword argument only; the string to use to join the args.
-
- Returns:
- The joined string.
- """
-
- def run(self, *args, **kwargs):
- separator = kwargs.get('separator', '')
- return separator.join(args)
-
-
-class Union(pipeline.Pipeline):
- """Like Extend, but the resulting list has all unique elements."""
-
- def run(self, *args):
- combined = set()
- for value in args:
- combined.update(value)
- return list(combined)
-
-
-class Intersection(pipeline.Pipeline):
- """Returns only those items belonging to all of the supplied lists.
-
- Each argument must be a list. No individual items are permitted.
- """
-
- def run(self, *args):
- if not args:
- return []
- result = set(args[0])
- for value in args[1:]:
- result.intersection_update(set(value))
- return list(result)
-
-
-class Uniquify(pipeline.Pipeline):
- """Returns a list of unique items from the list of items supplied."""
-
- def run(self, *args):
- return list(set(args))
-
-
-class Format(pipeline.Pipeline):
- """Formats a string with formatting arguments."""
-
- @classmethod
- def dict(cls, message, **format_dict):
- """Formats a dictionary.
-
- Args:
- message: The format string.
- **format_dict: Keyword arguments of format parameters to use for
- formatting the string.
-
- Returns:
- The formatted string.
- """
- return cls('dict', message, format_dict)
-
- @classmethod
- def tuple(cls, message, *params):
- """Formats a tuple.
-
- Args:
- message: The format string.
- *params: The formatting positional parameters.
-
- Returns:
- The formatted string.
- """
- return cls('tuple', message, *params)
-
- def run(self, format_type, message, *params):
- if format_type == 'dict':
- return message % params[0]
- elif format_type == 'tuple':
- return message % params
- else:
- raise pipeline.Abort('Invalid format type: %s' % format_type)
-
-
-class Log(pipeline.Pipeline):
- """Logs a message, just like the Python logging module."""
-
- # TODO: Hack the call stack of the logging message to use the file and line
- # context from when it was first scheduled, not when it actually ran.
-
- _log_method = logging.log
-
- @classmethod
- def log(cls, *args, **kwargs):
- return Log(*args, **kwargs)
-
- @classmethod
- def debug(cls, *args, **kwargs):
- return Log(logging.DEBUG, *args, **kwargs)
-
- @classmethod
- def info(cls, *args, **kwargs):
- return Log(logging.INFO, *args, **kwargs)
-
- @classmethod
- def warning(cls, *args, **kwargs):
- return Log(logging.WARNING, *args, **kwargs)
-
- @classmethod
- def error(cls, *args, **kwargs):
- return Log(logging.ERROR, *args, **kwargs)
-
- @classmethod
- def critical(cls, *args, **kwargs):
- return Log(logging.CRITICAL, *args, **kwargs)
-
- def run(self, level, message, *args):
- Log._log_method.im_func(level, message, *args)
-
-
-class Delay(pipeline.Pipeline):
- """Waits N seconds before completion.
-
- Args:
- seconds: Keyword argument only. The number of seconds to wait. Will be
- rounded to the nearest whole second.
-
- Returns:
- How long this delay waited.
- """
-
- async = True
-
- def __init__(self, *args, **kwargs):
- if len(args) != 0 or len(kwargs) != 1 or kwargs.keys()[0] != 'seconds':
- raise TypeError('Delay takes one keyword parameter, "seconds".')
- pipeline.Pipeline.__init__(self, *args, **kwargs)
-
- def run(self, seconds=None):
- task = self.get_callback_task(
- countdown=seconds,
- name='ae-pipeline-delay-' + self.pipeline_id)
- try:
- task.add(self.queue_name)
- except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError):
- pass
-
- def run_test(self, seconds=None):
- logging.debug('Delay pipeline pretending to sleep %0.2f seconds', seconds)
- self.complete(seconds)
-
- def callback(self):
- self.complete(self.kwargs['seconds'])
-
-
-class EmailToContinue(pipeline.Pipeline):
- """Emails someone asking if the pipeline should continue.
-
- When the user clicks "Approve", the pipeline will return True. When the
- user clicks "Disapprove", the pipeline will return False.
-
- Supply normal mail.EmailMessage parameters, plus two additional parameters:
-
- approve_html: HTML to show to the user after clicking approve.
- disapprove_html: HTML to show to the user after clicking disapprove.
-
- Additionally, the 'body' and 'html' keyword arguments are treated as Python
- dictionary templates with the keywords 'approval_url' and 'disapprove_url',
- which let you place those links in your email however you want (as long
- as clicking the links results in a GET request). The approve/disapprove URLs
- are relative paths (e.g., '/relative/foo/bar'), so you must connect them to
- whatever hostname you actually want users to access the callback on with an
- absolute URL.
-
- A random token is used to secure the asynchronous action.
- """
-
- async = True
- public_callbacks = True
-
- _email_message = mail.EmailMessage
-
- def __init__(self, **kwargs):
- if 'random_token' not in kwargs:
- kwargs['random_token'] = '%x' % random.randint(0, 2**64)
- if 'approve_html' not in kwargs:
- kwargs['approve_html'] = '<h1>Approved!</h1>'
- if 'disapprove_html' not in kwargs:
- kwargs['disapprove_html'] = '<h1>Not Approved!</h1>'
- pipeline.Pipeline.__init__(self, **kwargs)
-
- def run(self, **kwargs):
- random_token = kwargs.pop('random_token')
- kwargs.pop('approve_html', '')
- kwargs.pop('disapprove_html', '')
-
- approve_url = self.get_callback_url(
- random_token=random_token, choice='approve')
- disapprove_url = self.get_callback_url(
- random_token=random_token, choice='disapprove')
-
- mail_args = kwargs.copy()
- mail_args['body'] = mail_args['body'] % {
- 'approve_url': approve_url,
- 'disapprove_url': disapprove_url,
- }
- if 'html' in mail_args:
- mail_args['html'] = mail_args['html'] % {
- 'approve_url': cgi.escape(approve_url),
- 'disapprove_url': cgi.escape(disapprove_url),
- }
- EmailToContinue._email_message.im_func(**mail_args).send()
-
- def run_test(self, **kwargs):
- self.run(**kwargs)
- self.complete(True)
-
- def callback(self, random_token=None, choice=None):
- if random_token != self.kwargs['random_token']:
- return (403, 'text/html', '<h1>Invalid security token.</h1>')
-
- if choice == 'approve':
- self.complete(True)
- return (200, 'text/html', self.kwargs['approve_html'])
- elif choice == 'disapprove':
- self.complete(False)
- return (200, 'text/html', self.kwargs['disapprove_html'])
- else:
- return (400, 'text/html', '<h1>Invalid "choice" value.</h1>')
diff --git a/python/demo/pipeline/handlers.py b/python/demo/pipeline/handlers.py
deleted file mode 100755
index dfa248c..0000000
--- a/python/demo/pipeline/handlers.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Web request dispatcher for the Google App Engine Pipeline API.
-
-In a separate file from the core pipeline module to break circular dependencies.
-"""
-
-from google.appengine.ext import webapp
-from google.appengine.ext.webapp import util as webapp_util
-
-import pipeline
-
-
-_APP = webapp.WSGIApplication(pipeline.create_handlers_map(), debug=True)
-
-
-def _main():
- webapp_util.run_wsgi_app(_APP)
-
-
-if __name__ == '__main__':
- _main()
diff --git a/python/demo/pipeline/models.py b/python/demo/pipeline/models.py
deleted file mode 100755
index f7c64f7..0000000
--- a/python/demo/pipeline/models.py
+++ /dev/null
@@ -1,294 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Datastore models used by the Google App Engine Pipeline API."""
-
-from google.appengine.ext import db
-from google.appengine.ext import blobstore
-
-try:
- import json
-except ImportError:
- import simplejson as json
-
-# Relative imports
-import util
-
-
-class _PipelineRecord(db.Model):
- """Represents a Pipeline.
-
- Key name is a randomly assigned UUID. No parent entity.
-
- Properties:
- class_path: Path of the Python class to use for this pipeline.
- root_pipeline: The root of the whole workflow; set to itself this pipeline
- is its own root.
- fanned_out: List of child _PipelineRecords that were started when this
- generator pipeline moved from WAITING to RUN.
- start_time: For pipelines with no start _BarrierRecord, when this pipeline
- was enqueued to run immediately.
- finalized_time: When this pipeline moved from WAITING or RUN to DONE.
- params: Serialized parameter dictionary.
- status: The current status of the pipeline.
- current_attempt: The current attempt (starting at 0) to run.
- max_attempts: Maximum number of attempts (starting at 0) to run.
- next_retry_time: ETA of the next retry attempt.
- retry_message: Why the last attempt failed; None or empty if no message.
-
- Root pipeline properties:
- is_root_pipeline: This is a root pipeline.
- abort_message: Why the whole pipeline was aborted; only saved on
- root pipelines.
- abort_requested: If an abort signal has been requested for this root
- pipeline; only saved on root pipelines
- """
-
- WAITING = 'waiting'
- RUN = 'run'
- DONE = 'done'
- ABORTED = 'aborted'
-
- class_path = db.StringProperty()
- root_pipeline = db.SelfReferenceProperty(
- collection_name='child_pipelines_set')
- fanned_out = db.ListProperty(db.Key, indexed=False)
- start_time = db.DateTimeProperty(indexed=True)
- finalized_time = db.DateTimeProperty(indexed=False)
-
- # One of these two will be set, depending on the size of the params.
- params_text = db.TextProperty(name='params')
- params_blob = blobstore.BlobReferenceProperty(
- name='params_blob', indexed=False)
-
- status = db.StringProperty(choices=(WAITING, RUN, DONE, ABORTED),
- default=WAITING)
-
- # Retry behavior
- current_attempt = db.IntegerProperty(default=0, indexed=False)
- max_attempts = db.IntegerProperty(default=1, indexed=False)
- next_retry_time = db.DateTimeProperty(indexed=False)
- retry_message = db.TextProperty()
-
- # Root pipeline properties
- is_root_pipeline = db.BooleanProperty()
- abort_message = db.TextProperty()
- abort_requested = db.BooleanProperty(indexed=False)
-
- @classmethod
- def kind(cls):
- return '_AE_Pipeline_Record'
-
- @property
- def params(self):
- """Returns the dictionary of parameters for this Pipeline."""
- if hasattr(self, '_params_decoded'):
- return self._params_decoded
-
- if self.params_blob is not None:
- value_encoded = self.params_blob.open().read()
- else:
- value_encoded = self.params_text
-
- value = json.loads(value_encoded, cls=util.JsonDecoder)
- if isinstance(value, dict):
- kwargs = value.get('kwargs')
- if kwargs:
- adjusted_kwargs = {}
- for arg_key, arg_value in kwargs.iteritems():
- # Python only allows non-unicode strings as keyword arguments.
- adjusted_kwargs[str(arg_key)] = arg_value
- value['kwargs'] = adjusted_kwargs
-
- self._params_decoded = value
- return self._params_decoded
-
-
-class _SlotRecord(db.Model):
- """Represents an output slot.
-
- Key name is a randomly assigned UUID. No parent for slots of child pipelines.
- For the outputs of root pipelines, the parent entity is the root
- _PipelineRecord (see Pipeline.start()).
-
- Properties:
- root_pipeline: The root of the workflow.
- filler: The pipeline that filled this slot.
- value: Serialized value for this slot.
- status: The current status of the slot.
- fill_time: When the slot was filled by the filler.
- """
-
- FILLED = 'filled'
- WAITING = 'waiting'
-
- root_pipeline = db.ReferenceProperty(_PipelineRecord)
- filler = db.ReferenceProperty(_PipelineRecord,
- collection_name='filled_slots_set')
-
- # One of these two will be set, depending on the size of the value.
- value_text = db.TextProperty(name='value')
- value_blob = blobstore.BlobReferenceProperty(
- name='value_blob', indexed=False)
-
- status = db.StringProperty(choices=(FILLED, WAITING), default=WAITING,
- indexed=False)
- fill_time = db.DateTimeProperty(indexed=False)
-
- @classmethod
- def kind(cls):
- return '_AE_Pipeline_Slot'
-
- @property
- def value(self):
- """Returns the value of this Slot."""
- if hasattr(self, '_value_decoded'):
- return self._value_decoded
-
- if self.value_blob is not None:
- encoded_value = self.value_blob.open().read()
- else:
- encoded_value = self.value_text
-
- self._value_decoded = json.loads(encoded_value, cls=util.JsonDecoder)
- return self._value_decoded
-
-
-class _BarrierRecord(db.Model):
- """Represents a barrier.
-
- Key name is the purpose of the barrier (START or FINALIZE). Parent entity
- is the _PipelineRecord the barrier should trigger when all of its
- blocking_slots are filled.
-
- Properties:
- root_pipeline: The root of the workflow.
- target: The pipeline to run when the barrier fires.
- blocking_slots: The slots that must be filled before this barrier fires.
- trigger_time: When this barrier fired.
- status: The current status of the barrier.
- """
-
- # Barrier statuses
- FIRED = 'fired'
- WAITING = 'waiting'
-
- # Barrier trigger reasons (used as key names)
- START = 'start'
- FINALIZE = 'finalize'
- ABORT = 'abort'
-
- root_pipeline = db.ReferenceProperty(_PipelineRecord)
- target = db.ReferenceProperty(_PipelineRecord,
- collection_name='called_barrier_set')
- blocking_slots = db.ListProperty(db.Key)
- trigger_time = db.DateTimeProperty(indexed=False)
- status = db.StringProperty(choices=(FIRED, WAITING), default=WAITING,
- indexed=False)
-
- @classmethod
- def kind(cls):
- return '_AE_Pipeline_Barrier'
-
-
-class _BarrierIndex(db.Model):
- """Indicates a _BarrierRecord that is dependent on a slot.
-
- Previously, when a _SlotRecord was filled, notify_barriers() would query for
- all _BarrierRecords where the 'blocking_slots' property equals the
- _SlotRecord's key. The problem with that approach is the 'blocking_slots'
- index is eventually consistent, meaning _BarrierRecords that were just written
- will not match the query. When pipelines are created and barriers are notified
- in rapid succession, the inconsistent queries can cause certain barriers never
- to fire. The outcome is a pipeline is WAITING and never RUN, even though all
- of its dependent slots have been filled.
-
- This entity is used to make it so barrier fan-out is fully consistent
- with the High Replication Datastore. It's used by notify_barriers() to
- do fully consistent ancestor queries every time a slot is filled. This
- ensures that even all _BarrierRecords dependent on a _SlotRecord will
- be found regardless of eventual consistency.
-
- The key path for _BarrierIndexes is this for root entities:
-
- _PipelineRecord<owns_slot_id>/_SlotRecord<slot_id>/
- _PipelineRecord<dependent_pipeline_id>/_BarrierIndex<purpose>
-
- And this for child pipelines:
-
- _SlotRecord<slot_id>/_PipelineRecord<dependent_pipeline_id>/
- _BarrierIndex<purpose>
-
- That path is translated to the _BarrierRecord it should fire:
-
- _PipelineRecord<dependent_pipeline_id>/_BarrierRecord<purpose>
-
- All queries for _BarrierIndexes are key-only and thus the model requires
- no properties or helper methods.
- """
-
- # Enable this entity to be cleaned up.
- root_pipeline = db.ReferenceProperty(_PipelineRecord)
-
- @classmethod
- def kind(cls):
- return '_AE_Barrier_Index'
-
- @classmethod
- def to_barrier_key(cls, barrier_index_key):
- """Converts a _BarrierIndex key to a _BarrierRecord key.
-
- Args:
- barrier_index_key: db.Key for a _BarrierIndex entity.
-
- Returns:
- db.Key for the corresponding _BarrierRecord entity.
- """
- barrier_index_path = barrier_index_key.to_path()
-
- # Pick out the items from the _BarrierIndex key path that we need to
- # construct the _BarrierRecord key path.
- (pipeline_kind, dependent_pipeline_id,
- unused_kind, purpose) = barrier_index_path[-4:]
-
- barrier_record_path = (
- pipeline_kind, dependent_pipeline_id,
- _BarrierRecord.kind(), purpose)
-
- return db.Key.from_path(*barrier_record_path)
-
-
-class _StatusRecord(db.Model):
- """Represents the current status of a pipeline.
-
- Properties:
- message: The textual message to show.
- console_url: URL to iframe as the primary console for this pipeline.
- link_names: Human display names for status links.
- link_urls: URLs corresponding to human names for status links.
- status_time: When the status was written.
- """
-
- root_pipeline = db.ReferenceProperty(_PipelineRecord)
- message = db.TextProperty()
- console_url = db.TextProperty()
- link_names = db.ListProperty(db.Text, indexed=False)
- link_urls = db.ListProperty(db.Text, indexed=False)
- status_time = db.DateTimeProperty(indexed=False)
-
- @classmethod
- def kind(cls):
- return '_AE_Pipeline_Status'
diff --git a/python/demo/pipeline/pipeline.py b/python/demo/pipeline/pipeline.py
deleted file mode 100755
index 0940617..0000000
--- a/python/demo/pipeline/pipeline.py
+++ /dev/null
@@ -1,3291 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Google App Engine Pipeline API for complex, asynchronous workflows."""
-
-__all__ = [
- # Public API.
- 'Error', 'PipelineSetupError', 'PipelineExistsError',
- 'PipelineRuntimeError', 'SlotNotFilledError', 'SlotNotDeclaredError',
- 'UnexpectedPipelineError', 'PipelineStatusError', 'Slot', 'Pipeline',
- 'PipelineFuture', 'After', 'InOrder', 'Retry', 'Abort', 'get_status_tree',
- 'get_pipeline_names', 'get_root_list', 'create_handlers_map',
- 'set_enforce_auth',
-]
-
-import datetime
-import hashlib
-import itertools
-import logging
-import os
-import pprint
-import re
-import sys
-import threading
-import time
-import urllib
-import uuid
-
-from google.appengine.api import mail
-from google.appengine.api import files
-from google.appengine.api import users
-from google.appengine.api import taskqueue
-from google.appengine.ext import db
-from google.appengine.ext import webapp
-
-try:
- import json
-except ImportError:
- import simplejson as json
-
-# Relative imports
-import models
-import status_ui
-import util as mr_util
-
-# pylint: disable=g-bad-name
-# pylint: disable=protected-access
-
-# For convenience
-_BarrierIndex = models._BarrierIndex
-_BarrierRecord = models._BarrierRecord
-_PipelineRecord = models._PipelineRecord
-_SlotRecord = models._SlotRecord
-_StatusRecord = models._StatusRecord
-
-
-# Overall TODOs:
-# - Add a human readable name for start()
-
-# Potential TODOs:
-# - Add support for ANY N barriers.
-# - Allow Pipelines to declare they are "short" and optimize the evaluate()
-# function to run as many of them in quick succession.
-# - Add support in all Pipelines for hold/release where up-stream
-# barriers will fire but do nothing because the Pipeline is not ready.
-
-################################################################################
-
-
-class Error(Exception):
- """Base class for exceptions in this module."""
-
-
-class PipelineSetupError(Error):
- """Base class for exceptions that happen before Pipeline execution."""
-
-
-class PipelineExistsError(PipelineSetupError):
- """A new Pipeline with an assigned idempotence_key cannot be overwritten."""
-
-
-class PipelineRuntimeError(Error):
- """Base class for exceptions that happen during Pipeline execution."""
-
-
-class SlotNotFilledError(PipelineRuntimeError):
- """A slot that should have been filled already was not yet filled."""
-
-
-class SlotNotDeclaredError(PipelineRuntimeError):
- """A slot that was filled or passed along was not previously declared."""
-
-
-class UnexpectedPipelineError(PipelineRuntimeError):
- """An assertion failed, potentially leaving the pipeline unable to proceed."""
-
-
-class PipelineUserError(Error):
- """Exceptions raised indirectly by developers to cause certain behaviors."""
-
-
-class Retry(PipelineUserError):
- """The currently running pipeline should be retried at a later time."""
-
-
-class Abort(PipelineUserError):
- """The currently running pipeline should be aborted up to the root."""
-
-
-class PipelineStatusError(Error):
- """Exceptions raised when trying to collect pipeline status."""
-
-
-class _CallbackTaskError(Error):
- """A callback task was unable to execute properly for some reason."""
-
-
-################################################################################
-
-_MAX_BARRIERS_TO_NOTIFY = 10
-
-_MAX_ABORTS_TO_BEGIN = 10
-
-_TEST_MODE = False
-
-_TEST_ROOT_PIPELINE_KEY = None
-
-_DEFAULT_BACKOFF_SECONDS = 15
-
-_DEFAULT_BACKOFF_FACTOR = 2
-
-_DEFAULT_MAX_ATTEMPTS = 3
-
-_RETRY_WIGGLE_TIMEDELTA = datetime.timedelta(seconds=20)
-
-_DEBUG = False
-
-_MAX_JSON_SIZE = 900000
-
-_ENFORCE_AUTH = True
-
-_MAX_CALLBACK_TASK_RETRIES = 5
-
-################################################################################
-
-
-class Slot(object):
- """An output that is filled by a Pipeline as it executes."""
-
- def __init__(self, name=None, slot_key=None, strict=False):
- """Initializer.
-
- Args:
- name: The name of this slot.
- slot_key: The db.Key for this slot's _SlotRecord if it's already been
- allocated by an up-stream pipeline.
- strict: If this Slot was created as an output of a strictly defined
- pipeline.
- """
- if name is None:
- raise UnexpectedPipelineError('Slot with key "%s" missing a name.' %
- slot_key)
- if slot_key is None:
- slot_key = db.Key.from_path(_SlotRecord.kind(), uuid.uuid4().hex)
- self._exists = _TEST_MODE
- else:
- self._exists = True
- self._touched = False
- self._strict = strict
- self.name = name
- self.key = slot_key
- self.filled = False
- self._filler_pipeline_key = None
- self._fill_datetime = None
- self._value = None
-
- @property
- def value(self):
- """Returns the current value of this slot.
-
- Returns:
- The value of the slot (a serializable Python type).
-
- Raises:
- SlotNotFilledError if the value hasn't been filled yet.
- """
- if not self.filled:
- raise SlotNotFilledError('Slot with name "%s", key "%s" not yet filled.'
- % (self.name, self.key))
- return self._value
-
- @property
- def filler(self):
- """Returns the pipeline ID that filled this slot's value.
-
- Returns:
- A string that is the pipeline ID.
-
- Raises:
- SlotNotFilledError if the value hasn't been filled yet.
- """
- if not self.filled:
- raise SlotNotFilledError('Slot with name "%s", key "%s" not yet filled.'
- % (self.name, self.key))
- return self._filler_pipeline_key.name()
-
- @property
- def fill_datetime(self):
- """Returns when the slot was filled.
-
- Returns:
- A datetime.datetime.
-
- Raises:
- SlotNotFilledError if the value hasn't been filled yet.
- """
- if not self.filled:
- raise SlotNotFilledError('Slot with name "%s", key "%s" not yet filled.'
- % (self.name, self.key))
- return self._fill_datetime
-
- def _set_value(self, slot_record):
- """Sets the value of this slot based on its corresponding _SlotRecord.
-
- Does nothing if the slot has not yet been filled.
-
- Args:
- slot_record: The _SlotRecord containing this Slot's value.
- """
- if slot_record.status == _SlotRecord.FILLED:
- self.filled = True
- self._filler_pipeline_key = _SlotRecord.filler.get_value_for_datastore(
- slot_record)
- self._fill_datetime = slot_record.fill_time
- self._value = slot_record.value
-
- def _set_value_test(self, filler_pipeline_key, value):
- """Sets the value of this slot for use in testing.
-
- Args:
- filler_pipeline_key: The db.Key of the _PipelineRecord that filled
- this slot.
- value: The serializable value set for this slot.
- """
- self.filled = True
- self._filler_pipeline_key = filler_pipeline_key
- self._fill_datetime = datetime.datetime.utcnow()
- # Convert to JSON and back again, to simulate the behavior of production.
- self._value = json.loads(json.dumps(
- value, cls=mr_util.JsonEncoder), cls=mr_util.JsonDecoder)
-
- def __repr__(self):
- """Returns a string representation of this slot."""
- if self.filled:
- return repr(self._value)
- else:
- return 'Slot(name="%s", slot_key="%s")' % (self.name, self.key)
-
-
-class PipelineFuture(object):
- """A future for accessing the outputs of a Pipeline."""
-
- # NOTE: Do not, ever, add a names() method to this class. Callers cannot do
- # introspection on their context of being called. Even though the runtime
- # environment of the Pipeline can allow for that to happen, such behavior
- # would prevent synchronous simulation and verification, whic is an
- # unacceptable tradeoff.
-
- def __init__(self, output_names, force_strict=False):
- """Initializer.
-
- Args:
- output_names: The list of require output names that will be strictly
- enforced by this class.
- force_strict: If True, force this future to be in strict mode.
- """
- self._after_all_pipelines = set()
- self._output_dict = {
- 'default': Slot(name='default'),
- }
-
- self._strict = len(output_names) > 0 or force_strict
- if self._strict:
- for name in output_names:
- if name in self._output_dict:
- raise UnexpectedPipelineError('Output name reserved: "%s"' % name)
- self._output_dict[name] = Slot(name=name, strict=True)
-
- def _inherit_outputs(self,
- pipeline_name,
- already_defined,
- resolve_outputs=False):
- """Inherits outputs from a calling Pipeline.
-
- Args:
- pipeline_name: The Pipeline class name (used for debugging).
- already_defined: Maps output name to stringified db.Key (of _SlotRecords)
- of any exiting output slots to be inherited by this future.
- resolve_outputs: When True, this method will dereference all output slots
- before returning back to the caller, making those output slots' values
- available.
-
- Raises:
- UnexpectedPipelineError when resolve_outputs is True and any of the output
- slots could not be retrived from the Datastore.
- """
- for name, slot_key in already_defined.iteritems():
- if not isinstance(slot_key, db.Key):
- slot_key = db.Key(slot_key)
-
- slot = self._output_dict.get(name)
- if slot is None:
- if self._strict:
- raise UnexpectedPipelineError(
- 'Inherited output named "%s" must be filled but '
- 'not declared for pipeline class "%s"' % (name, pipeline_name))
- else:
- self._output_dict[name] = Slot(name=name, slot_key=slot_key)
- else:
- slot.key = slot_key
- slot._exists = True
-
- if resolve_outputs:
- slot_key_dict = dict((s.key, s) for s in self._output_dict.itervalues())
- all_slots = db.get(slot_key_dict.keys())
- for slot, slot_record in zip(slot_key_dict.itervalues(), all_slots):
- if slot_record is None:
- raise UnexpectedPipelineError(
- 'Inherited output named "%s" for pipeline class "%s" is '
- 'missing its Slot in the datastore: "%s"' %
- (slot.name, pipeline_name, slot.key))
- slot = slot_key_dict[slot_record.key()]
- slot._set_value(slot_record)
-
- def __getattr__(self, name):
- """Provides an output Slot instance with the given name if allowed."""
- if name not in self._output_dict:
- if self._strict:
- raise SlotNotDeclaredError('Undeclared output with name "%s"' % name)
- self._output_dict[name] = Slot(name=name)
- slot = self._output_dict[name]
- return slot
-
-
-class _PipelineMeta(type):
- """Meta-class for recording all Pipelines that have been defined."""
-
- # List of all Pipeline classes that have been seen.
- _all_classes = []
-
- def __new__(meta, name, bases, cls_dict):
- """Initializes the class path of a Pipeline and saves it."""
- cls = type.__new__(meta, name, bases, cls_dict)
- meta._all_classes.append(cls)
- return cls
-
-
-class ClassProperty(object):
- """Descriptor that lets us have read-only class properties."""
-
- def __init__(self, method):
- self.method = method
-
- def __get__(self, cls, obj):
- return self.method(obj)
-
-
-class Pipeline(object):
- """A Pipeline function-object that performs operations and has a life cycle.
-
- Class properties (to be overridden by sub-classes):
- async: When True, this Pipeline will execute asynchronously and fill the
- default output slot itself using the complete() method.
- output_names: List of named outputs (in addition to the default slot) that
- this Pipeline must output to (no more, no less).
- public_callbacks: If the callback URLs generated for this class should be
- accessible by all external requests regardless of login or task queue.
- admin_callbacks: If the callback URLs generated for this class should be
- accessible by the task queue ane externally by users logged in as admins.
- class_path: String identifier for this Pipeline, which is derived from
- its path in the global system modules dictionary.
-
- Modifiable instance properties:
- backoff_seconds: How many seconds to use as the constant factor in
- exponential backoff; may be changed by the user.
- backoff_factor: Base factor to use for exponential backoff. The formula
- followed is (backoff_seconds * backoff_factor^current_attempt).
- max_attempts: Maximum number of retry attempts to make before failing
- completely and aborting the entire pipeline up to the root.
- target: The application version to use for processing this Pipeline. This
- can be set to the name of a backend to direct Pipelines to run there.
-
- Instance properties:
- pipeline_id: The ID of this pipeline.
- root_pipeline_id: The ID of the root of this pipeline.
- queue_name: The queue this pipeline runs on or None if unknown.
- current_attempt: The current attempt being tried for this pipeline.
- """
-
- __metaclass__ = _PipelineMeta
-
- # To be set by sub-classes
- async = False
- output_names = []
- public_callbacks = False
- admin_callbacks = False
-
- # Internal only.
- _class_path = None # Set for each class
- _send_mail = mail.send_mail_to_admins # For testing
-
- # callback_xg_transaction: Determines whether callbacks are processed within
- # a single entity-group transaction (False), a cross-entity-group
- # transaction (True), or no transaction (None, default). It is generally
- # unsafe for a callback to modify pipeline state outside of a transaction, in
- # particular any pre-initialized state from the pipeline record, such as the
- # outputs. If a transaction is used, the callback method must operate within
- # the datastore's transaction time limits.
- # TODO(user): Make non-internal once other API calls are considered for
- # transaction support.
- _callback_xg_transaction = None
-
- def __init__(self, *args, **kwargs):
- """Initializer.
-
- Args:
- *args: The positional arguments for this function-object.
- **kwargs: The keyword arguments for this function-object.
- """
- self.args = args
- self.kwargs = kwargs
- self.outputs = None
- self.backoff_seconds = _DEFAULT_BACKOFF_SECONDS
- self.backoff_factor = _DEFAULT_BACKOFF_FACTOR
- self.max_attempts = _DEFAULT_MAX_ATTEMPTS
- self.target = None
- self.task_retry = False
- self._current_attempt = 0
- self._root_pipeline_key = None
- self._pipeline_key = None
- self._context = None
- self._result_status = None
- self._set_class_path()
- # Introspectively set the target so pipelines stick to the version it
- # started.
- self.target = mr_util._get_task_target()
-
- if _TEST_MODE:
- self._context = _PipelineContext('', 'default', '')
- self._root_pipeline_key = _TEST_ROOT_PIPELINE_KEY
- self._pipeline_key = db.Key.from_path(
- _PipelineRecord.kind(), uuid.uuid4().hex)
- self.outputs = PipelineFuture(self.output_names)
- self._context.evaluate_test(self)
-
- @property
- def pipeline_id(self):
- """Returns the ID of this Pipeline as a string or None if unknown."""
- if self._pipeline_key is None:
- return None
- return self._pipeline_key.name()
-
- @property
- def root_pipeline_id(self):
- """Returns root pipeline ID as a websafe string or None if unknown."""
- if self._root_pipeline_key is None:
- return None
- return self._root_pipeline_key.name()
-
- @property
- def is_root(self):
- """Returns True if this pipeline is a root pipeline, False otherwise."""
- return self._root_pipeline_key == self._pipeline_key
-
- @property
- def queue_name(self):
- """Returns the queue name this Pipeline runs on or None if unknown."""
- if self._context:
- return self._context.queue_name
- return None
-
- @property
- def base_path(self):
- """Returns the base path for Pipeline URL handlers or None if unknown."""
- if self._context:
- return self._context.base_path
- return None
-
- @property
- def has_finalized(self):
- """Returns True if this pipeline has completed and finalized."""
- return self._result_status == _PipelineRecord.DONE
-
- @property
- def was_aborted(self):
- """Returns True if this pipeline was aborted."""
- return self._result_status == _PipelineRecord.ABORTED
-
- @property
- def current_attempt(self):
- """Returns the current attempt at running this pipeline, starting at 1."""
- return self._current_attempt + 1
-
- @property
- def test_mode(self):
- """Returns True if the pipeline is running in test mode."""
- return _TEST_MODE
-
- @ClassProperty
- def class_path(cls):
- """Returns the unique string identifier for this Pipeline class.
-
- Refers to how to find the Pipeline in the global modules dictionary.
- """
- cls._set_class_path()
- return cls._class_path
-
- @classmethod
- def from_id(cls, pipeline_id, resolve_outputs=True, _pipeline_record=None):
- """Returns an instance corresponding to an existing Pipeline.
-
- The returned object will have the same properties a Pipeline does while
- it's running synchronously (e.g., like what it's first allocated), allowing
- callers to inspect caller arguments, outputs, fill slots, complete the
- pipeline, abort, retry, etc.
-
- Args:
- pipeline_id: The ID of this pipeline (a string).
- resolve_outputs: When True, dereference the outputs of this Pipeline
- so their values can be accessed by the caller.
- _pipeline_record: Internal-only. The _PipelineRecord instance to use
- to instantiate this instance instead of fetching it from
- the datastore.
-
- Returns:
- Pipeline sub-class instances or None if it could not be found.
- """
- pipeline_record = _pipeline_record
-
- # Support pipeline IDs and idempotence_keys that are not unicode.
- if not isinstance(pipeline_id, unicode):
- try:
- pipeline_id = pipeline_id.encode('utf-8')
- except UnicodeDecodeError:
- pipeline_id = hashlib.sha1(pipeline_id).hexdigest()
-
- pipeline_key = db.Key.from_path(_PipelineRecord.kind(), pipeline_id)
-
- if pipeline_record is None:
- pipeline_record = db.get(pipeline_key)
- if pipeline_record is None:
- return None
-
- try:
- pipeline_func_class = mr_util.for_name(pipeline_record.class_path)
- except ImportError, e:
- logging.warning('Tried to find Pipeline %s#%s, but class could '
- 'not be found. Using default Pipeline class instead.',
- pipeline_record.class_path, pipeline_id)
- pipeline_func_class = cls
-
- params = pipeline_record.params
- arg_list, kwarg_dict = _dereference_args(
- pipeline_record.class_path, params['args'], params['kwargs'])
- outputs = PipelineFuture(pipeline_func_class.output_names)
- outputs._inherit_outputs(
- pipeline_record.class_path,
- params['output_slots'],
- resolve_outputs=resolve_outputs)
-
- stage = pipeline_func_class(*arg_list, **kwarg_dict)
- stage.backoff_seconds = params['backoff_seconds']
- stage.backoff_factor = params['backoff_factor']
- stage.max_attempts = params['max_attempts']
- stage.task_retry = params['task_retry']
- stage.target = params.get('target') # May not be defined for old Pipelines
- stage._current_attempt = pipeline_record.current_attempt
- stage._set_values_internal(
- _PipelineContext('', params['queue_name'], params['base_path']),
- pipeline_key,
- _PipelineRecord.root_pipeline.get_value_for_datastore(pipeline_record),
- outputs,
- pipeline_record.status)
- return stage
-
- # Methods that can be invoked on a Pipeline instance by anyone with a
- # valid object (e.g., directly instantiated, retrieve via from_id).
- def start(self,
- idempotence_key='',
- queue_name='default',
- base_path='/_ah/pipeline',
- return_task=False,
- countdown=None,
- eta=None):
- """Starts a new instance of this pipeline.
-
- Args:
- idempotence_key: The ID to use for this Pipeline and throughout its
- asynchronous workflow to ensure the operations are idempotent. If
- empty a starting key will be automatically assigned.
- queue_name: What queue this Pipeline's workflow should execute on.
- base_path: The relative URL path to where the Pipeline API is
- mounted for access by the taskqueue API or external requests.
- return_task: When True, a task to start this pipeline will be returned
- instead of submitted, allowing the caller to start off this pipeline
- as part of a separate transaction (potentially leaving this newly
- allocated pipeline's datastore entities in place if that separate
- transaction fails for any reason).
- countdown: Time in seconds into the future that this Task should execute.
- Defaults to zero.
- eta: A datetime.datetime specifying the absolute time at which the task
- should be executed. Must not be specified if 'countdown' is specified.
- This may be timezone-aware or timezone-naive. If None, defaults to now.
- For pull tasks, no worker will be able to lease this task before the
- time indicated by eta.
-
- Returns:
- A taskqueue.Task instance if return_task was True. This task will *not*
- have a name, thus to ensure reliable execution of your pipeline you
- should add() this task as part of a separate Datastore transaction.
-
- Raises:
- PipelineExistsError if the pipeline with the given idempotence key exists.
- PipelineSetupError if the pipeline could not start for any other reason.
- """
- if not idempotence_key:
- idempotence_key = uuid.uuid4().hex
- elif not isinstance(idempotence_key, unicode):
- try:
- idempotence_key.encode('utf-8')
- except UnicodeDecodeError:
- idempotence_key = hashlib.sha1(idempotence_key).hexdigest()
-
- pipeline_key = db.Key.from_path(_PipelineRecord.kind(), idempotence_key)
- context = _PipelineContext('', queue_name, base_path)
- future = PipelineFuture(self.output_names, force_strict=True)
- try:
- self._set_values_internal(
- context, pipeline_key, pipeline_key, future, _PipelineRecord.WAITING)
- return context.start(
- self, return_task=return_task, countdown=countdown, eta=eta)
- except Error:
- # Pass through exceptions that originate in this module.
- raise
- except Exception, e:
- # Re-type any exceptions that were raised in dependent methods.
- raise PipelineSetupError('Error starting %s#%s: %s' % (
- self, idempotence_key, str(e)))
-
- def start_test(self, idempotence_key=None, base_path='', **kwargs):
- """Starts this pipeline in test fashion.
-
- Args:
- idempotence_key: Dummy idempotence_key to use for this root pipeline.
- base_path: Dummy base URL path to use for this root pipeline.
- kwargs: Ignored keyword arguments usually passed to start().
- """
- if not idempotence_key:
- idempotence_key = uuid.uuid4().hex
- pipeline_key = db.Key.from_path(_PipelineRecord.kind(), idempotence_key)
- context = _PipelineContext('', 'default', base_path)
- future = PipelineFuture(self.output_names, force_strict=True)
- self._set_values_internal(
- context, pipeline_key, pipeline_key, future, _PipelineRecord.WAITING)
- context.start_test(self)
-
- # Pipeline control methods.
- def retry(self, retry_message=''):
- """Forces a currently running asynchronous pipeline to retry.
-
- Note this may not be called by synchronous or generator pipelines. Those
- must instead raise the 'Retry' exception during execution.
-
- Args:
- retry_message: Optional message explaining why the retry happened.
-
- Returns:
- True if the Pipeline should be retried, False if it cannot be cancelled
- mid-flight for some reason.
- """
- if not self.async:
- raise UnexpectedPipelineError(
- 'May only call retry() method for asynchronous pipelines.')
- if self.try_cancel():
- self._context.transition_retry(self._pipeline_key, retry_message)
- return True
- else:
- return False
-
- def abort(self, abort_message=''):
- """Mark the entire pipeline up to the root as aborted.
-
- Note this should only be called from *outside* the context of a running
- pipeline. Synchronous and generator pipelines should raise the 'Abort'
- exception to cause this behavior during execution.
-
- Args:
- abort_message: Optional message explaining why the abort happened.
-
- Returns:
- True if the abort signal was sent successfully; False if the pipeline
- could not be aborted for any reason.
- """
- # TODO: Use thread-local variable to enforce that this is not called
- # while a pipeline is executing in the current thread.
- if (self.async and self._root_pipeline_key == self._pipeline_key and
- not self.try_cancel()):
- # Handle the special case where the root pipeline is async and thus
- # cannot be aborted outright.
- return False
- else:
- return self._context.begin_abort(
- self._root_pipeline_key, abort_message=abort_message)
-
- # Methods used by the Pipeline as it runs.
- def fill(self, name_or_slot, value):
- """Fills an output slot required by this Pipeline.
-
- Args:
- name_or_slot: The name of the slot (a string) or Slot record to fill.
- value: The serializable value to assign to this slot.
-
- Raises:
- UnexpectedPipelineError if the Slot no longer exists. SlotNotDeclaredError
- if trying to output to a slot that was not declared ahead of time.
- """
- if isinstance(name_or_slot, basestring):
- slot = getattr(self.outputs, name_or_slot)
- elif isinstance(name_or_slot, Slot):
- slot = name_or_slot
- else:
- raise UnexpectedPipelineError(
- 'Could not fill invalid output name: %r' % name_or_slot)
-
- if not slot._exists:
- raise SlotNotDeclaredError(
- 'Cannot fill output with name "%s" that was just '
- 'declared within the Pipeline context.' % slot.name)
-
- self._context.fill_slot(self._pipeline_key, slot, value)
-
- def set_status(self, message=None, console_url=None, status_links=None):
- """Sets the current status of this pipeline.
-
- This method is purposefully non-transactional. Updates are written to the
- datastore immediately and overwrite all existing statuses.
-
- Args:
- message: (optional) Overall status message.
- console_url: (optional) Relative URL to use for the "console" of this
- pipeline that displays current progress. When None, no console will
- be displayed.
- status_links: (optional) Dictionary of readable link names to relative
- URLs that should be associated with this pipeline as it runs. These links
- provide convenient access to other dashboards, consoles, etc associated
- with the pipeline.
-
- Raises:
- PipelineRuntimeError if the status could not be set for any reason.
- """
- if _TEST_MODE:
- logging.info(
- 'New status for %s#%s: message=%r, console_url=%r, status_links=%r',
- self, self.pipeline_id, message, console_url, status_links)
- return
-
- status_key = db.Key.from_path(_StatusRecord.kind(), self.pipeline_id)
- root_pipeline_key = db.Key.from_path(
- _PipelineRecord.kind(), self.root_pipeline_id)
- status_record = _StatusRecord(
- key=status_key, root_pipeline=root_pipeline_key)
-
- try:
- if message:
- status_record.message = message
- if console_url:
- status_record.console_url = console_url
- if status_links:
- # Alphabeticalize the list.
- status_record.link_names = sorted(
- db.Text(s) for s in status_links.iterkeys())
- status_record.link_urls = [
- db.Text(status_links[name]) for name in status_record.link_names]
-
- status_record.status_time = datetime.datetime.utcnow()
-
- status_record.put()
- except Exception, e:
- raise PipelineRuntimeError('Could not set status for %s#%s: %s' %
- (self, self.pipeline_id, str(e)))
-
- def complete(self, default_output=None):
- """Marks this asynchronous Pipeline as complete.
-
- Args:
- default_output: What value the 'default' output slot should be assigned.
-
- Raises:
- UnexpectedPipelineError if the slot no longer exists or this method was
- called for a pipeline that is not async.
- """
- # TODO: Enforce that all outputs expected by this async pipeline were
- # filled before this complete() function was called. May required all
- # async functions to declare their outputs upfront.
- if not self.async:
- raise UnexpectedPipelineError(
- 'May only call complete() method for asynchronous pipelines.')
- self._context.fill_slot(
- self._pipeline_key, self.outputs.default, default_output)
-
- def get_callback_url(self, **kwargs):
- """Returns a relative URL for invoking this Pipeline's callback method.
-
- Args:
- kwargs: Dictionary mapping keyword argument names to single values that
- should be passed to the callback when it is invoked.
-
- Raises:
- UnexpectedPipelineError if this is invoked on pipeline that is not async.
- """
- # TODO: Support positional parameters.
- if not self.async:
- raise UnexpectedPipelineError(
- 'May only call get_callback_url() method for asynchronous pipelines.')
- kwargs['pipeline_id'] = self._pipeline_key.name()
- params = urllib.urlencode(sorted(kwargs.items()))
- return '%s/callback?%s' % (self.base_path, params)
-
- def get_callback_task(self, *args, **kwargs):
- """Returns a task for calling back this Pipeline.
-
- Args:
- params: Keyword argument containing a dictionary of key/value pairs
- that will be passed to the callback when it is executed.
- args, kwargs: Passed to the taskqueue.Task constructor. Use these
- arguments to set the task name (for idempotence), etc.
-
- Returns:
- A taskqueue.Task instance that must be enqueued by the caller.
- """
- if not self.async:
- raise UnexpectedPipelineError(
- 'May only call get_callback_task() method for asynchronous pipelines.')
-
- params = kwargs.get('params', {})
- kwargs['params'] = params
- params['pipeline_id'] = self._pipeline_key.name()
- kwargs['url'] = self.base_path + '/callback'
- kwargs['method'] = 'POST'
- return taskqueue.Task(*args, **kwargs)
-
- def send_result_email(self, sender=None):
- """Sends an email to admins indicating this Pipeline has completed.
-
- For developer convenience. Automatically called from finalized for root
- Pipelines that do not override the default action.
-
- Args:
- sender: (optional) Override the sender's email address.
- """
- status = 'successful'
- if self.was_aborted:
- status = 'aborted'
-
- app_id = os.environ['APPLICATION_ID']
- shard_index = app_id.find('~')
- if shard_index != -1:
- app_id = app_id[shard_index+1:]
-
- param_dict = {
- 'status': status,
- 'app_id': app_id,
- 'class_path': self._class_path,
- 'pipeline_id': self.root_pipeline_id,
- 'base_path': '%s.appspot.com%s' % (app_id, self.base_path),
- }
- subject = (
- 'Pipeline %(status)s: App "%(app_id)s", %(class_path)s'
- '#%(pipeline_id)s' % param_dict)
- body = """View the pipeline results here:
-
-http://%(base_path)s/status?root=%(pipeline_id)s
-
-Thanks,
-
-The Pipeline API
-""" % param_dict
-
- html = """<html><body>
-<p>View the pipeline results here:</p>
-
-<p><a href="http://%(base_path)s/status?root=%(pipeline_id)s"
->http://%(base_path)s/status?root=%(pipeline_id)s</a></p>
-
-<p>
-Thanks,
-<br>
-The Pipeline API
-</p>
-</body></html>
-""" % param_dict
-
- if sender is None:
- sender = '%s@%s.appspotmail.com' % (app_id, app_id)
- try:
- self._send_mail(sender, subject, body, html=html)
- except (mail.InvalidSenderError, mail.InvalidEmailError):
- logging.warning('Could not send result email for '
- 'root pipeline ID "%s" from sender "%s"',
- self.root_pipeline_id, sender)
-
- def cleanup(self):
- """Clean up this Pipeline and all Datastore records used for coordination.
-
- Only works when called on a root pipeline. Child pipelines will ignore
- calls to this method.
-
- After this method is called, Pipeline.from_id() and related status
- methods will return inconsistent or missing results. This method is
- fire-and-forget and asynchronous.
- """
- if self._root_pipeline_key is None:
- raise UnexpectedPipelineError(
- 'Could not cleanup Pipeline with unknown root pipeline ID.')
- if not self.is_root:
- return
- task = taskqueue.Task(
- params=dict(root_pipeline_key=self._root_pipeline_key),
- url=self.base_path + '/cleanup',
- headers={'X-Ae-Pipeline-Key': self._root_pipeline_key})
- taskqueue.Queue(self.queue_name).add(task)
-
- def with_params(self, **kwargs):
- """Modify various execution parameters of a Pipeline before it runs.
-
- This method has no effect in test mode.
-
- Args:
- kwargs: Attributes to modify on this Pipeline instance before it has
- been executed.
-
- Returns:
- This Pipeline instance, for easy chaining.
- """
- if _TEST_MODE:
- logging.info(
- 'Setting runtime parameters for %s#%s: %r',
- self, self.pipeline_id, kwargs)
- return self
-
- if self.pipeline_id is not None:
- raise UnexpectedPipelineError(
- 'May only call with_params() on a Pipeline that has not yet '
- 'been scheduled for execution.')
-
- ALLOWED = ('backoff_seconds', 'backoff_factor', 'max_attempts', 'target')
- for name, value in kwargs.iteritems():
- if name not in ALLOWED:
- raise TypeError('Unexpected keyword: %s=%r' % (name, value))
- setattr(self, name, value)
- return self
-
- # Methods implemented by developers for lifecycle management. These
- # must be idempotent under all circumstances.
- def run(self, *args, **kwargs):
- """Runs this Pipeline."""
- raise NotImplementedError('Must implement "run" in Pipeline sub-class.')
-
- def run_test(self, *args, **kwargs):
- """Runs this Pipeline in test mode."""
- raise NotImplementedError(
- 'Must implement "run_test" in Pipeline sub-class.')
-
- def finalized(self):
- """Finalizes this Pipeline after execution if it's a generator.
-
- Default action as the root pipeline is to email the admins with the status.
- Implementors be sure to call 'was_aborted' to find out if the finalization
- that you're handling is for a success or error case.
- """
- if self.pipeline_id == self.root_pipeline_id:
- self.send_result_email()
-
- def finalized_test(self, *args, **kwargs):
- """Finalized this Pipeline in test mode."""
- raise NotImplementedError(
- 'Must implement "finalized_test" in Pipeline sub-class.')
-
- def callback(self, **kwargs):
- """This Pipeline received an asynchronous callback request."""
- raise NotImplementedError(
- 'Must implement "callback" in Pipeline sub-class.')
-
- def try_cancel(self):
- """This pipeline has been cancelled.
-
- Called when a pipeline is interrupted part-way through due to some kind
- of failure (an abort of the whole pipeline to the root or a forced retry on
- this child pipeline).
-
- Returns:
- True to indicate that cancellation was successful and this pipeline may
- go in the retry or aborted state; False to indicate that this pipeline
- cannot be canceled right now and must remain as-is.
- """
- return False
-
- # Internal methods.
- @classmethod
- def _set_class_path(cls, module_dict=sys.modules):
- """Sets the absolute path to this class as a string.
-
- Used by the Pipeline API to reconstruct the Pipeline sub-class object
- at execution time instead of passing around a serialized function.
-
- Args:
- module_dict: Used for testing.
- """
- # Do not traverse the class hierarchy fetching the class path attribute.
- found = cls.__dict__.get('_class_path')
- if found is not None:
- return
-
- # Do not set the _class_path for the base-class, otherwise all children's
- # lookups for _class_path will fall through and return 'Pipeline' above.
- # This situation can happen if users call the generic Pipeline.from_id
- # to get the result of a Pipeline without knowing its specific class.
- if cls is Pipeline:
- return
-
- class_path = '%s.%s' % (cls.__module__, cls.__name__)
- # When a WSGI handler is invoked as an entry point, any Pipeline class
- # defined in the same file as the handler will get __module__ set to
- # __main__. Thus we need to find out its real fully qualified path.
- if cls.__module__ == '__main__':
- for name, module in module_dict.items():
- if name == '__main__':
- continue
- found = getattr(module, cls.__name__, None)
- if found is cls:
- class_path = '%s.%s' % (name, cls.__name__)
- break
- cls._class_path = class_path
-
- def _set_values_internal(self,
- context,
- pipeline_key,
- root_pipeline_key,
- outputs,
- result_status):
- """Sets the user-visible values provided as an API by this class.
-
- Args:
- context: The _PipelineContext used for this Pipeline.
- pipeline_key: The db.Key of this pipeline.
- root_pipeline_key: The db.Key of the root pipeline.
- outputs: The PipelineFuture for this pipeline.
- result_status: The result status of this pipeline.
- """
- self._context = context
- self._pipeline_key = pipeline_key
- self._root_pipeline_key = root_pipeline_key
- self._result_status = result_status
- self.outputs = outputs
-
- def _callback_internal(self, kwargs):
- """Used to execute callbacks on asynchronous pipelines."""
- logging.debug('Callback %s(*%s, **%s)#%s with params: %r',
- self._class_path, _short_repr(self.args),
- _short_repr(self.kwargs), self._pipeline_key.name(), kwargs)
- return self.callback(**kwargs)
-
- def _run_internal(self,
- context,
- pipeline_key,
- root_pipeline_key,
- caller_output):
- """Used by the Pipeline evaluator to execute this Pipeline."""
- self._set_values_internal(
- context, pipeline_key, root_pipeline_key, caller_output,
- _PipelineRecord.RUN)
- logging.debug('Running %s(*%s, **%s)#%s',
- self._class_path, _short_repr(self.args),
- _short_repr(self.kwargs), self._pipeline_key.name())
- return self.run(*self.args, **self.kwargs)
-
- def _finalized_internal(self,
- context,
- pipeline_key,
- root_pipeline_key,
- caller_output,
- aborted):
- """Used by the Pipeline evaluator to finalize this Pipeline."""
- result_status = _PipelineRecord.RUN
- if aborted:
- result_status = _PipelineRecord.ABORTED
-
- self._set_values_internal(
- context, pipeline_key, root_pipeline_key, caller_output, result_status)
- logging.debug('Finalizing %s(*%r, **%r)#%s',
- self._class_path, _short_repr(self.args),
- _short_repr(self.kwargs), self._pipeline_key.name())
- try:
- self.finalized()
- except NotImplementedError:
- pass
-
- def __repr__(self):
- """Returns a string representation of this Pipeline."""
- return '%s(*%s, **%s)' % (
- self._class_path, _short_repr(self.args), _short_repr(self.kwargs))
-
-
-# TODO: Change InOrder and After to use a common thread-local list of
-# execution modifications to apply to the current evaluating pipeline.
-
-class After(object):
- """Causes all contained Pipelines to run after the given ones complete.
-
- Must be used in a 'with' block.
- """
-
- _local = threading.local()
-
- def __init__(self, *futures):
- """Initializer.
-
- Args:
- *futures: PipelineFutures that all subsequent pipelines should follow.
- May be empty, in which case this statement does nothing.
- """
- for f in futures:
- if not isinstance(f, PipelineFuture):
- raise TypeError('May only pass PipelineFuture instances to After(). %r',
- type(f))
- self._futures = set(futures)
-
- def __enter__(self):
- """When entering a 'with' block."""
- After._thread_init()
- After._local._after_all_futures.extend(self._futures)
-
- def __exit__(self, type, value, trace):
- """When exiting a 'with' block."""
- for future in self._futures:
- After._local._after_all_futures.remove(future)
- return False
-
- @classmethod
- def _thread_init(cls):
- """Ensure thread local is initialized."""
- if not hasattr(cls._local, '_after_all_futures'):
- cls._local._after_all_futures = []
-
-
-class InOrder(object):
- """Causes all contained Pipelines to run in order.
-
- Must be used in a 'with' block.
- """
-
- _local = threading.local()
-
- @classmethod
- def _add_future(cls, future):
- """Adds a future to the list of in-order futures thus far.
-
- Args:
- future: The future to add to the list.
- """
- if cls._local._activated:
- cls._local._in_order_futures.add(future)
-
- def __init__(self):
- """Initializer."""
-
- def __enter__(self):
- """When entering a 'with' block."""
- InOrder._thread_init()
- if InOrder._local._activated:
- raise UnexpectedPipelineError('Already in an InOrder "with" block.')
- InOrder._local._activated = True
- InOrder._local._in_order_futures.clear()
-
- def __exit__(self, type, value, trace):
- """When exiting a 'with' block."""
- InOrder._local._activated = False
- InOrder._local._in_order_futures.clear()
- return False
-
- @classmethod
- def _thread_init(cls):
- """Ensure thread local is initialized."""
- if not hasattr(cls._local, '_in_order_futures'):
- cls._local._in_order_futures = set()
- cls._local._activated = False
-
-
-################################################################################
-
-def _short_repr(obj):
- """Helper function returns a truncated repr() of an object."""
- stringified = pprint.saferepr(obj)
- if len(stringified) > 200:
- return '%s... (%d bytes)' % (stringified[:200], len(stringified))
- return stringified
-
-
-def _write_json_blob(encoded_value):
- """Writes a JSON encoded value to a Blobstore File.
-
- Args:
- encoded_value: The encoded JSON string.
-
- Returns:
- The blobstore.BlobKey for the file that was created.
- """
- file_name = files.blobstore.create(mime_type='application/json')
- handle = files.open(file_name, 'a')
- try:
- # Chunk the file into individual writes of less than 1MB, since the files
- # API does not do buffered writes implicitly.
- for start_index in xrange(0, len(encoded_value), _MAX_JSON_SIZE):
- end_index = start_index + _MAX_JSON_SIZE
- handle.write(encoded_value[start_index:end_index])
- finally:
- handle.close()
-
- files.finalize(file_name)
- return files.blobstore.get_blob_key(file_name)
-
-
-def _dereference_args(pipeline_name, args, kwargs):
- """Dereference a Pipeline's arguments that are slots, validating them.
-
- Each argument value passed in is assumed to be a dictionary with the format:
- {'type': 'value', 'value': 'serializable'} # A resolved value.
- {'type': 'slot', 'slot_key': 'str() on a db.Key'} # A pending Slot.
-
- Args:
- pipeline_name: The name of the pipeline class; used for debugging.
- args: Iterable of positional arguments.
- kwargs: Dictionary of keyword arguments.
-
- Returns:
- Tuple (args, kwargs) where:
- Args: A list of positional arguments values that are all dereferenced.
- Kwargs: A list of keyword arguments values that are all dereferenced.
-
- Raises:
- SlotNotFilledError if any of the supplied 'slot_key' records are not
- present in the Datastore or have not yet been filled.
- UnexpectedPipelineError if an unknown parameter type was passed.
- """
- lookup_slots = set()
- for arg in itertools.chain(args, kwargs.itervalues()):
- if arg['type'] == 'slot':
- lookup_slots.add(db.Key(arg['slot_key']))
-
- slot_dict = {}
- for key, slot_record in zip(lookup_slots, db.get(lookup_slots)):
- if slot_record is None or slot_record.status != _SlotRecord.FILLED:
- raise SlotNotFilledError(
- 'Slot "%s" missing its value. From %s(*args=%s, **kwargs=%s)' %
- (key, pipeline_name, _short_repr(args), _short_repr(kwargs)))
- slot_dict[key] = slot_record.value
-
- arg_list = []
- for current_arg in args:
- if current_arg['type'] == 'slot':
- arg_list.append(slot_dict[db.Key(current_arg['slot_key'])])
- elif current_arg['type'] == 'value':
- arg_list.append(current_arg['value'])
- else:
- raise UnexpectedPipelineError('Unknown parameter type: %r' % current_arg)
-
- kwarg_dict = {}
- for key, current_arg in kwargs.iteritems():
- if current_arg['type'] == 'slot':
- kwarg_dict[key] = slot_dict[db.Key(current_arg['slot_key'])]
- elif current_arg['type'] == 'value':
- kwarg_dict[key] = current_arg['value']
- else:
- raise UnexpectedPipelineError('Unknown parameter type: %r' % current_arg)
-
- return (arg_list, kwarg_dict)
-
-
-def _generate_args(pipeline, future, queue_name, base_path):
- """Generate the params used to describe a Pipeline's depedencies.
-
- The arguments passed to this method may be normal values, Slot instances
- (for named outputs), or PipelineFuture instances (for referring to the
- default output slot).
-
- Args:
- pipeline: The Pipeline instance to generate args for.
- future: The PipelineFuture for the Pipeline these arguments correspond to.
- queue_name: The queue to run the pipeline on.
- base_path: Relative URL for pipeline URL handlers.
-
- Returns:
- Tuple (dependent_slots, output_slot_keys, params_text, params_blob) where:
- dependent_slots: List of db.Key instances of _SlotRecords on which
- this pipeline will need to block before execution (passed to
- create a _BarrierRecord for running the pipeline).
- output_slot_keys: List of db.Key instances of _SlotRecords that will
- be filled by this pipeline during its execution (passed to create
- a _BarrierRecord for finalizing the pipeline).
- params_text: JSON dictionary of pipeline parameters to be serialized and
- saved in a corresponding _PipelineRecord. Will be None if the params are
- too big and must be saved in a blob instead.
- params_blob: JSON dictionary of pipeline parameters to be serialized and
- saved in a Blob file, and then attached to a _PipelineRecord. Will be
- None if the params data size was small enough to fit in the entity.
- """
- params = {
- 'args': [],
- 'kwargs': {},
- 'after_all': [],
- 'output_slots': {},
- 'class_path': pipeline._class_path,
- 'queue_name': queue_name,
- 'base_path': base_path,
- 'backoff_seconds': pipeline.backoff_seconds,
- 'backoff_factor': pipeline.backoff_factor,
- 'max_attempts': pipeline.max_attempts,
- 'task_retry': pipeline.task_retry,
- 'target': pipeline.target,
- }
- dependent_slots = set()
-
- arg_list = params['args']
- for current_arg in pipeline.args:
- if isinstance(current_arg, PipelineFuture):
- current_arg = current_arg.default
- if isinstance(current_arg, Slot):
- arg_list.append({'type': 'slot', 'slot_key': str(current_arg.key)})
- dependent_slots.add(current_arg.key)
- else:
- arg_list.append({'type': 'value', 'value': current_arg})
-
- kwarg_dict = params['kwargs']
- for name, current_arg in pipeline.kwargs.iteritems():
- if isinstance(current_arg, PipelineFuture):
- current_arg = current_arg.default
- if isinstance(current_arg, Slot):
- kwarg_dict[name] = {'type': 'slot', 'slot_key': str(current_arg.key)}
- dependent_slots.add(current_arg.key)
- else:
- kwarg_dict[name] = {'type': 'value', 'value': current_arg}
-
- after_all = params['after_all']
- for other_future in future._after_all_pipelines:
- slot_key = other_future._output_dict['default'].key
- after_all.append(str(slot_key))
- dependent_slots.add(slot_key)
-
- output_slots = params['output_slots']
- output_slot_keys = set()
- for name, slot in future._output_dict.iteritems():
- output_slot_keys.add(slot.key)
- output_slots[name] = str(slot.key)
-
- params_encoded = json.dumps(params, cls=mr_util.JsonEncoder)
- params_text = None
- params_blob = None
- if len(params_encoded) > _MAX_JSON_SIZE:
- params_blob = _write_json_blob(params_encoded)
- else:
- params_text = params_encoded
-
- return dependent_slots, output_slot_keys, params_text, params_blob
-
-
-class _PipelineContext(object):
- """Internal API for interacting with Pipeline state."""
-
- _gettime = datetime.datetime.utcnow
-
- def __init__(self,
- task_name,
- queue_name,
- base_path):
- """Initializer.
-
- Args:
- task_name: The name of the currently running task or empty if there
- is no task running.
- queue_name: The queue this pipeline should run on (may not be the
- current queue this request is on).
- base_path: Relative URL for the pipeline's handlers.
- """
- self.task_name = task_name
- self.queue_name = queue_name
- self.base_path = base_path
- self.barrier_handler_path = '%s/output' % base_path
- self.pipeline_handler_path = '%s/run' % base_path
- self.finalized_handler_path = '%s/finalized' % base_path
- self.fanout_handler_path = '%s/fanout' % base_path
- self.abort_handler_path = '%s/abort' % base_path
- self.fanout_abort_handler_path = '%s/fanout_abort' % base_path
- self.session_filled_output_names = set()
-
- @classmethod
- def from_environ(cls, environ=os.environ):
- """Constructs a _PipelineContext from the task queue environment."""
- base_path, unused = (environ['PATH_INFO'].rsplit('/', 1) + [''])[:2]
- return cls(
- environ['HTTP_X_APPENGINE_TASKNAME'],
- environ['HTTP_X_APPENGINE_QUEUENAME'],
- base_path)
-
- def fill_slot(self, filler_pipeline_key, slot, value):
- """Fills a slot, enqueueing a task to trigger pending barriers.
-
- Args:
- filler_pipeline_key: db.Key or stringified key of the _PipelineRecord
- that filled this slot.
- slot: The Slot instance to fill.
- value: The serializable value to assign.
-
- Raises:
- UnexpectedPipelineError if the _SlotRecord for the 'slot' could not
- be found in the Datastore.
- """
- if not isinstance(filler_pipeline_key, db.Key):
- filler_pipeline_key = db.Key(filler_pipeline_key)
-
- if _TEST_MODE:
- slot._set_value_test(filler_pipeline_key, value)
- else:
- encoded_value = json.dumps(value,
- sort_keys=True,
- cls=mr_util.JsonEncoder)
- value_text = None
- value_blob = None
- if len(encoded_value) <= _MAX_JSON_SIZE:
- value_text = db.Text(encoded_value)
- else:
- # The encoded value is too big. Save it as a blob.
- value_blob = _write_json_blob(encoded_value)
-
- def txn():
- slot_record = db.get(slot.key)
- if slot_record is None:
- raise UnexpectedPipelineError(
- 'Tried to fill missing slot "%s" '
- 'by pipeline ID "%s" with value: %r'
- % (slot.key, filler_pipeline_key.name(), value))
- # NOTE: Always take the override value here. If down-stream pipelines
- # need a consitent view of all up-stream outputs (meaning, all of the
- # outputs came from the same retry attempt of the upstream pipeline),
- # the down-stream pipeline must also wait for the 'default' output
- # of these up-stream pipelines.
- slot_record.filler = filler_pipeline_key
- slot_record.value_text = value_text
- slot_record.value_blob = value_blob
- slot_record.status = _SlotRecord.FILLED
- slot_record.fill_time = self._gettime()
- slot_record.put()
- task = taskqueue.Task(
- url=self.barrier_handler_path,
- params=dict(
- slot_key=slot.key,
- use_barrier_indexes=True),
- headers={'X-Ae-Slot-Key': slot.key,
- 'X-Ae-Filler-Pipeline-Key': filler_pipeline_key})
- task.add(queue_name=self.queue_name, transactional=True)
- db.run_in_transaction_options(
- db.create_transaction_options(propagation=db.ALLOWED), txn)
-
- self.session_filled_output_names.add(slot.name)
-
- def notify_barriers(self,
- slot_key,
- cursor,
- use_barrier_indexes,
- max_to_notify=_MAX_BARRIERS_TO_NOTIFY):
- """Searches for barriers affected by a slot and triggers completed ones.
-
- Args:
- slot_key: db.Key or stringified key of the _SlotRecord that was filled.
- cursor: Stringified Datastore cursor where the notification query
- should pick up.
- use_barrier_indexes: When True, use _BarrierIndex records to determine
- which _Barriers to trigger by having this _SlotRecord filled. When
- False, use the old method that queries for _BarrierRecords by
- the blocking_slots parameter.
- max_to_notify: Used for testing.
-
- Raises:
- PipelineStatusError: If any of the barriers are in a bad state.
- """
- if not isinstance(slot_key, db.Key):
- slot_key = db.Key(slot_key)
- logging.debug('Notifying slot %r', slot_key)
-
- if use_barrier_indexes:
- # Please see models.py:_BarrierIndex to understand how _BarrierIndex
- # entities relate to _BarrierRecord entities.
- query = (
- _BarrierIndex.all(cursor=cursor, keys_only=True)
- .ancestor(slot_key))
- barrier_index_list = query.fetch(max_to_notify)
- barrier_key_list = [
- _BarrierIndex.to_barrier_key(key) for key in barrier_index_list]
-
- # If there are task and pipeline kickoff retries it's possible for a
- # _BarrierIndex to exist for a _BarrierRecord that was not successfully
- # written. It's safe to ignore this because the original task that wrote
- # the _BarrierIndex and _BarrierRecord would not have made progress to
- # kick off a real pipeline or child pipeline unless all of the writes for
- # these dependent entities went through. We assume that the instigator
- # retried from scratch and somehwere there exists a good _BarrierIndex and
- # corresponding _BarrierRecord that tries to accomplish the same thing.
- barriers = db.get(barrier_key_list)
- results = []
- for barrier_key, barrier in zip(barrier_key_list, barriers):
- if barrier is None:
- logging.debug('Ignoring that Barrier "%r" is missing, '
- 'relies on Slot "%r"', barrier_key, slot_key)
- else:
- results.append(barrier)
- else:
- # TODO(user): Delete this backwards compatible codepath and
- # make use_barrier_indexes the assumed default in all cases.
- query = (
- _BarrierRecord.all(cursor=cursor)
- .filter('blocking_slots =', slot_key))
- results = query.fetch(max_to_notify)
-
- # Fetch all blocking _SlotRecords for any potentially triggered barriers.
- blocking_slot_keys = []
- for barrier in results:
- blocking_slot_keys.extend(barrier.blocking_slots)
-
- blocking_slot_dict = {}
- for slot_record in db.get(blocking_slot_keys):
- if slot_record is None:
- continue
- blocking_slot_dict[slot_record.key()] = slot_record
-
- task_list = []
- updated_barriers = []
- for barrier in results:
- ready_slots = []
- for blocking_slot_key in barrier.blocking_slots:
- slot_record = blocking_slot_dict.get(blocking_slot_key)
- if slot_record is None:
- raise UnexpectedPipelineError(
- 'Barrier "%r" relies on Slot "%r" which is missing.' %
- (barrier.key(), blocking_slot_key))
- if slot_record.status == _SlotRecord.FILLED:
- ready_slots.append(blocking_slot_key)
-
- # When all of the blocking_slots have been filled, consider the barrier
- # ready to trigger. We'll trigger it regardless of the current
- # _BarrierRecord status, since there could be task queue failures at any
- # point in this flow; this rolls forward the state and de-dupes using
- # the task name tombstones.
- pending_slots = set(barrier.blocking_slots) - set(ready_slots)
- if not pending_slots:
- if barrier.status != _BarrierRecord.FIRED:
- barrier.status = _BarrierRecord.FIRED
- barrier.trigger_time = self._gettime()
- updated_barriers.append(barrier)
-
- purpose = barrier.key().name()
- if purpose == _BarrierRecord.START:
- path = self.pipeline_handler_path
- countdown = None
- else:
- path = self.finalized_handler_path
- # NOTE: Wait one second before finalization to prevent
- # contention on the _PipelineRecord entity.
- countdown = 1
- pipeline_key = _BarrierRecord.target.get_value_for_datastore(barrier)
- logging.debug('Firing barrier %r', barrier.key())
- task_list.append(taskqueue.Task(
- url=path,
- countdown=countdown,
- name='ae-barrier-fire-%s-%s' % (pipeline_key.name(), purpose),
- params=dict(pipeline_key=pipeline_key, purpose=purpose),
- headers={'X-Ae-Pipeline-Key': pipeline_key}))
- else:
- logging.debug('Not firing barrier %r, Waiting for slots: %r',
- barrier.key(), pending_slots)
-
- # Blindly overwrite _BarrierRecords that have an updated status. This is
- # acceptable because by this point all finalization barriers for
- # generator children should have already had their final outputs assigned.
- if updated_barriers:
- db.put(updated_barriers)
-
- # Task continuation with sequence number to prevent fork-bombs.
- if len(results) == max_to_notify:
- the_match = re.match('(.*)-ae-barrier-notify-([0-9]+)', self.task_name)
- if the_match:
- prefix = the_match.group(1)
- end = int(the_match.group(2)) + 1
- else:
- prefix = self.task_name
- end = 0
- task_list.append(taskqueue.Task(
- name='%s-ae-barrier-notify-%d' % (prefix, end),
- url=self.barrier_handler_path,
- params=dict(
- slot_key=slot_key,
- cursor=query.cursor(),
- use_barrier_indexes=use_barrier_indexes)))
-
- if task_list:
- try:
- taskqueue.Queue(self.queue_name).add(task_list)
- except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError):
- pass
-
- def begin_abort(self, root_pipeline_key, abort_message):
- """Kicks off the abort process for a root pipeline and all its children.
-
- Args:
- root_pipeline_key: db.Key of the root pipeline to abort.
- abort_message: Message explaining why the abort happened, only saved
- into the root pipeline.
-
- Returns:
- True if the abort signal was sent successfully; False otherwise.
- """
- def txn():
- pipeline_record = db.get(root_pipeline_key)
- if pipeline_record is None:
- logging.warning(
- 'Tried to abort root pipeline ID "%s" but it does not exist.',
- root_pipeline_key.name())
- raise db.Rollback()
- if pipeline_record.status == _PipelineRecord.ABORTED:
- logging.warning(
- 'Tried to abort root pipeline ID "%s"; already in state: %s',
- root_pipeline_key.name(), pipeline_record.status)
- raise db.Rollback()
- if pipeline_record.abort_requested:
- logging.warning(
- 'Tried to abort root pipeline ID "%s"; abort signal already sent.',
- root_pipeline_key.name())
- raise db.Rollback()
-
- pipeline_record.abort_requested = True
- pipeline_record.abort_message = abort_message
- pipeline_record.put()
-
- task = taskqueue.Task(
- url=self.fanout_abort_handler_path,
- params=dict(root_pipeline_key=root_pipeline_key))
- task.add(queue_name=self.queue_name, transactional=True)
- return True
-
- return db.run_in_transaction(txn)
-
- def continue_abort(self,
- root_pipeline_key,
- cursor=None,
- max_to_notify=_MAX_ABORTS_TO_BEGIN):
- """Sends the abort signal to all children for a root pipeline.
-
- Args:
- root_pipeline_key: db.Key of the root pipeline to abort.
- cursor: The query cursor for enumerating _PipelineRecords when inserting
- tasks to cause child pipelines to terminate.
- max_to_notify: Used for testing.
- """
- if not isinstance(root_pipeline_key, db.Key):
- root_pipeline_key = db.Key(root_pipeline_key)
- # NOTE: The results of this query may include _PipelineRecord instances
- # that are not actually "reachable", meaning you cannot get to them by
- # starting at the root pipeline and following "fanned_out" onward. This
- # is acceptable because even these defunct _PipelineRecords will properly
- # set their status to ABORTED when the signal comes, regardless of any
- # other status they may have had.
- #
- # The only gotcha here is if a Pipeline's finalize method somehow modifies
- # its inputs (like deleting an input file). In the case there are
- # unreachable child pipelines, it will appear as if two finalize methods
- # have been called instead of just one. The saving grace here is that
- # finalize must be idempotent, so this *should* be harmless.
- query = (
- _PipelineRecord.all(cursor=cursor)
- .filter('root_pipeline =', root_pipeline_key))
- results = query.fetch(max_to_notify)
-
- task_list = []
- for pipeline_record in results:
- if pipeline_record.status not in (
- _PipelineRecord.RUN, _PipelineRecord.WAITING):
- continue
-
- pipeline_key = pipeline_record.key()
- task_list.append(taskqueue.Task(
- name='%s-%s-abort' % (self.task_name, pipeline_key.name()),
- url=self.abort_handler_path,
- params=dict(pipeline_key=pipeline_key, purpose=_BarrierRecord.ABORT),
- headers={'X-Ae-Pipeline-Key': pipeline_key}))
-
- # Task continuation with sequence number to prevent fork-bombs.
- if len(results) == max_to_notify:
- the_match = re.match('(.*)-([0-9]+)', self.task_name)
- if the_match:
- prefix = the_match.group(1)
- end = int(the_match.group(2)) + 1
- else:
- prefix = self.task_name
- end = 0
- task_list.append(taskqueue.Task(
- name='%s-%d' % (prefix, end),
- url=self.fanout_abort_handler_path,
- params=dict(root_pipeline_key=root_pipeline_key,
- cursor=query.cursor())))
-
- if task_list:
- try:
- taskqueue.Queue(self.queue_name).add(task_list)
- except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError):
- pass
-
- def start(self, pipeline, return_task=True, countdown=None, eta=None):
- """Starts a pipeline.
-
- Args:
- pipeline: Pipeline instance to run.
- return_task: When True, do not submit the task to start the pipeline
- but instead return it for someone else to enqueue.
- countdown: Time in seconds into the future that this Task should execute.
- Defaults to zero.
- eta: A datetime.datetime specifying the absolute time at which the task
- should be executed. Must not be specified if 'countdown' is specified.
- This may be timezone-aware or timezone-naive. If None, defaults to now.
- For pull tasks, no worker will be able to lease this task before the
- time indicated by eta.
-
- Returns:
- The task to start this pipeline if return_task was True.
-
- Raises:
- PipelineExistsError if the pipeline with the given ID already exists.
- """
- # Adjust all pipeline output keys for this Pipeline to be children of
- # the _PipelineRecord, that way we can write them all and submit in a
- # single transaction.
- for name, slot in pipeline.outputs._output_dict.iteritems():
- slot.key = db.Key.from_path(
- *slot.key.to_path(), **dict(parent=pipeline._pipeline_key))
-
- _, output_slots, params_text, params_blob = _generate_args(
- pipeline, pipeline.outputs, self.queue_name, self.base_path)
-
- @db.transactional(propagation=db.INDEPENDENT)
- def txn():
- pipeline_record = db.get(pipeline._pipeline_key)
- if pipeline_record is not None:
- raise PipelineExistsError(
- 'Pipeline with idempotence key "%s" already exists; params=%s' %
- (pipeline._pipeline_key.name(),
- _short_repr(pipeline_record.params)))
-
- entities_to_put = []
- for name, slot in pipeline.outputs._output_dict.iteritems():
- entities_to_put.append(_SlotRecord(
- key=slot.key,
- root_pipeline=pipeline._pipeline_key))
-
- entities_to_put.append(_PipelineRecord(
- key=pipeline._pipeline_key,
- root_pipeline=pipeline._pipeline_key,
- is_root_pipeline=True,
- # Bug in DB means we need to use the storage name here,
- # not the local property name.
- params=params_text,
- params_blob=params_blob,
- start_time=self._gettime(),
- class_path=pipeline._class_path,
- max_attempts=pipeline.max_attempts))
-
- entities_to_put.extend(_PipelineContext._create_barrier_entities(
- pipeline._pipeline_key,
- pipeline._pipeline_key,
- _BarrierRecord.FINALIZE,
- output_slots))
-
- db.put(entities_to_put)
-
- task = taskqueue.Task(
- url=self.pipeline_handler_path,
- params=dict(pipeline_key=pipeline._pipeline_key),
- headers={'X-Ae-Pipeline-Key': pipeline._pipeline_key},
- target=pipeline.target,
- countdown=countdown,
- eta=eta)
- if return_task:
- return task
- task.add(queue_name=self.queue_name, transactional=True)
-
- task = txn()
- # Immediately mark the output slots as existing so they can be filled
- # by asynchronous pipelines or used in test mode.
- for output_slot in pipeline.outputs._output_dict.itervalues():
- output_slot._exists = True
- return task
-
- def start_test(self, pipeline):
- """Starts a pipeline in the test mode.
-
- Args:
- pipeline: The Pipeline instance to test.
- """
- global _TEST_MODE, _TEST_ROOT_PIPELINE_KEY
- self.start(pipeline, return_task=True)
- _TEST_MODE = True
- _TEST_ROOT_PIPELINE_KEY = pipeline._pipeline_key
- try:
- self.evaluate_test(pipeline, root=True)
- finally:
- _TEST_MODE = False
-
- def evaluate_test(self, stage, root=False):
- """Recursively evaluates the given pipeline in test mode.
-
- Args:
- stage: The Pipeline instance to run at this stage in the flow.
- root: True if the supplied stage is the root of the pipeline.
- """
- args_adjusted = []
- for arg in stage.args:
- if isinstance(arg, PipelineFuture):
- arg = arg.default
- if isinstance(arg, Slot):
- value = arg.value
- arg._touched = True
- else:
- value = arg
- args_adjusted.append(value)
-
- kwargs_adjusted = {}
- for name, arg in stage.kwargs.iteritems():
- if isinstance(arg, PipelineFuture):
- arg = arg.default
- if isinstance(arg, Slot):
- value = arg.value
- arg._touched = True
- else:
- value = arg
- kwargs_adjusted[name] = value
-
- stage.args, stage.kwargs = args_adjusted, kwargs_adjusted
- pipeline_generator = mr_util.is_generator_function(stage.run)
- logging.debug('Running %s(*%s, **%s)', stage._class_path,
- _short_repr(stage.args), _short_repr(stage.kwargs))
-
- if stage.async:
- stage.run_test(*stage.args, **stage.kwargs)
- elif pipeline_generator:
- all_output_slots = set()
- try:
- pipeline_iter = stage.run_test(*stage.args, **stage.kwargs)
- except NotImplementedError:
- pipeline_iter = stage.run(*stage.args, **stage.kwargs)
-
- all_substages = set()
- next_value = None
- last_sub_stage = None
- while True:
- try:
- yielded = pipeline_iter.send(next_value)
- except StopIteration:
- break
-
- if isinstance(yielded, Pipeline):
- if yielded in all_substages:
- raise UnexpectedPipelineError(
- 'Already yielded pipeline object %r' % yielded)
- else:
- all_substages.add(yielded)
-
- last_sub_stage = yielded
- next_value = yielded.outputs
- all_output_slots.update(next_value._output_dict.itervalues())
- else:
- raise UnexpectedPipelineError(
- 'Yielded a disallowed value: %r' % yielded)
-
- if last_sub_stage:
- # Generator's outputs inherited from last running sub-stage.
- # If the generator changes its mind and doesn't yield anything, this
- # may not happen at all. Missing outputs will be caught when they
- # are passed to the stage as inputs, or verified from the outside by
- # the test runner.
- for slot_name, slot in last_sub_stage.outputs._output_dict.iteritems():
- stage.outputs._output_dict[slot_name] = slot
- # Any inherited slots won't be checked for declaration.
- all_output_slots.remove(slot)
- else:
- # Generator yielded no children, so treat it as a sync function.
- stage.outputs.default._set_value_test(stage._pipeline_key, None)
-
- # Enforce the policy of requiring all undeclared output slots from
- # child pipelines to be consumed by their parent generator.
- for slot in all_output_slots:
- if slot.name == 'default':
- continue
- if slot.filled and not slot._strict and not slot._touched:
- raise SlotNotDeclaredError(
- 'Undeclared output "%s"; all dynamic outputs from child '
- 'pipelines must be consumed.' % slot.name)
- else:
- try:
- result = stage.run_test(*stage.args, **stage.kwargs)
- except NotImplementedError:
- result = stage.run(*stage.args, **stage.kwargs)
- stage.outputs.default._set_value_test(stage._pipeline_key, result)
-
- # Enforce strict output usage at the top level.
- if root:
- found_outputs = set()
- for slot in stage.outputs._output_dict.itervalues():
- if slot.filled:
- found_outputs.add(slot.name)
- if slot.name == 'default':
- continue
- if slot.name not in stage.output_names:
- raise SlotNotDeclaredError(
- 'Undeclared output from root pipeline "%s"' % slot.name)
-
- missing_outputs = set(stage.output_names) - found_outputs
- if missing_outputs:
- raise SlotNotFilledError(
- 'Outputs %r were never filled.' % missing_outputs)
-
- logging.debug('Finalizing %s(*%s, **%s)', stage._class_path,
- _short_repr(stage.args), _short_repr(stage.kwargs))
- ran = False
- try:
- stage.finalized_test()
- ran = True
- except NotImplementedError:
- pass
- if not ran:
- try:
- stage.finalized()
- except NotImplementedError:
- pass
-
- def evaluate(self, pipeline_key, purpose=None, attempt=0):
- """Evaluates the given Pipeline and enqueues sub-stages for execution.
-
- Args:
- pipeline_key: The db.Key or stringified key of the _PipelineRecord to run.
- purpose: Why evaluate was called ('start', 'finalize', or 'abort').
- attempt: The attempt number that should be tried.
- """
- After._thread_init()
- InOrder._thread_init()
- InOrder._local._activated = False
-
- if not isinstance(pipeline_key, db.Key):
- pipeline_key = db.Key(pipeline_key)
- pipeline_record = db.get(pipeline_key)
- if pipeline_record is None:
- logging.error('Pipeline ID "%s" does not exist.', pipeline_key.name())
- return
- if pipeline_record.status not in (
- _PipelineRecord.WAITING, _PipelineRecord.RUN):
- logging.error('Pipeline ID "%s" in bad state for purpose "%s": "%s"',
- pipeline_key.name(), purpose or _BarrierRecord.START,
- pipeline_record.status)
- return
-
- params = pipeline_record.params
- root_pipeline_key = \
- _PipelineRecord.root_pipeline.get_value_for_datastore(pipeline_record)
- default_slot_key = db.Key(params['output_slots']['default'])
-
- default_slot_record, root_pipeline_record = db.get([
- default_slot_key, root_pipeline_key])
- if default_slot_record is None:
- logging.error('Pipeline ID "%s" default slot "%s" does not exist.',
- pipeline_key.name(), default_slot_key)
- return
- if root_pipeline_record is None:
- logging.error('Pipeline ID "%s" root pipeline ID "%s" is missing.',
- pipeline_key.name(), root_pipeline_key.name())
- return
-
- # Always finalize if we're aborting so pipelines have a chance to cleanup
- # before they terminate. Pipelines must access 'was_aborted' to find
- # out how their finalization should work.
- abort_signal = (
- purpose == _BarrierRecord.ABORT or
- root_pipeline_record.abort_requested == True)
- finalize_signal = (
- (default_slot_record.status == _SlotRecord.FILLED and
- purpose == _BarrierRecord.FINALIZE) or abort_signal)
-
- try:
- pipeline_func_class = mr_util.for_name(pipeline_record.class_path)
- except ImportError, e:
- # This means something is wrong with the deployed code. Rely on the
- # taskqueue system to do retries.
- retry_message = '%s: %s' % (e.__class__.__name__, str(e))
- logging.exception(
- 'Could not locate %s#%s. %s',
- pipeline_record.class_path, pipeline_key.name(), retry_message)
- raise
-
- try:
- pipeline_func = pipeline_func_class.from_id(
- pipeline_key.name(),
- resolve_outputs=finalize_signal,
- _pipeline_record=pipeline_record)
- except SlotNotFilledError, e:
- logging.exception(
- 'Could not resolve arguments for %s#%s. Most likely this means there '
- 'is a bug in the Pipeline runtime or some intermediate data has been '
- 'deleted from the Datastore. Giving up.',
- pipeline_record.class_path, pipeline_key.name())
- self.transition_aborted(pipeline_key)
- return
- except Exception, e:
- retry_message = '%s: %s' % (e.__class__.__name__, str(e))
- logging.exception(
- 'Instantiating %s#%s raised exception. %s',
- pipeline_record.class_path, pipeline_key.name(), retry_message)
- self.transition_retry(pipeline_key, retry_message)
- if pipeline_record.params['task_retry']:
- raise
- else:
- return
- else:
- pipeline_generator = mr_util.is_generator_function(
- pipeline_func_class.run)
- caller_output = pipeline_func.outputs
-
- if (abort_signal and pipeline_func.async and
- pipeline_record.status == _PipelineRecord.RUN
- and not pipeline_func.try_cancel()):
- logging.warning(
- 'Could not cancel and abort mid-flight async pipeline: %r#%s',
- pipeline_func, pipeline_key.name())
- return
-
- if finalize_signal:
- try:
- pipeline_func._finalized_internal(
- self, pipeline_key, root_pipeline_key,
- caller_output, abort_signal)
- except Exception, e:
- # This means something is wrong with the deployed finalization code.
- # Rely on the taskqueue system to do retries.
- retry_message = '%s: %s' % (e.__class__.__name__, str(e))
- logging.exception('Finalizing %r#%s raised exception. %s',
- pipeline_func, pipeline_key.name(), retry_message)
- raise
- else:
- if not abort_signal:
- self.transition_complete(pipeline_key)
- return
-
- if abort_signal:
- logging.debug('Marking as aborted %s#%s', pipeline_func,
- pipeline_key.name())
- self.transition_aborted(pipeline_key)
- return
-
- if pipeline_record.current_attempt != attempt:
- logging.error(
- 'Received evaluation task for pipeline ID "%s" attempt %d but '
- 'current pending attempt is %d', pipeline_key.name(), attempt,
- pipeline_record.current_attempt)
- return
-
- if pipeline_record.current_attempt >= pipeline_record.max_attempts:
- logging.error(
- 'Received evaluation task for pipeline ID "%s" on attempt %d '
- 'but that exceeds max attempts %d', pipeline_key.name(), attempt,
- pipeline_record.max_attempts)
- return
-
- if pipeline_record.next_retry_time is not None:
- retry_time = pipeline_record.next_retry_time - _RETRY_WIGGLE_TIMEDELTA
- if self._gettime() <= retry_time:
- detail_message = (
- 'Received evaluation task for pipeline ID "%s" on attempt %d, '
- 'which will not be ready until: %s' % (pipeline_key.name(),
- pipeline_record.current_attempt, pipeline_record.next_retry_time))
- logging.warning(detail_message)
- raise UnexpectedPipelineError(detail_message)
-
- if pipeline_record.status == _PipelineRecord.RUN and pipeline_generator:
- if (default_slot_record.status == _SlotRecord.WAITING and
- not pipeline_record.fanned_out):
- # This properly handles the yield-less generator case when the
- # RUN state transition worked properly but outputting to the default
- # slot failed.
- self.fill_slot(pipeline_key, caller_output.default, None)
- return
-
- if (pipeline_record.status == _PipelineRecord.WAITING and
- pipeline_func.async):
- self.transition_run(pipeline_key)
-
- try:
- result = pipeline_func._run_internal(
- self, pipeline_key, root_pipeline_key, caller_output)
- except Exception, e:
- if self.handle_run_exception(pipeline_key, pipeline_func, e):
- raise
- else:
- return
-
- if pipeline_func.async:
- return
-
- if not pipeline_generator:
- # Catch any exceptions that are thrown when the pipeline's return
- # value is being serialized. This ensures that serialization errors
- # will cause normal abort/retry behavior.
- try:
- self.fill_slot(pipeline_key, caller_output.default, result)
- except Exception, e:
- retry_message = 'Bad return value. %s: %s' % (
- e.__class__.__name__, str(e))
- logging.exception(
- 'Generator %r#%s caused exception while serializing return '
- 'value %r. %s', pipeline_func, pipeline_key.name(), result,
- retry_message)
- self.transition_retry(pipeline_key, retry_message)
- if pipeline_func.task_retry:
- raise
- else:
- return
-
- expected_outputs = set(caller_output._output_dict.iterkeys())
- found_outputs = self.session_filled_output_names
- if expected_outputs != found_outputs:
- exception = SlotNotFilledError(
- 'Outputs %r for pipeline ID "%s" were never filled by "%s".' % (
- expected_outputs - found_outputs,
- pipeline_key.name(), pipeline_func._class_path))
- if self.handle_run_exception(pipeline_key, pipeline_func, exception):
- raise exception
- return
-
- pipeline_iter = result
- next_value = None
- last_sub_stage = None
- sub_stage = None
- sub_stage_dict = {}
- sub_stage_ordering = []
-
- while True:
- try:
- yielded = pipeline_iter.send(next_value)
- except StopIteration:
- break
- except Exception, e:
- if self.handle_run_exception(pipeline_key, pipeline_func, e):
- raise
- else:
- return
-
- if isinstance(yielded, Pipeline):
- if yielded in sub_stage_dict:
- raise UnexpectedPipelineError(
- 'Already yielded pipeline object %r with pipeline ID %s' %
- (yielded, yielded.pipeline_id))
-
- last_sub_stage = yielded
- next_value = PipelineFuture(yielded.output_names)
- next_value._after_all_pipelines.update(After._local._after_all_futures)
- next_value._after_all_pipelines.update(InOrder._local._in_order_futures)
- sub_stage_dict[yielded] = next_value
- sub_stage_ordering.append(yielded)
- InOrder._add_future(next_value)
-
- # To aid local testing, the task_retry flag (which instructs the
- # evaluator to raise all exceptions back up to the task queue) is
- # inherited by all children from the root down.
- yielded.task_retry = pipeline_func.task_retry
- else:
- raise UnexpectedPipelineError(
- 'Yielded a disallowed value: %r' % yielded)
-
- if last_sub_stage:
- # Final yielded stage inherits outputs from calling pipeline that were not
- # already filled during the generator's execution.
- inherited_outputs = params['output_slots']
- for slot_name in self.session_filled_output_names:
- del inherited_outputs[slot_name]
- sub_stage_dict[last_sub_stage]._inherit_outputs(
- pipeline_record.class_path, inherited_outputs)
- else:
- # Here the generator has yielded nothing, and thus acts as a synchronous
- # function. We can skip the rest of the generator steps completely and
- # fill the default output slot to cause finalizing.
- expected_outputs = set(caller_output._output_dict.iterkeys())
- expected_outputs.remove('default')
- found_outputs = self.session_filled_output_names
- if expected_outputs != found_outputs:
- exception = SlotNotFilledError(
- 'Outputs %r for pipeline ID "%s" were never filled by "%s".' % (
- expected_outputs - found_outputs,
- pipeline_key.name(), pipeline_func._class_path))
- if self.handle_run_exception(pipeline_key, pipeline_func, exception):
- raise exception
- else:
- self.fill_slot(pipeline_key, caller_output.default, None)
- self.transition_run(pipeline_key)
- return
-
- # Allocate any SlotRecords that do not yet exist.
- entities_to_put = []
- for future in sub_stage_dict.itervalues():
- for slot in future._output_dict.itervalues():
- if not slot._exists:
- entities_to_put.append(_SlotRecord(
- key=slot.key, root_pipeline=root_pipeline_key))
-
- # Allocate PipelineRecords and BarrierRecords for generator-run Pipelines.
- pipelines_to_run = set()
- all_children_keys = []
- all_output_slots = set()
- for sub_stage in sub_stage_ordering:
- future = sub_stage_dict[sub_stage]
-
- # Catch any exceptions that are thrown when the pipeline's parameters
- # are being serialized. This ensures that serialization errors will
- # cause normal retry/abort behavior.
- try:
- dependent_slots, output_slots, params_text, params_blob = \
- _generate_args(sub_stage, future, self.queue_name, self.base_path)
- except Exception, e:
- retry_message = 'Bad child arguments. %s: %s' % (
- e.__class__.__name__, str(e))
- logging.exception(
- 'Generator %r#%s caused exception while serializing args for '
- 'child pipeline %r. %s', pipeline_func, pipeline_key.name(),
- sub_stage, retry_message)
- self.transition_retry(pipeline_key, retry_message)
- if pipeline_func.task_retry:
- raise
- else:
- return
-
- child_pipeline_key = db.Key.from_path(
- _PipelineRecord.kind(), uuid.uuid4().hex)
- all_output_slots.update(output_slots)
- all_children_keys.append(child_pipeline_key)
-
- child_pipeline = _PipelineRecord(
- key=child_pipeline_key,
- root_pipeline=root_pipeline_key,
- # Bug in DB means we need to use the storage name here,
- # not the local property name.
- params=params_text,
- params_blob=params_blob,
- class_path=sub_stage._class_path,
- max_attempts=sub_stage.max_attempts)
- entities_to_put.append(child_pipeline)
-
- if not dependent_slots:
- # This child pipeline will run immediately.
- pipelines_to_run.add(child_pipeline_key)
- child_pipeline.start_time = self._gettime()
- else:
- entities_to_put.extend(_PipelineContext._create_barrier_entities(
- root_pipeline_key,
- child_pipeline_key,
- _BarrierRecord.START,
- dependent_slots))
-
- entities_to_put.extend(_PipelineContext._create_barrier_entities(
- root_pipeline_key,
- child_pipeline_key,
- _BarrierRecord.FINALIZE,
- output_slots))
-
- # This generator pipeline's finalization barrier must include all of the
- # outputs of any child pipelines that it runs. This ensures the finalized
- # calls will not happen until all child pipelines have completed.
- #
- # The transition_run() call below will update the FINALIZE _BarrierRecord
- # for this generator pipeline to include all of these child outputs in
- # its list of blocking_slots. That update is done transactionally to
- # make sure the _BarrierRecord only lists the slots that matter.
- #
- # However, the notify_barriers() method doesn't find _BarrierRecords
- # through the blocking_slots field. It finds them through _BarrierIndexes
- # entities. Thus, before we update the FINALIZE _BarrierRecord in
- # transition_run(), we need to write _BarrierIndexes for all child outputs.
- barrier_entities = _PipelineContext._create_barrier_entities(
- root_pipeline_key,
- pipeline_key,
- _BarrierRecord.FINALIZE,
- all_output_slots)
- # Ignore the first element which is the _BarrierRecord. That entity must
- # have already been created and put in the datastore for the parent
- # pipeline before this code generated child pipelines.
- barrier_indexes = barrier_entities[1:]
- entities_to_put.extend(barrier_indexes)
-
- db.put(entities_to_put)
-
- self.transition_run(pipeline_key,
- blocking_slot_keys=all_output_slots,
- fanned_out_pipelines=all_children_keys,
- pipelines_to_run=pipelines_to_run)
-
- @staticmethod
- def _create_barrier_entities(root_pipeline_key,
- child_pipeline_key,
- purpose,
- blocking_slot_keys):
- """Creates all of the entities required for a _BarrierRecord.
-
- Args:
- root_pipeline_key: The root pipeline this is part of.
- child_pipeline_key: The pipeline this barrier is for.
- purpose: _BarrierRecord.START or _BarrierRecord.FINALIZE.
- blocking_slot_keys: Set of db.Keys corresponding to _SlotRecords that
- this barrier should wait on before firing.
-
- Returns:
- List of entities, starting with the _BarrierRecord entity, followed by
- _BarrierIndexes used for firing when _SlotRecords are filled in the same
- order as the blocking_slot_keys list provided. All of these entities
- should be put in the Datastore to ensure the barrier fires properly.
- """
- result = []
-
- blocking_slot_keys = list(blocking_slot_keys)
-
- barrier = _BarrierRecord(
- parent=child_pipeline_key,
- key_name=purpose,
- target=child_pipeline_key,
- root_pipeline=root_pipeline_key,
- blocking_slots=blocking_slot_keys)
-
- result.append(barrier)
-
- for slot_key in blocking_slot_keys:
- barrier_index_path = []
- barrier_index_path.extend(slot_key.to_path())
- barrier_index_path.extend(child_pipeline_key.to_path())
- barrier_index_path.extend([_BarrierIndex.kind(), purpose])
- barrier_index_key = db.Key.from_path(*barrier_index_path)
- barrier_index = _BarrierIndex(
- key=barrier_index_key,
- root_pipeline=root_pipeline_key)
- result.append(barrier_index)
-
- return result
-
- def handle_run_exception(self, pipeline_key, pipeline_func, e):
- """Handles an exception raised by a Pipeline's user code.
-
- Args:
- pipeline_key: The pipeline that raised the error.
- pipeline_func: The class path name of the Pipeline that was running.
- e: The exception that was raised.
-
- Returns:
- True if the exception should be re-raised up through the calling stack
- by the caller of this method.
- """
- if isinstance(e, Retry):
- retry_message = str(e)
- logging.warning('User forced retry for pipeline ID "%s" of %r: %s',
- pipeline_key.name(), pipeline_func, retry_message)
- self.transition_retry(pipeline_key, retry_message)
- elif isinstance(e, Abort):
- abort_message = str(e)
- logging.warning('User forced abort for pipeline ID "%s" of %r: %s',
- pipeline_key.name(), pipeline_func, abort_message)
- pipeline_func.abort(abort_message)
- else:
- retry_message = '%s: %s' % (e.__class__.__name__, str(e))
- logging.exception('Generator %r#%s raised exception. %s',
- pipeline_func, pipeline_key.name(), retry_message)
- self.transition_retry(pipeline_key, retry_message)
-
- return pipeline_func.task_retry
-
- def transition_run(self,
- pipeline_key,
- blocking_slot_keys=None,
- fanned_out_pipelines=None,
- pipelines_to_run=None):
- """Marks an asynchronous or generator pipeline as running.
-
- Does nothing if the pipeline is no longer in a runnable state.
-
- Args:
- pipeline_key: The db.Key of the _PipelineRecord to update.
- blocking_slot_keys: List of db.Key instances that this pipeline's
- finalization barrier should wait on in addition to the existing one.
- This is used to update the barrier to include all child outputs. When
- None, the barrier will not be updated.
- fanned_out_pipelines: List of db.Key instances of _PipelineRecords that
- were fanned out by this generator pipeline. This is distinct from the
- 'pipelines_to_run' list because not all of the pipelines listed here
- will be immediately ready to execute. When None, then this generator
- yielded no children.
- pipelines_to_run: List of db.Key instances of _PipelineRecords that should
- be kicked off (fan-out) transactionally as part of this transition.
- When None, no child pipelines will run. All db.Keys in this list must
- also be present in the fanned_out_pipelines list.
-
- Raises:
- UnexpectedPipelineError if blocking_slot_keys was not empty and the
- _BarrierRecord has gone missing.
- """
- def txn():
- pipeline_record = db.get(pipeline_key)
- if pipeline_record is None:
- logging.warning('Pipeline ID "%s" cannot be marked as run. '
- 'Does not exist.', pipeline_key.name())
- raise db.Rollback()
- if pipeline_record.status != _PipelineRecord.WAITING:
- logging.warning('Pipeline ID "%s" in bad state to be marked as run: %s',
- pipeline_key.name(), pipeline_record.status)
- raise db.Rollback()
-
- pipeline_record.status = _PipelineRecord.RUN
-
- if fanned_out_pipelines:
- # NOTE: We must model the pipeline relationship in a top-down manner,
- # meaning each pipeline must point forward to the pipelines that it
- # fanned out to. The reason is race conditions. If evaluate()
- # dies early, it may create many unused _PipelineRecord and _SlotRecord
- # instances that never progress. The only way we know which of these
- # are valid is by traversing the graph from the root, where the
- # fanned_out property refers to those pipelines that were run using a
- # transactional task.
- child_pipeline_list = list(fanned_out_pipelines)
- pipeline_record.fanned_out = child_pipeline_list
-
- if pipelines_to_run:
- child_indexes = [
- child_pipeline_list.index(p) for p in pipelines_to_run]
- child_indexes.sort()
- task = taskqueue.Task(
- url=self.fanout_handler_path,
- params=dict(parent_key=str(pipeline_key),
- child_indexes=child_indexes))
- task.add(queue_name=self.queue_name, transactional=True)
-
- pipeline_record.put()
-
- if blocking_slot_keys:
- # NOTE: Always update a generator pipeline's finalization barrier to
- # include all of the outputs of any pipelines that it runs, to ensure
- # that finalized calls will not happen until all child pipelines have
- # completed. This must happen transactionally with the enqueue of
- # the fan-out kickoff task above to ensure the child output slots and
- # the barrier blocking slots are the same.
- barrier_key = db.Key.from_path(
- _BarrierRecord.kind(), _BarrierRecord.FINALIZE,
- parent=pipeline_key)
- finalize_barrier = db.get(barrier_key)
- if finalize_barrier is None:
- raise UnexpectedPipelineError(
- 'Pipeline ID "%s" cannot update finalize barrier. '
- 'Does not exist.' % pipeline_key.name())
- else:
- finalize_barrier.blocking_slots = list(
- blocking_slot_keys.union(set(finalize_barrier.blocking_slots)))
- finalize_barrier.put()
-
- db.run_in_transaction(txn)
-
- def transition_complete(self, pipeline_key):
- """Marks the given pipeline as complete.
-
- Does nothing if the pipeline is no longer in a state that can be completed.
-
- Args:
- pipeline_key: db.Key of the _PipelineRecord that has completed.
- """
- def txn():
- pipeline_record = db.get(pipeline_key)
- if pipeline_record is None:
- logging.warning(
- 'Tried to mark pipeline ID "%s" as complete but it does not exist.',
- pipeline_key.name())
- raise db.Rollback()
- if pipeline_record.status not in (
- _PipelineRecord.WAITING, _PipelineRecord.RUN):
- logging.warning(
- 'Tried to mark pipeline ID "%s" as complete, found bad state: %s',
- pipeline_key.name(), pipeline_record.status)
- raise db.Rollback()
-
- pipeline_record.status = _PipelineRecord.DONE
- pipeline_record.finalized_time = self._gettime()
- pipeline_record.put()
-
- db.run_in_transaction(txn)
-
- def transition_retry(self, pipeline_key, retry_message):
- """Marks the given pipeline as requiring another retry.
-
- Does nothing if all attempts have been exceeded.
-
- Args:
- pipeline_key: db.Key of the _PipelineRecord that needs to be retried.
- retry_message: User-supplied message indicating the reason for the retry.
- """
- def txn():
- pipeline_record = db.get(pipeline_key)
- if pipeline_record is None:
- logging.warning(
- 'Tried to retry pipeline ID "%s" but it does not exist.',
- pipeline_key.name())
- raise db.Rollback()
- if pipeline_record.status not in (
- _PipelineRecord.WAITING, _PipelineRecord.RUN):
- logging.warning(
- 'Tried to retry pipeline ID "%s", found bad state: %s',
- pipeline_key.name(), pipeline_record.status)
- raise db.Rollback()
-
- params = pipeline_record.params
- offset_seconds = (
- params['backoff_seconds'] *
- (params['backoff_factor'] ** pipeline_record.current_attempt))
- pipeline_record.next_retry_time = (
- self._gettime() + datetime.timedelta(seconds=offset_seconds))
- pipeline_record.current_attempt += 1
- pipeline_record.retry_message = retry_message
- pipeline_record.status = _PipelineRecord.WAITING
-
- if pipeline_record.current_attempt >= pipeline_record.max_attempts:
- root_pipeline_key = (
- _PipelineRecord.root_pipeline.get_value_for_datastore(
- pipeline_record))
- logging.warning(
- 'Giving up on pipeline ID "%s" after %d attempt(s); causing abort '
- 'all the way to the root pipeline ID "%s"', pipeline_key.name(),
- pipeline_record.current_attempt, root_pipeline_key.name())
- # NOTE: We do *not* set the status to aborted here to ensure that
- # this pipeline will be finalized before it has been marked as aborted.
- pipeline_record.abort_message = (
- 'Aborting after %d attempts' % pipeline_record.current_attempt)
- task = taskqueue.Task(
- url=self.fanout_abort_handler_path,
- params=dict(root_pipeline_key=root_pipeline_key))
- task.add(queue_name=self.queue_name, transactional=True)
- else:
- task = taskqueue.Task(
- url=self.pipeline_handler_path,
- eta=pipeline_record.next_retry_time,
- params=dict(pipeline_key=pipeline_key,
- purpose=_BarrierRecord.START,
- attempt=pipeline_record.current_attempt),
- headers={'X-Ae-Pipeline-Key': pipeline_key})
- task.add(queue_name=self.queue_name, transactional=True)
-
- pipeline_record.put()
-
- db.run_in_transaction(txn)
-
- def transition_aborted(self, pipeline_key):
- """Makes the given pipeline as having aborted.
-
- Does nothing if the pipeline is in a bad state.
-
- Args:
- pipeline_key: db.Key of the _PipelineRecord that needs to be retried.
- """
- def txn():
- pipeline_record = db.get(pipeline_key)
- if pipeline_record is None:
- logging.warning(
- 'Tried to abort pipeline ID "%s" but it does not exist.',
- pipeline_key.name())
- raise db.Rollback()
- if pipeline_record.status not in (
- _PipelineRecord.WAITING, _PipelineRecord.RUN):
- logging.warning(
- 'Tried to abort pipeline ID "%s", found bad state: %s',
- pipeline_key.name(), pipeline_record.status)
- raise db.Rollback()
-
- pipeline_record.status = _PipelineRecord.ABORTED
- pipeline_record.finalized_time = self._gettime()
- pipeline_record.put()
-
- db.run_in_transaction(txn)
-
-################################################################################
-
-
-class _BarrierHandler(webapp.RequestHandler):
- """Request handler for triggering barriers."""
-
- def post(self):
- if 'HTTP_X_APPENGINE_TASKNAME' not in self.request.environ:
- self.response.set_status(403)
- return
-
- context = _PipelineContext.from_environ(self.request.environ)
- context.notify_barriers(
- self.request.get('slot_key'),
- self.request.get('cursor'),
- use_barrier_indexes=self.request.get('use_barrier_indexes') == 'True')
-
-
-class _PipelineHandler(webapp.RequestHandler):
- """Request handler for running pipelines."""
-
- def post(self):
- if 'HTTP_X_APPENGINE_TASKNAME' not in self.request.environ:
- self.response.set_status(403)
- return
-
- context = _PipelineContext.from_environ(self.request.environ)
- context.evaluate(self.request.get('pipeline_key'),
- purpose=self.request.get('purpose'),
- attempt=int(self.request.get('attempt', '0')))
-
-
-class _FanoutAbortHandler(webapp.RequestHandler):
- """Request handler for fanning out abort notifications."""
-
- def post(self):
- if 'HTTP_X_APPENGINE_TASKNAME' not in self.request.environ:
- self.response.set_status(403)
- return
-
- context = _PipelineContext.from_environ(self.request.environ)
- context.continue_abort(
- self.request.get('root_pipeline_key'),
- self.request.get('cursor'))
-
-
-class _FanoutHandler(webapp.RequestHandler):
- """Request handler for fanning out pipeline children."""
-
- def post(self):
- if 'HTTP_X_APPENGINE_TASKNAME' not in self.request.environ:
- self.response.set_status(403)
- return
-
- context = _PipelineContext.from_environ(self.request.environ)
-
- # Set of stringified db.Keys of children to run.
- all_pipeline_keys = set()
-
- # For backwards compatibility with the old style of fan-out requests.
- all_pipeline_keys.update(self.request.get_all('pipeline_key'))
-
- # Fetch the child pipelines from the parent. This works around the 10KB
- # task payload limit. This get() is consistent-on-read and the fan-out
- # task is enqueued in the transaction that updates the parent, so the
- # fanned_out property is consistent here.
- parent_key = self.request.get('parent_key')
- child_indexes = [int(x) for x in self.request.get_all('child_indexes')]
- if parent_key:
- parent_key = db.Key(parent_key)
- parent = db.get(parent_key)
- for index in child_indexes:
- all_pipeline_keys.add(str(parent.fanned_out[index]))
-
- all_tasks = []
- all_pipelines = db.get([db.Key(pipeline_key) for pipeline_key in all_pipeline_keys])
- for child_pipeline in all_pipelines:
- if child_pipeline is None:
- continue
- all_tasks.append(taskqueue.Task(
- url=context.pipeline_handler_path,
- params=dict(pipeline_key=pipeline_key),
- target=child_pipeline.params['target'],
- headers={'X-Ae-Pipeline-Key': pipeline_key},
- name='ae-pipeline-fan-out-' + child_pipeline.key().name()))
-
- batch_size = 100 # Limit of taskqueue API bulk add.
- for i in xrange(0, len(all_tasks), batch_size):
- batch = all_tasks[i:i+batch_size]
- try:
- taskqueue.Queue(context.queue_name).add(batch)
- except (taskqueue.TombstonedTaskError, taskqueue.TaskAlreadyExistsError):
- pass
-
-
-class _CleanupHandler(webapp.RequestHandler):
- """Request handler for cleaning up a Pipeline."""
-
- def post(self):
- if 'HTTP_X_APPENGINE_TASKNAME' not in self.request.environ:
- self.response.set_status(403)
- return
-
- root_pipeline_key = db.Key(self.request.get('root_pipeline_key'))
- logging.debug('Cleaning up root_pipeline_key=%r', root_pipeline_key)
-
- # TODO(user): Accumulate all BlobKeys from _PipelineRecord and
- # _SlotRecord entities and delete them.
- pipeline_keys = (
- _PipelineRecord.all(keys_only=True)
- .filter('root_pipeline =', root_pipeline_key))
- db.delete(pipeline_keys)
- slot_keys = (
- _SlotRecord.all(keys_only=True)
- .filter('root_pipeline =', root_pipeline_key))
- db.delete(slot_keys)
- barrier_keys = (
- _BarrierRecord.all(keys_only=True)
- .filter('root_pipeline =', root_pipeline_key))
- db.delete(barrier_keys)
- status_keys = (
- _StatusRecord.all(keys_only=True)
- .filter('root_pipeline =', root_pipeline_key))
- db.delete(status_keys)
- barrier_index_keys = (
- _BarrierIndex.all(keys_only=True)
- .filter('root_pipeline =', root_pipeline_key))
- db.delete(barrier_index_keys)
-
-
-class _CallbackHandler(webapp.RequestHandler):
- """Receives asynchronous callback requests from humans or tasks."""
-
- def post(self):
- self.get()
-
- def get(self):
- try:
- self.run_callback()
- except _CallbackTaskError, e:
- logging.error(str(e))
- if 'HTTP_X_APPENGINE_TASKRETRYCOUNT' in self.request.environ:
- # Silently give up on tasks that have retried many times. This
- # probably means that the target pipeline has been deleted, so there's
- # no reason to keep trying this task forever.
- retry_count = int(
- self.request.environ.get('HTTP_X_APPENGINE_TASKRETRYCOUNT'))
- if retry_count > _MAX_CALLBACK_TASK_RETRIES:
- logging.error('Giving up on task after %d retries',
- _MAX_CALLBACK_TASK_RETRIES)
- return
-
- # NOTE: The undescriptive error code 400 are present to address security
- # risks of giving external users access to cause PipelineRecord lookups
- # and execution.
- self.response.set_status(400)
-
- def run_callback(self):
- """Runs the callback for the pipeline specified in the request.
-
- Raises:
- _CallbackTaskError if something was wrong with the request parameters.
- """
- pipeline_id = self.request.get('pipeline_id')
- if not pipeline_id:
- raise _CallbackTaskError('"pipeline_id" parameter missing.')
-
- pipeline_key = db.Key.from_path(_PipelineRecord.kind(), pipeline_id)
- pipeline_record = db.get(pipeline_key)
- if pipeline_record is None:
- raise _CallbackTaskError(
- 'Pipeline ID "%s" for callback does not exist.' % pipeline_id)
-
- params = pipeline_record.params
- real_class_path = params['class_path']
- try:
- pipeline_func_class = mr_util.for_name(real_class_path)
- except ImportError, e:
- raise _CallbackTaskError(
- 'Cannot load class named "%s" for pipeline ID "%s".'
- % (real_class_path, pipeline_id))
-
- if 'HTTP_X_APPENGINE_TASKNAME' not in self.request.environ:
- if pipeline_func_class.public_callbacks:
- pass
- elif pipeline_func_class.admin_callbacks:
- if not users.is_current_user_admin():
- raise _CallbackTaskError(
- 'Unauthorized callback for admin-only pipeline ID "%s"'
- % pipeline_id)
- else:
- raise _CallbackTaskError(
- 'External callback for internal-only pipeline ID "%s"'
- % pipeline_id)
-
- kwargs = {}
- for key in self.request.arguments():
- if key != 'pipeline_id':
- kwargs[str(key)] = self.request.get(key)
-
- def perform_callback():
- stage = pipeline_func_class.from_id(pipeline_id)
- if stage is None:
- raise _CallbackTaskError(
- 'Pipeline ID "%s" deleted during callback' % pipeline_id)
- return stage._callback_internal(kwargs)
-
- # callback_xg_transaction is a 3-valued setting (None=no trans,
- # False=1-eg-trans, True=xg-trans)
- if pipeline_func_class._callback_xg_transaction is not None:
- transaction_options = db.create_transaction_options(
- xg=pipeline_func_class._callback_xg_transaction)
- callback_result = db.run_in_transaction_options(transaction_options,
- perform_callback)
- else:
- callback_result = perform_callback()
-
- if callback_result is not None:
- status_code, content_type, content = callback_result
- self.response.set_status(status_code)
- self.response.headers['Content-Type'] = content_type
- self.response.out.write(content)
-
-
-################################################################################
-
-def _get_timestamp_ms(when):
- """Converts a datetime.datetime to integer milliseconds since the epoch.
-
- Requires special handling to preserve microseconds.
-
- Args:
- when: A datetime.datetime instance.
-
- Returns:
- Integer time since the epoch in milliseconds. If the supplied 'when' is
- None, the return value will be None.
- """
- if when is None:
- return None
- ms_since_epoch = float(time.mktime(when.utctimetuple()) * 1000.0)
- ms_since_epoch += when.microsecond / 1000.0
- return int(ms_since_epoch)
-
-
-def _get_internal_status(pipeline_key=None,
- pipeline_dict=None,
- slot_dict=None,
- barrier_dict=None,
- status_dict=None):
- """Gets the UI dictionary of a pipeline from a set of status dictionaries.
-
- Args:
- pipeline_key: The key of the pipeline to lookup.
- pipeline_dict: Dictionary mapping pipeline db.Key to _PipelineRecord.
- Default is an empty dictionary.
- slot_dict: Dictionary mapping slot db.Key to _SlotRecord.
- Default is an empty dictionary.
- barrier_dict: Dictionary mapping barrier db.Key to _BarrierRecord.
- Default is an empty dictionary.
- status_dict: Dictionary mapping status record db.Key to _StatusRecord.
- Default is an empty dictionary.
-
- Returns:
- Dictionary with the keys:
- classPath: The pipeline function being run.
- args: List of positional argument slot dictionaries.
- kwargs: Dictionary of keyword argument slot dictionaries.
- outputs: Dictionary of output slot dictionaries.
- children: List of child pipeline IDs.
- queueName: Queue on which this pipeline is running.
- afterSlotKeys: List of Slot Ids after which this pipeline runs.
- currentAttempt: Number of the current attempt, starting at 1.
- maxAttempts: Maximum number of attempts before aborting.
- backoffSeconds: Constant factor for backoff before retrying.
- backoffFactor: Exponential factor for backoff before retrying.
- status: Current status of the pipeline.
- startTimeMs: When this pipeline ran or will run due to retries, if present.
- endTimeMs: When this pipeline finalized, if present.
- lastRetryMessage: Why the pipeline failed during the last retry, if there
- was a failure; may be empty.
- abortMessage: For root pipelines, why the pipeline was aborted if it was
- aborted; may be empty.
-
- Dictionary will contain these keys if explicit status is set:
- statusTimeMs: When the status was set as milliseconds since the epoch.
- statusMessage: Status message, if present.
- statusConsoleUrl: The relative URL for the console of this pipeline.
- statusLinks: Dictionary mapping human-readable names to relative URLs
- for related URLs to this pipeline.
-
- Raises:
- PipelineStatusError if any input is bad.
- """
- if pipeline_dict is None:
- pipeline_dict = {}
- if slot_dict is None:
- slot_dict = {}
- if barrier_dict is None:
- barrier_dict = {}
- if status_dict is None:
- status_dict = {}
-
- pipeline_record = pipeline_dict.get(pipeline_key)
- if pipeline_record is None:
- raise PipelineStatusError(
- 'Could not find pipeline ID "%s"' % pipeline_key.name())
-
- params = pipeline_record.params
- root_pipeline_key = \
- _PipelineRecord.root_pipeline.get_value_for_datastore(pipeline_record)
- default_slot_key = db.Key(params['output_slots']['default'])
- start_barrier_key = db.Key.from_path(
- _BarrierRecord.kind(), _BarrierRecord.START, parent=pipeline_key)
- finalize_barrier_key = db.Key.from_path(
- _BarrierRecord.kind(), _BarrierRecord.FINALIZE, parent=pipeline_key)
- status_record_key = db.Key.from_path(
- _StatusRecord.kind(), pipeline_key.name())
-
- start_barrier = barrier_dict.get(start_barrier_key)
- finalize_barrier = barrier_dict.get(finalize_barrier_key)
- default_slot = slot_dict.get(default_slot_key)
- status_record = status_dict.get(status_record_key)
- if finalize_barrier is None:
- raise PipelineStatusError(
- 'Finalization barrier missing for pipeline ID "%s"' %
- pipeline_key.name())
- if default_slot is None:
- raise PipelineStatusError(
- 'Default output slot with key=%s missing for pipeline ID "%s"' % (
- default_slot_key, pipeline_key.name()))
-
- output = {
- 'classPath': pipeline_record.class_path,
- 'args': list(params['args']),
- 'kwargs': params['kwargs'].copy(),
- 'outputs': params['output_slots'].copy(),
- 'children': [key.name() for key in pipeline_record.fanned_out],
- 'queueName': params['queue_name'],
- 'afterSlotKeys': [str(key) for key in params['after_all']],
- 'currentAttempt': pipeline_record.current_attempt + 1,
- 'maxAttempts': pipeline_record.max_attempts,
- 'backoffSeconds': pipeline_record.params['backoff_seconds'],
- 'backoffFactor': pipeline_record.params['backoff_factor'],
- }
-
- # TODO(user): Truncate args, kwargs, and outputs to < 1MB each so we
- # can reasonably return the whole tree of pipelines and their outputs.
- # Coerce each value to a string to truncate if necessary. For now if the
- # params are too big it will just cause the whole status page to break.
-
- # Fix the key names in parameters to match JavaScript style.
- for value_dict in itertools.chain(
- output['args'], output['kwargs'].itervalues()):
- if 'slot_key' in value_dict:
- value_dict['slotKey'] = value_dict.pop('slot_key')
-
- # Figure out the pipeline's status.
- if pipeline_record.status in (_PipelineRecord.WAITING, _PipelineRecord.RUN):
- if default_slot.status == _SlotRecord.FILLED:
- status = 'finalizing'
- elif (pipeline_record.status == _PipelineRecord.WAITING and
- pipeline_record.next_retry_time is not None):
- status = 'retry'
- elif start_barrier and start_barrier.status == _BarrierRecord.WAITING:
- # start_barrier will be missing for root pipelines
- status = 'waiting'
- else:
- status = 'run'
- elif pipeline_record.status == _PipelineRecord.DONE:
- status = 'done'
- elif pipeline_record.status == _PipelineRecord.ABORTED:
- status = 'aborted'
-
- output['status'] = status
-
- if status_record:
- output['statusTimeMs'] = _get_timestamp_ms(status_record.status_time)
- if status_record.message:
- output['statusMessage'] = status_record.message
- if status_record.console_url:
- output['statusConsoleUrl'] = status_record.console_url
- if status_record.link_names:
- output['statusLinks'] = dict(
- zip(status_record.link_names, status_record.link_urls))
-
- # Populate status-depenedent fields.
- if status in ('run', 'finalizing', 'done', 'retry'):
- if pipeline_record.next_retry_time is not None:
- output['startTimeMs'] = _get_timestamp_ms(pipeline_record.next_retry_time)
- elif start_barrier:
- # start_barrier will be missing for root pipelines
- output['startTimeMs'] = _get_timestamp_ms(start_barrier.trigger_time)
- elif pipeline_record.start_time:
- # Assume this pipeline ran immediately upon spawning with no
- # start barrier or it's the root pipeline.
- output['startTimeMs'] = _get_timestamp_ms(pipeline_record.start_time)
-
- if status in ('finalizing',):
- output['endTimeMs'] = _get_timestamp_ms(default_slot.fill_time)
-
- if status in ('done',):
- output['endTimeMs'] = _get_timestamp_ms(pipeline_record.finalized_time)
-
- if pipeline_record.next_retry_time is not None:
- output['lastRetryMessage'] = pipeline_record.retry_message
-
- if pipeline_record.abort_message:
- output['abortMessage'] = pipeline_record.abort_message
-
- return output
-
-
-def _get_internal_slot(slot_key=None,
- filler_pipeline_key=None,
- slot_dict=None):
- """Gets information about a _SlotRecord for display in UI.
-
- Args:
- slot_key: The db.Key of the slot to fetch.
- filler_pipeline_key: In the case the slot has not yet been filled, assume
- that the given db.Key (for a _PipelineRecord) will be the filler of
- the slot in the future.
- slot_dict: The slot JSON dictionary.
-
- Returns:
- Dictionary with the keys:
- status: Slot status: 'filled' or 'waiting'
- fillTimeMs: Time in milliseconds since the epoch of when it was filled.
- value: The current value of the slot, which is a slot's JSON dictionary.
- fillerPipelineId: The pipeline ID of what stage has or should fill
- this slot.
-
- Raises:
- PipelineStatusError if any input is bad.
- """
- if slot_dict is None:
- slot_dict = {}
-
- slot_record = slot_dict.get(slot_key)
- if slot_record is None:
- raise PipelineStatusError(
- 'Could not find data for output slot key "%s".' % slot_key)
-
- output = {}
- if slot_record.status == _SlotRecord.FILLED:
- output['status'] = 'filled'
- output['fillTimeMs'] = _get_timestamp_ms(slot_record.fill_time)
- output['value'] = slot_record.value
- filler_pipeline_key = (
- _SlotRecord.filler.get_value_for_datastore(slot_record))
- else:
- output['status'] = 'waiting'
-
- if filler_pipeline_key:
- output['fillerPipelineId'] = filler_pipeline_key.name()
-
- return output
-
-
-def get_status_tree(root_pipeline_id):
- """Gets the full status tree of a pipeline.
-
- Args:
- root_pipeline_id: The pipeline ID to get status for.
-
- Returns:
- Dictionary with the keys:
- rootPipelineId: The ID of the root pipeline.
- slots: Mapping of slot IDs to result of from _get_internal_slot.
- pipelines: Mapping of pipeline IDs to result of _get_internal_status.
-
- Raises:
- PipelineStatusError if any input is bad.
- """
- root_pipeline_key = db.Key.from_path(_PipelineRecord.kind(), root_pipeline_id)
- root_pipeline_record = db.get(root_pipeline_key)
- if root_pipeline_record is None:
- raise PipelineStatusError(
- 'Could not find pipeline ID "%s"' % root_pipeline_id)
-
- # If the supplied root_pipeline_id is not actually the root pipeline that's
- # okay. We'll find the real root and override the value they passed in.
- actual_root_key = _PipelineRecord.root_pipeline.get_value_for_datastore(
- root_pipeline_record)
- if actual_root_key != root_pipeline_key:
- root_pipeline_key = actual_root_key
- root_pipeline_id = root_pipeline_key.id_or_name()
- root_pipeline_record = db.get(root_pipeline_key)
- if not root_pipeline_record:
- raise PipelineStatusError(
- 'Could not find pipeline ID "%s"' % root_pipeline_id)
-
- # Run all queries asynchronously.
- queries = {}
- for model in (_PipelineRecord, _SlotRecord, _BarrierRecord, _StatusRecord):
- queries[model] = model.all().filter(
- 'root_pipeline =', root_pipeline_key).run(batch_size=1000)
-
- found_pipeline_dict = dict(
- (stage.key(), stage) for stage in queries[_PipelineRecord])
- found_slot_dict = dict(
- (slot.key(), slot) for slot in queries[_SlotRecord])
- found_barrier_dict = dict(
- (barrier.key(), barrier) for barrier in queries[_BarrierRecord])
- found_status_dict = dict(
- (status.key(), status) for status in queries[_StatusRecord])
-
- # Breadth-first traversal of _PipelineRecord instances by following
- # _PipelineRecord.fanned_out property values.
- valid_pipeline_keys = set([root_pipeline_key])
- slot_filler_dict = {} # slot_key to pipeline_key
- expand_stack = [root_pipeline_record]
- while expand_stack:
- old_stack = expand_stack
- expand_stack = []
- for pipeline_record in old_stack:
- for child_pipeline_key in pipeline_record.fanned_out:
- # This will let us prune off those pipelines which were allocated in
- # the Datastore but were never run due to mid-flight task failures.
- child_pipeline_record = found_pipeline_dict.get(child_pipeline_key)
- if child_pipeline_record is None:
- raise PipelineStatusError(
- 'Pipeline ID "%s" points to child ID "%s" which does not exist.'
- % (pipeline_record.key().name(), child_pipeline_key.name()))
- expand_stack.append(child_pipeline_record)
- valid_pipeline_keys.add(child_pipeline_key)
-
- # Figure out the deepest pipeline that's responsible for outputting to
- # a particular _SlotRecord, so we can report which pipeline *should*
- # be the filler.
- child_outputs = child_pipeline_record.params['output_slots']
- for output_slot_key in child_outputs.itervalues():
- slot_filler_dict[db.Key(output_slot_key)] = child_pipeline_key
-
- output = {
- 'rootPipelineId': root_pipeline_id,
- 'slots': {},
- 'pipelines': {},
- }
-
- for pipeline_key in found_pipeline_dict.keys():
- if pipeline_key not in valid_pipeline_keys:
- continue
- output['pipelines'][pipeline_key.name()] = _get_internal_status(
- pipeline_key=pipeline_key,
- pipeline_dict=found_pipeline_dict,
- slot_dict=found_slot_dict,
- barrier_dict=found_barrier_dict,
- status_dict=found_status_dict)
-
- for slot_key, filler_pipeline_key in slot_filler_dict.iteritems():
- output['slots'][str(slot_key)] = _get_internal_slot(
- slot_key=slot_key,
- filler_pipeline_key=filler_pipeline_key,
- slot_dict=found_slot_dict)
-
- return output
-
-
-def get_pipeline_names():
- """Returns the class paths of all Pipelines defined in alphabetical order."""
- class_path_set = set()
- for cls in _PipelineMeta._all_classes:
- if cls.class_path is not None:
- class_path_set.add(cls.class_path)
- return sorted(class_path_set)
-
-
-def get_root_list(class_path=None, cursor=None, count=50):
- """Gets a list root Pipelines.
-
- Args:
- class_path: Optional. If supplied, only return root Pipelines with the
- given class_path. By default all root pipelines are returned.
- cursor: Optional. When supplied, the cursor returned from the last call to
- get_root_list which indicates where to pick up.
- count: How many pipeline returns to return.
-
- Returns:
- Dictionary with the keys:
- pipelines: The list of Pipeline records in the same format as
- returned by get_status_tree, but with only the roots listed.
- cursor: Cursor to pass back to this function to resume the query. Will
- only be present if there is another page of results.
-
- Raises:
- PipelineStatusError if any input is bad.
- """
- query = _PipelineRecord.all(cursor=cursor)
- if class_path:
- query.filter('class_path =', class_path)
- query.filter('is_root_pipeline =', True)
- query.order('-start_time')
-
- root_list = query.fetch(count)
-
- fetch_list = []
- for pipeline_record in root_list:
- fetch_list.append(db.Key(pipeline_record.params['output_slots']['default']))
- fetch_list.append(db.Key.from_path(
- _BarrierRecord.kind(), _BarrierRecord.FINALIZE,
- parent=pipeline_record.key()))
- fetch_list.append(db.Key.from_path(
- _StatusRecord.kind(), pipeline_record.key().name()))
-
- pipeline_dict = dict((stage.key(), stage) for stage in root_list)
- slot_dict = {}
- barrier_dict = {}
- status_dict = {}
- for entity in db.get(fetch_list):
- if isinstance(entity, _BarrierRecord):
- barrier_dict[entity.key()] = entity
- elif isinstance(entity, _SlotRecord):
- slot_dict[entity.key()] = entity
- elif isinstance(entity, _StatusRecord):
- status_dict[entity.key()] = entity
-
- results = []
- for pipeline_record in root_list:
- try:
- output = _get_internal_status(
- pipeline_record.key(),
- pipeline_dict=pipeline_dict,
- slot_dict=slot_dict,
- barrier_dict=barrier_dict,
- status_dict=status_dict)
- output['pipelineId'] = pipeline_record.key().name()
- results.append(output)
- except PipelineStatusError, e:
- output = {'status': e.message}
- output['classPath'] = ''
- output['pipelineId'] = pipeline_record.key().name()
- results.append(output)
-
- result_dict = {}
- cursor = query.cursor()
- query.with_cursor(cursor)
- if query.get(keys_only=True):
- result_dict.update(cursor=cursor)
- result_dict.update(pipelines=results)
- return result_dict
-
-################################################################################
-
-def set_enforce_auth(new_status):
- """Sets whether Pipeline API handlers rely on app.yaml for access control.
-
- Args:
- new_status: If True, then the Pipeline API will enforce its own
- access control on status and static file handlers. If False, then
- it will assume app.yaml is doing the enforcement.
- """
- global _ENFORCE_AUTH
- _ENFORCE_AUTH = new_status
-
-
-def create_handlers_map(prefix='.*'):
- """Create new handlers map.
-
- Args:
- prefix: url prefix to use.
-
- Returns:
- list of (regexp, handler) pairs for WSGIApplication constructor.
- """
- return [
- (prefix + '/output', _BarrierHandler),
- (prefix + '/run', _PipelineHandler),
- (prefix + '/finalized', _PipelineHandler),
- (prefix + '/cleanup', _CleanupHandler),
- (prefix + '/abort', _PipelineHandler),
- (prefix + '/fanout', _FanoutHandler),
- (prefix + '/fanout_abort', _FanoutAbortHandler),
- (prefix + '/callback', _CallbackHandler),
- (prefix + '/rpc/tree', status_ui._TreeStatusHandler),
- (prefix + '/rpc/class_paths', status_ui._ClassPathListHandler),
- (prefix + '/rpc/list', status_ui._RootListHandler),
- (prefix + '(/.+)', status_ui._StatusUiHandler),
- ]
diff --git a/python/demo/pipeline/status_ui.py b/python/demo/pipeline/status_ui.py
deleted file mode 100644
index 0f260d5..0000000
--- a/python/demo/pipeline/status_ui.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Status UI for Google App Engine Pipeline API."""
-
-import logging
-import os
-import pkgutil
-import traceback
-
-from google.appengine.api import users
-from google.appengine.ext import webapp
-
-try:
- import json
-except ImportError:
- import simplejson as json
-
-# Relative imports
-import util
-
-
-class _StatusUiHandler(webapp.RequestHandler):
- """Render the status UI."""
-
- _RESOURCE_MAP = {
- '/status': ('ui/status.html', 'text/html'),
- '/status.css': ('ui/status.css', 'text/css'),
- '/status.js': ('ui/status.js', 'text/javascript'),
- '/list': ('ui/root_list.html', 'text/html'),
- '/list.css': ('ui/root_list.css', 'text/css'),
- '/list.js': ('ui/root_list.js', 'text/javascript'),
- '/common.js': ('ui/common.js', 'text/javascript'),
- '/common.css': ('ui/common.css', 'text/css'),
- '/jquery-1.4.2.min.js': ('ui/jquery-1.4.2.min.js', 'text/javascript'),
- '/jquery.treeview.min.js': ('ui/jquery.treeview.min.js', 'text/javascript'),
- '/jquery.cookie.js': ('ui/jquery.cookie.js', 'text/javascript'),
- '/jquery.timeago.js': ('ui/jquery.timeago.js', 'text/javascript'),
- '/jquery.ba-hashchange.min.js': (
- 'ui/jquery.ba-hashchange.min.js', 'text/javascript'),
- '/jquery.json.min.js': ('ui/jquery.json.min.js', 'text/javascript'),
- '/jquery.treeview.css': ('ui/jquery.treeview.css', 'text/css'),
- '/treeview-default.gif': ('ui/images/treeview-default.gif', 'image/gif'),
- '/treeview-default-line.gif': (
- 'ui/images/treeview-default-line.gif', 'image/gif'),
- '/treeview-black.gif': ('ui/images/treeview-black.gif', 'image/gif'),
- '/treeview-black-line.gif': (
- 'ui/images/treeview-black-line.gif', 'image/gif'),
- '/images/treeview-default.gif': (
- 'ui/images/treeview-default.gif', 'image/gif'),
- '/images/treeview-default-line.gif': (
- 'ui/images/treeview-default-line.gif', 'image/gif'),
- '/images/treeview-black.gif': (
- 'ui/images/treeview-black.gif', 'image/gif'),
- '/images/treeview-black-line.gif': (
- 'ui/images/treeview-black-line.gif', 'image/gif'),
- }
-
- def get(self, resource=''):
- import pipeline # Break circular dependency
- if pipeline._ENFORCE_AUTH:
- if users.get_current_user() is None:
- logging.debug('User is not logged in')
- self.redirect(users.create_login_url(self.request.url))
- return
-
- if not users.is_current_user_admin():
- logging.debug('User is not admin: %r', users.get_current_user())
- self.response.out.write('Forbidden')
- self.response.set_status(403)
- return
-
- if resource not in self._RESOURCE_MAP:
- logging.debug('Could not find: %s', resource)
- self.response.set_status(404)
- self.response.out.write("Resource not found.")
- self.response.headers['Content-Type'] = 'text/plain'
- return
-
- relative_path, content_type = self._RESOURCE_MAP[resource]
- path = os.path.join(os.path.dirname(__file__), relative_path)
- if not pipeline._DEBUG:
- self.response.headers["Cache-Control"] = "public, max-age=300"
- self.response.headers["Content-Type"] = content_type
- try:
- data = pkgutil.get_data(__name__, relative_path)
- except AttributeError: # Python < 2.6.
- data = None
- self.response.out.write(data or open(path, 'rb').read())
-
-
-class _BaseRpcHandler(webapp.RequestHandler):
- """Base handler for JSON-RPC responses.
-
- Sub-classes should fill in the 'json_response' property. All exceptions will
- be returned.
- """
-
- def get(self):
- import pipeline # Break circular dependency
- if pipeline._ENFORCE_AUTH:
- if not users.is_current_user_admin():
- logging.debug('User is not admin: %r', users.get_current_user())
- self.response.out.write('Forbidden')
- self.response.set_status(403)
- return
-
- # XSRF protection
- if (not pipeline._DEBUG and
- self.request.headers.get('X-Requested-With') != 'XMLHttpRequest'):
- logging.debug('Request missing X-Requested-With header')
- self.response.out.write('Request missing X-Requested-With header')
- self.response.set_status(403)
- return
-
- self.json_response = {}
- try:
- self.handle()
- output = json.dumps(self.json_response, cls=util.JsonEncoder)
- except Exception, e:
- self.json_response.clear()
- self.json_response['error_class'] = e.__class__.__name__
- self.json_response['error_message'] = str(e)
- self.json_response['error_traceback'] = traceback.format_exc()
- output = json.dumps(self.json_response, cls=util.JsonEncoder)
-
- self.response.set_status(200)
- self.response.headers['Content-Type'] = 'application/json'
- self.response.headers['Cache-Control'] = 'no-cache'
- self.response.out.write(output)
-
- def handle(self):
- raise NotImplementedError('To be implemented by sub-classes.')
-
-
-class _TreeStatusHandler(_BaseRpcHandler):
- """RPC handler for getting the status of all children of root pipeline."""
-
- def handle(self):
- import pipeline # Break circular dependency
- self.json_response.update(
- pipeline.get_status_tree(self.request.get('root_pipeline_id')))
-
-
-class _ClassPathListHandler(_BaseRpcHandler):
- """RPC handler for getting the list of all Pipeline classes defined."""
-
- def handle(self):
- import pipeline # Break circular dependency
- self.json_response['classPaths'] = pipeline.get_pipeline_names()
-
-
-class _RootListHandler(_BaseRpcHandler):
- """RPC handler for getting the status of all root pipelines."""
-
- def handle(self):
- import pipeline # Break circular dependency
- self.json_response.update(
- pipeline.get_root_list(
- class_path=self.request.get('class_path'),
- cursor=self.request.get('cursor')))
diff --git a/python/demo/pipeline/ui/common.css b/python/demo/pipeline/ui/common.css
deleted file mode 100644
index e967126..0000000
--- a/python/demo/pipeline/ui/common.css
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2010 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-html, body {
- padding: 0;
- margin: 0;
- width: 100%;
- font-family: helvetica, sans-serif;
- font-size: 14px;
-}
-#butter {
- position: absolute;
- top: 0;
- min-width: 300px;
- background-color: #C5D7EF;
- text-align: center;
- padding: 5px;
- border-left: 1px solid #3366CC;
- border-right: 1px solid #3366CC;
- border-bottom: 1px solid #3366CC;
-}
-#butter.error {
- /* Make stack traces legible. */
- text-align: left;
-}
-
-#butter a:visited,
-#butter a:active,
-#butter a:link,
-#butter a:hover {
- color: #3366CC;
-}
diff --git a/python/demo/pipeline/ui/common.js b/python/demo/pipeline/ui/common.js
deleted file mode 100644
index 6abb12d..0000000
--- a/python/demo/pipeline/ui/common.js
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright 2010 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author: Brett Slatkin (bslatkin@google.com)
- */
-
-// Format a number with a minimum number of digits and a padding character.
-function leftPadNumber(number, minSize, paddingChar) {
- var stringified = '' + number;
- if (stringified.length < minSize) {
- for (var i = 0; i < (minSize - stringified.length); ++i) {
- stringified = paddingChar + stringified;
- }
- }
- return stringified;
-}
-
-
-// Convert milliseconds since the epoch to an ISO8601 datestring.
-function getIso8601String(timeMs) {
- var time = new Date();
- time.setTime(timeMs);
- return '' +
- time.getUTCFullYear() + '-' +
- leftPadNumber(time.getUTCMonth() + 1, 2, '0') + '-' +
- leftPadNumber(time.getUTCDate(), 2, '0') + 'T' +
- leftPadNumber(time.getUTCHours(), 2, '0') + ':' +
- leftPadNumber(time.getUTCMinutes(), 2, '0') + ':' +
- leftPadNumber(time.getUTCSeconds(), 2, '0') + 'Z';
-}
-
-
-// Get time string for job runtime. Specially handle number of days running as
-// a prefix and milliseconds as a suffix. If the runtime is less than one
-// minute, use the format "38.123 seconds" instead.
-function getElapsedTimeString(startTimestampMs, updatedTimestampMs) {
- var updatedDiff = Math.max(0, updatedTimestampMs - startTimestampMs);
- var updatedDays = Math.floor(updatedDiff / 86400000.0);
- updatedDiff -= (updatedDays * 86400000.0);
- var updatedHours = Math.floor(updatedDiff / 3600000.0);
- updatedDiff -= (updatedHours * 3600000.0);
- var updatedMinutes = Math.floor(updatedDiff / 60000.0);
- updatedDiff -= (updatedMinutes * 60000.0);
- var updatedSeconds = Math.floor(updatedDiff / 1000.0);
- updatedDiff -= (updatedSeconds * 1000.0);
- var updatedMs = Math.floor(updatedDiff / 1.0);
-
- var updatedString = '';
-
- if (updatedMinutes > 0) {
- if (updatedDays == 1) {
- updatedString = '1 day, ';
- } else if (updatedDays > 1) {
- updatedString = '' + updatedDays + ' days, ';
- }
- updatedString +=
- leftPadNumber(updatedHours, 2, '0') + ':' +
- leftPadNumber(updatedMinutes, 2, '0') + ':' +
- leftPadNumber(updatedSeconds, 2, '0');
- if (updatedMs > 0) {
- updatedString += '.' + leftPadNumber(updatedMs, 3, '0');
- }
- } else {
- updatedString += updatedSeconds;
- updatedString += '.' + leftPadNumber(updatedMs, 3, '0');
- updatedString += ' seconds';
- }
-
- return updatedString;
-}
-
-
-// Clears the status butter.
-function clearButter() {
- $('#butter').css('display', 'none');
-}
-
-
-// Sets the status butter, optionally indicating if it's an error message.
-function setButter(message, error, traceback, asHtml) {
- var butter = $('#butter');
- // Prevent flicker on butter update by hiding it first.
- butter.css('display', 'none');
- if (error) {
- butter.removeClass('info').addClass('error');
- } else {
- butter.removeClass('error').addClass('info');
- }
- butter.children().remove();
- if (asHtml) {
- butter.append($('<div>').html(message));
- } else {
- butter.append($('<div>').text(message));
- }
-
- function centerButter() {
- butter.css('left', ($(window).width() - $(butter).outerWidth()) / 2);
- }
-
- if (traceback) {
- var showDetail = $('<a href="">').text('Detail');
- showDetail.click(function(event) {
- $('#butter-detail').toggle();
- centerButter();
- event.preventDefault();
- });
- var butterDetail = $('<pre id="butter-detail">').text(traceback);
- butterDetail.css('display', 'none');
-
- butter.append(showDetail);
- butter.append(butterDetail);
- }
- centerButter();
- butter.css('display', null);
-}
-
-
-// Given an AJAX error message (which is empty or null on success) and a
-// data payload containing JSON, parses the data payload and returns the object.
-// Server-side errors and AJAX errors will be brought to the user's attention
-// if present in the response object
-function getResponseDataJson(error, data) {
- var response = null;
- try {
- response = $.parseJSON(data);
- } catch (e) {
- error = '' + e;
- }
- if (response && response.error_class) {
- error = response.error_class + ': ' + response.error_message;
- setButter('Error -- ' + error, true, response.error_traceback);
- } else if (!response) {
- setButter('Error -- Could not parse response JSON data.', true);
- } else {
- return response;
- }
- return null;
-}
diff --git a/python/demo/pipeline/ui/images/treeview-black-line.gif b/python/demo/pipeline/ui/images/treeview-black-line.gif
deleted file mode 100644
index e549687..0000000
--- a/python/demo/pipeline/ui/images/treeview-black-line.gif
+++ /dev/null
Binary files differ
diff --git a/python/demo/pipeline/ui/images/treeview-black.gif b/python/demo/pipeline/ui/images/treeview-black.gif
deleted file mode 100644
index d549b9f..0000000
--- a/python/demo/pipeline/ui/images/treeview-black.gif
+++ /dev/null
Binary files differ
diff --git a/python/demo/pipeline/ui/images/treeview-default-line.gif b/python/demo/pipeline/ui/images/treeview-default-line.gif
deleted file mode 100644
index 37114d3..0000000
--- a/python/demo/pipeline/ui/images/treeview-default-line.gif
+++ /dev/null
Binary files differ
diff --git a/python/demo/pipeline/ui/images/treeview-default.gif b/python/demo/pipeline/ui/images/treeview-default.gif
deleted file mode 100644
index a12ac52..0000000
--- a/python/demo/pipeline/ui/images/treeview-default.gif
+++ /dev/null
Binary files differ
diff --git a/python/demo/pipeline/ui/jquery-1.4.2.min.js b/python/demo/pipeline/ui/jquery-1.4.2.min.js
deleted file mode 100644
index 7c24308..0000000
--- a/python/demo/pipeline/ui/jquery-1.4.2.min.js
+++ /dev/null
@@ -1,154 +0,0 @@
-/*!
- * jQuery JavaScript Library v1.4.2
- * http://jquery.com/
- *
- * Copyright 2010, John Resig
- * Dual licensed under the MIT or GPL Version 2 licenses.
- * http://jquery.org/license
- *
- * Includes Sizzle.js
- * http://sizzlejs.com/
- * Copyright 2010, The Dojo Foundation
- * Released under the MIT, BSD, and GPL Licenses.
- *
- * Date: Sat Feb 13 22:33:48 2010 -0500
- */
-(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
-e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
-j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
-"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
-true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
-Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
-(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
-a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
-"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
-function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
-c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
-L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
-"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
-a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
-d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
-a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
-!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
-true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
-var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
-parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
-false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
-s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
-applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
-else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
-a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
-w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
-cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
-i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
-" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
-this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
-e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
-c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
-a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
-function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
-k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
-C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
-null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
-e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
-f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
-if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
-fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
-d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
-"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
-a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
-isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
-{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
-if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
-e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
-"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
-d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
-!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
-toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
-u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
-function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
-if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
-e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
-t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
-g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
-for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
-1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
-CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
-relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
-l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
-h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
-CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
-g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
-text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
-setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
-h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
-m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
-"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
-h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
-!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
-h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
-q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
-if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
-(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
-function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
-gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
-c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
-{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
-"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
-d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
-a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
-1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
-a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
-c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
-wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
-prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
-this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
-return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
-""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
-this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
-u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
-1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
-return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
-""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
-c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
-c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
-function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
-Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
-"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
-a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
-a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
-"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
-serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
-function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
-global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
-e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
-"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
-false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
-false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
-c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
-d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
-g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
-1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
-"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
-if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
-this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
-"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
-animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
-j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
-this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
-"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
-c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
-this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
-this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
-e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
-c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
-function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
-this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
-k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
-f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
-a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
-c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
-d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
-f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
-"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
-e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
diff --git a/python/demo/pipeline/ui/jquery.ba-hashchange.min.js b/python/demo/pipeline/ui/jquery.ba-hashchange.min.js
deleted file mode 100644
index 3c607ba..0000000
--- a/python/demo/pipeline/ui/jquery.ba-hashchange.min.js
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * jQuery hashchange event - v1.3 - 7/21/2010
- * http://benalman.com/projects/jquery-hashchange-plugin/
- *
- * Copyright (c) 2010 "Cowboy" Ben Alman
- * Dual licensed under the MIT and GPL licenses.
- * http://benalman.com/about/license/
- */
-(function($,e,b){var c="hashchange",h=document,f,g=$.event.special,i=h.documentMode,d="on"+c in e&&(i===b||i>7);function a(j){j=j||location.href;return"#"+j.replace(/^[^#]*#?(.*)$/,"$1")}$.fn[c]=function(j){return j?this.bind(c,j):this.trigger(c)};$.fn[c].delay=50;g[c]=$.extend(g[c],{setup:function(){if(d){return false}$(f.start)},teardown:function(){if(d){return false}$(f.stop)}});f=(function(){var j={},p,m=a(),k=function(q){return q},l=k,o=k;j.start=function(){p||n()};j.stop=function(){p&&clearTimeout(p);p=b};function n(){var r=a(),q=o(m);if(r!==m){l(m=r,q);$(e).trigger(c)}else{if(q!==m){location.href=location.href.replace(/#.*/,"")+q}}p=setTimeout(n,$.fn[c].delay)}$.browser.msie&&!d&&(function(){var q,r;j.start=function(){if(!q){r=$.fn[c].src;r=r&&r+a();q=$('<iframe tabindex="-1" title="empty"/>').hide().one("load",function(){r||l(a());n()}).attr("src",r||"javascript:0").insertAfter("body")[0].contentWindow;h.onpropertychange=function(){try{if(event.propertyName==="title"){q.document.title=h.title}}catch(s){}}}};j.stop=k;o=function(){return a(q.location.href)};l=function(v,s){var u=q.document,t=$.fn[c].domain;if(v!==s){u.title=h.title;u.open();t&&u.write('<script>document.domain="'+t+'"<\/script>');u.close();q.location.hash=v}}})();return j})()})(jQuery,this);
\ No newline at end of file
diff --git a/python/demo/pipeline/ui/jquery.cookie.js b/python/demo/pipeline/ui/jquery.cookie.js
deleted file mode 100644
index 8e8e1d9..0000000
--- a/python/demo/pipeline/ui/jquery.cookie.js
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Cookie plugin
- *
- * Copyright (c) 2006 Klaus Hartl (stilbuero.de)
- * Dual licensed under the MIT and GPL licenses:
- * http://www.opensource.org/licenses/mit-license.php
- * http://www.gnu.org/licenses/gpl.html
- *
- */
-
-/**
- * Create a cookie with the given name and value and other optional parameters.
- *
- * @example $.cookie('the_cookie', 'the_value');
- * @desc Set the value of a cookie.
- * @example $.cookie('the_cookie', 'the_value', {expires: 7, path: '/', domain: 'jquery.com', secure: true});
- * @desc Create a cookie with all available options.
- * @example $.cookie('the_cookie', 'the_value');
- * @desc Create a session cookie.
- * @example $.cookie('the_cookie', null);
- * @desc Delete a cookie by passing null as value.
- *
- * @param String name The name of the cookie.
- * @param String value The value of the cookie.
- * @param Object options An object literal containing key/value pairs to provide optional cookie attributes.
- * @option Number|Date expires Either an integer specifying the expiration date from now on in days or a Date object.
- * If a negative value is specified (e.g. a date in the past), the cookie will be deleted.
- * If set to null or omitted, the cookie will be a session cookie and will not be retained
- * when the the browser exits.
- * @option String path The value of the path atribute of the cookie (default: path of page that created the cookie).
- * @option String domain The value of the domain attribute of the cookie (default: domain of page that created the cookie).
- * @option Boolean secure If true, the secure attribute of the cookie will be set and the cookie transmission will
- * require a secure protocol (like HTTPS).
- * @type undefined
- *
- * @name $.cookie
- * @cat Plugins/Cookie
- * @author Klaus Hartl/klaus.hartl@stilbuero.de
- */
-
-/**
- * Get the value of a cookie with the given name.
- *
- * @example $.cookie('the_cookie');
- * @desc Get the value of a cookie.
- *
- * @param String name The name of the cookie.
- * @return The value of the cookie.
- * @type String
- *
- * @name $.cookie
- * @cat Plugins/Cookie
- * @author Klaus Hartl/klaus.hartl@stilbuero.de
- */
-jQuery.cookie = function(name, value, options) {
- if (typeof value != 'undefined') { // name and value given, set cookie
- options = options || {};
- if (value === null) {
- value = '';
- options.expires = -1;
- }
- var expires = '';
- if (options.expires && (typeof options.expires == 'number' || options.expires.toUTCString)) {
- var date;
- if (typeof options.expires == 'number') {
- date = new Date();
- date.setTime(date.getTime() + (options.expires * 24 * 60 * 60 * 1000));
- } else {
- date = options.expires;
- }
- expires = '; expires=' + date.toUTCString(); // use expires attribute, max-age is not supported by IE
- }
- var path = options.path ? '; path=' + options.path : '';
- var domain = options.domain ? '; domain=' + options.domain : '';
- var secure = options.secure ? '; secure' : '';
- document.cookie = [name, '=', encodeURIComponent(value), expires, path, domain, secure].join('');
- } else { // only name given, get cookie
- var cookieValue = null;
- if (document.cookie && document.cookie != '') {
- var cookies = document.cookie.split(';');
- for (var i = 0; i < cookies.length; i++) {
- var cookie = jQuery.trim(cookies[i]);
- // Does this cookie string begin with the name we want?
- if (cookie.substring(0, name.length + 1) == (name + '=')) {
- cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
- break;
- }
- }
- }
- return cookieValue;
- }
-};
\ No newline at end of file
diff --git a/python/demo/pipeline/ui/jquery.json.min.js b/python/demo/pipeline/ui/jquery.json.min.js
deleted file mode 100644
index bad4a0a..0000000
--- a/python/demo/pipeline/ui/jquery.json.min.js
+++ /dev/null
@@ -1,31 +0,0 @@
-
-(function($){$.toJSON=function(o)
-{if(typeof(JSON)=='object'&&JSON.stringify)
-return JSON.stringify(o);var type=typeof(o);if(o===null)
-return"null";if(type=="undefined")
-return undefined;if(type=="number"||type=="boolean")
-return o+"";if(type=="string")
-return $.quoteString(o);if(type=='object')
-{if(typeof o.toJSON=="function")
-return $.toJSON(o.toJSON());if(o.constructor===Date)
-{var month=o.getUTCMonth()+1;if(month<10)month='0'+month;var day=o.getUTCDate();if(day<10)day='0'+day;var year=o.getUTCFullYear();var hours=o.getUTCHours();if(hours<10)hours='0'+hours;var minutes=o.getUTCMinutes();if(minutes<10)minutes='0'+minutes;var seconds=o.getUTCSeconds();if(seconds<10)seconds='0'+seconds;var milli=o.getUTCMilliseconds();if(milli<100)milli='0'+milli;if(milli<10)milli='0'+milli;return'"'+year+'-'+month+'-'+day+'T'+
-hours+':'+minutes+':'+seconds+'.'+milli+'Z"';}
-if(o.constructor===Array)
-{var ret=[];for(var i=0;i<o.length;i++)
-ret.push($.toJSON(o[i])||"null");return"["+ret.join(",")+"]";}
-var pairs=[];for(var k in o){var name;var type=typeof k;if(type=="number")
-name='"'+k+'"';else if(type=="string")
-name=$.quoteString(k);else
-continue;if(typeof o[k]=="function")
-continue;var val=$.toJSON(o[k]);pairs.push(name+":"+val);}
-return"{"+pairs.join(", ")+"}";}};$.evalJSON=function(src)
-{if(typeof(JSON)=='object'&&JSON.parse)
-return JSON.parse(src);return eval("("+src+")");};$.secureEvalJSON=function(src)
-{if(typeof(JSON)=='object'&&JSON.parse)
-return JSON.parse(src);var filtered=src;filtered=filtered.replace(/\\["\\\/bfnrtu]/g,'@');filtered=filtered.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,']');filtered=filtered.replace(/(?:^|:|,)(?:\s*\[)+/g,'');if(/^[\],:{}\s]*$/.test(filtered))
-return eval("("+src+")");else
-throw new SyntaxError("Error parsing JSON, source is not valid.");};$.quoteString=function(string)
-{if(string.match(_escapeable))
-{return'"'+string.replace(_escapeable,function(a)
-{var c=_meta[a];if(typeof c==='string')return c;c=a.charCodeAt();return'\\u00'+Math.floor(c/16).toString(16)+(c%16).toString(16);})+'"';}
-return'"'+string+'"';};var _escapeable=/["\\\x00-\x1f\x7f-\x9f]/g;var _meta={'\b':'\\b','\t':'\\t','\n':'\\n','\f':'\\f','\r':'\\r','"':'\\"','\\':'\\\\'};})(jQuery);
\ No newline at end of file
diff --git a/python/demo/pipeline/ui/jquery.timeago.js b/python/demo/pipeline/ui/jquery.timeago.js
deleted file mode 100644
index 739c3e7..0000000
--- a/python/demo/pipeline/ui/jquery.timeago.js
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * timeago: a jQuery plugin, version: 0.9.1 (2010-08-30)
- * @requires jQuery v1.2.3 or later
- *
- * Timeago is a jQuery plugin that makes it easy to support automatically
- * updating fuzzy timestamps (e.g. "4 minutes ago" or "about 1 day ago").
- *
- * For usage and examples, visit:
- * http://timeago.yarp.com/
- *
- * Licensed under the MIT:
- * http://www.opensource.org/licenses/mit-license.php
- *
- * Copyright (c) 2008-2010, Ryan McGeary (ryanonjavascript -[at]- mcgeary [*dot*] org)
- */
-(function($) {
- $.timeago = function(timestamp) {
- if (timestamp instanceof Date) return inWords(timestamp);
- else if (typeof timestamp == "string") return inWords($.timeago.parse(timestamp));
- else return inWords($.timeago.datetime(timestamp));
- };
- var $t = $.timeago;
-
- $.extend($.timeago, {
- settings: {
- refreshMillis: 60000,
- allowFuture: false,
- strings: {
- prefixAgo: null,
- prefixFromNow: null,
- suffixAgo: "ago",
- suffixFromNow: "from now",
- seconds: "%d seconds", // Local modification.
- minute: "about a minute",
- minutes: "%d minutes",
- hour: "about an hour",
- hours: "about %d hours",
- day: "a day",
- days: "%d days",
- month: "about a month",
- months: "%d months",
- year: "about a year",
- years: "%d years",
- numbers: []
- }
- },
- inWords: function(distanceMillis) {
- var $l = this.settings.strings;
- var prefix = $l.prefixAgo;
- var suffix = $l.suffixAgo;
- if (this.settings.allowFuture) {
- if (distanceMillis < 0) {
- prefix = $l.prefixFromNow;
- suffix = $l.suffixFromNow;
- }
- distanceMillis = Math.abs(distanceMillis);
- }
-
- var seconds = distanceMillis / 1000;
- var minutes = seconds / 60;
- var hours = minutes / 60;
- var days = hours / 24;
- var years = days / 365;
-
- function substitute(stringOrFunction, number) {
- var string = $.isFunction(stringOrFunction) ? stringOrFunction(number) : stringOrFunction;
- var value = ($l.numbers && $l.numbers[number]) || number;
- return string.replace(/%d/i, value);
- }
-
- var words = seconds < 45 && substitute($l.seconds, Math.round(seconds)) ||
- seconds < 90 && substitute($l.minute, 1) ||
- minutes < 45 && substitute($l.minutes, Math.round(minutes)) ||
- minutes < 90 && substitute($l.hour, 1) ||
- hours < 24 && substitute($l.hours, Math.round(hours)) ||
- hours < 48 && substitute($l.day, 1) ||
- days < 30 && substitute($l.days, Math.floor(days)) ||
- days < 60 && substitute($l.month, 1) ||
- days < 365 && substitute($l.months, Math.floor(days / 30)) ||
- years < 2 && substitute($l.year, 1) ||
- substitute($l.years, Math.floor(years));
-
- return $.trim([prefix, words, suffix].join(" "));
- },
- parse: function(iso8601) {
- var s = $.trim(iso8601);
- s = s.replace(/\.\d\d\d+/,""); // remove milliseconds
- s = s.replace(/-/,"/").replace(/-/,"/");
- s = s.replace(/T/," ").replace(/Z/," UTC");
- s = s.replace(/([\+-]\d\d)\:?(\d\d)/," $1$2"); // -04:00 -> -0400
- return new Date(s);
- },
- datetime: function(elem) {
- // jQuery's `is()` doesn't play well with HTML5 in IE
- var isTime = $(elem).get(0).tagName.toLowerCase() == "time"; // $(elem).is("time");
- var iso8601 = isTime ? $(elem).attr("datetime") : $(elem).attr("title");
- return $t.parse(iso8601);
- }
- });
-
- $.fn.timeago = function() {
- var self = this;
- self.each(refresh);
-
- var $s = $t.settings;
- if ($s.refreshMillis > 0) {
- setInterval(function() { self.each(refresh); }, $s.refreshMillis);
- }
- return self;
- };
-
- function refresh() {
- var data = prepareData(this);
- if (!isNaN(data.datetime)) {
- $(this).text(inWords(data.datetime));
- }
- return this;
- }
-
- function prepareData(element) {
- element = $(element);
- if (!element.data("timeago")) {
- element.data("timeago", { datetime: $t.datetime(element) });
- var text = $.trim(element.text());
- if (text.length > 0) element.attr("title", text);
- }
- return element.data("timeago");
- }
-
- function inWords(date) {
- return $t.inWords(distance(date));
- }
-
- function distance(date) {
- return (new Date().getTime() - date.getTime());
- }
-
- // fix for IE6 suckage
- document.createElement("abbr");
- document.createElement("time");
-})(jQuery);
diff --git a/python/demo/pipeline/ui/jquery.treeview.css b/python/demo/pipeline/ui/jquery.treeview.css
deleted file mode 100644
index 042dae4..0000000
--- a/python/demo/pipeline/ui/jquery.treeview.css
+++ /dev/null
@@ -1,70 +0,0 @@
-.treeview, .treeview ul {
- padding: 0;
- margin: 0;
- list-style: none;
-}
-
-.treeview ul {
- background-color: white;
- /* local modification
- margin-top: 4px; */
-}
-
-.treeview .hitarea {
- background: url(images/treeview-default.gif) -64px -25px no-repeat;
- height: 16px;
- width: 16px;
- margin-left: -16px;
- float: left;
- cursor: pointer;
-}
-/* fix for IE6 */
-* html .hitarea {
- display: inline;
- float:none;
-}
-
-.treeview li {
- margin: 0;
- /* local modification
- padding: 3px 0pt 3px 16px; */
-}
-
-.treeview a.selected {
- background-color: #eee;
-}
-
-#treecontrol { margin: 1em 0; display: none; }
-
-.treeview .hover { /* local modification color: blue; */ cursor: pointer; }
-
-.treeview li { background: url(images/treeview-default-line.gif) 0 0 no-repeat; }
-.treeview li.collapsable, .treeview li.expandable { background-position: 0 -177px; /* local modification */ }
-
-.treeview .expandable-hitarea { background-position: -80px -3px; }
-
-.treeview li.last { background-position: 0 -1766px }
-.treeview li.lastCollapsable, .treeview li.lastExpandable { background-image: url(images/treeview-default.gif); }
-.treeview li.lastCollapsable { background-position: 0 -111px }
-.treeview li.lastExpandable { background-position: -32px -67px }
-
-.treeview div.lastCollapsable-hitarea, .treeview div.lastExpandable-hitarea { background-position: 0; }
-
-.treeview-red li { background-image: url(images/treeview-red-line.gif); }
-.treeview-red .hitarea, .treeview-red li.lastCollapsable, .treeview-red li.lastExpandable { background-image: url(images/treeview-red.gif); }
-
-.treeview-black li { background-image: url(images/treeview-black-line.gif); }
-.treeview-black .hitarea, .treeview-black li.lastCollapsable, .treeview-black li.lastExpandable { background-image: url(images/treeview-black.gif); }
-
-.treeview-gray li { background-image: url(images/treeview-gray-line.gif); }
-.treeview-gray .hitarea, .treeview-gray li.lastCollapsable, .treeview-gray li.lastExpandable { background-image: url(images/treeview-gray.gif); }
-
-.treeview-famfamfam li { background-image: url(images/treeview-famfamfam-line.gif); }
-.treeview-famfamfam .hitarea, .treeview-famfamfam li.lastCollapsable, .treeview-famfamfam li.lastExpandable { background-image: url(images/treeview-famfamfam.gif); }
-
-
-.filetree li { padding: 3px 0 2px 16px; }
-.filetree span.folder, .filetree span.file { padding: 1px 0 1px 16px; display: block; }
-.filetree span.folder { background: url(images/folder.gif) 0 0 no-repeat; }
-.filetree li.expandable span.folder { background: url(images/folder-closed.gif) 0 0 no-repeat; }
-.filetree span.file { background: url(images/file.gif) 0 0 no-repeat; }
diff --git a/python/demo/pipeline/ui/jquery.treeview.min.js b/python/demo/pipeline/ui/jquery.treeview.min.js
deleted file mode 100644
index 96202d9..0000000
--- a/python/demo/pipeline/ui/jquery.treeview.min.js
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
- * Treeview 1.4 - jQuery plugin to hide and show branches of a tree
- *
- * http://bassistance.de/jquery-plugins/jquery-plugin-treeview/
- * http://docs.jquery.com/Plugins/Treeview
- *
- * Copyright (c) 2007 Jörn Zaefferer
- *
- * Dual licensed under the MIT and GPL licenses:
- * http://www.opensource.org/licenses/mit-license.php
- * http://www.gnu.org/licenses/gpl.html
- *
- * Revision: $Id: jquery.treeview.js 4684 2008-02-07 19:08:06Z joern.zaefferer $
- *
- */;(function($){$.extend($.fn,{swapClass:function(c1,c2){var c1Elements=this.filter('.'+c1);this.filter('.'+c2).removeClass(c2).addClass(c1);c1Elements.removeClass(c1).addClass(c2);return this;},replaceClass:function(c1,c2){return this.filter('.'+c1).removeClass(c1).addClass(c2).end();},hoverClass:function(className){className=className||"hover";return this.hover(function(){$(this).addClass(className);},function(){$(this).removeClass(className);});},heightToggle:function(animated,callback){animated?this.animate({height:"toggle"},animated,callback):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();if(callback)callback.apply(this,arguments);});},heightHide:function(animated,callback){if(animated){this.animate({height:"hide"},animated,callback);}else{this.hide();if(callback)this.each(callback);}},prepareBranches:function(settings){if(!settings.prerendered){this.filter(":last-child:not(ul)").addClass(CLASSES.last);this.filter((settings.collapsed?"":"."+CLASSES.closed)+":not(."+CLASSES.open+")").find(">ul").hide();}return this.filter(":has(>ul)");},applyClasses:function(settings,toggler){this.filter(":has(>ul):not(:has(>a))").find(">span").click(function(event){toggler.apply($(this).next());}).add($("a",this)).hoverClass();if(!settings.prerendered){this.filter(":has(>ul:hidden)").addClass(CLASSES.expandable).replaceClass(CLASSES.last,CLASSES.lastExpandable);this.not(":has(>ul:hidden)").addClass(CLASSES.collapsable).replaceClass(CLASSES.last,CLASSES.lastCollapsable);this.prepend("<div class=\""+CLASSES.hitarea+"\"/>").find("div."+CLASSES.hitarea).each(function(){var classes="";$.each($(this).parent().attr("class").split(" "),function(){classes+=this+"-hitarea ";});$(this).addClass(classes);});}this.find("div."+CLASSES.hitarea).click(toggler);},treeview:function(settings){settings=$.extend({cookieId:"treeview"},settings);if(settings.add){return this.trigger("add",[settings.add]);}if(settings.toggle){var callback=settings.toggle;settings.toggle=function(){return callback.apply($(this).parent()[0],arguments);};}function treeController(tree,control){function handler(filter){return function(){toggler.apply($("div."+CLASSES.hitarea,tree).filter(function(){return filter?$(this).parent("."+filter).length:true;}));return false;};}$("a:eq(0)",control).click(handler(CLASSES.collapsable));$("a:eq(1)",control).click(handler(CLASSES.expandable));$("a:eq(2)",control).click(handler());}function toggler(){$(this).parent().find(">.hitarea").swapClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).swapClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().swapClass(CLASSES.collapsable,CLASSES.expandable).swapClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightToggle(settings.animated,settings.toggle);if(settings.unique){$(this).parent().siblings().find(">.hitarea").replaceClass(CLASSES.collapsableHitarea,CLASSES.expandableHitarea).replaceClass(CLASSES.lastCollapsableHitarea,CLASSES.lastExpandableHitarea).end().replaceClass(CLASSES.collapsable,CLASSES.expandable).replaceClass(CLASSES.lastCollapsable,CLASSES.lastExpandable).find(">ul").heightHide(settings.animated,settings.toggle);}}function serialize(){function binary(arg){return arg?1:0;}var data=[];branches.each(function(i,e){data[i]=$(e).is(":has(>ul:visible)")?1:0;});$.cookie(settings.cookieId,data.join(""));}function deserialize(){var stored=$.cookie(settings.cookieId);if(stored){var data=stored.split("");branches.each(function(i,e){$(e).find(">ul")[parseInt(data[i])?"show":"hide"]();});}}this.addClass("treeview");var branches=this.find("li").prepareBranches(settings);switch(settings.persist){case"cookie":var toggleCallback=settings.toggle;settings.toggle=function(){serialize();if(toggleCallback){toggleCallback.apply(this,arguments);}};deserialize();break;case"location":var current=this.find("a").filter(function(){return this.href.toLowerCase()==location.href.toLowerCase();});if(current.length){current.addClass("selected").parents("ul, li").add(current.next()).show();}break;}branches.applyClasses(settings,toggler);if(settings.control){treeController(this,settings.control);$(settings.control).show();}return this.bind("add",function(event,branches){$(branches).prev().removeClass(CLASSES.last).removeClass(CLASSES.lastCollapsable).removeClass(CLASSES.lastExpandable).find(">.hitarea").removeClass(CLASSES.lastCollapsableHitarea).removeClass(CLASSES.lastExpandableHitarea);$(branches).find("li").andSelf().prepareBranches(settings).applyClasses(settings,toggler);});}});var CLASSES=$.fn.treeview.classes={open:"open",closed:"closed",expandable:"expandable",expandableHitarea:"expandable-hitarea",lastExpandableHitarea:"lastExpandable-hitarea",collapsable:"collapsable",collapsableHitarea:"collapsable-hitarea",lastCollapsableHitarea:"lastCollapsable-hitarea",lastCollapsable:"lastCollapsable",lastExpandable:"lastExpandable",last:"last",hitarea:"hitarea"};$.fn.Treeview=$.fn.treeview;})(jQuery);
\ No newline at end of file
diff --git a/python/demo/pipeline/ui/root_list.css b/python/demo/pipeline/ui/root_list.css
deleted file mode 100644
index 2f1e50a..0000000
--- a/python/demo/pipeline/ui/root_list.css
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2012 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#content {
- margin: 10px 20px;
-}
-#root-list {
- border-collapse: collapse;
- width: 80%;
-}
-#root-list th,
-#root-list td {
- text-align: left;
- padding: 4px 8px;
- min-width: 75px;
-}
-#root-list tbody td {
- border-top: 1px solid #ccc;
-}
-#filter_form {
- margin: 0 0 1em 0;
-}
-.status {
- text-transform: capitalize;
-}
-abbr {
- cursor: help;
- border: 0;
-}
diff --git a/python/demo/pipeline/ui/root_list.html b/python/demo/pipeline/ui/root_list.html
deleted file mode 100644
index f4ffdc8..0000000
--- a/python/demo/pipeline/ui/root_list.html
+++ /dev/null
@@ -1,61 +0,0 @@
-<!doctype html>
-<html>
-<head>
- <meta charset="utf-8">
- <link rel="stylesheet" href="common.css" type="text/css" charset="utf-8">
- <link rel="stylesheet" href="list.css" type="text/css" charset="utf-8">
- <link rel="stylesheet" href="jquery.treeview.css" type="text/css" charset="utf-8">
- <script type="text/javascript" src="jquery-1.4.2.min.js"></script>
- <script type="text/javascript" src="jquery.cookie.js"></script>
- <script type="text/javascript" src="jquery.treeview.min.js"></script>
- <script type="text/javascript" src="jquery.timeago.js"></script>
- <script type="text/javascript" src="jquery.ba-hashchange.min.js"></script>
- <script type="text/javascript" src="jquery.json.min.js"></script>
- <script type="text/javascript" src="common.js"></script>
- <script type="text/javascript" src="list.js"></script>
- <script type="text/javascript">
- $(document).ready(initRootList);
- </script>
- <title>Root Pipelines</title>
-</head>
-<body>
-
-<div id="butter" style="display: none;"></div>
-
-<div id="content">
-
- <h1 class="page-title">Root Pipelines</h1>
-
- <form method="get" id="filter_form">
- Filter:
- <select name="class_path" id="filter_menu" onchange="form.submit()" autocomplete="off">
- <option value="">All</option>
- </select>
- </form>
-
- <table id="root-list" style="display: none">
- <thead>
- <tr>
- <th>Class Path</th>
- <th>Status</th>
- <th>Start Time</th>
- <th>Run Time</th>
- <th>Links</th>
- </tr>
- </thead>
- <tfoot>
- <tr>
- <td colspan="5">
- <a href="" id="next-link" style="display: none">Next »</a>
- </td>
- </tr>
- </tfoot>
- <tbody></tbody>
- </table>
-
- <div id="empty-list-message" style="display: none"></div>
-
-</div>
-
-</body>
-</html>
diff --git a/python/demo/pipeline/ui/root_list.js b/python/demo/pipeline/ui/root_list.js
deleted file mode 100644
index 39e670a..0000000
--- a/python/demo/pipeline/ui/root_list.js
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Copyright 2012 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author: Brett Slatkin (bslatkin@google.com)
- */
-
-
-function initRootList() {
- setButter('Loading root jobs...');
- $.ajax({
- type: 'GET',
- url: 'rpc/list' + window.location.search,
- dataType: 'text',
- error: function(request, textStatus) {
- getResponseDataJson(textStatus);
- },
- success: function(data, textStatus, request) {
- var response = getResponseDataJson(null, data);
- if (response) {
- clearButter();
- initRootListDone(response);
- }
- }
- });
-}
-
-
-function initRootListDone(response) {
- if (response.pipelines && response.pipelines.length > 0) {
- $('#root-list').show();
- if (response.cursor) {
- // Prepend the cursor to the next link. This may have a suffix of
- // the class_path from initRootNamesDone() below.
- var href = $('#next-link').attr('href');
- $('#next-link').attr('href', '?cursor=' + response.cursor + href);
- $('#next-link').show();
- }
-
- $.each(response.pipelines, function(index, infoMap) {
- var row = $('<tr>');
- $('<td class="class-path">').text(infoMap.classPath).appendTo(row);
- $('<td class="status">').text(infoMap.status).appendTo(row);
-
- if (infoMap.startTimeMs) {
- var sinceSpan = $('<abbr class="timeago">');
- var isoDate = getIso8601String(infoMap.startTimeMs);
- sinceSpan.attr('title', isoDate);
- sinceSpan.text(isoDate);
- sinceSpan.timeago();
- $('<td class="start-time">').append(sinceSpan).appendTo(row);
- } else {
- $('<td class="start-time">').text('-').appendTo(row);
- }
-
- if (infoMap.endTimeMs) {
- $('<td class="run-time">').text(getElapsedTimeString(
- infoMap.startTimeMs, infoMap.endTimeMs)).appendTo(row);
- } else {
- $('<td class="run-time">').text('-').appendTo(row);
- }
-
- $('<td class="links">')
- .append(
- $('<a>')
- .attr('href', 'status?root=' + infoMap.pipelineId)
- .text(infoMap.pipelineId))
- .appendTo(row);
- $('#root-list>tbody').append(row);
- });
- } else {
- $('#empty-list-message').text('No pipelines found.').show();
- }
-
- initRootNames();
-}
-
-
-function initRootNames() {
- setButter('Loading names...');
- $.ajax({
- type: 'GET',
- url: 'rpc/class_paths',
- dataType: 'text',
- error: function(request, textStatus) {
- getResponseDataJson(textStatus);
- },
- success: function(data, textStatus, request) {
- var response = getResponseDataJson(null, data);
- if (response) {
- clearButter();
- initRootNamesDone(response);
- }
- }
- });
-}
-
-
-function initRootNamesDone(response) {
- if (response.classPaths) {
- var filterMenu = $('#filter_menu');
-
- $.each(response.classPaths, function(index, path) {
- // Ignore internal pipelines.
- if (path.match(/\.?pipeline\./)) {
- return;
- }
-
- var option = $('<option>').val(path).text(path);
- if (window.location.search.indexOf(path) != -1) {
- option.attr('selected', 'selected');
- // Append the class name selected to the "next page" link. This
- // may already have a value from initRootListDone() above.
- var href = $('#next-link').attr('href');
- $('#next-link').attr('href', href + '&class_path=' + path);
- }
- option.appendTo(filterMenu);
- });
- }
-}
diff --git a/python/demo/pipeline/ui/status.css b/python/demo/pipeline/ui/status.css
deleted file mode 100644
index f4d5105..0000000
--- a/python/demo/pipeline/ui/status.css
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Copyright 2010 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/* Major page sections. */
-html, body {
- overflow: hidden;
-}
-#sidebar {
- top: 0;
- left: 0;
- position: absolute;
- width: 30%;
- height: 100%;
- overflow: auto;
- padding: 0;
- border-right: 1px solid grey;
- margin: 0;
- background-color: #f8f8f8;
-}
-#content {
- border: 0;
- margin: 0 0 0 30%;
- padding: 0 0 0 1px; /* must match border+padding on #sidebar */
-}
-#detail {
- padding: 10px;
- overflow-y: auto;
-}
-#control {
- padding: 0;
- margin: 0;
- height: 25px;
- width: 100%;
- border-bottom: 1px solid #808080;
-}
-#control-table {
- height: 100%;
- float: right;
-}
-.control-cell {
- text-align: center;
- vertical-align: middle;
- padding: 0 4px;
-}
-#status-console {
- border-top: 1px solid grey;
- border-right: 0;
- border-bottom: 0;
- border-left: 0;
- padding: 0;
- margin: 0;
-}
-
-/* shared styling of pipeline info divs */
-.status-message {
-}
-.status-message-text {
- font-family: monospace;
-}
-abbr {
- cursor: help;
- border: 0;
-}
-.status-links > a {
- margin-right: 5px;
-}
-.status-title {
- text-transform: capitalize;
-}
-
-/* detail-specific styling */
-.status-param,
-.retry-param {
- padding-left: 1em;
- font-size: 0.85em;
-}
-.slot-detail,
-.status-param-value {
- padding-left: 1em;
- word-wrap: break-word;
-}
-.value-disclosure-more {
- display: none;
-}
-.run-after-pipeline-id,
-.child-pipeline-id,
-.child-link,
-.run-after-link {
- display: block;
- padding-left: 1em;
- font-size: 0.85em;
-}
-#detail .status-box {
- font-size: 1em;
-}
-#detail .outputs-container,
-#detail .param-container,
-#detail .child-container,
-#detail .run-after-container,
-#detail .status-retry-params {
- margin-top: 1em;
-}
-#detail .status-param-value,
-#detail .slot-value {
- font-family: monospace;
-}
-
-/* sidebar */
-#sidebar .status-title {
-}
-#sidebar .status-message {
- word-wrap: break-word;
-}
-#sidebar .status-time {
-}
-#sidebar .status-runtime {
-}
-#sidebar .status-links {
-}
-#sidebar .status-box {
- font-size: 0.85em;
- padding: 3px 3px;
- min-width: 200px;
- border-left: 1px solid #BFBFBF;
- border-right: 0;
- border-top: 0;
- border-bottom: 1px solid #BFBFBF;
-}
-#sidebar .status-waiting {
- background-color: #EDEDED;
-}
-#sidebar .status-run,
-#sidebar .status-finalizing {
- background-color: #fff;
-}
-#sidebar .status-done {
- background-color: #BEEDBE;
-}
-#sidebar .status-aborted,
-#sidebar .status-canceled {
- background-color: #EDBEC6;
-}
-#sidebar .status-retry {
- background-color: #EDE7B2;
-}
-.treeview li {
- padding: 0 0 0 16px;
-}
-.disclosure {
- text-decoration: underline;
- color: blue;
- padding-top: 0.5em;
- padding-left: 3px;
- display: block;
-}
-.selected-indicator {
- display: none;
-}
-.selected-link .selected-indicator {
- display: block;
- float: right;
- margin: 0;
- padding: 0;
- font-size: 30px;
- color: #000;
- font-weight: bold;
- line-height: 15px;
-}
-.status-links .selected-link {
- font-weight: bold !important;
-}
-.status-links a:active,
-.status-links a:link,
-.status-links a:visited {
- text-decoration: underline;
- color: blue;
- font-weight: normal;
-}
-.disclosure:hover,
-.status-links a:hover,
-.detail-link a:hover {
- text-decoration: underline;
-}
-#sidebar .detail-link {
- word-wrap: break-word;
-}
-.detail-link a {
- font-size: 1.2em;
- font-weight: bold;
- text-decoration: none;
-}
-.detail-link a:active,
-.detail-link a:link,
-.detail-link a:visited {
- color: black;
-}
diff --git a/python/demo/pipeline/ui/status.html b/python/demo/pipeline/ui/status.html
deleted file mode 100644
index 1874394..0000000
--- a/python/demo/pipeline/ui/status.html
+++ /dev/null
@@ -1,57 +0,0 @@
-<!doctype html>
-<html>
-<head>
- <meta charset="utf-8">
- <link rel="stylesheet" href="common.css" type="text/css" charset="utf-8">
- <link rel="stylesheet" href="status.css" type="text/css" charset="utf-8">
- <link rel="stylesheet" href="jquery.treeview.css" type="text/css" charset="utf-8">
- <script type="text/javascript" src="jquery-1.4.2.min.js"></script>
- <script type="text/javascript" src="jquery.cookie.js"></script>
- <script type="text/javascript" src="jquery.treeview.min.js"></script>
- <script type="text/javascript" src="jquery.timeago.js"></script>
- <script type="text/javascript" src="jquery.ba-hashchange.min.js"></script>
- <script type="text/javascript" src="jquery.json.min.js"></script>
- <script type="text/javascript" src="common.js"></script>
- <script type="text/javascript" src="status.js"></script>
- <script type="text/javascript">
- $(document).ready(initStatus);
- </script>
-</head>
-<body>
-
-<div id="butter" style="display: none;"></div>
-
-<div id="sidebar" style="display: none;">
-<!-- Tree will go here -->
-</div>
-
-<div id="content">
-
- <div id="control" style="display: none;">
- <table id="control-table" border="0" cellpadding="0" cellspacing="0">
- <tr>
- <td class="control-cell">
- <a class="abort-link" href="#" style="display: none;">Abort</a>
- </td>
- <td class="control-cell">
- <a class="delete-link" href="#" style="display: none;">Delete</a>
- </td>
- <td class="control-cell">
- <a class="refresh-link" href="#">Refresh</a>
- </td>
- <td class="control-cell">
- <label for="auto-refresh">Auto-pilot</label>
- <input type="checkbox" id="auto-refresh" checked="checked">
- </td>
- </tr>
- </table>
- </div>
-
- <div id="detail"><!-- Pipeline detail goes here --></div>
-
- <iframe id="status-console" src="javascript:''" style="display: none;" frameborder="0" border="0"></iframe>
-
-</div>
-
-</body>
-</html>
diff --git a/python/demo/pipeline/ui/status.js b/python/demo/pipeline/ui/status.js
deleted file mode 100644
index eca6da5..0000000
--- a/python/demo/pipeline/ui/status.js
+++ /dev/null
@@ -1,971 +0,0 @@
-/*
- * Copyright 2010 Google Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author: Brett Slatkin (bslatkin@google.com)
- */
-
-// Global variables.
-var AUTO_REFRESH = true;
-var ROOT_PIPELINE_ID = null;
-var STATUS_MAP = null;
-var LANG = null;
-
-
-// Adjusts the height/width of the embedded status console iframe.
-function adjustStatusConsole() {
- var statusConsole = $('#status-console');
- var detail = $('#detail');
- var sidebar = $('#sidebar');
- var control = $('#control');
-
- // NOTE: 16 px here is the height of the resize grip in most browsers.
- // Need to specify this explicitly because some browsers (eg, Firefox)
- // cause the overflow scrollbars to bounce around the page randomly when
- // they accidentally overlap the resize grip.
- if (statusConsole.css('display') == 'none') {
- var paddingAndMargin = detail.outerHeight() - detail.height() + 16;
- detail.css('max-height', (sidebar.outerHeight() - paddingAndMargin) + 'px');
- } else {
- detail.css('max-height', '200px');
- statusConsole.width(
- $(window).width() - sidebar.outerWidth());
- statusConsole.height(
- $(window).height() - (statusConsole.offset().top + 16));
- }
-}
-
-
-// Gets the ID of the pipeline info in the left-nav.
-function getTreePipelineElementId(value) {
- if (value.indexOf('#item-pipeline-') == 0) {
- return value;
- } else {
- return '#item-pipeline-' + value;
- }
-}
-
-
-// Scrolls to element of the pipeline in the tree.
-function scrollTreeToPipeline(pipelineIdOrElement) {
- var element = pipelineIdOrElement;
- if (!(pipelineIdOrElement instanceof jQuery)) {
- element = $(getTreePipelineElementId(pipelineIdOrElement));
- }
- $('#sidebar').scrollTop(element.attr('offsetTop'));
- $('#sidebar').scrollLeft(element.attr('offsetLeft'));
-}
-
-
-// Opens all pipelines down to the target one if not already expanded and
-// scroll that pipeline into view.
-function expandTreeToPipeline(pipelineId) {
- if (pipelineId == null) {
- return;
- }
- var elementId = getTreePipelineElementId(pipelineId);
- var parents = $(elementId).parents('.expandable');
- if (parents.size() > 0) {
- // The toggle function will scroll to highlight the pipeline.
- parents.children('.hitarea').click();
- } else {
- // No children, so just scroll.
- scrollTreeToPipeline(pipelineId);
- }
-}
-
-
-// Handles when the user toggles a leaf of the tree.
-function handleTreeToggle(index, element) {
- var parentItem = $(element).parent();
- var collapsing = parentItem.hasClass('expandable');
- if (collapsing) {
- } else {
- // When expanded be sure the pipeline and its children are showing.
- scrollTreeToPipeline(parentItem);
- }
-}
-
-
-// Counts the number of total and active children for the given pipeline.
-// Will include the supplied pipeline in the totals.
-function countChildren(pipelineId) {
- var current = STATUS_MAP.pipelines[pipelineId];
- if (!current) {
- return [0, 0];
- }
- var total = 1;
- var done = 0;
- if (current.status == 'done') {
- done += 1;
- }
- for (var i = 0, n = current.children.length; i < n; i++) {
- var parts = countChildren(current.children[i]);
- total += parts[0];
- done += parts[1];
- }
- return [total, done];
-}
-
-
-// Create the readable name for the pipeline name.
-function prettyName(name, sidebar) {
- var adjustedName = name;
- if (sidebar) {
- var adjustedName = name;
- var parts = name.split('.');
- if (parts.length > 0) {
- adjustedName = parts[parts.length - 1];
- }
- }
- return adjustedName.replace(/\./, '.<wbr>');
-}
-
-
-// Constructs the info div for a stage.
-function constructStageNode(pipelineId, infoMap, sidebar) {
- if (!infoMap) {
- return;
- }
- var containerDiv = $('<div class="status-box">');
- containerDiv.addClass('status-' + infoMap.status);
-
- var detailDiv = $('<div class="detail-link">');
- if (sidebar) {
- detailDiv.append($('<div class="selected-indicator">').html('‣'));
- }
-
- var detailLink = $('<a>');
- detailLink.attr('href', '#pipeline-' + pipelineId);
- detailLink.attr('title', 'ID #' + pipelineId);
- detailLink.attr('id', 'link-pipeline-' + pipelineId);
- detailLink.html(prettyName(infoMap.classPath, sidebar));
- detailDiv.append(detailLink);
- containerDiv.append(detailDiv);
-
- // ID of the pipeline
- if (!sidebar) {
- var pipelineIdDiv = $('<div class="status-pipeline-id">');
- pipelineIdDiv.text('ID #' + pipelineId);
- containerDiv.append(pipelineIdDiv);
- }
-
- // Broad status category.
- var statusTitleDiv = $('<div class="status-title">');
- if (!sidebar) {
- statusTitleDiv.append($('<span>').text('Status: '));
- }
- statusTitleDiv.append($('<span>').text(infoMap.status));
- containerDiv.append(statusTitleDiv);
-
- // Determine timing information based on state.
- var statusTimeLabel = null;
- var statusTimeMs = null;
- var statusRuntimeDiv = null;
-
- if (infoMap.status == 'done') {
- statusRuntimeDiv = $('<div class="status-runtime">');
-
- var statusTimeSpan = $('<span class="status-time-label">');
- statusTimeSpan.text('Run time: ');
- statusRuntimeDiv.append(statusTimeSpan);
-
- var runtimeSpan = $('<span class="status-runtime-value">');
- runtimeSpan.text(getElapsedTimeString(
- infoMap.startTimeMs, infoMap.endTimeMs));
- statusRuntimeDiv.append(runtimeSpan);
-
- statusTimeLabel = 'Complete';
- statusTimeMs = infoMap.endTimeMs;
- } else if (infoMap.status == 'run') {
- statusTimeLabel = 'Started';
- statusTimeMs = infoMap.startTimeMs;
- } else if (infoMap.status == 'retry') {
- statusTimeLabel = 'Will run';
- statusTimeMs = infoMap.startTimeMs;
- } else if (infoMap.status == 'finalizing') {
- statusTimeLabel = 'Complete';
- statusTimeMs = infoMap.endTimeMs;
- } else if (infoMap.status == 'aborted' ||
- infoMap.status == 'canceled') {
- statusTimeLabel = 'Aborted';
- statusTimeMs = infoMap.endTimeMs;
- } else if (infoMap.status == 'waiting') {
- // Do nothing.
- }
-
- // Last abort message, if any.
- if (infoMap.abortMessage) {
- var abortMessageDiv = $('<div class="status-message abort">');
- abortMessageDiv.append($('<span>').text('Abort Message: '));
- abortMessageDiv.append(
- $('<span class="status-message-text">').text(infoMap.abortMessage));
- containerDiv.append(abortMessageDiv);
- }
-
- // Last error message that caused a retry, if any.
- if (infoMap.lastRetryMessage) {
- var errorMessageDiv = $('<div class="status-message error">');
- errorMessageDiv.append($('<span>').text('Retry Message: '));
- errorMessageDiv.append(
- $('<span class="status-message-text">').text(infoMap.lastRetryMessage));
- containerDiv.append(errorMessageDiv);
- }
-
- // User-supplied status message.
- if (infoMap.statusMessage) {
- var statusMessageDiv = $('<div class="status-message normal">');
- statusMessageDiv.append($('<span>').text('Message: '));
- statusMessageDiv.append(
- $('<span class="status-message-text">').text(infoMap.statusMessage));
- containerDiv.append(statusMessageDiv);
- }
-
- // Completed children count.
- if (infoMap.status == 'run' || infoMap.status == 'done') {
- var counts = countChildren(pipelineId);
- var totalChildren = counts[0];
- var doneChildren = counts[1];
- // Do not count ourselves
- totalChildren--;
- if (infoMap.status == 'done') {
- doneChildren--;
- }
- if (totalChildren > 0 && doneChildren < totalChildren) {
- var doneChildrenDiv = $('<div class="active-children">');
- doneChildrenDiv.append($('<span>').text('Children: '));
- var countText = '' + doneChildren + ' / ' + totalChildren + ' done';
- doneChildrenDiv.append($('<span>').text(countText));
- containerDiv.append(doneChildrenDiv);
- }
- }
-
- // Number of attempts, if more than one.
- if (infoMap.currentAttempt > 1) {
- var attemptDiv = $('<div class="status-attempt">');
- var attemptTitle = 'Attempt: ';
- if (infoMap.status == 'retry') {
- attemptTitle = 'Next Attempt: ';
- } else if (infoMap.status == 'done') {
- attemptTitle = 'Attempts: ';
- }
- attemptDiv.append($('<span>').text(attemptTitle));
- var attemptText = '' + infoMap.currentAttempt + ' / ' +
- infoMap.maxAttempts + '';
- attemptDiv.append($('<span>').text(attemptText));
- containerDiv.append(attemptDiv);
- }
-
- // Runtime if present.
- if (statusRuntimeDiv) {
- containerDiv.append(statusRuntimeDiv);
- }
-
- // Next retry time, complete time, start time.
- if (statusTimeLabel && statusTimeMs) {
- var statusTimeDiv = $('<div class="status-time">');
-
- var statusTimeSpan = $('<span class="status-time-label">');
- statusTimeSpan.text(statusTimeLabel + ': ');
- statusTimeDiv.append(statusTimeSpan);
-
- var sinceSpan = $('<abbr class="timeago status-time-since">');
- var isoDate = getIso8601String(statusTimeMs);
- sinceSpan.attr('title', isoDate);
- sinceSpan.text(isoDate);
- sinceSpan.timeago();
- statusTimeDiv.append(sinceSpan);
-
- containerDiv.append(statusTimeDiv);
- }
-
- // User-supplied status links.
- var linksDiv = $('<div class="status-links">');
- if (!sidebar) {
- linksDiv.append($('<span>').text('Links: '));
- }
- var foundLinks = 0;
- if (infoMap.statusConsoleUrl) {
- var link = $('<a class="status-console">');
- link.attr('href', infoMap.statusConsoleUrl);
- link.text('Console');
- link.click(function(event) {
- selectPipeline(pipelineId);
- event.preventDefault();
- });
- linksDiv.append(link);
- foundLinks++;
- }
- if (infoMap.statusLinks) {
- $.each(infoMap.statusLinks, function(key, value) {
- var link = $('<a>');
- link.attr('href', value);
- link.text(key);
- link.click(function(event) {
- selectPipeline(pipelineId, key);
- event.preventDefault();
- });
- linksDiv.append(link);
- foundLinks++;
- });
- }
- if (foundLinks > 0) {
- containerDiv.append(linksDiv);
- }
-
- // Retry parameters.
- if (!sidebar) {
- var retryParamsDiv = $('<div class="status-retry-params">');
- retryParamsDiv.append(
- $('<div class="retry-params-title">').text('Retry parameters'));
-
- var backoffSecondsDiv = $('<div class="retry-param">');
- $('<span>').text('Backoff seconds: ').appendTo(backoffSecondsDiv);
- $('<span>')
- .text(infoMap.backoffSeconds)
- .appendTo(backoffSecondsDiv);
- retryParamsDiv.append(backoffSecondsDiv);
-
- var backoffFactorDiv = $('<div class="retry-param">');
- $('<span>').text('Backoff factor: ').appendTo(backoffFactorDiv);
- $('<span>')
- .text(infoMap.backoffFactor)
- .appendTo(backoffFactorDiv);
- retryParamsDiv.append(backoffFactorDiv);
-
- containerDiv.append(retryParamsDiv);
- }
-
- function renderCollapsableValue(value, container) {
- var stringValue = $.toJSON(value);
- var SPLIT_LENGTH = 200;
- if (stringValue.length < SPLIT_LENGTH) {
- container.append($('<span>').text(stringValue));
- return;
- }
-
- var startValue = stringValue.substr(0, SPLIT_LENGTH);
- var endValue = stringValue.substr(SPLIT_LENGTH);
-
- // Split the end value with <wbr> tags so it looks nice; force
- // word wrapping never works right.
- var moreSpan = $('<span class="value-disclosure-more">');
- for (var i = 0; i < endValue.length; i += SPLIT_LENGTH) {
- moreSpan.append(endValue.substr(i, SPLIT_LENGTH));
- moreSpan.append('<wbr/>');
- }
- var betweenMoreText = '...(' + endValue.length + ' more) ';
- var betweenSpan = $('<span class="value-disclosure-between">')
- .text(betweenMoreText);
- var toggle = $('<a class="value-disclosure-toggle">')
- .text('Expand')
- .attr('href', '');
- toggle.click(function(e) {
- e.preventDefault();
- if (moreSpan.css('display') == 'none') {
- betweenSpan.text(' ');
- toggle.text('Collapse');
- } else {
- betweenSpan.text(betweenMoreText);
- toggle.text('Expand');
- }
- moreSpan.toggle();
- });
- container.append($('<span>').text(startValue));
- container.append(moreSpan);
- container.append(betweenSpan);
- container.append(toggle);
- }
-
- // Slot rendering
- function renderSlot(slotKey) {
- var filledMessage = null;
- var slot = STATUS_MAP.slots[slotKey];
- var slotDetailDiv = $('<div class="slot-detail">');
- if (!slot) {
- var keyAbbr = $('<abbr>');
- keyAbbr.attr('title', slotKey);
- keyAbbr.text('Pending slot');
- slotDetailDiv.append(keyAbbr);
- return slotDetailDiv;
- }
-
- if (slot.status == 'filled') {
- var valueDiv = $('<span class="slot-value-container">');
- valueDiv.append($('<span>').text('Value: '));
- var valueContainer = $('<span class="slot-value">');
- renderCollapsableValue(slot.value, valueContainer);
- valueDiv.append(valueContainer);
- slotDetailDiv.append(valueDiv);
-
- var filledDiv = $('<div class="slot-filled">');
- filledDiv.append($('<span>').text('Filled: '));
- var isoDate = getIso8601String(slot.fillTimeMs);
- filledDiv.append(
- $('<abbr class="timeago">')
- .attr('title', isoDate)
- .text(isoDate)
- .timeago());
- slotDetailDiv.append(filledDiv);
-
- filledMessage = 'Filled by';
- } else {
- filledMessage = 'Waiting for';
- }
-
- var filledMessageDiv = $('<div class="slot-message">');
- filledMessageDiv.append(
- $('<span>').text(filledMessage + ': '));
- var otherPipeline = STATUS_MAP.pipelines[slot.fillerPipelineId];
- if (otherPipeline) {
- var fillerLink = $('<a class="slot-filler">');
- fillerLink
- .attr('title', 'ID #' + slot.fillerPipelineId)
- .attr('href', '#pipeline-' + slot.fillerPipelineId)
- .text(otherPipeline.classPath);
- fillerLink.click(function(event) {
- selectPipeline(slot.fillerPipelineId);
- event.preventDefault();
- });
- filledMessageDiv.append(fillerLink);
- } else {
- filledMessageDiv.append(
- $('<span class="status-pipeline-id">')
- .text('ID #' + slot.fillerPipelineId));
- }
- slotDetailDiv.append(filledMessageDiv);
- return slotDetailDiv;
- }
-
- // Argument/ouptut rendering
- function renderParam(key, valueDict) {
- var paramDiv = $('<div class="status-param">');
-
- var nameDiv = $('<span class="status-param-name">');
- nameDiv.text(key + ':');
- paramDiv.append(nameDiv);
-
- if (valueDict.type == 'slot' && STATUS_MAP.slots) {
- paramDiv.append(renderSlot(valueDict.slotKey));
- } else {
- var valueDiv = $('<span class="status-param-value">');
- renderCollapsableValue(valueDict.value, valueDiv);
- paramDiv.append(valueDiv);
- }
-
- return paramDiv;
- }
-
- if (!sidebar && (
- !$.isEmptyObject(infoMap.kwargs) || infoMap.args.length > 0)) {
- var paramDiv = $('<div class="param-container">');
- paramDiv.append(
- $('<div class="param-container-title">')
- .text('Parameters'));
-
- // Positional arguments
- $.each(infoMap.args, function(index, valueDict) {
- paramDiv.append(renderParam(index, valueDict));
- });
-
- // Keyword arguments in alphabetical order
- var keywordNames = [];
- $.each(infoMap.kwargs, function(key, value) {
- keywordNames.push(key);
- });
- keywordNames.sort();
- $.each(keywordNames, function(index, key) {
- paramDiv.append(renderParam(key, infoMap.kwargs[key]));
- });
-
- containerDiv.append(paramDiv);
- }
-
- // Outputs in alphabetical order, but default first
- if (!sidebar) {
- var outputContinerDiv = $('<div class="outputs-container">');
- outputContinerDiv.append(
- $('<div class="outputs-container-title">')
- .text('Outputs'));
-
- var outputNames = [];
- $.each(infoMap.outputs, function(key, value) {
- if (key != 'default') {
- outputNames.push(key);
- }
- });
- outputNames.sort();
- outputNames.unshift('default');
-
- $.each(outputNames, function(index, key) {
- outputContinerDiv.append(renderParam(
- key, {'type': 'slot', 'slotKey': infoMap.outputs[key]}));
- });
-
- containerDiv.append(outputContinerDiv);
- }
-
- // Related pipelines
- function renderRelated(relatedList, relatedTitle, classPrefix) {
- var relatedDiv = $('<div>');
- relatedDiv.addClass(classPrefix + '-container');
- relatedTitleDiv = $('<div>');
- relatedTitleDiv.addClass(classPrefix + '-container-title');
- relatedTitleDiv.text(relatedTitle);
- relatedDiv.append(relatedTitleDiv);
-
- $.each(relatedList, function(index, relatedPipelineId) {
- var relatedInfoMap = STATUS_MAP.pipelines[relatedPipelineId];
- if (relatedInfoMap) {
- var relatedLink = $('<a>');
- relatedLink
- .addClass(classPrefix + '-link')
- .attr('title', 'ID #' + relatedPipelineId)
- .attr('href', '#pipeline-' + relatedPipelineId)
- .text(relatedInfoMap.classPath);
- relatedLink.click(function(event) {
- selectPipeline(relatedPipelineId);
- event.preventDefault();
- });
- relatedDiv.append(relatedLink);
- } else {
- var relatedIdDiv = $('<div>');
- relatedIdDiv
- .addClass(classPrefix + '-pipeline-id')
- .text('ID #' + relatedPipelineId);
- relatedDiv.append(relatedIdDiv);
- }
- });
-
- return relatedDiv;
- }
-
- // Run after
- if (!sidebar && infoMap.afterSlotKeys.length > 0) {
- var foundPipelineIds = [];
- $.each(infoMap.afterSlotKeys, function(index, slotKey) {
- if (STATUS_MAP.slots[slotKey]) {
- var slotDict = STATUS_MAP.slots[slotKey];
- if (slotDict.fillerPipelineId) {
- foundPipelineIds.push(slotDict.fillerPipelineId);
- }
- }
- });
- containerDiv.append(
- renderRelated(foundPipelineIds, 'Run after', 'run-after'));
- }
-
- // Spawned children
- if (!sidebar && infoMap.children.length > 0) {
- containerDiv.append(
- renderRelated(infoMap.children, 'Children', 'child'));
- }
-
- return containerDiv;
-}
-
-
-// Recursively creates the sidebar. Use null nextPipelineId to create from root.
-function generateSidebar(statusMap, nextPipelineId, rootElement) {
- var currentElement = null;
-
- if (nextPipelineId) {
- currentElement = $('<li>');
- // Value should match return of getTreePipelineElementId
- currentElement.attr('id', 'item-pipeline-' + nextPipelineId);
- } else {
- currentElement = rootElement;
- nextPipelineId = statusMap.rootPipelineId;
- }
-
- var parentInfoMap = statusMap.pipelines[nextPipelineId];
- currentElement.append(
- constructStageNode(nextPipelineId, parentInfoMap, true));
-
- if (statusMap.pipelines[nextPipelineId]) {
- var children = statusMap.pipelines[nextPipelineId].children;
- if (children.length > 0) {
- var treeElement = null;
- if (rootElement) {
- treeElement =
- $('<ul id="pipeline-tree" class="treeview-black treeview">');
- } else {
- treeElement = $('<ul>');
- }
-
- $.each(children, function(index, childPipelineId) {
- var childElement = generateSidebar(statusMap, childPipelineId);
- treeElement.append(childElement);
- });
- currentElement.append(treeElement);
- }
- }
- return currentElement;
-}
-
-
-function selectPipeline(pipelineId, linkName) {
- if (linkName) {
- location.hash = '#pipeline-' + pipelineId + ';' + linkName;
- } else {
- location.hash = '#pipeline-' + pipelineId;
- }
-}
-
-
-// Depth-first search for active pipeline.
-function findActivePipeline(pipelineId, isRoot) {
- var infoMap = STATUS_MAP.pipelines[pipelineId];
- if (!infoMap) {
- return null;
- }
-
- // This is an active leaf node.
- if (infoMap.children.length == 0 && infoMap.status != 'done') {
- return pipelineId;
- }
-
- // Sort children by start time only.
- var children = infoMap.children.slice(0);
- children.sort(function(a, b) {
- var infoMapA = STATUS_MAP.pipelines[a];
- var infoMapB = STATUS_MAP.pipelines[b];
- if (!infoMapA || !infoMapB) {
- return 0;
- }
- if (infoMapA.startTimeMs && infoMapB.startTimeMs) {
- return infoMapA.startTimeMs - infoMapB.startTimeMs;
- } else {
- return 0;
- }
- });
-
- for (var i = 0; i < children.length; ++i) {
- var foundPipelineId = findActivePipeline(children[i], false);
- if (foundPipelineId != null) {
- return foundPipelineId;
- }
- }
-
- return null;
-}
-
-
-function getSelectedPipelineId() {
- var prefix = '#pipeline-';
- var pieces = location.hash.split(';', 2);
- if (pieces[0].indexOf(prefix) == 0) {
- return pieces[0].substr(prefix.length);
- }
- return null;
-}
-
-
-/* Event handlers */
-function handleHashChange() {
- var prefix = '#pipeline-';
- var hash = location.hash;
- var pieces = hash.split(';', 2);
- var pipelineId = null;
-
- if (pieces[0].indexOf(prefix) == 0) {
- pipelineId = pieces[0].substr(prefix.length);
- } else {
- // Bad hash, just show the root pipeline.
- location.hash = '';
- return;
- }
-
- if (!pipelineId) {
- // No hash means show the root pipeline.
- pipelineId = STATUS_MAP.rootPipelineId;
- }
- var rootMap = STATUS_MAP.pipelines[STATUS_MAP.rootPipelineId];
- var infoMap = STATUS_MAP.pipelines[pipelineId];
- if (!rootMap || !infoMap) {
- // Hash not found.
- return;
- }
-
- // Clear any selection styling.
- $('.selected-link').removeClass('selected-link');
-
- if (pieces[1]) {
- // Show a specific status link.
- var statusLink = $(getTreePipelineElementId(pipelineId))
- .find('.status-links>a:contains("' + pieces[1] + '")');
- if (statusLink.size() > 0) {
- var selectedLink = $(statusLink[0]);
- selectedLink.addClass('selected-link');
- $('#status-console').attr('src', selectedLink.attr('href'));
- $('#status-console').show();
- } else {
- // No console link for this pipeline; ignore it.
- $('#status-console').hide();
- }
- } else {
- // Show the console link.
- var consoleLink = $(getTreePipelineElementId(pipelineId))
- .find('a.status-console');
- if (consoleLink.size() > 0) {
- var selectedLink = $(consoleLink[0]);
- selectedLink.addClass('selected-link');
- $('#status-console').attr('src', selectedLink.attr('href'));
- $('#status-console').show();
- } else {
- // No console link for this pipeline; ignore it.
- $('#status-console').hide();
- }
- }
-
- // Mark the pipeline as selected.
- var selected = $('#link-pipeline-' + pipelineId);
- selected.addClass('selected-link');
- selected.parents('.status-box').addClass('selected-link');
-
- // Title is always the info for the root pipeline, to make it easier to
- // track across multiple tabs.
- document.title = rootMap.classPath + ' - ID #' + STATUS_MAP.rootPipelineId;
-
- // Update the detail status frame.
- var stageNode = constructStageNode(pipelineId, infoMap, false);
- $('#overview').remove();
- stageNode.attr('id', 'overview');
- $('#detail').append(stageNode);
-
- // Make sure everything is the right size.
- adjustStatusConsole();
-}
-
-
-function handleAutoRefreshClick(event) {
- var loc = window.location;
- var newSearch = null;
- if (!AUTO_REFRESH && event.target.checked) {
- newSearch = '?root=' + ROOT_PIPELINE_ID;
- } else if (AUTO_REFRESH && !event.target.checked) {
- newSearch = '?root=' + ROOT_PIPELINE_ID + '&auto=false';
- }
-
- if (newSearch != null) {
- loc.replace(
- loc.protocol + '//' + loc.host + loc.pathname +
- newSearch + loc.hash);
- }
-}
-
-
-function handleRefreshClick(event) {
- var loc = window.location;
- if (AUTO_REFRESH) {
- newSearch = '?root=' + ROOT_PIPELINE_ID;
- } else {
- newSearch = '?root=' + ROOT_PIPELINE_ID + '&auto=false';
- }
- loc.href = loc.protocol + '//' + loc.host + loc.pathname + newSearch;
- return false;
-}
-
-function handleDeleteClick(event) {
- var ajaxRequest = {
- type: 'GET',
- url: 'rpc/delete?root_pipeline_id=' + ROOT_PIPELINE_ID,
- dataType: 'text',
- error: function(request, textStatus) {
- if (request.status == 404) {
- setButter('Pipeline is already deleted');
- } else {
- setButter('Delete request failed: ' + textStatus);
- }
- window.setTimeout(function() {
- clearButter();
- }, 5000);
- },
- success: function(data, textStatus, request) {
- setButter('Delete request was sent');
- window.setTimeout(function() {
- window.location.href = 'list';
- }, 5000);
- }
- };
- $.ajax(jQuery.extend({}, ajaxRequest));
-}
-
-function handleAbortClick(event) {
- var ajaxRequest = {
- type: 'GET',
- url: 'rpc/abort?root_pipeline_id=' + ROOT_PIPELINE_ID,
- dataType: 'text',
- error: function(request, textStatus) {
- setButter('Abort request failed: ' + textStatus);
- window.setTimeout(function() {
- clearButter();
- }, 5000);
- },
- success: function(data, textStatus, request) {
- setButter('Abort request was sent');
- window.setTimeout(function() {
- clearButter();
- window.location.reload();
- }, 5000);
- }
- };
- if (confirm('Are you sure you want to abort the pipeline', 'Abort')) {
- $.ajax(jQuery.extend({}, ajaxRequest));
- }
-}
-
-/* Initialization. */
-function initStatus() {
- if (window.location.search.length > 0 &&
- window.location.search[0] == '?') {
- var query = window.location.search.substr(1);
- var pieces = query.split('&');
- $.each(pieces, function(index, param) {
- var mapping = param.split('=');
- if (mapping.length != 2) {
- return;
- }
- if (mapping[0] == 'auto' && mapping[1] == 'false') {
- AUTO_REFRESH = false;
- } else if (mapping[0] == 'root') {
- ROOT_PIPELINE_ID = mapping[1];
- if (ROOT_PIPELINE_ID.match(/^pipeline-/)) {
- ROOT_PIPELINE_ID = ROOT_PIPELINE_ID.substring(9);
- }
- }
- });
- }
-
- if (!Boolean(ROOT_PIPELINE_ID)) {
- setButter('Missing root param' +
- '. For a job list click <a href="list">here</a>.',
- true, null, true);
- return;
- }
-
- var loadingMsg = 'Loading... #' + ROOT_PIPELINE_ID;
- var attempts = 1;
- var ajaxRequest = {
- type: 'GET',
- url: 'rpc/tree?root_pipeline_id=' + ROOT_PIPELINE_ID,
- dataType: 'text',
- error: function(request, textStatus) {
- if (request.status == 404) {
- if (++attempts <= 5) {
- setButter(loadingMsg + ' [attempt #' + attempts + ']');
- window.setTimeout(function() {
- $.ajax(jQuery.extend({}, ajaxRequest));
- }, 2000);
- } else {
- setButter('Could not find pipeline #' + ROOT_PIPELINE_ID +
- '. For a job list click <a href="list">here</a>.',
- true, null, true);
- }
- } else if (request.status == 449) {
- var root = request.getResponseHeader('root_pipeline_id');
- var newURL = '?root=' + root + '#pipeline-' + ROOT_PIPELINE_ID;
- window.location.replace(newURL);
- } else {
- getResponseDataJson(textStatus);
- }
- },
- success: function(data, textStatus, request) {
- var response = getResponseDataJson(null, data);
- if (response) {
- clearButter();
- STATUS_MAP = response;
- LANG = request.getResponseHeader('Pipeline-Lang');
- initStatusDone();
- }
- }
- };
- setButter(loadingMsg);
- $.ajax(jQuery.extend({}, ajaxRequest));
-}
-
-
-function initStatusDone() {
- jQuery.timeago.settings.allowFuture = true;
-
- // Update the root pipeline ID to match what the server returns. This handles
- // the case where the ID specified is for a child node. We always want to
- // show status up to the root.
- ROOT_PIPELINE_ID = STATUS_MAP.rootPipelineId;
-
- // Generate the sidebar.
- generateSidebar(STATUS_MAP, null, $('#sidebar'));
-
- // Turn the sidebar into a tree.
- $('#pipeline-tree').treeview({
- collapsed: true,
- unique: false,
- cookieId: 'pipeline Id here',
- toggle: handleTreeToggle
- });
- $('#sidebar').show();
-
- var rootStatus = STATUS_MAP.pipelines[STATUS_MAP.rootPipelineId].status;
- var isFinalState = /^done$|^aborted$|^canceled$/.test(rootStatus);
-
- // Init the control panel.
- $('#auto-refresh').click(handleAutoRefreshClick);
- if (!AUTO_REFRESH) {
- $('#auto-refresh').attr('checked', '');
- } else {
- if (!isFinalState) {
- // Only do auto-refresh behavior if we're not in a terminal state.
- window.setTimeout(function() {
- var loc = window.location;
- var search = '?root=' + ROOT_PIPELINE_ID;
- loc.replace(loc.protocol + '//' + loc.host + loc.pathname + search);
- }, 30 * 1000);
- }
- }
- $('.refresh-link').click(handleRefreshClick);
- $('.abort-link').click(handleAbortClick);
- $('.delete-link').click(handleDeleteClick);
- if (LANG == 'Java') {
- if (isFinalState) {
- $('.delete-link').show();
- } else {
- $('.abort-link').show();
- }
- }
- $('#control').show();
-
- // Properly adjust the console iframe to match the window size.
- $(window).resize(adjustStatusConsole);
- window.setTimeout(adjustStatusConsole, 0);
-
- // Handle ajax-y URL fragment events.
- $(window).hashchange(handleHashChange);
- $(window).hashchange(); // Trigger for initial load.
-
- // When there's no hash selected, auto-navigate to the most active node.
- if (window.location.hash == '' || window.location.hash == '#') {
- var activePipelineId = findActivePipeline(STATUS_MAP.rootPipelineId, true);
- if (activePipelineId) {
- selectPipeline(activePipelineId);
- } else {
- // If there's nothing active, then select the root.
- selectPipeline(ROOT_PIPELINE_ID);
- }
- }
-
- // Scroll to the current active node.
- expandTreeToPipeline(getSelectedPipelineId());
-}
diff --git a/python/demo/pipeline/util.py b/python/demo/pipeline/util.py
deleted file mode 100755
index 62eba10..0000000
--- a/python/demo/pipeline/util.py
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2010 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utility functions for use with the Google App Engine Pipeline API."""
-
-__all__ = ["for_name",
- "JsonEncoder",
- "JsonDecoder"]
-
-#pylint: disable=g-bad-name
-
-import datetime
-import inspect
-import logging
-import os
-
-try:
- import json
-except ImportError:
- import simplejson as json
-
-# pylint: disable=protected-access
-
-
-def _get_task_target():
- """Get the default target for a pipeline task.
-
- Current version id format is: user_defined_version.minor_version_number
- Current module id is just the module's name. It could be "default"
-
- Returns:
- A complete target name is of format version.module. If module is the
- default module, just version. None if target can not be determined.
- """
- # Break circular dependency.
- # pylint: disable=g-import-not-at-top
- import pipeline
- if pipeline._TEST_MODE:
- return None
-
- # Further protect against test cases that doesn't set env vars
- # propertly.
- if ("CURRENT_VERSION_ID" not in os.environ or
- "CURRENT_MODULE_ID" not in os.environ):
- logging.warning("Running Pipeline in non TEST_MODE but important "
- "env vars are not set.")
- return None
-
- version = os.environ["CURRENT_VERSION_ID"].split(".")[0]
- module = os.environ["CURRENT_MODULE_ID"]
- if module == "default":
- return version
- return "%s.%s" % (version, module)
-
-
-def for_name(fq_name, recursive=False):
- """Find class/function/method specified by its fully qualified name.
-
- Fully qualified can be specified as:
- * <module_name>.<class_name>
- * <module_name>.<function_name>
- * <module_name>.<class_name>.<method_name> (an unbound method will be
- returned in this case).
-
- for_name works by doing __import__ for <module_name>, and looks for
- <class_name>/<function_name> in module's __dict__/attrs. If fully qualified
- name doesn't contain '.', the current module will be used.
-
- Args:
- fq_name: fully qualified name of something to find
-
- Returns:
- class object.
-
- Raises:
- ImportError: when specified module could not be loaded or the class
- was not found in the module.
- """
- fq_name = str(fq_name)
- module_name = __name__
- short_name = fq_name
-
- if fq_name.rfind(".") >= 0:
- (module_name, short_name) = (fq_name[:fq_name.rfind(".")],
- fq_name[fq_name.rfind(".") + 1:])
-
- try:
- result = __import__(module_name, None, None, [short_name])
- return result.__dict__[short_name]
- except KeyError:
- # If we're recursively inside a for_name() chain, then we want to raise
- # this error as a key error so we can report the actual source of the
- # problem. If we're *not* recursively being called, that means the
- # module was found and the specific item could not be loaded, and thus
- # we want to raise an ImportError directly.
- if recursive:
- raise
- else:
- raise ImportError("Could not find '%s' on path '%s'" % (
- short_name, module_name))
- except ImportError, e:
- # module_name is not actually a module. Try for_name for it to figure
- # out what's this.
- try:
- module = for_name(module_name, recursive=True)
- if hasattr(module, short_name):
- return getattr(module, short_name)
- else:
- # The module was found, but the function component is missing.
- raise KeyError()
- except KeyError:
- raise ImportError("Could not find '%s' on path '%s'" % (
- short_name, module_name))
- except ImportError:
- # This means recursive import attempts failed, thus we will raise the
- # first ImportError we encountered, since it's likely the most accurate.
- pass
- # Raise the original import error that caused all of this, since it is
- # likely the real cause of the overall problem.
- raise
-
-
-def is_generator_function(obj):
- """Return true if the object is a user-defined generator function.
-
- Generator function objects provides same attributes as functions.
- See isfunction.__doc__ for attributes listing.
-
- Adapted from Python 2.6.
-
- Args:
- obj: an object to test.
-
- Returns:
- true if the object is generator function.
- """
- CO_GENERATOR = 0x20
- return bool(((inspect.isfunction(obj) or inspect.ismethod(obj)) and
- obj.func_code.co_flags & CO_GENERATOR))
-
-
-class JsonEncoder(json.JSONEncoder):
- """Pipeline customized json encoder."""
-
- TYPE_ID = "__pipeline_json_type"
-
- def default(self, o):
- """Inherit docs."""
- if type(o) in _TYPE_TO_ENCODER:
- encoder = _TYPE_TO_ENCODER[type(o)]
- json_struct = encoder(o)
- json_struct[self.TYPE_ID] = type(o).__name__
- return json_struct
- return super(JsonEncoder, self).default(o)
-
-
-class JsonDecoder(json.JSONDecoder):
- """Pipeline customized json decoder."""
-
- def __init__(self, **kwargs):
- if "object_hook" not in kwargs:
- kwargs["object_hook"] = self._dict_to_obj
- super(JsonDecoder, self).__init__(**kwargs)
-
- def _dict_to_obj(self, d):
- """Converts a dictionary of json object to a Python object."""
- if JsonEncoder.TYPE_ID not in d:
- return d
-
- type_name = d.pop(JsonEncoder.TYPE_ID)
- if type_name in _TYPE_NAME_TO_DECODER:
- decoder = _TYPE_NAME_TO_DECODER[type_name]
- return decoder(d)
- else:
- raise TypeError("Invalid type %s.", type_name)
-
-
-_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
-
-
-def _json_encode_datetime(o):
- """Json encode a datetime object.
-
- Args:
- o: a datetime object.
-
- Returns:
- A dict of json primitives.
- """
- return {"isostr": o.strftime(_DATETIME_FORMAT)}
-
-
-def _json_decode_datetime(d):
- """Converts a dict of json primitives to a datetime object."""
- return datetime.datetime.strptime(d["isostr"], _DATETIME_FORMAT)
-
-
-def _register_json_primitive(object_type, encoder, decoder):
- """Extend what Pipeline can serialize.
-
- Args:
- object_type: type of the object.
- encoder: a function that takes in an object and returns
- a dict of json primitives.
- decoder: inverse function of encoder.
- """
- global _TYPE_TO_ENCODER
- global _TYPE_NAME_TO_DECODER
- if object_type not in _TYPE_TO_ENCODER:
- _TYPE_TO_ENCODER[object_type] = encoder
- _TYPE_NAME_TO_DECODER[object_type.__name__] = decoder
-
-
-_TYPE_TO_ENCODER = {}
-_TYPE_NAME_TO_DECODER = {}
-_register_json_primitive(datetime.datetime,
- _json_encode_datetime,
- _json_decode_datetime)
diff --git a/python/src/pipeline/pipeline.py b/python/src/pipeline/pipeline.py
index d4b1fbc..b2ee16f 100755
--- a/python/src/pipeline/pipeline.py
+++ b/python/src/pipeline/pipeline.py
@@ -1587,7 +1587,8 @@
continue
blocking_slot_dict[slot_record.key()] = slot_record
- barriers_to_trigger = []
+ task_list = []
+ updated_barriers = []
for barrier in results:
ready_slots = []
for blocking_slot_key in barrier.blocking_slots:
@@ -1606,44 +1607,32 @@
# the task name tombstones.
pending_slots = set(barrier.blocking_slots) - set(ready_slots)
if not pending_slots:
- barriers_to_trigger.append(barrier)
+ if barrier.status != _BarrierRecord.FIRED:
+ barrier.status = _BarrierRecord.FIRED
+ barrier.trigger_time = self._gettime()
+ updated_barriers.append(barrier)
+
+ purpose = barrier.key().name()
+ if purpose == _BarrierRecord.START:
+ path = self.pipeline_handler_path
+ countdown = None
+ else:
+ path = self.finalized_handler_path
+ # NOTE: Wait one second before finalization to prevent
+ # contention on the _PipelineRecord entity.
+ countdown = 1
+ pipeline_key = _BarrierRecord.target.get_value_for_datastore(barrier)
+ logging.debug('Firing barrier %r', barrier.key())
+ task_list.append(taskqueue.Task(
+ url=path,
+ countdown=countdown,
+ name='ae-barrier-fire-%s-%s' % (pipeline_key.name(), purpose),
+ params=dict(pipeline_key=pipeline_key, purpose=purpose),
+ headers={'X-Ae-Pipeline-Key': pipeline_key}))
else:
logging.debug('Not firing barrier %r, Waiting for slots: %r',
barrier.key(), pending_slots)
- pipeline_keys_to_trigger = [
- _BarrierRecord.target.get_value_for_datastore(barrier)
- for barrier in barriers_to_trigger]
- pipelines_to_trigger = dict(zip(
- pipeline_keys_to_trigger, db.get(pipeline_keys_to_trigger)))
- task_list = []
- updated_barriers = []
- for barrier in barriers_to_trigger:
- if barrier.status != _BarrierRecord.FIRED:
- barrier.status = _BarrierRecord.FIRED
- barrier.trigger_time = self._gettime()
- updated_barriers.append(barrier)
-
- purpose = barrier.key().name()
- if purpose == _BarrierRecord.START:
- path = self.pipeline_handler_path
- countdown = None
- else:
- path = self.finalized_handler_path
- # NOTE: Wait one second before finalization to prevent
- # contention on the _PipelineRecord entity.
- countdown = 1
- pipeline_key = _BarrierRecord.target.get_value_for_datastore(barrier)
- target = pipelines_to_trigger[pipeline_key].params.get('target')
- logging.debug('Firing barrier %r', barrier.key())
- task_list.append(taskqueue.Task(
- url=path,
- countdown=countdown,
- name='ae-barrier-fire-%s-%s' % (pipeline_key.name(), purpose),
- params=dict(pipeline_key=pipeline_key, purpose=purpose),
- headers={'X-Ae-Pipeline-Key': pipeline_key},
- target=target))
-
# Blindly overwrite _BarrierRecords that have an updated status. This is
# acceptable because by this point all finalization barriers for
# generator children should have already had their final outputs assigned.
@@ -2615,8 +2604,7 @@
params=dict(pipeline_key=pipeline_key,
purpose=_BarrierRecord.START,
attempt=pipeline_record.current_attempt),
- headers={'X-Ae-Pipeline-Key': pipeline_key},
- target=pipeline_record.params.get('target'))
+ headers={'X-Ae-Pipeline-Key': pipeline_key})
task.add(queue_name=self.queue_name, transactional=True)
pipeline_record.put()
@@ -2734,7 +2722,7 @@
all_tasks.append(taskqueue.Task(
url=context.pipeline_handler_path,
params=dict(pipeline_key=pipeline_key),
- target=child_pipeline.params.get('target'),
+ target=child_pipeline.params['target'],
headers={'X-Ae-Pipeline-Key': pipeline_key},
name='ae-pipeline-fan-out-' + child_pipeline.key().name()))
@@ -2923,7 +2911,6 @@
outputs: Dictionary of output slot dictionaries.
children: List of child pipeline IDs.
queueName: Queue on which this pipeline is running.
- target: Target version/module for the pipeline.
afterSlotKeys: List of Slot Ids after which this pipeline runs.
currentAttempt: Number of the current attempt, starting at 1.
maxAttempts: Maximum number of attempts before aborting.
@@ -2992,7 +2979,6 @@
'outputs': params['output_slots'].copy(),
'children': [key.name() for key in pipeline_record.fanned_out],
'queueName': params['queue_name'],
- 'target': params['target'],
'afterSlotKeys': [str(key) for key in params['after_all']],
'currentAttempt': pipeline_record.current_attempt + 1,
'maxAttempts': pipeline_record.max_attempts,
diff --git a/python/src/pipeline/ui/status.css b/python/src/pipeline/ui/status.css
index 1b581b6..f4d5105 100644
--- a/python/src/pipeline/ui/status.css
+++ b/python/src/pipeline/ui/status.css
@@ -83,7 +83,6 @@
/* detail-specific styling */
.status-param,
-.target-param,
.retry-param {
padding-left: 1em;
font-size: 0.85em;
@@ -111,7 +110,6 @@
#detail .param-container,
#detail .child-container,
#detail .run-after-container,
-#detail .status-target-params,
#detail .status-retry-params {
margin-top: 1em;
}
diff --git a/python/src/pipeline/ui/status.js b/python/src/pipeline/ui/status.js
index bd7fd72..eca6da5 100644
--- a/python/src/pipeline/ui/status.js
+++ b/python/src/pipeline/ui/status.js
@@ -326,29 +326,6 @@
containerDiv.append(linksDiv);
}
- // Target parameters.
- if (!sidebar) {
- var targetParamsDiv = $('<div class="status-target-params">');
- targetParamsDiv.append(
- $('<div class="target-params-title">').text('Target parameters'));
-
- var queueNameDiv = $('<div class="target-param">');
- $('<span>').text('Queue name: ').appendTo(queueNameDiv);
- $('<span>')
- .text(infoMap.queueName)
- .appendTo(queueNameDiv);
- targetParamsDiv.append(queueNameDiv);
-
- var targetDiv = $('<div class="target-param">');
- $('<span>').text('Target: ').appendTo(targetDiv);
- $('<span>')
- .text(infoMap.target || 'Unspecified')
- .appendTo(targetDiv);
- targetParamsDiv.append(targetDiv);
-
- containerDiv.append(targetParamsDiv);
- }
-
// Retry parameters.
if (!sidebar) {
var retryParamsDiv = $('<div class="status-retry-params">');