Open Source Monorail
R=jrobbins@chromium.org
BUG=monorail:1066
Review URL: https://codereview.chromium.org/1868553004
diff --git a/.coveragerc b/.coveragerc
index 510fefa..bf59e70 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -7,6 +7,10 @@
# Add chromium_build, which wasn't test.py-enabled for a long time.
./appengine/chromium_build/*
+ # Add monorail's third-party packages and worst offenders
+ ./appengine/monorail/third_party/*
+ ./appengine/monorail/testing/*
+
# Appengine third_party packages, committed into the repo as is.
./appengine/third_party/*
@@ -32,3 +36,6 @@
# Don't complain if non-runnable code isn't run:
if 0:
if __name__ == ['"]__main__['"]:
+
+[expect_tests]
+expected_coverage_min = 90
diff --git a/appengine/monorail/.expect_tests.cfg b/appengine/monorail/.expect_tests.cfg
new file mode 100644
index 0000000..1eada43
--- /dev/null
+++ b/appengine/monorail/.expect_tests.cfg
@@ -0,0 +1,4 @@
+[expect_tests]
+skip=
+ gae_ts_mon
+ third_party
diff --git a/appengine/monorail/.expect_tests_pretest.py b/appengine/monorail/.expect_tests_pretest.py
new file mode 100644
index 0000000..b8ca7a4
--- /dev/null
+++ b/appengine/monorail/.expect_tests_pretest.py
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# pylint: disable=undefined-variable
+
+import os
+import sys
+
+# Using pretest_filename is magic, because it is available in the locals() of
+# the script which execfiles this file.
+# prefixing with 'pretest' to avoid name collisions in expect_tests.
+pretest_APPENGINE_ENV_PATH = os.path.join(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.abspath(pretest_filename))))),
+ 'google_appengine')
+sys.path.append(pretest_APPENGINE_ENV_PATH)
+
+# Crazy hack, because of appengine.
+# Importing dev_appserver is probably not officially supported and fix_sys_path
+# may be an implementation detail subject to change.
+import dev_appserver as pretest_dev_appserver
+pretest_dev_appserver.fix_sys_path()
+
+# Remove google_appengine SDK from sys.path after use
+sys.path.remove(pretest_APPENGINE_ENV_PATH)
+
+SDK_LIBRARY_PATHS = [
+ # This is not added by fix_sys_path.
+ os.path.join(pretest_APPENGINE_ENV_PATH, 'lib', 'mox'),
+]
+sys.path.extend(SDK_LIBRARY_PATHS)
+
+os.environ['SERVER_SOFTWARE'] = 'test ' + os.environ.get('SERVER_SOFTWARE', '')
+os.environ['CURRENT_VERSION_ID'] = 'test.123'
diff --git a/appengine/monorail/.gitignore b/appengine/monorail/.gitignore
new file mode 100644
index 0000000..877a235
--- /dev/null
+++ b/appengine/monorail/.gitignore
@@ -0,0 +1,14 @@
+.*\.py[co]
+.*\.pyc-2.4
+.*~
+.*\.orig
+.*\.swp
+.*\#.*
+.*@.*
+index\.yaml
+REVISION
+.coverage
+htmlcov
+.DS_Store
+workspace.xml
+new_static/components/*\.vulcanized.html
diff --git a/appengine/monorail/Makefile b/appengine/monorail/Makefile
new file mode 100644
index 0000000..a770b80
--- /dev/null
+++ b/appengine/monorail/Makefile
@@ -0,0 +1,73 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# Makefile to simplify some common AppEngine actions.
+# Use 'make help' for a list of commands.
+
+STAGEID= monorail-staging
+PRODID= monorail-prod
+
+GAE_PY?= python gae.py
+DEV_APPSERVER_FLAGS?=
+
+FRONTEND_MODULES?= default
+BACKEND_MODULES?= besearch
+
+default: help
+
+check:
+ifndef NPM_VERSION
+ $(error npm not found. Install from nodejs.org or see README)
+endif
+
+help:
+ @echo "Available commands:"
+ @sed -n '/^[a-zA-Z0-9_.]*:/s/:.*//p' <Makefile
+
+test:
+ ../../test.py test appengine/monorail
+
+# Commands for running locally using dev_appserver.
+serve:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver $(DEV_APPSERVER_FLAGS)
+
+serve_email:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver $(DEV_APPSERVER_FLAGS) --enable_sendmail=True
+
+# The _remote commands expose the app on 0.0.0.0, so that it is externally
+# accessible by hostname:port, rather than just localhost:port.
+serve_remote:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver -o $(DEV_APPSERVER_FLAGS)
+
+serve_remote_email:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver -o $(DEV_APPSERVER_FLAGS) --enable_sendmail=True
+
+run: serve
+
+
+# AppEngine apps can be tested locally and in non-default versions upload to
+# the main app-id, but it is still sometimes useful to have a completely
+# separate app-id. E.g., for testing inbound email, load testing, or using
+# throwaway databases.
+deploy_staging:
+ @echo "---[Staging $(STAGEID)]---"
+ $(GAE_PY) upload -A $(STAGEID) $(FRONTEND_MODULES) $(BACKEND_MODULES)
+
+
+# This is our production server that users actually use.
+deploy_prod:
+ @echo "---[Deploying prod instance $(PRODID)]---"
+ $(GAE_PY) upload -A $(PRODID) $(FRONTEND_MODULES) $(BACKEND_MODULES)
+
+
+# Note that we do not provide a command-line way to make the newly-uploaded
+# version the default version. This is for two reasons: a) You should be using
+# your browser to confirm that the new version works anyway, so just use the
+# console interface to make it the default; and b) If you really want to use
+# the command line you can use gae.py directly.
diff --git a/appengine/monorail/README.md b/appengine/monorail/README.md
index 7a4f4e6..2f7998a 100644
--- a/appengine/monorail/README.md
+++ b/appengine/monorail/README.md
@@ -1,8 +1,41 @@
-# Monorail
+# Monorail Issue Tracker
-[bugs.chromium.org](https://bugs.chromium.org)
+Monorail is the Issue Tracker used by the Chromium project and other related
+projects. It is hosted at [bugs.chromium.org](https://bugs.chromium.org).
-Monorail is the issue tracking tool for chromium-related projects, which is a port of the Google Code issue tracker to AppEngine.
+If you wish to file a bug against Monorail itself, please do so in our
+[self-hosting tracker](https://bugs.chromium.org/p/monorail/issues/entry).
+We also discuss development of Monorail at `infra-dev@chromium.org`.
+## Testing
-* [API](doc/api.md)
\ No newline at end of file
+In order to run all of the Monorail unit tests, run `make test` in this
+directory. If you wish to run just a subset of the tests, you can invoke the
+test runner directly and give it a subdirectory: `../../test.py
+appengine/monorail/tracker`.
+
+## Running Locally
+
+To run the app locally, you need to have a local MySQL database. Install MySQL
+according to the canonical instructions for your platform. Then create
+a new database an import our schema:
+
+ mysql> create database monorail;
+ mysql> source /path/to/infra/appengine/monorail/sql/framework.sql;
+ mysql> source /path/to/infra/appengine/monorail/sql/project.sql;
+ mysql> source /path/to/infra/appengine/monorail/sql/tracker.sql;
+ mysql> exit;
+
+Then you can run the development server locally with just `make serve`.
+
+## Deploying
+
+The `app.yaml` and `Makefile` files contained in this directory point at the
+official instances of Monorail maintained by the Chromium Infrastructure Team.
+If you wish (and have sufficient permissions) to deploy to one of those, simply
+run `make deploy_staging` or `make deploy_prod`. If you wish to set up your
+own instance, edit the first line of the `app.yaml` and use gae.py directly,
+or edit the `Makefile` to add an entry for your AppEngine app ID. It is likely
+that you'll also want to edit many of the values in `settings.py`, which
+specify debug email addresses, instance counts, and default Google Storage
+buckets.
diff --git a/appengine/monorail/app.yaml b/appengine/monorail/app.yaml
new file mode 100644
index 0000000..cfda623
--- /dev/null
+++ b/appengine/monorail/app.yaml
@@ -0,0 +1,68 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+application: monorail-staging
+version: 2015-05-26
+runtime: python27
+api_version: 1
+threadsafe: no
+
+default_expiration: "3600d"
+
+instance_class: F4
+automatic_scaling:
+ min_idle_instances: 10
+ max_pending_latency: 0.2s
+
+handlers:
+- url: /_ah/spi/.*
+ script: monorailapp.endpoints
+
+- url: /robots.txt
+ static_files: static/robots.txt
+ upload: static/robots.txt
+
+- url: /database-maintenance
+ static_files: static/database-maintenance.html
+ upload: static/database-maintenance.html
+
+- url: /static
+ static_dir: static
+
+- url: /_ah/mail/.+
+ script: monorailapp.app
+ login: admin
+
+- url: /_task/.*
+ script: monorailapp.app
+ login: admin
+
+- url: /_cron/.*
+ script: monorailapp.app
+ login: admin
+
+- url: /_backend/.*
+ script: monorailapp.app
+ login: admin
+
+- url: /.*
+ script: monorailapp.app
+ secure: always
+
+inbound_services:
+- mail
+
+libraries:
+- name: endpoints
+ version: 1.0
+- name: MySQLdb
+ version: "latest"
+- name: pycrypto
+ version: "2.6"
+- name: django
+ version: 1.4
+
+includes:
+ - gae_ts_mon
diff --git a/appengine/monorail/appengine_config.py b/appengine/monorail/appengine_config.py
new file mode 100644
index 0000000..e886e94
--- /dev/null
+++ b/appengine/monorail/appengine_config.py
@@ -0,0 +1,15 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Configuration."""
+
+import os
+import sys
+
+# Enable third-party imports
+sys.path.append(os.path.join(os.path.dirname(__file__), 'third_party'))
+
+import httplib2
+import oauth2client
diff --git a/appengine/monorail/benchmark/search-urls.txt b/appengine/monorail/benchmark/search-urls.txt
new file mode 100644
index 0000000..00545cc
--- /dev/null
+++ b/appengine/monorail/benchmark/search-urls.txt
@@ -0,0 +1,75 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# This is a bunch of URLs that can be hit to measure performance.
+# Use 'make siege' to run it.
+# Or 'siege -c 4 -f benchmark/search-urls.txt'
+#
+# For a log of results of running performance tests see the
+# go/monorail-performance spreadsheet.
+
+
+# Use one of these lines to choose the server and project.
+ISSUE_LIST=https://monorail-staging.appspot.com/p/chromium/issues/list?disable_cache=1&
+#ISSUE_LIST=https://code.google.com/p/chromium/issues/list?
+
+# Log data shows that 68% of issue list page views have no query terms.
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+
+${ISSUE_LIST}can=1
+${ISSUE_LIST}can=1
+${ISSUE_LIST}can=3
+${ISSUE_LIST}can=4
+${ISSUE_LIST}q=label:Type-Bug
+${ISSUE_LIST}q=label:Pri-2
+${ISSUE_LIST}q=label:OS-Windows
+${ISSUE_LIST}q=status:Unconfirmed
+${ISSUE_LIST}q=status=Unconfirmed
+${ISSUE_LIST}q=status=Started
+${ISSUE_LIST}q=status=Untriaged
+${ISSUE_LIST}q=status=Started,Untriaged
+${ISSUE_LIST}q=-status=Started,Untriaged
+${ISSUE_LIST}can=1&q=status=Fixed
+${ISSUE_LIST}can=1&q=status=Duplicate
+# Actually owner:me is pretty common, but specify the owner here since we ar not signed in.
+${ISSUE_LIST}q=owner=agl@chromium.org
+${ISSUE_LIST}q=owner=rch@chromium.org
+${ISSUE_LIST}q=owner=jon@chromium.org
+${ISSUE_LIST}q=owner=amit@chromium.org
+${ISSUE_LIST}q=cc:amit@chromium.org
+${ISSUE_LIST}q=owner:google.com
+${ISSUE_LIST}q=feature=nacl status:started
+${ISSUE_LIST}q=Hotlist=GoodFirstBug
+${ISSUE_LIST}q=has:restrict
+${ISSUE_LIST}q=feature=Printing
+${ISSUE_LIST}q=feature=Printing status=Available
+${ISSUE_LIST}q=feature=Printing -status=Available
+
+
+
diff --git a/appengine/monorail/codereview.settings b/appengine/monorail/codereview.settings
new file mode 100644
index 0000000..6a720d7
--- /dev/null
+++ b/appengine/monorail/codereview.settings
@@ -0,0 +1,11 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# This file is used by gcl to get repository specific information.
+CODE_REVIEW_SERVER: https://codereview.chromium.org
+VIEW_VC: https://chromium.googlesource.com/infra/infra/+/
+CC_LIST: chromium-reviews@chromium.org, infra-reviews+infra@chromium.org, jrobbins+catch-up@google.com
+PROJECT: infra
+BUG_PREFIX: monorail:
diff --git a/appengine/monorail/cron.yaml b/appengine/monorail/cron.yaml
new file mode 100644
index 0000000..97f6546
--- /dev/null
+++ b/appengine/monorail/cron.yaml
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+cron:
+- description: keep the databases loaded
+ url: /p/chromium/issues/list
+ schedule: every 30 minutes synchronized
+- description: consolidate old invalidation rows
+ url: /_cron/ramCacheConsolidate
+ schedule: every 6 hours synchronized
+- description: index issues that were modified in big batches
+ url: /_cron/reindexQueue
+ schedule: every 6 minutes synchronized
+- description: get rid of doomed and deletable projects
+ url: /_cron/reap
+ schedule: every 24 hours synchronized
+- description: send ts_mon metrics
+ url: /internal/cron/ts_mon/send
+ schedule: every 1 minutes
+- description: export spam model training examples
+ url: /_cron/spamDataExport
+ timezone: US/Pacific
+ schedule: every day 01:00
diff --git a/appengine/monorail/dos.yaml b/appengine/monorail/dos.yaml
new file mode 100644
index 0000000..97e4e4d
--- /dev/null
+++ b/appengine/monorail/dos.yaml
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+blacklist:
+# Edit this file to enable GAE's built-in DoS protection.
+# Run some aggregate queries in the Cloud Console BigQuery
+# interface to see if you can block larger subnets rathe
+# than individual IP addresses. Also, this list is limited
+# to 100 entries, so if it's a DDoS you might run out
+# subnets.
+# On-Call Playbook:
+# https://docs.google.com/document/d/1acGea37jlb5FEp1BGdqca6tY_hiH1QGXKxbt4iBfAug
+#
+# See the playbook for other measures you can take, such
+# as setting ratelimiting_enabled = True in settings.py.
+
+# Example entries:
+# - subnet: 192.0.2.1
+# description: a single IP address
+# - subnet: 192.0.2.0/24
+# description: an IPv4 subnet
+# - subnet: 2001:DB8::1
+# description: an IPv6 address
+# - subnet: 2001:DB8::/32
+# description: an IPv6 subnet
diff --git a/appengine/monorail/features/__init__.py b/appengine/monorail/features/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/features/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/features/activities.py b/appengine/monorail/features/activities.py
new file mode 100644
index 0000000..e36f97a
--- /dev/null
+++ b/appengine/monorail/features/activities.py
@@ -0,0 +1,310 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Code to support project and user activies pages."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import sql
+from framework import template_helpers
+from framework import timestr
+from project import project_views
+from proto import tracker_pb2
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+UPDATES_PER_PAGE = 50
+MAX_UPDATES_PER_PAGE = 200
+
+
+class ActivityView(template_helpers.PBProxy):
+ """EZT-friendly wrapper for Activities."""
+
+ _TITLE_TEMPLATE = template_helpers.MonorailTemplate(
+ framework_constants.TEMPLATE_PATH + 'features/activity-title.ezt',
+ compress_whitespace=True, base_format=ezt.FORMAT_HTML)
+
+ _BODY_TEMPLATE = template_helpers.MonorailTemplate(
+ framework_constants.TEMPLATE_PATH + 'features/activity-body.ezt',
+ compress_whitespace=True, base_format=ezt.FORMAT_HTML)
+
+ def __init__(
+ self, pb, services, mr, prefetched_issues, users_by_id,
+ autolink=None, all_ref_artifacts=None, ending=None, highlight=None):
+ """Constructs an ActivityView out of an Activity protocol buffer.
+
+ Args:
+ pb: an IssueComment or Activity protocol buffer.
+ services: connections to backend services.
+ mr: HTTP request info, used by the artifact autolink.
+ prefetched_issues: dictionary of the issues for the comments being shown.
+ users_by_id: dict {user_id: UserView} for all relevant users.
+ autolink: Autolink instance.
+ all_ref_artifacts: list of all artifacts in the activity stream.
+ ending: ending type for activity titles, 'in_project' or 'by_user'
+ highlight: what to highlight in the middle column on user updates pages
+ i.e. 'project', 'user', or None
+ """
+ template_helpers.PBProxy.__init__(self, pb)
+
+ activity_type = 'ProjectIssueUpdate' # TODO(jrobbins): more types
+
+ self.comment = None
+ self.issue = None
+ self.field_changed = None
+ self.multiple_fields_changed = ezt.boolean(False)
+ self.project = None
+ self.user = None
+ self.timestamp = time.time() # Bogus value makes bad ones highly visible.
+
+ if isinstance(pb, tracker_pb2.IssueComment):
+ self.timestamp = pb.timestamp
+ issue = prefetched_issues[pb.issue_id]
+ if self.timestamp == issue.opened_timestamp:
+ issue_change_id = None # This comment is the description.
+ else:
+ issue_change_id = pb.id # instead of seq num.
+
+ self.comment = tracker_views.IssueCommentView(
+ mr.project_name, pb, users_by_id, autolink,
+ all_ref_artifacts, mr, issue)
+
+ # TODO(jrobbins): pass effective_ids of the commenter so that he/she
+ # can be identified as a project member or not.
+ # TODO(jrobbins): Prefetch all needed projects and configs just like the
+ # way that we batch-prefetch issues.
+ config = services.config.GetProjectConfig(mr.cnxn, issue.project_id)
+ self.issue = tracker_views.IssueView(issue, users_by_id, config)
+ self.user = self.comment.creator
+ project = services.project.GetProject(mr.cnxn, issue.project_id)
+ self.project_name = project.project_name
+ self.project = project_views.ProjectView(project)
+
+ else:
+ logging.warn('unknown activity object %r', pb)
+
+ nested_page_data = {
+ 'activity_type': activity_type,
+ 'issue_change_id': issue_change_id,
+ 'comment': self.comment,
+ 'issue': self.issue,
+ 'project': self.project,
+ 'user': self.user,
+ 'timestamp': self.timestamp,
+ 'ending_type': ending,
+ }
+
+ self.escaped_title = self._TITLE_TEMPLATE.GetResponse(
+ nested_page_data).strip()
+ self.escaped_body = self._BODY_TEMPLATE.GetResponse(
+ nested_page_data).strip()
+
+ if autolink is not None and all_ref_artifacts is not None:
+ # TODO(jrobbins): actually parse the comment text. Actually render runs.
+ runs = autolink.MarkupAutolinks(
+ mr, [template_helpers.TextRun(self.escaped_body)], all_ref_artifacts)
+ self.escaped_body = ''.join(run.content for run in runs)
+
+ self.date_bucket, self.date_relative = timestr.GetHumanScaleDate(
+ self.timestamp)
+ time_tuple = time.localtime(self.timestamp)
+ self.date_tooltip = time.asctime(time_tuple)
+
+ # We always highlight the user for starring activities
+ if activity_type.startswith('UserStar'):
+ self.highlight = 'user'
+ else:
+ self.highlight = highlight
+
+
+def GatherUpdatesData(
+ services, mr, prof, project_ids=None, user_ids=None, ending=None,
+ updates_page_url=None, autolink=None, highlight=None):
+ """Gathers and returns updates data.
+
+ Args:
+ services: Connections to backend services.
+ mr: HTTP request info, used by the artifact autolink.
+ prof: The profiler to use.
+ project_ids: List of project IDs we want updates for.
+ user_ids: List of user IDs we want updates for.
+ ending: Ending type for activity titles, 'in_project' or 'by_user'.
+ updates_page_url: The URL that will be used to create pagination links from.
+ autolink: Autolink instance.
+ highlight: What to highlight in the middle column on user updates pages
+ i.e. 'project', 'user', or None.
+ """
+ ascending = bool(mr.after)
+
+ # num should be non-negative number
+ num = mr.GetPositiveIntParam('num', UPDATES_PER_PAGE)
+ num = min(num, MAX_UPDATES_PER_PAGE)
+
+ updates_data = {
+ 'no_stars': None,
+ 'no_activities': None,
+ 'pagination': None,
+ 'updates_data': None,
+ 'ending_type': ending,
+ }
+
+ if not user_ids and not project_ids:
+ updates_data['no_stars'] = ezt.boolean(True)
+ return updates_data
+
+ with prof.Phase('get activities'):
+ # TODO(jrobbins): make this into a persist method.
+ # TODO(jrobbins): this really needs permission checking in SQL, which will
+ # be slow.
+ where_conds = [('Issue.id = Comment.issue_id', [])]
+ if project_ids is not None:
+ cond_str = 'Comment.project_id IN (%s)' % sql.PlaceHolders(project_ids)
+ where_conds.append((cond_str, project_ids))
+ if user_ids is not None:
+ cond_str = 'Comment.commenter_id IN (%s)' % sql.PlaceHolders(user_ids)
+ where_conds.append((cond_str, user_ids))
+
+ if project_ids:
+ use_clause = 'USE INDEX (project_id) USE INDEX FOR ORDER BY (project_id)'
+ elif user_ids:
+ use_clause = (
+ 'USE INDEX (commenter_id) USE INDEX FOR ORDER BY (commenter_id)')
+ else:
+ use_clause = ''
+
+ if mr.before:
+ where_conds.append(('created < %s', [mr.before]))
+ if mr.after:
+ where_conds.append(('created > %s', [mr.after]))
+ if ascending:
+ order_by = [('created', [])]
+ else:
+ order_by = [('created DESC', [])]
+
+ comments = services.issue.GetComments(
+ mr.cnxn, joins=[('Issue', [])], deleted_by=None, where=where_conds,
+ use_clause=use_clause, order_by=order_by, limit=num + 1)
+
+ # TODO(jrobbins): it would be better if we could just get the dict directly.
+ prefetched_issues_list = services.issue.GetIssues(
+ mr.cnxn, {c.issue_id for c in comments})
+ prefetched_issues = {
+ issue.issue_id: issue for issue in prefetched_issues_list}
+ needed_project_ids = {issue.project_id for issue in prefetched_issues_list}
+ prefetched_projects = services.project.GetProjects(
+ mr.cnxn, needed_project_ids)
+ prefetched_configs = services.config.GetProjectConfigs(
+ mr.cnxn, needed_project_ids)
+ viewable_issues_list = tracker_helpers.FilterOutNonViewableIssues(
+ mr.auth.effective_ids, mr.auth.user_pb, prefetched_projects,
+ prefetched_configs, prefetched_issues_list)
+ viewable_iids = {issue.issue_id for issue in viewable_issues_list}
+
+ # Filter the comments based on permission to view the issue.
+ # TODO(jrobbins): push permission checking in the query so that pagination
+ # pages never become underfilled, or use backends to shard.
+ # TODO(jrobbins): come back to this when I implement private comments.
+ comments = [
+ c for c in comments if c.issue_id in viewable_iids]
+
+ if ascending:
+ comments.reverse()
+
+ amendment_user_ids = []
+ for comment in comments:
+ for amendment in comment.amendments:
+ amendment_user_ids.extend(amendment.added_user_ids)
+ amendment_user_ids.extend(amendment.removed_user_ids)
+
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, services.user, [c.user_id for c in comments],
+ amendment_user_ids)
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+
+ num_results_returned = len(comments)
+ displayed_activities = comments[:UPDATES_PER_PAGE]
+
+ if not num_results_returned:
+ updates_data['no_activities'] = ezt.boolean(True)
+ return updates_data
+
+ # Get all referenced artifacts first
+ all_ref_artifacts = None
+ if autolink is not None:
+ content_list = []
+ for activity in comments:
+ content_list.append(activity.content)
+
+ all_ref_artifacts = autolink.GetAllReferencedArtifacts(
+ mr, content_list)
+
+ # Now process content and gather activities
+ today = []
+ yesterday = []
+ pastweek = []
+ pastmonth = []
+ thisyear = []
+ older = []
+
+ with prof.Phase('rendering activities'):
+ for activity in displayed_activities:
+ entry = ActivityView(
+ activity, services, mr, prefetched_issues, users_by_id,
+ autolink=autolink, all_ref_artifacts=all_ref_artifacts, ending=ending,
+ highlight=highlight)
+
+ if entry.date_bucket == 'Today':
+ today.append(entry)
+ elif entry.date_bucket == 'Yesterday':
+ yesterday.append(entry)
+ elif entry.date_bucket == 'Last 7 days':
+ pastweek.append(entry)
+ elif entry.date_bucket == 'Last 30 days':
+ pastmonth.append(entry)
+ elif entry.date_bucket == 'Earlier this year':
+ thisyear.append(entry)
+ elif entry.date_bucket == 'Older':
+ older.append(entry)
+
+ new_after = None
+ new_before = None
+ if displayed_activities:
+ new_after = displayed_activities[0].timestamp
+ new_before = displayed_activities[-1].timestamp
+
+ prev_url = None
+ next_url = None
+ if updates_page_url:
+ list_servlet_rel_url = updates_page_url.split('/')[-1]
+ if displayed_activities and (mr.before or mr.after):
+ prev_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url, after=new_after)
+ if mr.after or len(comments) > UPDATES_PER_PAGE:
+ next_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url, before=new_before)
+
+ if prev_url or next_url:
+ pagination = template_helpers.EZTItem(
+ start=None, last=None, prev_url=prev_url, next_url=next_url,
+ reload_url=None, visible=ezt.boolean(True), total_count=None)
+ else:
+ pagination = None
+
+ updates_data.update({
+ 'no_activities': ezt.boolean(False),
+ 'pagination': pagination,
+ 'updates_data': template_helpers.EZTItem(
+ today=today, yesterday=yesterday, pastweek=pastweek,
+ pastmonth=pastmonth, thisyear=thisyear, older=older),
+ })
+
+ return updates_data
diff --git a/appengine/monorail/features/autolink.py b/appengine/monorail/features/autolink.py
new file mode 100644
index 0000000..a50b848
--- /dev/null
+++ b/appengine/monorail/features/autolink.py
@@ -0,0 +1,465 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Autolink helps auto-link references to artifacts in text.
+
+This class maintains a registry of artifact autolink syntax specs and
+callbacks. The structure of that registry is:
+ { component_name: (lookup_callback,
+ { regex: substitution_callback, ...}),
+ ...
+ }
+
+For example:
+ { 'tracker':
+ (GetReferencedIssues,
+ ExtractProjectAndIssueIds,
+ {_ISSUE_REF_RE: ReplaceIssueRef}),
+ 'versioncontrol':
+ (GetReferencedRevisions,
+ ExtractProjectAndRevNum,
+ {_GIT_HASH_RE: ReplaceRevisionRef}),
+ }
+
+The dictionary of regexes is used here because, in the future, we
+might add more regexes for each component rather than have one complex
+regex per component.
+"""
+
+import logging
+import re
+import urllib
+import urlparse
+
+import settings
+from framework import template_helpers
+from framework import validate
+from proto import project_pb2
+from tracker import tracker_helpers
+
+
+_CLOSING_TAG_RE = re.compile('</[a-z0-9]+>$', re.IGNORECASE)
+
+_LINKIFY_SCHEMES = r'(https?://|ftp://|mailto:)'
+# Also count a start-tag '<' as a url delimeter, since the autolinker
+# is sometimes run against html fragments.
+_IS_A_LINK_RE = re.compile(r'(%s)([^\s<]+)' % _LINKIFY_SCHEMES, re.UNICODE)
+
+# These are allowed in links, but if any of closing delimiters appear
+# at the end of the link, and the opening one is not part of the link,
+# then trim off the closing delimiters.
+_LINK_TRAILING_CHARS = [
+ (None, ':'),
+ (None, '.'),
+ (None, ','),
+ ('<', '>'),
+ ('"', '"'),
+ ('(', ')'),
+ ('[', ']'),
+ ('{', '}'),
+ ]
+
+
+def Linkify(_mr, autolink_regex_match,
+ _component_ref_artifacts):
+ """Examine a textual reference and replace it with a hyperlink or not.
+
+ This is a callback for use with the autolink feature.
+
+ Args:
+ _mr: common info parsed from the user HTTP request.
+ autolink_regex_match: regex match for the textual reference.
+ _component_ref_artifacts: unused value
+
+ Returns:
+ A list of TextRuns with tag=a for all matched ftp, http, https and mailto
+ links converted into HTML hyperlinks.
+ """
+ hyperlink = autolink_regex_match.group(0)
+
+ trailing = ''
+ for begin, end in _LINK_TRAILING_CHARS:
+ if hyperlink.endswith(end):
+ if not begin or hyperlink[:-len(end)].find(begin) == -1:
+ trailing = end + trailing
+ hyperlink = hyperlink[:-len(end)]
+
+ tag_match = _CLOSING_TAG_RE.search(hyperlink)
+ if tag_match:
+ trailing = hyperlink[tag_match.start(0):] + trailing
+ hyperlink = hyperlink[:tag_match.start(0)]
+
+ if (not validate.IsValidURL(hyperlink) and
+ not validate.IsValidEmail(hyperlink)):
+ return [template_helpers.TextRun(hyperlink)]
+
+ result = [template_helpers.TextRun(hyperlink, tag='a', href=hyperlink)]
+ if trailing:
+ result.append(template_helpers.TextRun(trailing))
+
+ return result
+
+
+# Regular expression to detect git hashes.
+# Used to auto-link to Git hashes on crrev.com when displaying issue details.
+# Matches "rN", "r#N", and "revision N" when "rN" is not part of a larger word
+# and N is a hexadecimal string of 40 chars.
+_GIT_HASH_RE = re.compile(
+ r'\b(?P<prefix>r(evision\s+#?)?)?(?P<revnum>([a-f0-9]{40}))\b',
+ re.IGNORECASE | re.MULTILINE)
+
+# This is for SVN revisions and Git commit posisitons.
+_SVN_REF_RE = re.compile(
+ r'\b(?P<prefix>r(evision\s+#?)?)(?P<revnum>([0-9]{1,7}))\b',
+ re.IGNORECASE | re.MULTILINE)
+
+
+def GetReferencedRevisions(_mr, _refs):
+ """Load the referenced revision objects."""
+ # For now we just autolink any revision hash without actually
+ # checking that such a revision exists,
+ # TODO(jrobbins): Hit crrev.com and check that the revision exists
+ # and show a rollover with revision info.
+ return None
+
+
+def ExtractRevNums(_mr, autolink_regex_match):
+ """Return internal representation of a rev reference."""
+ ref = autolink_regex_match.group('revnum')
+ logging.debug('revision ref = %s', ref)
+ return [ref]
+
+
+def ReplaceRevisionRef(
+ mr, autolink_regex_match, _component_ref_artifacts):
+ """Return HTML markup for an autolink reference."""
+ prefix = autolink_regex_match.group('prefix')
+ revnum = autolink_regex_match.group('revnum')
+ url = _GetRevisionURLFormat(mr.project).format(revnum=revnum)
+ content = revnum
+ if prefix:
+ content = '%s%s' % (prefix, revnum)
+ return [template_helpers.TextRun(content, tag='a', href=url)]
+
+
+def _GetRevisionURLFormat(project):
+ # TODO(jrobbins): Expose a UI to customize it to point to whatever site
+ # hosts the source code. Also, site-wide default.
+ return (project.revision_url_format or settings.revision_url_format)
+
+
+# Regular expression to detect issue references.
+# Used to auto-link to other issues when displaying issue details.
+# Matches "issue " when "issue" is not part of a larger word, or
+# "issue #", or just a "#" when it is preceeded by a space.
+_ISSUE_REF_RE = re.compile(r"""
+ (?P<prefix>\b(issues?|bugs?)[ \t]*(:|=)?)
+ ([ \t]*(?P<project_name>\b[-a-z0-9]+[:\#])?
+ (?P<number_sign>\#?)
+ (?P<local_id>\d+)\b
+ (,?[ \t]*(and|or)?)?)+""", re.IGNORECASE | re.VERBOSE)
+
+_SINGLE_ISSUE_REF_RE = re.compile(r"""
+ (?P<prefix>\b(issue|bug)[ \t]*)?
+ (?P<project_name>\b[-a-z0-9]+[:\#])?
+ (?P<number_sign>\#?)
+ (?P<local_id>\d+)\b""", re.IGNORECASE | re.VERBOSE)
+
+
+def CurryGetReferencedIssues(services):
+ """Return a function to get ref'd issues with these persist objects bound.
+
+ Currying is a convienent way to give the callback access to the persist
+ objects, but without requiring that all possible persist objects be passed
+ through the autolink registry and functions.
+
+ Args:
+ services: connection to issue, config, and project persistence layers.
+
+ Returns:
+ A ready-to-use function that accepts the arguments that autolink
+ expects to pass to it.
+ """
+
+ def GetReferencedIssues(mr, ref_tuples):
+ """Return lists of open and closed issues referenced by these comments.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ ref_tuples: list of (project_name, local_id) tuples for each issue
+ that is mentioned in the comment text. The project_name may be None,
+ in which case the issue is assumed to be in the current project.
+
+ Returns:
+ A list of open and closed issue dicts.
+ """
+ ref_projects = services.project.GetProjectsByName(
+ mr.cnxn,
+ [(ref_pn or mr.project_name) for ref_pn, _ in ref_tuples])
+ issue_ids = services.issue.ResolveIssueRefs(
+ mr.cnxn, ref_projects, mr.project_name, ref_tuples)
+ open_issues, closed_issues = (
+ tracker_helpers.GetAllowedOpenedAndClosedIssues(
+ mr, issue_ids, services))
+
+ open_dict = {}
+ for issue in open_issues:
+ open_dict[_IssueProjectKey(issue.project_name, issue.local_id)] = issue
+
+ closed_dict = {}
+ for issue in closed_issues:
+ closed_dict[_IssueProjectKey(issue.project_name, issue.local_id)] = issue
+
+ logging.info('autolinking dicts %r and %r', open_dict, closed_dict)
+
+ return open_dict, closed_dict
+
+ return GetReferencedIssues
+
+
+def _ParseProjectNameMatch(project_name):
+ """Process the passed project name and determine the best representation.
+
+ Args:
+ project_name: a string with the project name matched in a regex
+
+ Returns:
+ A minimal representation of the project name, None if no valid content.
+ """
+ if not project_name:
+ return None
+ return project_name.lstrip().rstrip('#: \t\n')
+
+
+def ExtractProjectAndIssueIds(_mr, autolink_regex_match):
+ """Convert a regex match for a textual reference into our internal form."""
+ whole_str = autolink_regex_match.group(0)
+ refs = []
+ for submatch in _SINGLE_ISSUE_REF_RE.finditer(whole_str):
+ ref = (_ParseProjectNameMatch(submatch.group('project_name')),
+ int(submatch.group('local_id')))
+ refs.append(ref)
+ logging.info('issue ref = %s', ref)
+
+ return refs
+
+
+# This uses project name to avoid a lookup on project ID in a function
+# that has no services object.
+def _IssueProjectKey(project_name, local_id):
+ """Make a dictionary key to identify a referenced issue."""
+ return '%s:%d' % (project_name, local_id)
+
+
+class IssueRefRun(object):
+ """A text run that links to a referenced issue."""
+
+ def __init__(self, issue, is_closed, project_name, prefix):
+ self.tag = 'a'
+ self.css_class = 'closed_ref' if is_closed else None
+ self.title = issue.summary
+ self.href = '/p/%s/issues/detail?id=%d' % (project_name, issue.local_id)
+
+ self.content = '%s%d' % (prefix, issue.local_id)
+ if is_closed and not prefix:
+ self.content = ' %s ' % self.content
+
+
+def ReplaceIssueRef(mr, autolink_regex_match, component_ref_artifacts):
+ """Examine a textual reference and replace it with an autolink or not.
+
+ Args:
+ mr: commonly used info parsed from the request
+ autolink_regex_match: regex match for the textual reference.
+ component_ref_artifacts: result of earlier call to GetReferencedIssues.
+
+ Returns:
+ A list of IssueRefRuns and TextRuns to replace the textual
+ reference. If there is an issue to autolink to, we return an HTML
+ hyperlink. Otherwise, we the run will have the original plain
+ text.
+ """
+ open_dict, closed_dict = component_ref_artifacts
+ original = autolink_regex_match.group(0)
+ logging.info('called ReplaceIssueRef on %r', original)
+ result_runs = []
+ pos = 0
+ for submatch in _SINGLE_ISSUE_REF_RE.finditer(original):
+ if submatch.start() >= pos:
+ if original[pos: submatch.start()]:
+ result_runs.append(template_helpers.TextRun(
+ original[pos: submatch.start()]))
+ replacement_run = _ReplaceSingleIssueRef(
+ mr, submatch, open_dict, closed_dict)
+ result_runs.append(replacement_run)
+ pos = submatch.end()
+
+ if original[pos:]:
+ result_runs.append(template_helpers.TextRun(original[pos:]))
+
+ return result_runs
+
+
+def _ReplaceSingleIssueRef(mr, submatch, open_dict, closed_dict):
+ """Replace one issue reference with a link, or the original text."""
+ prefix = submatch.group('prefix') or ''
+ project_name = submatch.group('project_name')
+ if project_name:
+ prefix += project_name
+ project_name = project_name.lstrip().rstrip(':#')
+ else:
+ # We need project_name for the URL, even if it is not in the text.
+ project_name = mr.project_name
+
+ number_sign = submatch.group('number_sign')
+ if number_sign:
+ prefix += number_sign
+ local_id = int(submatch.group('local_id'))
+ issue_key = _IssueProjectKey(project_name or mr.project_name, local_id)
+
+ if issue_key in open_dict:
+ return IssueRefRun(open_dict[issue_key], False, project_name, prefix)
+ elif issue_key in closed_dict:
+ return IssueRefRun(closed_dict[issue_key], True, project_name, prefix)
+ else: # Don't link to non-existent issues.
+ return template_helpers.TextRun('%s%d' % (prefix, local_id))
+
+
+class Autolink(object):
+ """Maintains a registry of autolink syntax and can apply it to comments."""
+
+ def __init__(self):
+ self.registry = {}
+
+ def RegisterComponent(self, component_name, artifact_lookup_function,
+ match_to_reference_function, autolink_re_subst_dict):
+ """Register all the autolink info for a software component.
+
+ Args:
+ component_name: string name of software component, must be unique.
+ artifact_lookup_function: function to batch lookup all artifacts that
+ might have been referenced in a set of comments:
+ function(all_matches) -> referenced_artifacts
+ the referenced_artifacts will be pased to each subst function.
+ match_to_reference_function: convert a regex match object to
+ some internal representation of the artifact reference.
+ autolink_re_subst_dict: dictionary of regular expressions and
+ the substitution function that should be called for each match:
+ function(match, referenced_artifacts) -> replacement_markup
+ """
+ self.registry[component_name] = (artifact_lookup_function,
+ match_to_reference_function,
+ autolink_re_subst_dict)
+
+ def GetAllReferencedArtifacts(self, mr, comment_text_list):
+ """Call callbacks to lookup all artifacts possibly referenced.
+
+ Args:
+ mr: information parsed out of the user HTTP request.
+ comment_text_list: list of comment content strings.
+
+ Returns:
+ Opaque object that can be pased to MarkupAutolinks. It's
+ structure happens to be {component_name: artifact_list, ...}.
+ """
+ all_referenced_artifacts = {}
+ for comp, (lookup, match_to_refs, re_dict) in self.registry.iteritems():
+ refs = set()
+ for comment_text in comment_text_list:
+ for regex in re_dict:
+ for match in regex.finditer(comment_text):
+ additional_refs = match_to_refs(mr, match)
+ if additional_refs:
+ refs.update(additional_refs)
+
+ all_referenced_artifacts[comp] = lookup(mr, refs)
+
+ return all_referenced_artifacts
+
+ def MarkupAutolinks(self, mr, text_runs, all_referenced_artifacts):
+ """Loop over components and regexes, applying all substitutions.
+
+ Args:
+ mr: info parsed from the user's HTTP request.
+ text_runs: List of text runs for the user's comment.
+ all_referenced_artifacts: result of previous call to
+ GetAllReferencedArtifacts.
+
+ Returns:
+ List of text runs for the entire user comment, some of which may have
+ attribures that cause them to render as links in render-rich-text.ezt.
+ """
+ items = self.registry.items()
+ items.sort() # Process components in determinate alphabetical order.
+ for component, (_lookup, _match_ref, re_subst_dict) in items:
+ component_ref_artifacts = all_referenced_artifacts[component]
+ for regex, subst_fun in re_subst_dict.iteritems():
+ text_runs = self._ApplySubstFunctionToRuns(
+ text_runs, regex, subst_fun, mr, component_ref_artifacts)
+
+ return text_runs
+
+ def _ApplySubstFunctionToRuns(
+ self, text_runs, regex, subst_fun, mr, component_ref_artifacts):
+ """Apply autolink regex and substitution function to each text run.
+
+ Args:
+ text_runs: list of TextRun objects with parts of the original comment.
+ regex: Regular expression for detecting textual references to artifacts.
+ subst_fun: function to return autolink markup, or original text.
+ mr: common info parsed from the user HTTP request.
+ component_ref_artifacts: already-looked-up destination artifacts to use
+ when computing substitution text.
+
+ Returns:
+ A new list with more and smaller runs, some of which may have tag
+ and link attributes set.
+ """
+ result_runs = []
+ for run in text_runs:
+ content = run.content
+ if run.tag:
+ # This chunk has already been substituted, don't allow nested
+ # autolinking to mess up our output.
+ result_runs.append(run)
+ else:
+ pos = 0
+ for match in regex.finditer(content):
+ if match.start() > pos:
+ result_runs.append(template_helpers.TextRun(
+ content[pos: match.start()]))
+ replacement_runs = subst_fun(mr, match, component_ref_artifacts)
+ result_runs.extend(replacement_runs)
+ pos = match.end()
+
+ if run.content[pos:]: # Keep any text that came after the last match
+ result_runs.append(template_helpers.TextRun(run.content[pos:]))
+
+ # TODO(jrobbins): ideally we would merge consecutive plain text runs
+ # so that regexes can match across those run boundaries.
+
+ return result_runs
+
+
+def RegisterAutolink(services):
+ """Register all the autolink hooks."""
+ services.autolink.RegisterComponent(
+ '01-linkify',
+ lambda request, mr: None,
+ lambda mr, match: None,
+ {_IS_A_LINK_RE: Linkify})
+
+ services.autolink.RegisterComponent(
+ '02-tracker',
+ CurryGetReferencedIssues(services),
+ ExtractProjectAndIssueIds,
+ {_ISSUE_REF_RE: ReplaceIssueRef})
+
+ services.autolink.RegisterComponent(
+ '03-versioncontrol',
+ GetReferencedRevisions,
+ ExtractRevNums,
+ {_GIT_HASH_RE: ReplaceRevisionRef,
+ _SVN_REF_RE: ReplaceRevisionRef})
diff --git a/appengine/monorail/features/commands.py b/appengine/monorail/features/commands.py
new file mode 100644
index 0000000..5f5ea9b
--- /dev/null
+++ b/appengine/monorail/features/commands.py
@@ -0,0 +1,305 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions that implement command-line-like issue updates."""
+
+import logging
+import re
+
+from framework import framework_bizobj
+from framework import framework_constants
+from services import user_svc
+from tracker import tracker_constants
+
+
+def ParseQuickEditCommand(
+ cnxn, cmd, issue, config, logged_in_user_id, services):
+ """Parse a quick edit command into assignments and labels."""
+ parts = _BreakCommandIntoParts(cmd)
+ parser = AssignmentParser(None, easier_kv_labels=True)
+
+ for key, value in parts:
+ if key: # A key=value assignment.
+ valid_assignment = parser.ParseAssignment(
+ cnxn, key, value, config, services, logged_in_user_id)
+ if not valid_assignment:
+ logging.info('ignoring assignment: %r, %r', key, value)
+
+ elif value.startswith('-'): # Removing a label.
+ parser.labels_remove.append(_StandardizeLabel(value[1:], config))
+
+ else: # Adding a label.
+ value = value.strip('+')
+ parser.labels_add.append(_StandardizeLabel(value, config))
+
+ new_summary = parser.summary or issue.summary
+
+ if parser.status is None:
+ new_status = issue.status
+ else:
+ new_status = parser.status
+
+ if parser.owner_id is None:
+ new_owner_id = issue.owner_id
+ else:
+ new_owner_id = parser.owner_id
+
+ new_cc_ids = [cc for cc in list(issue.cc_ids) + list(parser.cc_add)
+ if cc not in parser.cc_remove]
+ (new_labels, _update_add,
+ _update_remove) = framework_bizobj.MergeLabels(
+ issue.labels, parser.labels_add, parser.labels_remove,
+ config.exclusive_label_prefixes)
+
+ return new_summary, new_status, new_owner_id, new_cc_ids, new_labels
+
+
+ASSIGN_COMMAND_RE = re.compile(
+ r'(?P<key>\w+(?:-|\w)*)(?:=|:)'
+ r'(?:(?P<value1>(?:-|\+|\.|%|@|=|,|\w)+)|'
+ r'"(?P<value2>[^"]+)"|'
+ r"'(?P<value3>[^']+)')",
+ re.UNICODE | re.IGNORECASE)
+
+LABEL_COMMAND_RE = re.compile(
+ r'(?P<label>(?:\+|-)?\w(?:-|\w)*)',
+ re.UNICODE | re.IGNORECASE)
+
+
+def _BreakCommandIntoParts(cmd):
+ """Break a quick edit command into assignment and label parts.
+
+ Args:
+ cmd: string command entered by the user.
+
+ Returns:
+ A list of (key, value) pairs where key is the name of the field
+ being assigned or None for OneWord labels, and value is the value
+ to assign to it, or the whole label. Value may begin with a "+"
+ which is just ignored, or a "-" meaning that the label should be
+ removed, or neither.
+ """
+ parts = []
+ cmd = cmd.strip()
+ m = True
+
+ while m:
+ m = ASSIGN_COMMAND_RE.match(cmd)
+ if m:
+ key = m.group('key')
+ value = m.group('value1') or m.group('value2') or m.group('value3')
+ parts.append((key, value))
+ cmd = cmd[len(m.group(0)):].strip()
+ else:
+ m = LABEL_COMMAND_RE.match(cmd)
+ if m:
+ parts.append((None, m.group('label')))
+ cmd = cmd[len(m.group(0)):].strip()
+
+ return parts
+
+
+def _ParsePlusMinusList(value):
+ """Parse a string containing a series of plus/minuse values.
+
+ Strings are seprated by whitespace, comma and/or semi-colon.
+
+ Example:
+ value = "one +two -three"
+ plus = ['one', 'two']
+ minus = ['three']
+
+ Args:
+ value: string containing unparsed plus minus values.
+
+ Returns:
+ A tuple of (plus, minus) string values.
+ """
+ plus = []
+ minus = []
+ # Treat ';' and ',' as separators (in addition to SPACE)
+ for ch in [',', ';']:
+ value = value.replace(ch, ' ')
+ terms = [i.strip() for i in value.split()]
+ for item in terms:
+ if item.startswith('-'):
+ minus.append(item.lstrip('-'))
+ else:
+ plus.append(item.lstrip('+')) # optional leading '+'
+
+ return plus, minus
+
+
+class AssignmentParser(object):
+ """Class to parse assignment statements in quick edits or email replies."""
+
+ def __init__(self, template, easier_kv_labels=False):
+ self.cc_list = []
+ self.cc_add = []
+ self.cc_remove = []
+ self.owner_id = None
+ self.status = None
+ self.summary = None
+ self.labels_list = []
+ self.labels_add = []
+ self.labels_remove = []
+ self.branch = None
+
+ # Accept "Anything=Anything" for quick-edit, but not in commit-log-commands
+ # because it would be too error-prone when mixed with plain text comment
+ # text and without autocomplete to help users triggering it via typos.
+ self.easier_kv_labels = easier_kv_labels
+
+ if template:
+ if template.owner_id:
+ self.owner_id = template.owner_id
+ if template.summary:
+ self.summary = template.summary
+ if template.labels:
+ self.labels_list = template.labels
+ # Do not have a similar check as above for status because it could be an
+ # empty string.
+ self.status = template.status
+
+ def ParseAssignment(self, cnxn, key, value, config, services, user_id):
+ """Parse command-style text entered by the user to update an issue.
+
+ E.g., The user may want to set the issue status to "reviewed", or
+ set the owner to "me".
+
+ Args:
+ cnxn: connection to SQL database.
+ key: string name of the field to set.
+ value: string value to be interpreted.
+ config: Projects' issue tracker configuration PB.
+ services: connections to backends.
+ user_id: int user ID of the user making the change.
+
+ Returns:
+ True if the line could be parsed as an assigment, False otherwise.
+ Also, as a side-effect, the assigned values are built up in the instance
+ variables of the parser.
+ """
+ valid_line = True
+
+ if key == 'owner':
+ if framework_constants.NO_VALUE_RE.match(value):
+ self.owner_id = framework_constants.NO_USER_SPECIFIED
+ else:
+ try:
+ self.owner_id = _LookupMeOrUsername(cnxn, value, services, user_id)
+ except user_svc.NoSuchUserException:
+ logging.warning('bad owner: %r when committing to project_id %r',
+ value, config.project_id)
+ valid_line = False
+
+ elif key == 'cc':
+ try:
+ add, remove = _ParsePlusMinusList(value)
+ self.cc_add = [_LookupMeOrUsername(cnxn, cc, services, user_id)
+ for cc in add]
+ self.cc_remove = [_LookupMeOrUsername(cnxn, cc, services, user_id)
+ for cc in remove]
+ for user_id in self.cc_add:
+ if user_id not in self.cc_list:
+ self.cc_list.append(user_id)
+ self.cc_list = [user_id for user_id in self.cc_list
+ if user_id not in self.cc_remove]
+ except user_svc.NoSuchUserException:
+ logging.warning('bad cc: %r when committing to project_id %r',
+ value, config.project_id)
+ valid_line = False
+
+ elif key == 'summary':
+ self.summary = value
+
+ elif key == 'status':
+ if framework_constants.NO_VALUE_RE.match(value):
+ self.status = ''
+ else:
+ self.status = _StandardizeStatus(value, config)
+
+ elif key == 'label' or key == 'labels':
+ self.labels_add, self.labels_remove = _ParsePlusMinusList(value)
+ self.labels_add = [_StandardizeLabel(lab, config)
+ for lab in self.labels_add]
+ self.labels_remove = [_StandardizeLabel(lab, config)
+ for lab in self.labels_remove]
+ (self.labels_list, _update_add,
+ _update_remove) = framework_bizobj.MergeLabels(
+ self.labels_list, self.labels_add, self.labels_remove,
+ config.exclusive_label_prefixes)
+
+ elif (self.easier_kv_labels and
+ key not in tracker_constants.RESERVED_PREFIXES and
+ key and value):
+ if key.startswith('-'):
+ self.labels_remove.append(_StandardizeLabel(
+ '%s-%s' % (key[1:], value), config))
+ else:
+ self.labels_add.append(_StandardizeLabel(
+ '%s-%s' % (key, value), config))
+
+ else:
+ valid_line = False
+
+ return valid_line
+
+
+def _StandardizeStatus(status, config):
+ """Attempt to match a user-supplied status with standard status values.
+
+ Args:
+ status: User-supplied status string.
+ config: Project's issue tracker configuration PB.
+
+ Returns:
+ A canonicalized status string, that matches a standard project
+ value, if found.
+ """
+ well_known_statuses = [wks.status for wks in config.well_known_statuses]
+ return _StandardizeArtifact(status, well_known_statuses)
+
+
+def _StandardizeLabel(label, config):
+ """Attempt to match a user-supplied label with standard label values.
+
+ Args:
+ label: User-supplied label string.
+ config: Project's issue tracker configuration PB.
+
+ Returns:
+ A canonicalized label string, that matches a standard project
+ value, if found.
+ """
+ well_known_labels = [wkl.label for wkl in config.well_known_labels]
+ return _StandardizeArtifact(label, well_known_labels)
+
+
+def _StandardizeArtifact(artifact, well_known_artifacts):
+ """Attempt to match a user-supplied artifact with standard artifact values.
+
+ Args:
+ artifact: User-supplied artifact string.
+ well_known_artifacts: List of well known values of the artifact.
+
+ Returns:
+ A canonicalized artifact string, that matches a standard project
+ value, if found.
+ """
+ artifact = framework_bizobj.CanonicalizeLabel(artifact)
+ for wka in well_known_artifacts:
+ if artifact.lower() == wka.lower():
+ return wka
+ # No match - use user-supplied artifact.
+ return artifact
+
+
+def _LookupMeOrUsername(cnxn, username, services, user_id):
+ """Handle the 'me' syntax or lookup a user's user ID."""
+ if username.lower() == 'me':
+ return user_id
+
+ return services.user.LookupUserID(cnxn, username)
diff --git a/appengine/monorail/features/commitlogcommands.py b/appengine/monorail/features/commitlogcommands.py
new file mode 100644
index 0000000..16c6e3d
--- /dev/null
+++ b/appengine/monorail/features/commitlogcommands.py
@@ -0,0 +1,155 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implements processing of issue update command lines.
+
+This currently processes the leading command-lines that appear
+at the top of inbound email messages to update existing issues.
+
+It could also be expanded to allow new issues to be created. Or, to
+handle commands in commit-log messages if the version control system
+invokes a webhook.
+"""
+
+import logging
+import re
+
+from features import commands
+from features import notify
+from framework import emailfmt
+from framework import framework_bizobj
+from framework import framework_helpers
+from services import issue_svc
+
+
+# Actions have separate 'Parse' and 'Run' implementations to allow better
+# testing coverage.
+class IssueAction(object):
+ """Base class for all issue commands."""
+
+ def __init__(self):
+ self.parser = commands.AssignmentParser(None)
+ self.description = ''
+ self.inbound_message = None
+ self.commenter_id = None
+ self.project = None
+ self.config = None
+ self.hostport = framework_helpers.GetHostPort()
+
+ def Parse(
+ self, cnxn, project_name, commenter_id, lines, services,
+ strip_quoted_lines=False, hostport=None):
+ """Populate object from raw user input."""
+ self.project = services.project.GetProjectByName(cnxn, project_name)
+ self.config = services.config.GetProjectConfig(
+ cnxn, self.project.project_id)
+ self.commenter_id = commenter_id
+
+ # Process all valid key-value lines. Once we find a non key-value line,
+ # treat the rest as the 'description'.
+ for idx, line in enumerate(lines):
+ valid_line = False
+ m = re.match(r'^\s*(\w+)\s*\:\s*(.*?)\s*$', line)
+ if m:
+ # Process Key-Value
+ key = m.group(1).lower()
+ value = m.group(2)
+ valid_line = self.parser.ParseAssignment(
+ cnxn, key, value, self.config, services, self.commenter_id)
+
+ if not valid_line:
+ # Not Key-Value. Treat this line and remaining as 'description'.
+ # First strip off any trailing blank lines.
+ while lines and not lines[-1].strip():
+ lines.pop()
+ if lines:
+ self.description = '\n'.join(lines[idx:])
+ break
+
+ if strip_quoted_lines:
+ self.inbound_message = '\n'.join(lines)
+ self.description = emailfmt.StripQuotedText(self.description)
+
+ if hostport:
+ self.hostport = hostport
+
+ for key in ['owner_id', 'cc_add', 'cc_remove', 'summary',
+ 'status', 'labels_add', 'labels_remove', 'branch']:
+ logging.info('\t%s: %s', key, self.parser.__dict__[key])
+
+ for key in ['commenter_id', 'description', 'hostport']:
+ logging.info('\t%s: %s', key, self.__dict__[key])
+
+ def Run(self, cnxn, services, allow_edit=True):
+ """Execute this action."""
+ raise NotImplementedError()
+
+
+class UpdateIssueAction(IssueAction):
+ """Implements processing email replies or the "update issue" command."""
+
+ def __init__(self, local_id):
+ super(UpdateIssueAction, self).__init__()
+ self.local_id = local_id
+
+ def Run(self, cnxn, services, allow_edit=True):
+ """Updates an issue based on the parsed commands."""
+ try:
+ issue = services.issue.GetIssueByLocalID(
+ cnxn, self.project.project_id, self.local_id)
+ except issue_svc.NoSuchIssueException:
+ return # Issue does not exist, so do nothing
+
+ old_owner_id = issue.owner_id
+ new_summary = self.parser.summary or issue.summary
+
+ if self.parser.status is None:
+ new_status = issue.status
+ else:
+ new_status = self.parser.status
+
+ if self.parser.owner_id is None:
+ new_owner_id = issue.owner_id
+ else:
+ new_owner_id = self.parser.owner_id
+
+ new_cc_ids = [cc for cc in list(issue.cc_ids) + list(self.parser.cc_add)
+ if cc not in self.parser.cc_remove]
+ (new_labels, _update_add,
+ _update_remove) = framework_bizobj.MergeLabels(
+ issue.labels, self.parser.labels_add,
+ self.parser.labels_remove,
+ self.config.exclusive_label_prefixes)
+
+ new_field_values = issue.field_values # TODO(jrobbins): edit custom ones
+
+ if not allow_edit:
+ # If user can't edit, then only consider the plain-text comment,
+ # and set all other fields back to their original values.
+ logging.info('Processed reply from user who can not edit issue')
+ new_summary = issue.summary
+ new_status = issue.status
+ new_owner_id = issue.owner_id
+ new_cc_ids = issue.cc_ids
+ new_labels = issue.labels
+ new_field_values = issue.field_values
+
+ amendments, _comment_pb = services.issue.ApplyIssueComment(
+ cnxn, services, self.commenter_id,
+ self.project.project_id, issue.local_id, new_summary, new_status,
+ new_owner_id, new_cc_ids, new_labels, new_field_values,
+ issue.component_ids, issue.blocked_on_iids, issue.blocking_iids,
+ issue.dangling_blocked_on_refs, issue.dangling_blocking_refs,
+ issue.merged_into, comment=self.description,
+ inbound_message=self.inbound_message)
+
+ logging.info('Updated issue %s:%s w/ amendments %r',
+ self.project.project_name, issue.local_id, amendments)
+
+ if amendments or self.description: # Avoid completely empty comments.
+ cmnts = services.issue.GetCommentsForIssue(cnxn, issue.issue_id)
+ notify.PrepareAndSendIssueChangeNotification(
+ self.project.project_id, self.local_id, self.hostport,
+ self.commenter_id, len(cmnts) - 1, old_owner_id=old_owner_id)
diff --git a/appengine/monorail/features/cues.py b/appengine/monorail/features/cues.py
new file mode 100644
index 0000000..f56821d
--- /dev/null
+++ b/appengine/monorail/features/cues.py
@@ -0,0 +1,50 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Simple servlet to store the fact that a user has dismissed a cue card.
+
+Cue cards are small on-page help items that appear when the user has
+done a certain action or is viewing a project that is in a certain
+state. The cue card give the user a suggestion of what he/she should
+do next. Cue cards can be dismissed to reduce visual clutter on the
+page once the user has learned the content of the suggestion. That
+preference is recorded in the User PB, and the same cue card will not
+be presented again to the same user.
+
+Exmple: The logged in user has dismissed the cue card that tells him/her how
+to search for numbers in the issue tracker:
+
+ POST /hosting/cues.do
+ cue_id=search_for_numbers&token=12344354534
+"""
+
+
+import logging
+
+from framework import jsonfeed
+from framework import monorailrequest
+
+
+class SetCuesFeed(jsonfeed.JsonFeed):
+ """A class to process an AJAX request to dismiss a cue card."""
+
+ def HandleRequest(self, mr):
+ """Processes a user's POST request to dismiss a cue card.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+
+ cue_id = mr.GetParam('cue_id')
+ if not cue_id:
+ raise monorailrequest.InputException('no cue_id specified')
+
+ logging.info('Handling user set cue request: %r', cue_id)
+ new_dismissed_cues = mr.auth.user_pb.dismissed_cues
+ new_dismissed_cues.append(cue_id)
+ self.services.user.UpdateUserSettings(
+ mr.cnxn, mr.auth.user_id, mr.auth.user_pb,
+ dismissed_cues=new_dismissed_cues)
+
diff --git a/appengine/monorail/features/filterrules.py b/appengine/monorail/features/filterrules.py
new file mode 100644
index 0000000..fb5f84a
--- /dev/null
+++ b/appengine/monorail/features/filterrules.py
@@ -0,0 +1,47 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implementation of the filter rules feature."""
+
+import logging
+
+from features import filterrules_helpers
+from framework import jsonfeed
+from tracker import tracker_constants
+
+
+class RecomputeDerivedFieldsTask(jsonfeed.InternalTask):
+ """JSON servlet that recomputes derived fields on a batch of issues."""
+
+ def HandleRequest(self, mr):
+ """Recompute derived field values on one range of issues in a shard."""
+ logging.info(
+ 'params are %r %r %r %r', mr.specified_project_id, mr.lower_bound,
+ mr.upper_bound, mr.shard_id)
+ project = self.services.project.GetProject(
+ mr.cnxn, mr.specified_project_id)
+ config = self.services.config.GetProjectConfig(
+ mr.cnxn, mr.specified_project_id)
+ filterrules_helpers.RecomputeAllDerivedFieldsNow(
+ mr.cnxn, self.services, project, config, lower_bound=mr.lower_bound,
+ upper_bound=mr.upper_bound, shard_id=mr.shard_id)
+
+ return {
+ 'success': True,
+ }
+
+
+class ReindexQueueCron(jsonfeed.InternalTask):
+ """JSON servlet that reindexes some issues each minute, as needed."""
+
+ def HandleRequest(self, mr):
+ """Reindex issues that are listed in the the reindex table."""
+ num_reindexed = self.services.issue.ReindexIssues(
+ mr.cnxn, tracker_constants.MAX_ISSUES_TO_REINDEX_PER_MINUTE,
+ self.services.user)
+
+ return {
+ 'num_reindexed': num_reindexed,
+ }
diff --git a/appengine/monorail/features/filterrules_helpers.py b/appengine/monorail/features/filterrules_helpers.py
new file mode 100644
index 0000000..695d2ce
--- /dev/null
+++ b/appengine/monorail/features/filterrules_helpers.py
@@ -0,0 +1,699 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implementation of the filter rules helper functions."""
+
+import logging
+import re
+
+from google.appengine.api import taskqueue
+
+import settings
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import monorailrequest
+from framework import urls
+from framework import validate
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import query2ast
+from search import searchpipeline
+from services import user_svc
+from tracker import component_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+
+
+# Maximum number of filer rules that can be specified in a given
+# project. This helps us bound the amount of time needed to
+# (re)compute derived fields.
+MAX_RULES = 200
+
+BLOCK = tracker_constants.RECOMPUTE_DERIVED_FIELDS_BLOCK_SIZE
+
+
+# TODO(jrobbins): implement a more efficient way to update just those
+# issues affected by a specific component change.
+def RecomputeAllDerivedFields(cnxn, services, project, config):
+ """Create work items to update all issues after filter rule changes.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to backend services.
+ project: Project PB for the project that was edited.
+ config: ProjectIssueConfig PB for the project that was edited,
+ including the edits made.
+ """
+ if not settings.recompute_derived_fields_in_worker:
+ # Background tasks are not enabled, just do everything in the servlet.
+ RecomputeAllDerivedFieldsNow(cnxn, services, project, config)
+ return
+
+ highest_id = services.issue.GetHighestLocalID(cnxn, project.project_id)
+ if highest_id == 0:
+ return # No work to do.
+
+ # Enqueue work items for blocks of issues to recompute.
+ steps = range(1, highest_id + 1, BLOCK)
+ steps.reverse() # Update higher numbered issues sooner, old issues last.
+ # Cycle through shard_ids just to load-balance among the replicas. Each
+ # block includes all issues in that local_id range, not just 1/10 of them.
+ shard_id = 0
+ for step in steps:
+ params = {
+ 'project_id': project.project_id,
+ 'lower_bound': step,
+ 'upper_bound': min(step + BLOCK, highest_id + 1),
+ 'shard_id': shard_id,
+ }
+ logging.info('adding task with params %r', params)
+ taskqueue.add(
+ url=urls.RECOMPUTE_DERIVED_FIELDS_TASK + '.do', params=params)
+ shard_id = (shard_id + 1) % settings.num_logical_shards
+
+
+def RecomputeAllDerivedFieldsNow(
+ cnxn, services, project, config, lower_bound=None, upper_bound=None,
+ shard_id=None):
+ """Re-apply all filter rules to all issues in a project.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to persistence layer.
+ project: Project PB for the project that was changed.
+ config: ProjectIssueConfig for that project.
+ lower_bound: optional int lowest issue ID to consider, inclusive.
+ upper_bound: optional int highest issue ID to consider, exclusive.
+ shard_id: optional int shard_id to read from one replica.
+
+ SIDE-EFFECT: updates all issues in the project. Stores and re-indexes
+ all those that were changed.
+ """
+ if lower_bound is not None and upper_bound is not None:
+ issues = services.issue.GetIssuesByLocalIDs(
+ cnxn, project.project_id, range(lower_bound, upper_bound),
+ shard_id=shard_id)
+ else:
+ issues = services.issue.GetAllIssuesInProject(cnxn, project.project_id)
+
+ rules = services.features.GetFilterRules(cnxn, project.project_id)
+ predicate_asts = ParsePredicateASTs(rules, config, None)
+ modified_issues = []
+ for issue in issues:
+ if ApplyGivenRules(cnxn, services, issue, config, rules, predicate_asts):
+ modified_issues.append(issue)
+
+ services.issue.UpdateIssues(cnxn, modified_issues, just_derived=True)
+
+ # Doing the FTS indexing can be too slow, so queue up the issues
+ # that need to be re-indexed by a cron-job later.
+ services.issue.EnqueueIssuesForIndexing(
+ cnxn, [issue.issue_id for issue in modified_issues])
+
+
+def ParsePredicateASTs(rules, config, me_user_id):
+ """Parse the given rules in QueryAST PBs."""
+ predicates = [rule.predicate for rule in rules]
+ if me_user_id:
+ predicates = [searchpipeline.ReplaceKeywordsWithUserID(me_user_id, pred)
+ for pred in predicates]
+ predicate_asts = [
+ query2ast.ParseUserQuery(pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ for pred in predicates]
+ return predicate_asts
+
+
+def ApplyFilterRules(cnxn, services, issue, config):
+ """Apply the filter rules for this project to the given issue.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users, issues, and projects.
+ issue: An Issue PB that has just been updated with new explicit values.
+ config: The project's issue tracker config PB.
+
+ Returns:
+ True if any derived_* field of the issue was changed.
+
+ SIDE-EFFECT: update the derived_* fields of the Issue PB.
+ """
+ rules = services.features.GetFilterRules(cnxn, issue.project_id)
+ predicate_asts = ParsePredicateASTs(rules, config, None)
+ return ApplyGivenRules(cnxn, services, issue, config, rules, predicate_asts)
+
+
+def ApplyGivenRules(cnxn, services, issue, config, rules, predicate_asts):
+ """Apply the filter rules for this project to the given issue.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users, issues, and projects.
+ issue: An Issue PB that has just been updated with new explicit values.
+ config: The project's issue tracker config PB.
+ rules: list of FilterRule PBs.
+
+ Returns:
+ True if any derived_* field of the issue was changed.
+
+ SIDE-EFFECT: update the derived_* fields of the Issue PB.
+ """
+ (derived_owner_id, derived_status, derived_cc_ids,
+ derived_labels, derived_notify_addrs) = _ComputeDerivedFields(
+ cnxn, services, issue, config, rules, predicate_asts)
+
+ any_change = (derived_owner_id != issue.derived_owner_id or
+ derived_status != issue.derived_status or
+ derived_cc_ids != issue.derived_cc_ids or
+ derived_labels != issue.derived_labels or
+ derived_notify_addrs != issue.derived_notify_addrs)
+
+ # Remember any derived values.
+ issue.derived_owner_id = derived_owner_id
+ issue.derived_status = derived_status
+ issue.derived_cc_ids = derived_cc_ids
+ issue.derived_labels = derived_labels
+ issue.derived_notify_addrs = derived_notify_addrs
+
+ return any_change
+
+
+def _ComputeDerivedFields(cnxn, services, issue, config, rules, predicate_asts):
+ """Compute derived field values for an issue based on filter rules.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users, issues, and projects.
+ issue: the issue to examine.
+ config: ProjectIssueConfig for the project containing the issue.
+ rules: list of FilterRule PBs.
+ predicate_asts: QueryAST PB for each rule.
+
+ Returns:
+ A 5-tuple of derived values for owner_id, status, cc_ids, labels, and
+ notify_addrs. These values are the result of applying all rules in order.
+ Filter rules only produce derived values that do not conflict with the
+ explicit field values of the issue.
+ """
+ excl_prefixes = config.exclusive_label_prefixes
+ # Examine the explicit labels and Cc's on the issue.
+ lower_labels = [lab.lower() for lab in issue.labels]
+ label_set = set(lower_labels)
+ cc_set = set(issue.cc_ids)
+ excl_prefixes_used = set()
+ for lab in lower_labels:
+ prefix = lab.split('-')[0]
+ if prefix in excl_prefixes:
+ excl_prefixes_used.add(prefix)
+ prefix_values_added = {}
+
+ # Start with the assumption that rules don't change anything, then
+ # accumulate changes.
+ derived_owner_id = framework_constants.NO_USER_SPECIFIED
+ derived_status = ''
+ # Get the component auto-cc's before even starting the rules.
+ # TODO(jrobbins): take this out and instead get component CC IDs
+ # on each access and search, but that will be a pretty big change.
+ derived_cc_ids = [
+ auto_cc_id
+ for auto_cc_id in component_helpers.GetComponentCcIDs(issue, config)
+ if auto_cc_id not in cc_set]
+ derived_labels = []
+ derived_notify_addrs = []
+
+ # Apply each rule in order. Later rules see the results of earlier rules.
+ # Later rules can overwrite or add to results of earlier rules.
+ # TODO(jrobbins): also pass in in-progress values for owner and CCs so
+ # that early rules that set those can affect later rules that check them.
+ for rule, predicate_ast in zip(rules, predicate_asts):
+ (rule_owner_id, rule_status, rule_add_cc_ids,
+ rule_add_labels, rule_add_notify) = _ApplyRule(
+ cnxn, services, rule, predicate_ast, issue, label_set, config)
+
+ # logging.info(
+ # 'rule "%s" gave %r, %r, %r, %r, %r',
+ # rule.predicate, rule_owner_id, rule_status, rule_add_cc_ids,
+ # rule_add_labels, rule_add_notify)
+
+ if rule_owner_id and not issue.owner_id:
+ derived_owner_id = rule_owner_id
+
+ if rule_status and not issue.status:
+ derived_status = rule_status
+
+ for cc_id in rule_add_cc_ids:
+ if cc_id not in cc_set:
+ derived_cc_ids.append(cc_id)
+ cc_set.add(cc_id)
+
+ for lab in rule_add_labels:
+ lab_lower = lab.lower()
+ if lab_lower in label_set:
+ continue # We already have that label.
+ prefix = lab_lower.split('-')[0]
+ if '-' in lab_lower and prefix in excl_prefixes:
+ if prefix in excl_prefixes_used:
+ continue # Issue already has that prefix.
+ # Replace any earlied-added label that had the same exclusive prefix.
+ if prefix in prefix_values_added:
+ label_set.remove(prefix_values_added[prefix].lower())
+ derived_labels = [dl for dl in derived_labels
+ if dl != prefix_values_added[prefix]]
+ prefix_values_added[prefix] = lab
+
+ derived_labels.append(lab)
+ label_set.add(lab_lower)
+
+ for addr in rule_add_notify:
+ if addr not in derived_notify_addrs:
+ derived_notify_addrs.append(addr)
+
+ return (derived_owner_id, derived_status, derived_cc_ids, derived_labels,
+ derived_notify_addrs)
+
+
+def EvalPredicate(
+ cnxn, services, predicate_ast, issue, label_set, config, owner_id, cc_ids,
+ status):
+ """Return True if the given issue satisfies the given predicate.
+
+ Args:
+ cnxn: Connection to SQL database.
+ services: persistence layer for users and issues.
+ predicate_ast: QueryAST for rule or saved query string.
+ issue: Issue PB of the issue to evaluate.
+ label_set: set of lower-cased labels on the issue.
+ config: ProjectIssueConfig for the project that contains the issue.
+ owner_id: int user ID of the issue owner.
+ cc_ids: list of int user IDs of the users Cc'd on the issue.
+ status: string status value of the issue.
+
+ Returns:
+ True if the issue satisfies the predicate.
+
+ Note: filter rule evaluation passes in only the explicit owner_id,
+ cc_ids, and status whereas subscription evaluation passes in the
+ combination of explicit values and derived values.
+ """
+ # TODO(jrobbins): Call ast2ast to simplify the predicate and do
+ # most lookups. Refactor to allow that to be done once.
+ project = services.project.GetProject(cnxn, config.project_id)
+ for conj in predicate_ast.conjunctions:
+ if all(_ApplyCond(cnxn, services, project, cond, issue, label_set, config,
+ owner_id, cc_ids, status)
+ for cond in conj.conds):
+ return True
+
+ # All OR-clauses were evaluated, but none of them was matched.
+ return False
+
+
+def _ApplyRule(
+ cnxn, services, rule_pb, predicate_ast, issue, label_set, config):
+ """Test if the given rule should fire and return its result.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users and issues.
+ rule_pb: FilterRule PB instance with a predicate and various actions.
+ predicate_ast: QueryAST for the rule predicate.
+ issue: The Issue PB to be considered.
+ label_set: set of lowercased labels from an issue's explicit
+ label_list plus and labels that have accumlated from previous rules.
+ config: ProjectIssueConfig for the project containing the issue.
+
+ Returns:
+ A 5-tuple of the results from this rule: derived owner id, status,
+ cc_ids to add, labels to add, and notify addresses to add.
+ """
+ if EvalPredicate(
+ cnxn, services, predicate_ast, issue, label_set, config,
+ issue.owner_id, issue.cc_ids, issue.status):
+ logging.info('rule adds: %r', rule_pb.add_labels)
+ return (rule_pb.default_owner_id, rule_pb.default_status,
+ rule_pb.add_cc_ids, rule_pb.add_labels,
+ rule_pb.add_notify_addrs)
+ else:
+ return None, None, [], [], []
+
+
+def _ApplyCond(
+ cnxn, services, project, term, issue, label_set, config, owner_id, cc_ids,
+ status):
+ """Return True if the given issue satisfied the given predicate term."""
+ op = term.op
+ vals = term.str_values or term.int_values
+ # Since rules are per-project, there'll be exactly 1 field
+ fd = term.field_defs[0]
+ field = fd.field_name
+
+ if field == 'label':
+ return _Compare(op, vals, label_set)
+ if field == 'component':
+ return _CompareComponents(config, op, vals, issue.component_ids)
+ if field == 'any_field':
+ return _Compare(op, vals, label_set) or _Compare(op, vals, [issue.summary])
+ if field == 'attachments':
+ return _Compare(op, vals, [issue.attachment_count])
+ if field == 'blocked':
+ return _Compare(op, vals, issue.blocked_on_iids)
+ if field == 'blockedon':
+ return _CompareIssueRefs(
+ cnxn, services, project, op, term.str_values, issue.blocked_on_iids)
+ if field == 'blocking':
+ return _CompareIssueRefs(
+ cnxn, services, project, op, term.str_values, issue.blocking_iids)
+ if field == 'cc':
+ return _CompareUsers(cnxn, services.user, op, vals, cc_ids)
+ if field == 'closed':
+ return (issue.closed_timestamp and
+ _Compare(op, vals, [issue.closed_timestamp]))
+ if field == 'id':
+ return _Compare(op, vals, [issue.local_id])
+ if field == 'mergedinto':
+ return _CompareIssueRefs(
+ cnxn, services, project, op, term.str_values, [issue.merged_into or 0])
+ if field == 'modified':
+ return (issue.modified_timestamp and
+ _Compare(op, vals, [issue.modified_timestamp]))
+ if field == 'open':
+ # TODO(jrobbins): this just checks the explicit status, not the result
+ # of any previous rules.
+ return tracker_helpers.MeansOpenInProject(status, config)
+ if field == 'opened':
+ return (issue.opened_timestamp and
+ _Compare(op, vals, [issue.opened_timestamp]))
+ if field == 'owner':
+ return _CompareUsers(cnxn, services.user, op, vals, [owner_id])
+ if field == 'reporter':
+ return _CompareUsers(cnxn, services.user, op, vals, [issue.reporter_id])
+ if field == 'stars':
+ return _Compare(op, vals, [issue.star_count])
+ if field == 'status':
+ return _Compare(op, vals, [status.lower()])
+ if field == 'summary':
+ return _Compare(op, vals, [issue.summary])
+
+ # Since rules are per-project, it makes no sense to support field project.
+ # We would need to load comments to support fields comment, commentby,
+ # description, attachment.
+ # Supporting starredby is probably not worth the complexity.
+
+ logging.info('Rule with unsupported field %r was False', field)
+ return False
+
+
+def _CheckTrivialCases(op, issue_values):
+ """Check has:x and -has:x terms and no values. Otherwise, return None."""
+ # We can do these operators without looking up anything or even knowing
+ # which field is being checked.
+ issue_values_exist = bool(
+ issue_values and issue_values != [''] and issue_values != [0])
+ if op == ast_pb2.QueryOp.IS_DEFINED:
+ return issue_values_exist
+ elif op == ast_pb2.QueryOp.IS_NOT_DEFINED:
+ return not issue_values_exist
+ elif not issue_values_exist:
+ # No other operator can match empty values.
+ return op in (ast_pb2.QueryOp.NE, ast_pb2.QueryOp.NOT_TEXT_HAS)
+
+ return None # Caller should continue processing the term.
+
+def _CompareComponents(config, op, rule_values, issue_values):
+ """Compare the components specified in the rule vs those in the issue."""
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ exact = op in (ast_pb2.QueryOp.EQ, ast_pb2.QueryOp.NE)
+ rule_component_ids = set()
+ for path in rule_values:
+ rule_component_ids.update(tracker_bizobj.FindMatchingComponentIDs(
+ path, config, exact=exact))
+
+ if op == ast_pb2.QueryOp.TEXT_HAS or op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_values for rv in rule_component_ids)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS or op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_values for rv in rule_component_ids)
+
+ return False
+
+
+def _CompareIssueRefs(
+ cnxn, services, project, op, rule_str_values, issue_values):
+ """Compare the issues specified in the rule vs referenced in the issue."""
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ rule_refs = []
+ for str_val in rule_str_values:
+ ref = tracker_bizobj.ParseIssueRef(str_val)
+ if ref:
+ rule_refs.append(ref)
+ rule_ref_project_names = set(
+ pn for pn, local_id in rule_refs if pn)
+ rule_ref_projects_dict = services.project.GetProjectsByName(
+ cnxn, rule_ref_project_names)
+ rule_ref_projects_dict[project.project_name] = project
+ rule_iids = services.issue.ResolveIssueRefs(
+ cnxn, rule_ref_projects_dict, project.project_name, rule_refs)
+
+ if op == ast_pb2.QueryOp.TEXT_HAS:
+ op = ast_pb2.QueryOp.EQ
+ if op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ op = ast_pb2.QueryOp.NE
+
+ return _Compare(op, rule_iids, issue_values)
+
+
+def _CompareUsers(cnxn, user_service, op, rule_values, issue_values):
+ """Compare the user(s) specified in the rule and the issue."""
+ # Note that all occurances of "me" in rule_values should have already
+ # been resolved to str(user_id) of the subscribing user.
+ # TODO(jrobbins): Project filter rules should not be allowed to have "me".
+
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ try:
+ return _CompareUserIDs(op, rule_values, issue_values)
+ except ValueError:
+ return _CompareEmails(cnxn, user_service, op, rule_values, issue_values)
+
+
+def _CompareUserIDs(op, rule_values, issue_values):
+ """Compare users according to specified user ID integer strings."""
+ rule_user_ids = [int(uid_str) for uid_str in rule_values]
+
+ if op == ast_pb2.QueryOp.TEXT_HAS or op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_values for rv in rule_user_ids)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS or op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_values for rv in rule_user_ids)
+
+ logging.info('unexpected numeric user operator %r %r %r',
+ op, rule_values, issue_values)
+ return False
+
+
+def _CompareEmails(cnxn, user_service, op, rule_values, issue_values):
+ """Compare users based on email addresses."""
+ issue_emails = user_service.LookupUserEmails(cnxn, issue_values).values()
+
+ if op == ast_pb2.QueryOp.TEXT_HAS:
+ return any(_HasText(rv, issue_emails) for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ return all(not _HasText(rv, issue_emails) for rv in rule_values)
+ elif op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_emails for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_emails for rv in rule_values)
+
+ logging.info('unexpected user operator %r %r %r',
+ op, rule_values, issue_values)
+ return False
+
+
+def _Compare(op, rule_values, issue_values):
+ """Compare the values specified in the rule and the issue."""
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ if (op in [ast_pb2.QueryOp.TEXT_HAS, ast_pb2.QueryOp.NOT_TEXT_HAS] and
+ issue_values and not isinstance(min(issue_values), basestring)):
+ return False # Empty or numeric fields cannot match substrings
+ elif op == ast_pb2.QueryOp.TEXT_HAS:
+ return any(_HasText(rv, issue_values) for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ return all(not _HasText(rv, issue_values) for rv in rule_values)
+
+ val_type = type(min(issue_values))
+ if val_type == int or val_type == long:
+ try:
+ rule_values = [int(rv) for rv in rule_values]
+ except ValueError:
+ logging.info('rule value conversion to int failed: %r', rule_values)
+ return False
+
+ if op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_values for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_values for rv in rule_values)
+
+ if val_type != int and val_type != long:
+ return False # Inequalities only work on numeric fields
+
+ if op == ast_pb2.QueryOp.GT:
+ return min(issue_values) > min(rule_values)
+ elif op == ast_pb2.QueryOp.GE:
+ return min(issue_values) >= min(rule_values)
+ elif op == ast_pb2.QueryOp.LT:
+ return max(issue_values) < max(rule_values)
+ elif op == ast_pb2.QueryOp.LE:
+ return max(issue_values) <= max(rule_values)
+
+ logging.info('unexpected operator %r %r %r', op, rule_values, issue_values)
+ return False
+
+
+def _HasText(rule_text, issue_values):
+ """Return True if the issue contains the rule text, case insensitive."""
+ rule_lower = rule_text.lower()
+ for iv in issue_values:
+ if iv is not None and rule_lower in iv.lower():
+ return True
+
+ return False
+
+
+def MakeRule(predicate, default_status=None, default_owner_id=None,
+ add_cc_ids=None, add_labels=None, add_notify=None):
+ """Make a FilterRule PB with the supplied information.
+
+ Args:
+ predicate: string query that will trigger the rule if satisfied.
+ default_status: optional default status to set if rule fires.
+ default_owner_id: optional default owner_id to set if rule fires.
+ add_cc_ids: optional cc ids to set if rule fires.
+ add_labels: optional label strings to set if rule fires.
+ add_notify: optional notify email addresses to set if rule fires.
+
+ Returns:
+ A new FilterRule PB.
+ """
+ rule_pb = tracker_pb2.FilterRule()
+ rule_pb.predicate = predicate
+
+ if add_labels:
+ rule_pb.add_labels = add_labels
+ if default_status:
+ rule_pb.default_status = default_status
+ if default_owner_id:
+ rule_pb.default_owner_id = default_owner_id
+ if add_cc_ids:
+ rule_pb.add_cc_ids = add_cc_ids
+ if add_notify:
+ rule_pb.add_notify_addrs = add_notify
+
+ return rule_pb
+
+
+def ParseRules(cnxn, post_data, user_service, errors, prefix=''):
+ """Parse rules from the user and return a list of FilterRule PBs.
+
+ Args:
+ cnxn: connection to database.
+ post_data: dictionary of html form data.
+ user_service: connection to user backend services.
+ errors: EZTErrors message used to display field validation errors.
+ prefix: optional string prefix used to differentiate the form fields
+ for existing rules from the form fields for new rules.
+
+ Returns:
+ A list of FilterRule PBs
+ """
+ rules = []
+
+ # The best we can do for now is show all validation errors at the bottom of
+ # the filter rules section, not directly on the rule that had the error :(.
+ error_list = []
+
+ for i in xrange(1, MAX_RULES + 1):
+ if ('%spredicate%s' % (prefix, i)) not in post_data:
+ continue # skip any entries that are blank or have no predicate.
+ predicate = post_data['%spredicate%s' % (prefix, i)].strip()
+ action_type = post_data.get('%saction_type%s' % (prefix, i),
+ 'add_labels').strip()
+ action_value = post_data.get('%saction_value%s' % (prefix, i),
+ '').strip()
+ if predicate:
+ # Note: action_value may be '', meaning no-op.
+ rules.append(_ParseOneRule(
+ cnxn, predicate, action_type, action_value, user_service, i,
+ error_list))
+
+ if error_list:
+ errors.rules = error_list
+
+ return rules
+
+
+def _ParseOneRule(
+ cnxn, predicate, action_type, action_value, user_service,
+ rule_num, error_list):
+ """Parse one FilterRule based on the action type."""
+ if action_type == 'default_status':
+ status = framework_bizobj.CanonicalizeLabel(action_value)
+ rule = MakeRule(predicate, default_status=status)
+
+ elif action_type == 'default_owner':
+ if action_value:
+ try:
+ user_id = user_service.LookupUserID(cnxn, action_value)
+ except user_svc.NoSuchUserException:
+ user_id = framework_constants.NO_USER_SPECIFIED
+ error_list.append(
+ 'Rule %d: No such user: %s' % (rule_num, action_value))
+ else:
+ user_id = framework_constants.NO_USER_SPECIFIED
+ rule = MakeRule(predicate, default_owner_id=user_id)
+
+ elif action_type == 'add_ccs':
+ cc_ids = []
+ for email in re.split('[,;\s]+', action_value):
+ if not email.strip():
+ continue
+ try:
+ user_id = user_service.LookupUserID(
+ cnxn, email.strip(), autocreate=True)
+ cc_ids.append(user_id)
+ except user_svc.NoSuchUserException:
+ error_list.append(
+ 'Rule %d: No such user: %s' % (rule_num, email.strip()))
+
+ rule = MakeRule(predicate, add_cc_ids=cc_ids)
+
+ elif action_type == 'add_labels':
+ add_labels = framework_constants.IDENTIFIER_RE.findall(action_value)
+ rule = MakeRule(predicate, add_labels=add_labels)
+
+ elif action_type == 'also_notify':
+ add_notify = []
+ for addr in re.split('[,;\s]+', action_value):
+ if validate.IsValidEmail(addr.strip()):
+ add_notify.append(addr.strip())
+ else:
+ error_list.append(
+ 'Rule %d: Invalid email address: %s' % (rule_num, addr.strip()))
+
+ rule = MakeRule(predicate, add_notify=add_notify)
+
+ else:
+ logging.info('unexpected action type, probably tampering:%r', action_type)
+ raise monorailrequest.InputException()
+
+ return rule
diff --git a/appengine/monorail/features/filterrules_views.py b/appengine/monorail/features/filterrules_views.py
new file mode 100644
index 0000000..ca21b2d
--- /dev/null
+++ b/appengine/monorail/features/filterrules_views.py
@@ -0,0 +1,44 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes to display filter rules in templates."""
+
+import logging
+
+from framework import template_helpers
+
+
+class RuleView(template_helpers.PBProxy):
+ """Wrapper class that makes it easier to display a Rule via EZT."""
+
+ def __init__(self, rule_pb, users_by_id):
+ super(RuleView, self).__init__(rule_pb)
+
+ self.action_type = ''
+ self.action_value = ''
+
+ if rule_pb is None:
+ return # Just leave everything as ''
+
+ # self.predicate is automatically available.
+
+ # For the current UI, we assume that each rule has exactly
+ # one action, so we can determine the text value for it here.
+ if rule_pb.default_status:
+ self.action_type = 'default_status'
+ self.action_value = rule_pb.default_status
+ elif rule_pb.default_owner_id:
+ self.action_type = 'default_owner'
+ self.action_value = users_by_id[rule_pb.default_owner_id].email
+ elif rule_pb.add_cc_ids:
+ self.action_type = 'add_ccs'
+ usernames = [users_by_id[cc_id].email for cc_id in rule_pb.add_cc_ids]
+ self.action_value = ', '.join(usernames)
+ elif rule_pb.add_labels:
+ self.action_type = 'add_labels'
+ self.action_value = ', '.join(rule_pb.add_labels)
+ elif rule_pb.add_notify_addrs:
+ self.action_type = 'also_notify'
+ self.action_value = ', '.join(rule_pb.add_notify_addrs)
diff --git a/appengine/monorail/features/inboundemail.py b/appengine/monorail/features/inboundemail.py
new file mode 100644
index 0000000..b25b312
--- /dev/null
+++ b/appengine/monorail/features/inboundemail.py
@@ -0,0 +1,258 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Handler to process inbound email with issue comments and commands."""
+
+import logging
+import os
+import urllib
+
+from third_party import ezt
+
+from google.appengine.api import mail
+
+import webapp2
+
+from features import commitlogcommands
+from features import notify
+from framework import emailfmt
+from framework import framework_constants
+from framework import monorailrequest
+from framework import permissions
+from framework import sql
+from framework import template_helpers
+from proto import project_pb2
+from services import issue_svc
+from services import user_svc
+
+
+TEMPLATE_PATH_BASE = framework_constants.TEMPLATE_PATH
+
+MSG_TEMPLATES = {
+ 'banned': 'features/inboundemail-banned.ezt',
+ 'body_too_long': 'features/inboundemail-body-too-long.ezt',
+ 'project_not_found': 'features/inboundemail-project-not-found.ezt',
+ 'not_a_reply': 'features/inboundemail-not-a-reply.ezt',
+ 'no_account': 'features/inboundemail-no-account.ezt',
+ 'no_artifact': 'features/inboundemail-no-artifact.ezt',
+ 'no_perms': 'features/inboundemail-no-perms.ezt',
+ 'replies_disabled': 'features/inboundemail-replies-disabled.ezt',
+ }
+
+
+class InboundEmail(webapp2.RequestHandler):
+ """Servlet to handle inbound email messages."""
+
+ def __init__(self, request, response, services=None, *args, **kwargs):
+ super(InboundEmail, self).__init__(request, response, *args, **kwargs)
+ self.services = services or self.app.config.get('services')
+ self._templates = {}
+ for name, template_path in MSG_TEMPLATES.iteritems():
+ self._templates[name] = template_helpers.MonorailTemplate(
+ TEMPLATE_PATH_BASE + template_path,
+ compress_whitespace=False, base_format=ezt.FORMAT_RAW)
+
+ def get(self, project_addr=None):
+ logging.info('\n\n\nGET for InboundEmail and project_addr is %r',
+ project_addr)
+ self.Handler(mail.InboundEmailMessage(self.request.body),
+ urllib.unquote(project_addr))
+
+ def post(self, project_addr=None):
+ logging.info('\n\n\nPOST for InboundEmail and project_addr is %r',
+ project_addr)
+ self.Handler(mail.InboundEmailMessage(self.request.body),
+ urllib.unquote(project_addr))
+
+ def Handler(self, inbound_email_message, project_addr):
+ """Process an inbound email message."""
+ msg = inbound_email_message.original
+ email_tasks = self.ProcessMail(msg, project_addr)
+
+ if email_tasks:
+ notify.AddAllEmailTasks(email_tasks)
+
+ def ProcessMail(self, msg, project_addr):
+ """Process an inbound email message."""
+ # TODO(jrobbins): If the message is HUGE, don't even try to parse
+ # it. Silently give up.
+
+ (from_addr, to_addrs, cc_addrs, references, subject,
+ body) = emailfmt.ParseEmailMessage(msg)
+
+ logging.info('Proj addr: %r', project_addr)
+ logging.info('From addr: %r', from_addr)
+ logging.info('Subject: %r', subject)
+ logging.info('To: %r', to_addrs)
+ logging.info('Cc: %r', cc_addrs)
+ logging.info('References: %r', references)
+ logging.info('Body: %r', body)
+
+ # If message body is very large, reject it and send an error email.
+ if emailfmt.IsBodyTooBigToParse(body):
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['body_too_long'])
+
+ # Make sure that the project reply-to address is in the To: line.
+ if not emailfmt.IsProjectAddressOnToLine(project_addr, to_addrs):
+ return None
+
+ # Identify the project and artifact to update.
+ project_name, local_id = emailfmt.IdentifyProjectAndIssue(
+ project_addr, subject)
+ if not project_addr or not local_id:
+ logging.info('Could not identify issue: %s %s', project_addr, subject)
+ # No error message, because message was probably not intended for us.
+ return None
+
+ cnxn = sql.MonorailConnection()
+ if self.services.cache_manager:
+ self.services.cache_manager.DoDistributedInvalidation(cnxn)
+
+ project = self.services.project.GetProjectByName(cnxn, project_name)
+
+ if not project or project.state != project_pb2.ProjectState.LIVE:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['project_not_found'])
+
+ if not project.process_inbound_email:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['replies_disabled'],
+ project_name=project_name)
+
+ # Verify that this is a reply to a notification that we could have sent.
+ if not os.environ['SERVER_SOFTWARE'].startswith('Development'):
+ for ref in references:
+ if emailfmt.ValidateReferencesHeader(ref, project, from_addr, subject):
+ break # Found a message ID that we could have sent.
+ else:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['not_a_reply'])
+
+ # Authenticate the from-addr and perm check.
+ # Note: If the issue summary line is changed, a new thread is created,
+ # and replies to the old thread will no longer work because the subject
+ # line hash will not match, which seems reasonable.
+ try:
+ auth = monorailrequest.AuthData.FromEmail(cnxn, from_addr, self.services)
+ from_user_id = auth.user_id
+ except user_svc.NoSuchUserException:
+ from_user_id = None
+ if not from_user_id:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['no_account'])
+
+ if auth.user_pb.banned:
+ logging.info('Banned user %s tried to post to %s',
+ from_addr, project_addr)
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['banned'])
+
+ perms = permissions.GetPermissions(
+ auth.user_pb, auth.effective_ids, project)
+
+ self.ProcessIssueReply(
+ cnxn, project, local_id, project_addr, from_addr, from_user_id,
+ auth.effective_ids, perms, body)
+
+ return None
+
+ def ProcessIssueReply(
+ self, cnxn, project, local_id, project_addr, from_addr, from_user_id,
+ effective_ids, perms, body):
+ """Examine an issue reply email body and add a comment to the issue.
+
+ Args:
+ cnxn: connection to SQL database.
+ project: Project PB for the project containing the issue.
+ local_id: int ID of the issue being replied to.
+ project_addr: string email address used for outbound emails from
+ that project.
+ from_addr: string email address of the user who sent the email
+ reply to our server.
+ from_user_id: int user ID of user who sent the reply email.
+ effective_ids: set of int user IDs for the user (including any groups),
+ or an empty set if user is not signed in.
+ perms: PermissionSet for the user who sent the reply email.
+ body: string email body text of the reply email.
+
+ Returns:
+ A list of follow-up work items, e.g., to notify other users of
+ the new comment, or to notify the user that their reply was not
+ processed.
+
+ Side-effect:
+ Adds a new comment to the issue, if no error is reported.
+ """
+ try:
+ issue = self.services.issue.GetIssueByLocalID(
+ cnxn, project.project_id, local_id)
+ except issue_svc.NoSuchIssueException:
+ issue = None
+
+ if not issue or issue.deleted:
+ # The referenced issue was not found, e.g., it might have been
+ # deleted, or someone messed with the subject line. Reject it.
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['no_artifact'],
+ artifact_phrase='issue %d' % local_id,
+ project_name=project.project_name)
+
+ if not perms.CanUsePerm(
+ permissions.ADD_ISSUE_COMMENT, effective_ids, project,
+ permissions.GetRestrictions(issue)):
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['no_perms'],
+ artifact_phrase='issue %d' % local_id,
+ project_name=project.project_name)
+ allow_edit = permissions.CanEditIssue(
+ effective_ids, perms, project, issue)
+ # TODO(jrobbins): if the user does not have EDIT_ISSUE and the inbound
+ # email tries to make an edit, send back an error message.
+
+ lines = body.strip().split('\n')
+ uia = commitlogcommands.UpdateIssueAction(local_id)
+ uia.Parse(cnxn, project.project_name, from_user_id, lines, self.services,
+ strip_quoted_lines=True)
+ uia.Run(cnxn, self.services, allow_edit=allow_edit)
+
+
+def _MakeErrorMessageReplyTask(
+ project_addr, sender_addr, template, **callers_page_data):
+ """Return a new task to send an error message email.
+
+ Args:
+ project_addr: string email address that the inbound email was delivered to.
+ sender_addr: string email address of user who sent the email that we could
+ not process.
+ template: EZT template used to generate the email error message. The
+ first line of this generated text will be used as the subject line.
+ callers_page_data: template data dict for body of the message.
+
+ Returns:
+ A list with a single Email task that can be enqueued to
+ actually send the email.
+
+ Raises:
+ ValueError: if the template does begin with a "Subject:" line.
+ """
+ email_data = {
+ 'project_addr': project_addr,
+ 'sender_addr': sender_addr
+ }
+ email_data.update(callers_page_data)
+
+ generated_lines = template.GetResponse(email_data)
+ subject, body = generated_lines.split('\n', 1)
+ if subject.startswith('Subject: '):
+ subject = subject[len('Subject: '):]
+ else:
+ raise ValueError('Email template does not begin with "Subject:" line.')
+
+ email_task = dict(to=sender_addr, subject=subject, body=body,
+ from_addr=emailfmt.NoReplyAddress())
+ logging.info('sending email error reply: %r', email_task)
+
+ return [email_task]
diff --git a/appengine/monorail/features/notify.py b/appengine/monorail/features/notify.py
new file mode 100644
index 0000000..b6382b6
--- /dev/null
+++ b/appengine/monorail/features/notify.py
@@ -0,0 +1,928 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Task handlers for email notifications of issue changes.
+
+Email notificatons are sent when an issue changes, an issue that is blocking
+another issue changes, or a bulk edit is done. The users notified include
+the project-wide mailing list, issue owners, cc'd users, starrers,
+also-notify addresses, and users who have saved queries with email notification
+set.
+"""
+
+import collections
+import logging
+
+from third_party import ezt
+
+from google.appengine.api import mail
+from google.appengine.api import taskqueue
+
+import settings
+from features import autolink
+from features import notify_helpers
+from framework import emailfmt
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import jsonfeed
+from framework import monorailrequest
+from framework import permissions
+from framework import template_helpers
+from framework import urls
+from tracker import component_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+TEMPLATE_PATH = framework_constants.TEMPLATE_PATH
+
+
+def PrepareAndSendIssueChangeNotification(
+ project_id, local_id, hostport, commenter_id, seq_num, send_email=True,
+ old_owner_id=framework_constants.NO_USER_SPECIFIED):
+ """Create a task to notify users that an issue has changed.
+
+ Args:
+ project_id: int ID of the project containing the changed issue.
+ local_id: Issue number for the issue that was updated and saved.
+ hostport: string domain name and port number from the HTTP request.
+ commenter_id: int user ID of the user who made the comment.
+ seq_num: int index into the comments of the new comment.
+ send_email: True if email notifications should be sent.
+ old_owner_id: optional user ID of owner before the current change took
+ effect. He/she will also be notified.
+
+ Returns nothing.
+ """
+ params = dict(
+ project_id=project_id, id=local_id, commenter_id=commenter_id,
+ seq=seq_num, hostport=hostport,
+ old_owner_id=old_owner_id, send_email=int(send_email))
+ logging.info('adding notify task with params %r', params)
+ taskqueue.add(url=urls.NOTIFY_ISSUE_CHANGE_TASK + '.do', params=params)
+
+
+def PrepareAndSendIssueBlockingNotification(
+ project_id, hostport, local_id, delta_blocker_iids,
+ commenter_id, send_email=True):
+ """Create a task to follow up on an issue blocked_on change."""
+ if not delta_blocker_iids:
+ return # No notification is needed
+
+ params = dict(
+ project_id=project_id, id=local_id, commenter_id=commenter_id,
+ hostport=hostport, send_email=int(send_email),
+ delta_blocker_iids=','.join(str(iid) for iid in delta_blocker_iids))
+
+ logging.info('adding blocking task with params %r', params)
+ taskqueue.add(url=urls.NOTIFY_BLOCKING_CHANGE_TASK + '.do', params=params)
+
+
+def SendIssueBulkChangeNotification(
+ hostport, project_id, local_ids, old_owner_ids,
+ comment_text, commenter_id, amendments, send_email, users_by_id):
+ """Create a task to follow up on an issue blocked_on change."""
+ amendment_lines = []
+ for up in amendments:
+ line = ' %s: %s' % (
+ tracker_bizobj.GetAmendmentFieldName(up),
+ tracker_bizobj.AmendmentString(up, users_by_id))
+ if line not in amendment_lines:
+ amendment_lines.append(line)
+
+ params = dict(
+ project_id=project_id, commenter_id=commenter_id,
+ hostport=hostport, send_email=int(send_email),
+ ids=','.join(str(lid) for lid in local_ids),
+ old_owner_ids=','.join(str(uid) for uid in old_owner_ids),
+ comment_text=comment_text, amendments='\n'.join(amendment_lines))
+
+ logging.info('adding bulk task with params %r', params)
+ taskqueue.add(url=urls.NOTIFY_BULK_CHANGE_TASK + '.do', params=params)
+
+
+def _EnqueueOutboundEmail(message_dict):
+ """Create a task to send one email message, all fields are in the dict.
+
+ We use a separate task for each outbound email to isolate errors.
+
+ Args:
+ message_dict: dict with all needed info for the task.
+ """
+ logging.info('Queuing an email task with params %r', message_dict)
+ taskqueue.add(
+ url=urls.OUTBOUND_EMAIL_TASK + '.do', params=message_dict,
+ queue_name='outboundemail')
+
+
+def AddAllEmailTasks(tasks):
+ """Add one GAE task for each email to be sent."""
+ notified = []
+ for task in tasks:
+ _EnqueueOutboundEmail(task)
+ notified.append(task['to'])
+
+ return notified
+
+
+class NotifyTaskBase(jsonfeed.InternalTask):
+ """Abstract base class for notification task handler."""
+
+ _EMAIL_TEMPLATE = None # Subclasses must override this.
+
+ CHECK_SECURITY_TOKEN = False
+
+ def __init__(self, *args, **kwargs):
+ super(NotifyTaskBase, self).__init__(*args, **kwargs)
+
+ if not self._EMAIL_TEMPLATE:
+ raise Exception('Subclasses must override _EMAIL_TEMPLATE.'
+ ' This class must not be called directly.')
+ # We use FORMAT_RAW for emails because they are plain text, not HTML.
+ # TODO(jrobbins): consider sending HTML formatted emails someday.
+ self.email_template = template_helpers.MonorailTemplate(
+ TEMPLATE_PATH + self._EMAIL_TEMPLATE,
+ compress_whitespace=False, base_format=ezt.FORMAT_RAW)
+
+
+class NotifyIssueChangeTask(NotifyTaskBase):
+ """JSON servlet that notifies appropriate users after an issue change."""
+
+ _EMAIL_TEMPLATE = 'tracker/issue-change-notification-email.ezt'
+
+ def HandleRequest(self, mr):
+ """Process the task to notify users after an issue change.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ project_id = mr.specified_project_id
+ if project_id is None:
+ return {
+ 'params': {},
+ 'notified': [],
+ 'message': 'Cannot proceed without a valid project ID.',
+ }
+ commenter_id = mr.GetPositiveIntParam('commenter_id')
+ seq_num = mr.seq
+ omit_ids = [commenter_id]
+ hostport = mr.GetParam('hostport')
+ old_owner_id = mr.GetPositiveIntParam('old_owner_id')
+ send_email = bool(mr.GetIntParam('send_email'))
+ params = dict(
+ project_id=project_id, local_id=mr.local_id, commenter_id=commenter_id,
+ seq_num=seq_num, hostport=hostport, old_owner_id=old_owner_id,
+ omit_ids=omit_ids, send_email=send_email)
+
+ logging.info('issue change params are %r', params)
+ project = self.services.project.GetProject(mr.cnxn, project_id)
+ config = self.services.config.GetProjectConfig(mr.cnxn, project_id)
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, project_id, mr.local_id)
+
+ if issue.is_spam:
+ # Don't send email for spam issues.
+ return {
+ 'params': params,
+ 'notified': [],
+ }
+
+ all_comments = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+ comment = all_comments[seq_num]
+
+ # Only issues that any contributor could view sent to mailing lists.
+ contributor_could_view = permissions.CanViewIssue(
+ set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ project, issue)
+ starrer_ids = self.services.issue_star.LookupItemStarrers(
+ mr.cnxn, issue.issue_id)
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ tracker_bizobj.UsersInvolvedInIssues([issue]), [old_owner_id],
+ tracker_bizobj.UsersInvolvedInComment(comment),
+ issue.cc_ids, issue.derived_cc_ids, starrer_ids, omit_ids)
+
+ # Make followup tasks to send emails
+ tasks = []
+ if send_email:
+ tasks = self._MakeEmailTasks(
+ mr.cnxn, project, issue, config, old_owner_id, users_by_id,
+ all_comments, comment, starrer_ids, contributor_could_view,
+ hostport, omit_ids)
+
+ notified = AddAllEmailTasks(tasks)
+
+ return {
+ 'params': params,
+ 'notified': notified,
+ }
+
+ def _MakeEmailTasks(
+ self, cnxn, project, issue, config, old_owner_id,
+ users_by_id, all_comments, comment, starrer_ids,
+ contributor_could_view, hostport, omit_ids):
+ """Formulate emails to be sent."""
+ detail_url = framework_helpers.IssueCommentURL(
+ hostport, project, issue.local_id, seq_num=comment.sequence)
+
+ # TODO(jrobbins): avoid the need to make a MonorailRequest object.
+ mr = monorailrequest.MonorailRequest()
+ mr.project_name = project.project_name
+ mr.project = project
+
+ # We do not autolink in the emails, so just use an empty
+ # registry of autolink rules.
+ # TODO(jrobbins): offer users an HTML email option w/ autolinks.
+ autolinker = autolink.Autolink()
+
+ email_data = {
+ # Pass open_related and closed_related into this method and to
+ # the issue view so that we can show it on new issue email.
+ 'issue': tracker_views.IssueView(issue, users_by_id, config),
+ 'summary': issue.summary,
+ 'comment': tracker_views.IssueCommentView(
+ project.project_name, comment, users_by_id,
+ autolinker, {}, mr, issue),
+ 'comment_text': comment.content,
+ 'detail_url': detail_url,
+ }
+
+ # Generate two versions of email body: members version has all
+ # full email addresses exposed.
+ body_for_non_members = self.email_template.GetResponse(email_data)
+ framework_views.RevealAllEmails(users_by_id)
+ email_data['comment'] = tracker_views.IssueCommentView(
+ project.project_name, comment, users_by_id,
+ autolinker, {}, mr, issue)
+ body_for_members = self.email_template.GetResponse(email_data)
+
+ subject = 'Issue %d in %s: %s' % (
+ issue.local_id, project.project_name, issue.summary)
+
+ commenter_email = users_by_id[comment.user_id].email
+ omit_addrs = set([commenter_email] +
+ [users_by_id[omit_id].email for omit_id in omit_ids])
+
+ auth = monorailrequest.AuthData.FromUserID(
+ cnxn, comment.user_id, self.services)
+ commenter_in_project = framework_bizobj.UserIsInProject(
+ project, auth.effective_ids)
+ noisy = tracker_helpers.IsNoisy(len(all_comments) - 1, len(starrer_ids))
+
+ # Get the transitive set of owners and Cc'd users, and their proxies.
+ reporter = [issue.reporter_id] if issue.reporter_id in starrer_ids else []
+ old_direct_owners, old_transitive_owners = (
+ self.services.usergroup.ExpandAnyUserGroups(cnxn, [old_owner_id]))
+ direct_owners, transitive_owners = (
+ self.services.usergroup.ExpandAnyUserGroups(cnxn, [issue.owner_id]))
+ der_direct_owners, der_transitive_owners = (
+ self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, [issue.derived_owner_id]))
+ direct_comp, trans_comp = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, component_helpers.GetComponentCcIDs(issue, config))
+ direct_ccs, transitive_ccs = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(issue.cc_ids))
+ # TODO(jrobbins): This will say that the user was cc'd by a rule when it
+ # was really added to the derived_cc_ids by a component.
+ der_direct_ccs, der_transitive_ccs = (
+ self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(issue.derived_cc_ids)))
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, self.services.user, transitive_owners, der_transitive_owners,
+ direct_comp, trans_comp, transitive_ccs, der_transitive_ccs))
+
+ # Notify interested people according to the reason for their interest:
+ # owners, component auto-cc'd users, cc'd users, starrers, and
+ # other notification addresses.
+ reporter_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, reporter, project, issue, self.services, omit_addrs, users_by_id)
+ owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_owners + transitive_owners, project, issue,
+ self.services, omit_addrs, users_by_id)
+ old_owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, old_direct_owners + old_transitive_owners, project, issue,
+ self.services, omit_addrs, users_by_id)
+ owner_addr_perm_set = set(owner_addr_perm_list)
+ old_owner_addr_perm_list = [ap for ap in old_owner_addr_perm_list
+ if ap not in owner_addr_perm_set]
+ der_owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, der_direct_owners + der_transitive_owners, project, issue,
+ self.services, omit_addrs, users_by_id)
+ cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_ccs + transitive_ccs, project, issue,
+ self.services, omit_addrs, users_by_id)
+ der_cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, der_direct_ccs + der_transitive_ccs, project, issue,
+ self.services, omit_addrs, users_by_id)
+
+ starrer_addr_perm_list = []
+ sub_addr_perm_list = []
+ if not noisy or commenter_in_project:
+ # Avoid an OOM by only notifying a number of starrers that we can handle.
+ # And, we really should limit the number of emails that we send anyway.
+ max_starrers = settings.max_starrers_to_notify
+ starrer_ids = starrer_ids[-max_starrers:]
+ # Note: starrers can never be user groups.
+ starrer_addr_perm_list = (
+ notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, starrer_ids, project, issue,
+ self.services, omit_addrs, users_by_id,
+ pref_check_function=lambda u: u.notify_starred_issue_change))
+
+ sub_addr_perm_list = _GetSubscribersAddrPermList(
+ cnxn, self.services, issue, project, config, omit_addrs,
+ users_by_id)
+
+ # Get the list of addresses to notify based on filter rules.
+ issue_notify_addr_list = notify_helpers.ComputeIssueNotificationAddrList(
+ issue, omit_addrs)
+ # Get the list of addresses to notify based on project settings.
+ proj_notify_addr_list = notify_helpers.ComputeProjectNotificationAddrList(
+ project, contributor_could_view, omit_addrs)
+
+ # Give each user a bullet-list of all the reasons that apply for that user.
+ group_reason_list = [
+ (reporter_addr_perm_list, 'You reported this issue'),
+ (owner_addr_perm_list, 'You are the owner of the issue'),
+ (old_owner_addr_perm_list,
+ 'You were the issue owner before this change'),
+ (der_owner_addr_perm_list, 'A rule made you owner of the issue'),
+ (cc_addr_perm_list, 'You were specifically CC\'d on the issue'),
+ (der_cc_addr_perm_list, 'A rule CC\'d you on the issue'),
+ ]
+ group_reason_list.extend(notify_helpers.ComputeComponentFieldAddrPerms(
+ cnxn, config, issue, project, self.services, omit_addrs,
+ users_by_id))
+ group_reason_list.extend(notify_helpers.ComputeCustomFieldAddrPerms(
+ cnxn, config, issue, project, self.services, omit_addrs,
+ users_by_id))
+ group_reason_list.extend([
+ (starrer_addr_perm_list, 'You starred the issue'),
+ (sub_addr_perm_list, 'Your saved query matched the issue'),
+ (issue_notify_addr_list,
+ 'A rule was set up to notify you'),
+ (proj_notify_addr_list,
+ 'The project was configured to send all issue notifications '
+ 'to this address'),
+ ])
+ commenter_view = users_by_id[comment.user_id]
+ detail_url = framework_helpers.FormatAbsoluteURLForDomain(
+ hostport, issue.project_name, urls.ISSUE_DETAIL,
+ id=issue.local_id)
+ email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ group_reason_list, subject, body_for_non_members, body_for_members,
+ project, hostport, commenter_view, seq_num=comment.sequence,
+ detail_url=detail_url)
+
+ return email_tasks
+
+
+class NotifyBlockingChangeTask(NotifyTaskBase):
+ """JSON servlet that notifies appropriate users after a blocking change."""
+
+ _EMAIL_TEMPLATE = 'tracker/issue-blocking-change-notification-email.ezt'
+
+ def HandleRequest(self, mr):
+ """Process the task to notify users after an issue blocking change.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ project_id = mr.specified_project_id
+ if project_id is None:
+ return {
+ 'params': {},
+ 'notified': [],
+ 'message': 'Cannot proceed without a valid project ID.',
+ }
+ commenter_id = mr.GetPositiveIntParam('commenter_id')
+ omit_ids = [commenter_id]
+ hostport = mr.GetParam('hostport')
+ delta_blocker_iids = mr.GetIntListParam('delta_blocker_iids')
+ send_email = bool(mr.GetIntParam('send_email'))
+ params = dict(
+ project_id=project_id, local_id=mr.local_id, commenter_id=commenter_id,
+ hostport=hostport, delta_blocker_iids=delta_blocker_iids,
+ omit_ids=omit_ids, send_email=send_email)
+
+ logging.info('blocking change params are %r', params)
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, project_id, mr.local_id)
+ if issue.is_spam:
+ return {
+ 'params': params,
+ 'notified': [],
+ }
+
+ upstream_issues = self.services.issue.GetIssues(
+ mr.cnxn, delta_blocker_iids)
+ logging.info('updating ids %r', [up.local_id for up in upstream_issues])
+ upstream_projects = tracker_helpers.GetAllIssueProjects(
+ mr.cnxn, upstream_issues, self.services.project)
+ upstream_configs = self.services.config.GetProjectConfigs(
+ mr.cnxn, upstream_projects.keys())
+
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, [commenter_id])
+ commenter_view = users_by_id[commenter_id]
+
+ tasks = []
+ if send_email:
+ for upstream_issue in upstream_issues:
+ one_issue_email_tasks = self._ProcessUpstreamIssue(
+ mr.cnxn, upstream_issue,
+ upstream_projects[upstream_issue.project_id],
+ upstream_configs[upstream_issue.project_id],
+ issue, omit_ids, hostport, commenter_view)
+ tasks.extend(one_issue_email_tasks)
+
+ notified = AddAllEmailTasks(tasks)
+
+ return {
+ 'params': params,
+ 'notified': notified,
+ }
+
+ def _ProcessUpstreamIssue(
+ self, cnxn, upstream_issue, upstream_project, upstream_config,
+ issue, omit_ids, hostport, commenter_view):
+ """Compute notifications for one upstream issue that is now blocking."""
+ upstream_detail_url = framework_helpers.FormatAbsoluteURLForDomain(
+ hostport, upstream_issue.project_name, urls.ISSUE_DETAIL,
+ id=upstream_issue.local_id)
+ logging.info('upstream_detail_url = %r', upstream_detail_url)
+ detail_url = framework_helpers.FormatAbsoluteURLForDomain(
+ hostport, issue.project_name, urls.ISSUE_DETAIL,
+ id=issue.local_id)
+
+ # Only issues that any contributor could view are sent to mailing lists.
+ contributor_could_view = permissions.CanViewIssue(
+ set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ upstream_project, upstream_issue)
+
+ # Now construct the e-mail to send
+
+ # Note: we purposely do not notify users who starred an issue
+ # about changes in blocking.
+ users_by_id = framework_views.MakeAllUserViews(
+ cnxn, self.services.user,
+ tracker_bizobj.UsersInvolvedInIssues([upstream_issue]), omit_ids)
+
+ is_blocking = upstream_issue.issue_id in issue.blocked_on_iids
+
+ email_data = {
+ 'issue': tracker_views.IssueView(
+ upstream_issue, users_by_id, upstream_config),
+ 'summary': upstream_issue.summary,
+ 'detail_url': upstream_detail_url,
+ 'is_blocking': ezt.boolean(is_blocking),
+ 'downstream_issue_ref': tracker_bizobj.FormatIssueRef(
+ (None, issue.local_id)),
+ 'downstream_issue_url': detail_url,
+ }
+
+ # TODO(jrobbins): Generate two versions of email body: members
+ # vesion has other member full email addresses exposed. But, don't
+ # expose too many as we iterate through upstream projects.
+ body = self.email_template.GetResponse(email_data)
+
+ # Just use "Re:", not Message-Id and References because a blocking
+ # notification is not a comment on the issue.
+ subject = 'Re: Issue %d in %s: %s' % (
+ upstream_issue.local_id, upstream_issue.project_name,
+ upstream_issue.summary)
+
+ omit_addrs = {users_by_id[omit_id].email for omit_id in omit_ids}
+
+ # Get the transitive set of owners and Cc'd users, and their UserView's.
+ direct_owners, trans_owners = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, [tracker_bizobj.GetOwnerId(upstream_issue)])
+ direct_ccs, trans_ccs = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(upstream_issue.cc_ids))
+ # TODO(jrobbins): This will say that the user was cc'd by a rule when it
+ # was really added to the derived_cc_ids by a component.
+ der_direct_ccs, der_transitive_ccs = (
+ self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(upstream_issue.derived_cc_ids)))
+ # direct owners and Ccs are already in users_by_id
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, self.services.user, trans_owners, trans_ccs, der_transitive_ccs))
+
+ owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_owners + trans_owners, upstream_project, upstream_issue,
+ self.services, omit_addrs, users_by_id)
+ cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_ccs + trans_ccs, upstream_project, upstream_issue,
+ self.services, omit_addrs, users_by_id)
+ der_cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, der_direct_ccs + der_transitive_ccs, upstream_project,
+ upstream_issue, self.services, omit_addrs, users_by_id)
+ sub_addr_perm_list = _GetSubscribersAddrPermList(
+ cnxn, self.services, upstream_issue, upstream_project, upstream_config,
+ omit_addrs, users_by_id)
+
+ issue_notify_addr_list = notify_helpers.ComputeIssueNotificationAddrList(
+ upstream_issue, omit_addrs)
+ proj_notify_addr_list = notify_helpers.ComputeProjectNotificationAddrList(
+ upstream_project, contributor_could_view, omit_addrs)
+
+ # Give each user a bullet-list of all the reasons that apply for that user.
+ group_reason_list = [
+ (owner_addr_perm_list, 'You are the owner of the issue'),
+ (cc_addr_perm_list, 'You were specifically CC\'d on the issue'),
+ (der_cc_addr_perm_list, 'A rule CC\'d you on the issue'),
+ ]
+ group_reason_list.extend(notify_helpers.ComputeComponentFieldAddrPerms(
+ cnxn, upstream_config, upstream_issue, upstream_project, self.services,
+ omit_addrs, users_by_id))
+ group_reason_list.extend(notify_helpers.ComputeCustomFieldAddrPerms(
+ cnxn, upstream_config, upstream_issue, upstream_project, self.services,
+ omit_addrs, users_by_id))
+ group_reason_list.extend([
+ # Starrers are not notified of blocking changes to reduce noise.
+ (sub_addr_perm_list, 'Your saved query matched the issue'),
+ (issue_notify_addr_list,
+ 'Project filter rules were setup to notify you'),
+ (proj_notify_addr_list,
+ 'The project was configured to send all issue notifications '
+ 'to this address'),
+ ])
+
+ one_issue_email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ group_reason_list, subject, body, body, upstream_project, hostport,
+ commenter_view, detail_url=detail_url)
+
+ return one_issue_email_tasks
+
+
+class NotifyBulkChangeTask(NotifyTaskBase):
+ """JSON servlet that notifies appropriate users after a bulk edit."""
+
+ _EMAIL_TEMPLATE = 'tracker/issue-bulk-change-notification-email.ezt'
+
+ def HandleRequest(self, mr):
+ """Process the task to notify users after an issue blocking change.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ hostport = mr.GetParam('hostport')
+ project_id = mr.specified_project_id
+ if project_id is None:
+ return {
+ 'params': {},
+ 'notified': [],
+ 'message': 'Cannot proceed without a valid project ID.',
+ }
+
+ local_ids = mr.local_id_list
+ old_owner_ids = mr.GetIntListParam('old_owner_ids')
+ comment_text = mr.GetParam('comment_text')
+ commenter_id = mr.GetPositiveIntParam('commenter_id')
+ amendments = mr.GetParam('amendments')
+ send_email = bool(mr.GetIntParam('send_email'))
+ params = dict(
+ project_id=project_id, local_ids=mr.local_id_list,
+ commenter_id=commenter_id, hostport=hostport,
+ old_owner_ids=old_owner_ids, comment_text=comment_text,
+ send_email=send_email, amendments=amendments)
+
+ logging.info('bulk edit params are %r', params)
+ # TODO(jrobbins): For cross-project bulk edits, prefetch all relevant
+ # projects and configs and pass a dict of them to subroutines.
+ project = self.services.project.GetProject(mr.cnxn, project_id)
+ config = self.services.config.GetProjectConfig(mr.cnxn, project_id)
+ issues = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, project_id, local_ids)
+ issues = [issue for issue in issues if not issue.is_spam]
+ anon_perms = permissions.GetPermissions(None, set(), project)
+
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, [commenter_id])
+ ids_in_issues = {}
+ starrers = {}
+
+ non_private_issues = []
+ for issue, old_owner_id in zip(issues, old_owner_ids):
+ # TODO(jrobbins): use issue_id consistently rather than local_id.
+ starrers[issue.local_id] = self.services.issue_star.LookupItemStarrers(
+ mr.cnxn, issue.issue_id)
+ named_ids = set() # users named in user-value fields that notify.
+ for fd in config.field_defs:
+ named_ids.update(notify_helpers.ComputeNamedUserIDsToNotify(issue, fd))
+ direct, indirect = self.services.usergroup.ExpandAnyUserGroups(
+ mr.cnxn, list(issue.cc_ids) + list(issue.derived_cc_ids) +
+ [issue.owner_id, old_owner_id, issue.derived_owner_id] +
+ list(named_ids))
+ ids_in_issues[issue.local_id] = set(starrers[issue.local_id])
+ ids_in_issues[issue.local_id].update(direct)
+ ids_in_issues[issue.local_id].update(indirect)
+ ids_in_issue_needing_views = (
+ ids_in_issues[issue.local_id] |
+ tracker_bizobj.UsersInvolvedInIssues([issue]))
+ new_ids_in_issue = [user_id for user_id in ids_in_issue_needing_views
+ if user_id not in users_by_id]
+ users_by_id.update(
+ framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, new_ids_in_issue))
+
+ anon_can_view = permissions.CanViewIssue(
+ set(), anon_perms, project, issue)
+ if anon_can_view:
+ non_private_issues.append(issue)
+
+ commenter_view = users_by_id[commenter_id]
+ omit_addrs = {commenter_view.email}
+
+ tasks = []
+ if send_email:
+ email_tasks = self._BulkEditEmailTasks(
+ mr.cnxn, issues, old_owner_ids, omit_addrs, project,
+ non_private_issues, users_by_id, ids_in_issues, starrers,
+ commenter_view, hostport, comment_text, amendments, config)
+ tasks = email_tasks
+
+ notified = AddAllEmailTasks(tasks)
+ return {
+ 'params': params,
+ 'notified': notified,
+ }
+
+ def _BulkEditEmailTasks(
+ self, cnxn, issues, old_owner_ids, omit_addrs, project,
+ non_private_issues, users_by_id, ids_in_issues, starrers,
+ commenter_view, hostport, comment_text, amendments, config):
+ """Generate Email PBs to notify interested users after a bulk edit."""
+ # 1. Get the user IDs of everyone who could be notified,
+ # and make all their user proxies. Also, build a dictionary
+ # of all the users to notify and the issues that they are
+ # interested in. Also, build a dictionary of additional email
+ # addresses to notify and the issues to notify them of.
+ users_by_id = {}
+ ids_to_notify_of_issue = {}
+ additional_addrs_to_notify_of_issue = collections.defaultdict(list)
+
+ users_to_queries = notify_helpers.GetNonOmittedSubscriptions(
+ cnxn, self.services, [project.project_id], {})
+ config = self.services.config.GetProjectConfig(
+ cnxn, project.project_id)
+ for issue, old_owner_id in zip(issues, old_owner_ids):
+ issue_participants = set(
+ [tracker_bizobj.GetOwnerId(issue), old_owner_id] +
+ tracker_bizobj.GetCcIds(issue))
+ # users named in user-value fields that notify.
+ for fd in config.field_defs:
+ issue_participants.update(
+ notify_helpers.ComputeNamedUserIDsToNotify(issue, fd))
+ for user_id in ids_in_issues[issue.local_id]:
+ # TODO(jrobbins): implement batch GetUser() for speed.
+ if not user_id:
+ continue
+ auth = monorailrequest.AuthData.FromUserID(
+ cnxn, user_id, self.services)
+ if (auth.user_pb.notify_issue_change and
+ not auth.effective_ids.isdisjoint(issue_participants)):
+ ids_to_notify_of_issue.setdefault(user_id, []).append(issue)
+ elif (auth.user_pb.notify_starred_issue_change and
+ user_id in starrers[issue.local_id]):
+ # Skip users who have starred issues that they can no longer view.
+ starrer_perms = permissions.GetPermissions(
+ auth.user_pb, auth.effective_ids, project)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, auth.effective_ids, config)
+ starrer_can_view = permissions.CanViewIssue(
+ auth.effective_ids, starrer_perms, project, issue,
+ granted_perms=granted_perms)
+ if starrer_can_view:
+ ids_to_notify_of_issue.setdefault(user_id, []).append(issue)
+ logging.info(
+ 'ids_to_notify_of_issue[%s] = %s',
+ user_id,
+ [i.local_id for i in ids_to_notify_of_issue.get(user_id, [])])
+
+ # Find all subscribers that should be notified.
+ subscribers_to_consider = notify_helpers.EvaluateSubscriptions(
+ cnxn, issue, users_to_queries, self.services, config)
+ for sub_id in subscribers_to_consider:
+ auth = monorailrequest.AuthData.FromUserID(cnxn, sub_id, self.services)
+ sub_perms = permissions.GetPermissions(
+ auth.user_pb, auth.effective_ids, project)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, auth.effective_ids, config)
+ sub_can_view = permissions.CanViewIssue(
+ auth.effective_ids, sub_perms, project, issue,
+ granted_perms=granted_perms)
+ if sub_can_view:
+ ids_to_notify_of_issue.setdefault(sub_id, []).append(issue)
+
+ if issue in non_private_issues:
+ for notify_addr in issue.derived_notify_addrs:
+ additional_addrs_to_notify_of_issue[notify_addr].append(issue)
+
+ # 2. Compose an email specifically for each user.
+ email_tasks = []
+ needed_user_view_ids = [uid for uid in ids_to_notify_of_issue
+ if uid not in users_by_id]
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, self.services.user, needed_user_view_ids))
+ for user_id in ids_to_notify_of_issue:
+ if not user_id:
+ continue # Don't try to notify NO_USER_SPECIFIED
+ if users_by_id[user_id].email in omit_addrs:
+ logging.info('Omitting %s', user_id)
+ continue
+ user_issues = ids_to_notify_of_issue[user_id]
+ if not user_issues:
+ continue # user's prefs indicate they don't want these notifications
+ email = self._FormatBulkIssuesEmail(
+ users_by_id[user_id].email, user_issues, users_by_id,
+ commenter_view, hostport, comment_text, amendments, config, project)
+ email_tasks.append(email)
+ omit_addrs.add(users_by_id[user_id].email)
+ logging.info('about to bulk notify %s (%s) of %s',
+ users_by_id[user_id].email, user_id,
+ [issue.local_id for issue in user_issues])
+
+ # 3. Compose one email to each notify_addr with all the issues that it
+ # is supossed to be notified about.
+ for addr, addr_issues in additional_addrs_to_notify_of_issue.iteritems():
+ email = self._FormatBulkIssuesEmail(
+ addr, addr_issues, users_by_id, commenter_view, hostport,
+ comment_text, amendments, config, project)
+ email_tasks.append(email)
+ omit_addrs.add(addr)
+ logging.info('about to bulk notify additional addr %s of %s',
+ addr, [addr_issue.local_id for addr_issue in addr_issues])
+
+ # 4. Add in the project's issue_notify_address. This happens even if it
+ # is the same as the commenter's email address (which would be an unusual
+ # but valid project configuration). Only issues that any contributor could
+ # view are included in emails to the all-issue-activity mailing lists.
+ if (project.issue_notify_address
+ and project.issue_notify_address not in omit_addrs):
+ non_private_issues_live = []
+ for issue in issues:
+ contributor_could_view = permissions.CanViewIssue(
+ set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ project, issue)
+ if contributor_could_view:
+ non_private_issues_live.append(issue)
+
+ if non_private_issues_live:
+ email = self._FormatBulkIssuesEmail(
+ project.issue_notify_address, non_private_issues_live,
+ users_by_id, commenter_view, hostport, comment_text, amendments,
+ config, project)
+ email_tasks.append(email)
+ omit_addrs.add(project.issue_notify_address)
+ logging.info('about to bulk notify all-issues %s of %s',
+ project.issue_notify_address,
+ [issue.local_id for issue in non_private_issues])
+
+ return email_tasks
+
+ def _FormatBulkIssuesEmail(
+ self, dest_email, issues, users_by_id, commenter_view,
+ hostport, comment_text, amendments, config, _project):
+ """Format an email to one user listing many issues."""
+ # TODO(jrobbins): Generate two versions of email body: members
+ # vesion has full email addresses exposed. And, use the full
+ # commenter email address in the From: line when sending to
+ # a member.
+ subject, body = self._FormatBulkIssues(
+ issues, users_by_id, commenter_view, hostport, comment_text,
+ amendments, config)
+
+ from_addr = emailfmt.NoReplyAddress(commenter_view=commenter_view)
+ return dict(from_addr=from_addr, to=dest_email, subject=subject, body=body)
+
+ def _FormatBulkIssues(
+ self, issues, users_by_id, commenter_view, hostport, comment_text,
+ amendments, config, body_type='email'):
+ """Format a subject and body for a bulk issue edit."""
+ assert body_type in ('email', 'feed')
+ project_name = issues[0].project_name
+
+ issue_views = []
+ for issue in issues:
+ # TODO(jrobbins): choose config from dict of prefetched configs.
+ issue_views.append(tracker_views.IssueView(issue, users_by_id, config))
+
+ email_data = {
+ 'hostport': hostport,
+ 'num_issues': len(issues),
+ 'issues': issue_views,
+ 'comment_text': comment_text,
+ 'commenter': commenter_view,
+ 'amendments': amendments,
+ 'body_type': body_type,
+ }
+
+ if len(issues) == 1:
+ subject = 'issue %s in %s: %s' % (
+ issues[0].local_id, project_name, issues[0].summary)
+ # TODO(jrobbins): Look up the sequence number instead and treat this
+ # more like an individual change for email threading. For now, just
+ # add "Re:" because bulk edits are always replies.
+ subject = 'Re: ' + subject
+ else:
+ subject = '%d issues changed in %s' % (len(issues), project_name)
+
+ body = self.email_template.GetResponse(email_data)
+
+ return subject, body
+
+
+class OutboundEmailTask(jsonfeed.InternalTask):
+ """JSON servlet that sends one email."""
+
+ def HandleRequest(self, mr):
+ """Process the task to send one email message.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ # If running on a GAFYD domain, you must define an app alias on the
+ # Application Settings admin web page.
+ sender = mr.GetParam('from_addr')
+ reply_to = mr.GetParam('reply_to')
+ to = mr.GetParam('to')
+ if not to:
+ # Cannot proceed if we cannot create a valid EmailMessage.
+ return
+ references = mr.GetParam('references')
+ subject = mr.GetParam('subject')
+ body = mr.GetParam('body')
+ html_body = mr.GetParam('html_body')
+
+ if settings.dev_mode:
+ to_format = settings.send_dev_email_to
+ else:
+ to_format = settings.send_all_email_to
+
+ if to_format:
+ to_user, to_domain = to.split('@')
+ to = to_format % {'user': to_user, 'domain': to_domain}
+
+ logging.info(
+ 'Email:\n sender: %s\n reply_to: %s\n to: %s\n references: %s\n '
+ 'subject: %s\n body: %s\n html body: %s',
+ sender, reply_to, to, references, subject, body, html_body)
+ message = mail.EmailMessage(
+ sender=sender, to=to, subject=subject, body=body)
+ if html_body:
+ message.html = html_body
+ if reply_to:
+ message.reply_to = reply_to
+ if references:
+ message.headers = {'References': references}
+ if settings.unit_test_mode:
+ logging.info('Sending message "%s" in test mode.', message.subject)
+ else:
+ message.send()
+
+ return dict(
+ sender=sender, to=to, subject=subject, body=body, html_body=html_body,
+ reply_to=reply_to, references=references)
+
+
+def _GetSubscribersAddrPermList(
+ cnxn, services, issue, project, config, omit_addrs, users_by_id):
+ """Lookup subscribers, evaluate their saved queries, and decide to notify."""
+ users_to_queries = notify_helpers.GetNonOmittedSubscriptions(
+ cnxn, services, [project.project_id], omit_addrs)
+ # TODO(jrobbins): need to pass through the user_id to use for "me".
+ subscribers_to_notify = notify_helpers.EvaluateSubscriptions(
+ cnxn, issue, users_to_queries, services, config)
+ # TODO(jrobbins): expand any subscribers that are user groups.
+ subs_needing_user_views = [
+ uid for uid in subscribers_to_notify if uid not in users_by_id]
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, services.user, subs_needing_user_views))
+ sub_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, subscribers_to_notify, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda *args: True)
+
+ return sub_addr_perm_list
diff --git a/appengine/monorail/features/notify_helpers.py b/appengine/monorail/features/notify_helpers.py
new file mode 100644
index 0000000..ff37190
--- /dev/null
+++ b/appengine/monorail/features/notify_helpers.py
@@ -0,0 +1,414 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for email notifications of issue changes."""
+
+import cgi
+import logging
+import re
+
+from django.utils.html import urlize
+
+from features import filterrules_helpers
+from features import savedqueries_helpers
+from framework import emailfmt
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import monorailrequest
+from framework import permissions
+from framework import urls
+from proto import tracker_pb2
+from search import query2ast
+from search import searchpipeline
+from tracker import component_helpers
+from tracker import tracker_bizobj
+
+
+# When sending change notification emails, choose the reply-to header and
+# footer message based on three levels of the the recipient's permissions
+# for that issue.
+REPLY_NOT_ALLOWED = 'REPLY_NOT_ALLOWED'
+REPLY_MAY_COMMENT = 'REPLY_MAY_COMMENT'
+REPLY_MAY_UPDATE = 'REPLY_MAY_UPDATE'
+
+# This HTML template adds mark up which enables Gmail/Inbox to display a
+# convenient link that takes users to the CL directly from the inbox without
+# having to click on the email.
+# Documentation for this schema.org markup is here:
+# https://developers.google.com/gmail/markup/reference/go-to-action
+HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE = """
+<html>
+<body>
+<script type="application/ld+json">
+{
+ "@context": "http://schema.org",
+ "@type": "EmailMessage",
+ "potentialAction": {
+ "@type": "ViewAction",
+ "name": "View Issue",
+ "url": "%s"
+ },
+ "description": ""
+}
+</script>
+
+<div style="font-family: arial, sans-serif">%s</div>
+</body>
+</html>
+"""
+
+
+def ComputeIssueChangeAddressPermList(
+ cnxn, ids_to_consider, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda u: u.notify_issue_change):
+ """Return a list of user email addresses to notify of an issue change.
+
+ User email addresses are determined by looking up the given user IDs
+ in the given users_by_id dict.
+
+ Args:
+ cnxn: connection to SQL database.
+ ids_to_consider: list of user IDs for users interested in this issue.
+ project: Project PB for the project contianing containing this issue.
+ issue: Issue PB for the issue that was updated.
+ services: Services.
+ omit_addrs: set of strings for email addresses to not notify because
+ they already know.
+ users_by_id: dict {user_id: user_view} user info.
+ pref_check_function: optional function to use to check if a certain
+ User PB has a preference set to receive the email being sent. It
+ defaults to "If I am in the issue's owner or cc field", but it
+ can be set to check "If I starred the issue."
+
+ Returns:
+ A list of tuples: [(recipient_is_member, address, reply_perm), ...] where
+ reply_perm is one of REPLY_NOT_ALLOWED, REPLY_MAY_COMMENT,
+ REPLY_MAY_UPDATE.
+ """
+ memb_addr_perm_list = []
+ for user_id in ids_to_consider:
+ if user_id == framework_constants.NO_USER_SPECIFIED:
+ continue
+ user = services.user.GetUser(cnxn, user_id)
+ # Notify people who have a pref set, or if they have no User PB
+ # because the pref defaults to True.
+ if user and not pref_check_function(user):
+ continue
+ # TODO(jrobbins): doing a bulk operation would reduce DB load.
+ auth = monorailrequest.AuthData.FromUserID(cnxn, user_id, services)
+ perms = permissions.GetPermissions(user, auth.effective_ids, project)
+ config = services.config.GetProjectConfig(cnxn, project.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, auth.effective_ids, config)
+
+ if not permissions.CanViewIssue(
+ auth.effective_ids, perms, project, issue,
+ granted_perms=granted_perms):
+ continue
+
+ addr = users_by_id[user_id].email
+ if addr in omit_addrs:
+ continue
+
+ recipient_is_member = bool(framework_bizobj.UserIsInProject(
+ project, auth.effective_ids))
+
+ reply_perm = REPLY_NOT_ALLOWED
+ if project.process_inbound_email:
+ if permissions.CanEditIssue(auth.effective_ids, perms, project, issue):
+ reply_perm = REPLY_MAY_UPDATE
+ elif permissions.CanCommentIssue(
+ auth.effective_ids, perms, project, issue):
+ reply_perm = REPLY_MAY_COMMENT
+
+ memb_addr_perm_list.append((recipient_is_member, addr, reply_perm))
+
+ logging.info('For %s %s, will notify: %r',
+ project.project_name, issue.local_id, memb_addr_perm_list)
+
+ return memb_addr_perm_list
+
+
+def ComputeProjectNotificationAddrList(
+ project, contributor_could_view, omit_addrs):
+ """Return a list of non-user addresses to notify of an issue change.
+
+ The non-user addresses are specified by email address strings, not
+ user IDs. One such address can be specified in the project PB.
+ It is not assumed to have permission to see all issues.
+
+ Args:
+ project: Project PB containing the issue that was updated.
+ contributor_could_view: True if any project contributor should be able to
+ see the notification email, e.g., in a mailing list archive or feed.
+ omit_addrs: set of strings for email addresses to not notify because
+ they already know.
+
+ Returns:
+ A list of tuples: [(False, email_address, reply_permission_level), ...],
+ where reply_permission_level is always REPLY_NOT_ALLOWED for now.
+ """
+ memb_addr_perm_list = []
+ if contributor_could_view:
+ ml_addr = project.issue_notify_address
+ if ml_addr and ml_addr not in omit_addrs:
+ memb_addr_perm_list.append((False, ml_addr, REPLY_NOT_ALLOWED))
+
+ return memb_addr_perm_list
+
+
+def ComputeIssueNotificationAddrList(issue, omit_addrs):
+ """Return a list of non-user addresses to notify of an issue change.
+
+ The non-user addresses are specified by email address strings, not
+ user IDs. They can be set by filter rules with the "Also notify" action.
+ "Also notify" addresses are assumed to have permission to see any issue,
+ even a restricted one.
+
+ Args:
+ issue: Issue PB for the issue that was updated.
+ omit_addrs: set of strings for email addresses to not notify because
+ they already know.
+
+ Returns:
+ A list of tuples: [(False, email_address, reply_permission_level), ...],
+ where reply_permission_level is always REPLY_NOT_ALLOWED for now.
+ """
+ addr_perm_list = []
+ for addr in issue.derived_notify_addrs:
+ if addr not in omit_addrs:
+ addr_perm_list.append((False, addr, REPLY_NOT_ALLOWED))
+
+ return addr_perm_list
+
+
+def MakeBulletedEmailWorkItems(
+ group_reason_list, subject, body_for_non_members, body_for_members,
+ project, hostport, commenter_view, seq_num=None, detail_url=None):
+ """Make a list of dicts describing email-sending tasks to notify users.
+
+ Args:
+ group_reason_list: list of (is_memb, addr_perm, reason) tuples.
+ subject: string email subject line.
+ body_for_non_members: string body of email to send to non-members.
+ body_for_members: string body of email to send to members.
+ project: Project that contains the issue.
+ hostport: string hostname and port number for links to the site.
+ commenter_view: UserView for the user who made the comment.
+ seq_num: optional int sequence number of the comment.
+ detail_url: optional str direct link to the issue.
+
+ Returns:
+ A list of dictionaries, each with all needed info to send an individual
+ email to one user. Each email contains a footer that lists all the
+ reasons why that user received the email.
+ """
+ logging.info('group_reason_list is %r', group_reason_list)
+ addr_reasons_dict = {}
+ for group, reason in group_reason_list:
+ for memb_addr_perm in group:
+ addr_reasons_dict.setdefault(memb_addr_perm, []).append(reason)
+
+ email_tasks = []
+ for memb_addr_perm, reasons in addr_reasons_dict.iteritems():
+ email_tasks.append(_MakeEmailWorkItem(
+ memb_addr_perm, reasons, subject, body_for_non_members,
+ body_for_members, project, hostport, commenter_view, seq_num=seq_num,
+ detail_url=detail_url))
+
+ return email_tasks
+
+
+def _MakeEmailWorkItem(
+ (recipient_is_member, to_addr, reply_perm), reasons, subject,
+ body_for_non_members, body_for_members, project, hostport, commenter_view,
+ seq_num=None, detail_url=None):
+ """Make one email task dict for one user, includes a detailed reason."""
+ footer = _MakeNotificationFooter(reasons, reply_perm, hostport)
+ if isinstance(footer, unicode):
+ footer = footer.encode('utf-8')
+ if recipient_is_member:
+ logging.info('got member %r', to_addr)
+ body = body_for_members
+ else:
+ logging.info('got non-member %r', to_addr)
+ body = body_for_non_members
+
+ logging.info('sending body + footer: %r', body + footer)
+ can_reply_to = (
+ reply_perm != REPLY_NOT_ALLOWED and project.process_inbound_email)
+ from_addr = emailfmt.FormatFromAddr(
+ project, commenter_view=commenter_view, reveal_addr=recipient_is_member,
+ can_reply_to=can_reply_to)
+ if can_reply_to:
+ reply_to = '%s@%s' % (project.project_name, emailfmt.MailDomain())
+ else:
+ reply_to = emailfmt.NoReplyAddress()
+ refs = emailfmt.GetReferences(
+ to_addr, subject, seq_num,
+ '%s@%s' % (project.project_name, emailfmt.MailDomain()))
+ # If detail_url is specified then we can use markup to display a convenient
+ # link that takes users directly to the issue without clicking on the email.
+ html_body = None
+ if detail_url:
+ # cgi.escape the body and additionally escape single quotes which are
+ # occassionally used to contain HTML attributes and event handler
+ # definitions.
+ html_escaped_body = cgi.escape(body + footer, quote=1).replace("'", ''')
+ html_body = HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ _AddHTMLTags(html_escaped_body.decode('utf-8')))
+ return dict(to=to_addr, subject=subject, body=body + footer,
+ html_body=html_body, from_addr=from_addr, reply_to=reply_to,
+ references=refs)
+
+
+def _AddHTMLTags(body):
+ """Adds HMTL tags in the specified email body.
+
+ Specifically does the following:
+ * Detects links and adds <a href>s around the links.
+ * Substitutes <br/> for all occurrences of "\n".
+
+ See crbug.com/582463 for context.
+ """
+ # Convert all URLs into clickable links.
+ body = urlize(body)
+ # The above step converts
+ # '<link.com>' into '<<a href="link.com>">link.com></a>;' and
+ # '<x@y.com>' into '<<a href="mailto:x@y.com>">x@y.com></a>;'
+ # The below regex fixes this specific problem. See
+ # https://bugs.chromium.org/p/monorail/issues/detail?id=1007 for more details.
+ body = re.sub(r'<<a href="(|mailto:)(.*?)>">(.*?)></a>;',
+ r'<a href="\1\2"><\3></a>', body)
+
+ # Convert all "\n"s into "<br/>"s.
+ body = body.replace("\n", "<br/>")
+ return body
+
+
+def _MakeNotificationFooter(reasons, reply_perm, hostport):
+ """Make an informative footer for a notification email.
+
+ Args:
+ reasons: a list of strings to be used as the explanation. Empty if no
+ reason is to be given.
+ reply_perm: string which is one of REPLY_NOT_ALLOWED, REPLY_MAY_COMMENT,
+ REPLY_MAY_UPDATE.
+ hostport: string with domain_name:port_number to be used in linking to
+ the user preferences page.
+
+ Returns:
+ A string to be used as the email footer.
+ """
+ if not reasons:
+ return ''
+
+ domain_port = hostport.split(':')
+ domain_port[0] = framework_helpers.GetPreferredDomain(domain_port[0])
+ hostport = ':'.join(domain_port)
+
+ prefs_url = 'https://%s%s' % (hostport, urls.USER_SETTINGS)
+ lines = ['-- ']
+ lines.append('You received this message because:')
+ lines.extend(' %d. %s' % (idx + 1, reason)
+ for idx, reason in enumerate(reasons))
+
+ lines.extend(['', 'You may adjust your notification preferences at:',
+ prefs_url])
+
+ if reply_perm == REPLY_MAY_COMMENT:
+ lines.extend(['', 'Reply to this email to add a comment.'])
+ elif reply_perm == REPLY_MAY_UPDATE:
+ lines.extend(['', 'Reply to this email to add a comment or make updates.'])
+
+ return '\n'.join(lines)
+
+
+def GetNonOmittedSubscriptions(cnxn, services, project_ids, omit_addrs):
+ """Get a dict of users w/ subscriptions in those projects."""
+ users_to_queries = services.features.GetSubscriptionsInProjects(
+ cnxn, project_ids)
+ user_emails = services.user.LookupUserEmails(cnxn, users_to_queries.keys())
+ for user_id, email in user_emails.iteritems():
+ if email in omit_addrs:
+ del users_to_queries[user_id]
+
+ return users_to_queries
+
+
+def EvaluateSubscriptions(
+ cnxn, issue, users_to_queries, services, config):
+ """Determine subscribers who have subs that match the given issue."""
+ # Note: unlike filter rule, subscriptions see explicit & derived values.
+ lower_labels = [lab.lower() for lab in tracker_bizobj.GetLabels(issue)]
+ label_set = set(lower_labels)
+
+ subscribers_to_notify = []
+ for uid, saved_queries in users_to_queries.iteritems():
+ for sq in saved_queries:
+ if sq.subscription_mode != 'immediate':
+ continue
+ if issue.project_id not in sq.executes_in_project_ids:
+ continue
+ cond = savedqueries_helpers.SavedQueryToCond(sq)
+ logging.info('evaluating query %s: %r', sq.name, cond)
+ cond = searchpipeline.ReplaceKeywordsWithUserID(uid, cond)
+ cond_ast = query2ast.ParseUserQuery(
+ cond, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+
+ if filterrules_helpers.EvalPredicate(
+ cnxn, services, cond_ast, issue, label_set, config,
+ tracker_bizobj.GetOwnerId(issue), tracker_bizobj.GetCcIds(issue),
+ tracker_bizobj.GetStatus(issue)):
+ subscribers_to_notify.append(uid)
+ break # Don't bother looking at the user's other saved quereies.
+
+ return subscribers_to_notify
+
+
+def ComputeCustomFieldAddrPerms(
+ cnxn, config, issue, project, services, omit_addrs, users_by_id):
+ """Check the reasons to notify users named in custom fields."""
+ group_reason_list = []
+ for fd in config.field_defs:
+ named_user_ids = ComputeNamedUserIDsToNotify(issue, fd)
+ if named_user_ids:
+ named_addr_perms = ComputeIssueChangeAddressPermList(
+ cnxn, named_user_ids, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda u: True)
+ group_reason_list.append(
+ (named_addr_perms, 'You are named in the %s field' % fd.field_name))
+
+ return group_reason_list
+
+
+def ComputeNamedUserIDsToNotify(issue, fd):
+ """Give a list of user IDs to notify because they're in a field."""
+ if (fd.field_type == tracker_pb2.FieldTypes.USER_TYPE and
+ fd.notify_on == tracker_pb2.NotifyTriggers.ANY_COMMENT):
+ return [fv.user_id for fv in issue.field_values
+ if fv.field_id == fd.field_id]
+
+ return []
+
+
+def ComputeComponentFieldAddrPerms(
+ cnxn, config, issue, project, services, omit_addrs, users_by_id):
+ """Return [(addr_perm, reason), ...] for users auto-cc'd by components."""
+ component_ids = set(issue.component_ids)
+ group_reason_list = []
+ for cd in config.component_defs:
+ if cd.component_id in component_ids:
+ cc_ids = component_helpers.GetCcIDsForComponentAndAncestors(config, cd)
+ comp_addr_perms = ComputeIssueChangeAddressPermList(
+ cnxn, cc_ids, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda u: True)
+ group_reason_list.append(
+ (comp_addr_perms,
+ 'You are auto-CC\'d on all issues in component %s' % cd.path))
+
+ return group_reason_list
diff --git a/appengine/monorail/features/prettify.py b/appengine/monorail/features/prettify.py
new file mode 100644
index 0000000..5b12b63
--- /dev/null
+++ b/appengine/monorail/features/prettify.py
@@ -0,0 +1,73 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for source code syntax highlighting."""
+
+from third_party import ezt
+
+from framework import framework_constants
+
+
+# We only attempt to do client-side syntax highlighting on files that we
+# expect to be source code in languages that we support, and that are
+# reasonably sized.
+MAX_PRETTIFY_LINES = 3000
+
+
+def PrepareSourceLinesForHighlighting(file_contents):
+ """Parse a file into lines for highlighting.
+
+ Args:
+ file_contents: string contents of the source code file.
+
+ Returns:
+ A list of _SourceLine objects, one for each line in the source file.
+ """
+ return [_SourceLine(num + 1, line) for num, line
+ in enumerate(file_contents.splitlines())]
+
+
+class _SourceLine(object):
+ """Convenience class to represent one line of the source code display.
+
+ Attributes:
+ num: The line's location in the source file.
+ line: String source code line to display.
+ """
+
+ def __init__(self, num, line):
+ self.num = num
+ self.line = line
+
+ def __str__(self):
+ return '%d: %s' % (self.num, self.line)
+
+
+def BuildPrettifyData(num_lines, path):
+ """Return page data to help configure google-code-prettify.
+
+ Args:
+ num_lines: int number of lines of source code in the file.
+ path: string path to the file, or just the filename.
+
+ Returns:
+ Dictionary that can be passed to EZT to render a page.
+ """
+ reasonable_size = num_lines < MAX_PRETTIFY_LINES
+
+ filename_lower = path[path.rfind('/') + 1:].lower()
+ ext = filename_lower[filename_lower.rfind('.') + 1:]
+
+ # Note that '' might be a valid entry in these maps.
+ prettify_class = framework_constants.PRETTIFY_CLASS_MAP.get(ext)
+ if prettify_class is None:
+ prettify_class = framework_constants.PRETTIFY_FILENAME_CLASS_MAP.get(
+ filename_lower)
+ supported_lang = prettify_class is not None
+
+ return {
+ 'should_prettify': ezt.boolean(supported_lang and reasonable_size),
+ 'prettify_class': prettify_class,
+ }
diff --git a/appengine/monorail/features/savedqueries.py b/appengine/monorail/features/savedqueries.py
new file mode 100644
index 0000000..624312d
--- /dev/null
+++ b/appengine/monorail/features/savedqueries.py
@@ -0,0 +1,70 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Page for showing a user's saved queries and subscription options."""
+
+import logging
+import time
+
+from features import savedqueries_helpers
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from framework import urls
+
+
+class SavedQueries(servlet.Servlet):
+ """A page class that shows the user's saved queries."""
+
+ _PAGE_TEMPLATE = 'features/saved-queries-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ super(SavedQueries, self).AssertBasePermission(mr)
+ viewing_self = mr.viewed_user_auth.user_id == mr.auth.user_id
+ if not mr.auth.user_pb.is_site_admin and not viewing_self:
+ raise permissions.PermissionException(
+ 'User not allowed to edit this user\'s saved queries')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ saved_queries = self.services.features.GetSavedQueriesByUserID(
+ mr.cnxn, mr.viewed_user_auth.user_id)
+ saved_query_views = [
+ savedqueries_helpers.SavedQueryView(
+ sq, idx + 1, mr.cnxn, self.services.project)
+ for idx, sq in enumerate(saved_queries)]
+
+ page_data = {
+ 'canned_queries': saved_query_views,
+ 'new_query_indexes': (
+ range(len(saved_queries) + 1,
+ savedqueries_helpers.MAX_QUERIES + 1)),
+ 'max_queries': savedqueries_helpers.MAX_QUERIES,
+ 'user_tab_mode': 'st4',
+ }
+ return page_data
+
+ def ProcessFormData(self, mr, post_data):
+ """Validate and store the contents of the issues tracker admin page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ existing_queries = savedqueries_helpers.ParseSavedQueries(
+ mr.cnxn, post_data, self.services.project)
+ added_queries = savedqueries_helpers.ParseSavedQueries(
+ mr.cnxn, post_data, self.services.project, prefix='new_')
+ saved_queries = existing_queries + added_queries
+
+ self.services.features.UpdateUserSavedQueries(
+ mr.cnxn, mr.viewed_user_auth.user_id, saved_queries)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, '/u/%s%s' % (mr.viewed_username, urls.SAVED_QUERIES),
+ include_project=False, saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/features/savedqueries_helpers.py b/appengine/monorail/features/savedqueries_helpers.py
new file mode 100644
index 0000000..036b43e
--- /dev/null
+++ b/appengine/monorail/features/savedqueries_helpers.py
@@ -0,0 +1,110 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Utility functions and classes for dealing with saved queries.
+
+Saved queries can be part of the project issue config, where they are
+called "canned queries". Or, they can be personal saved queries that
+may appear in the search scope drop-down, on the user's dashboard, or
+in the user's subscription.
+"""
+
+import logging
+import re
+
+from framework import template_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+MAX_QUERIES = 100
+
+
+def ParseSavedQueries(cnxn, post_data, project_service, prefix=''):
+ """Parse form data for the Saved Queries part of an admin form."""
+ saved_queries = []
+ for i in xrange(1, MAX_QUERIES + 1):
+ if ('%ssavedquery_name_%s' % (prefix, i)) not in post_data:
+ continue # skip any entries that are blank or have no predicate.
+
+ name = post_data['%ssavedquery_name_%s' % (prefix, i)].strip()
+ if not name:
+ continue # skip any blank entries
+
+ if '%ssavedquery_id_%s' % (prefix, i) in post_data:
+ query_id = int(post_data['%ssavedquery_id_%s' % (prefix, i)])
+ else:
+ query_id = None # a new query_id will be generated by the DB.
+
+ project_names_str = post_data.get(
+ '%ssavedquery_projects_%s' % (prefix, i), '')
+ project_names = [pn.strip().lower()
+ for pn in re.split('[],;\s]+', project_names_str)
+ if pn.strip()]
+ project_ids = project_service.LookupProjectIDs(
+ cnxn, project_names).values()
+
+ base_id = int(post_data['%ssavedquery_base_%s' % (prefix, i)])
+ query = post_data['%ssavedquery_query_%s' % (prefix, i)].strip()
+
+ subscription_mode_field = '%ssavedquery_sub_mode_%s' % (prefix, i)
+ if subscription_mode_field in post_data:
+ subscription_mode = post_data[subscription_mode_field].strip()
+ else:
+ subscription_mode = None
+
+ saved_queries.append(tracker_bizobj.MakeSavedQuery(
+ query_id, name, base_id, query, subscription_mode=subscription_mode,
+ executes_in_project_ids=project_ids))
+
+ return saved_queries
+
+
+class SavedQueryView(template_helpers.PBProxy):
+ """Wrapper class that makes it easier to display SavedQuery via EZT."""
+
+ def __init__(self, sq, idx, cnxn, project_service):
+ """Store relevant values for later display by EZT.
+
+ Args:
+ sq: A SavedQuery protocol buffer.
+ idx: Int index of this saved query in the list.
+ cnxn: connection to SQL database.
+ project_service: persistence layer for project data.
+ """
+ super(SavedQueryView, self).__init__(sq)
+
+ self.idx = idx
+ base_query_name = 'All issues'
+ for canned in tracker_constants.DEFAULT_CANNED_QUERIES:
+ qid, name, _base_id, _query = canned
+ if qid == sq.base_query_id:
+ base_query_name = name
+
+ if cnxn:
+ project_names = sorted(project_service.LookupProjectNames(
+ cnxn, sq.executes_in_project_ids).values())
+ self.projects = ', '.join(project_names)
+ else:
+ self.projects = ''
+
+ self.docstring = '[%s] %s' % (base_query_name, sq.query)
+
+
+def SavedQueryToCond(saved_query):
+ """Convert a SavedQuery PB to a user query condition string."""
+ base_cond = tracker_bizobj.GetBuiltInQuery(saved_query.base_query_id)
+ cond = '%s %s' % (base_cond, saved_query.query)
+ return cond.strip()
+
+
+def SavedQueryIDToCond(cnxn, features_service, query_id):
+ """Convert a can/query ID to a user query condition string."""
+ built_in = tracker_bizobj.GetBuiltInQuery(query_id)
+ if built_in:
+ return built_in
+
+ saved_query = features_service.GetSavedQuery(cnxn, query_id)
+ return SavedQueryToCond(saved_query)
diff --git a/appengine/monorail/features/spammodel.py b/appengine/monorail/features/spammodel.py
new file mode 100644
index 0000000..3f292a9
--- /dev/null
+++ b/appengine/monorail/features/spammodel.py
@@ -0,0 +1,74 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+""" Tasks and handlers for maintaining the spam classifier model. These
+ should be run via cron and task queue rather than manually.
+"""
+
+import cgi
+import csv
+import logging
+import webapp2
+import cloudstorage
+import json
+
+from datetime import date
+from datetime import datetime
+from datetime import timedelta
+
+from framework import servlet
+from framework import urls
+from google.appengine.api import taskqueue
+from google.appengine.api import app_identity
+from framework import gcs_helpers
+
+class TrainingDataExport(webapp2.RequestHandler):
+ """Trigger a training data export task"""
+ def get(self):
+ logging.info("Training data export requested.")
+ taskqueue.add(url=urls.SPAM_DATA_EXPORT_TASK + '.do')
+
+BATCH_SIZE = 100
+
+class TrainingDataExportTask(servlet.Servlet):
+ """Export any human-labeled ham or spam from the previous day. These
+ records will be used by a subsequent task to create an updated model.
+ """
+ CHECK_SECURITY_TOKEN = False
+
+ def ProcessFormData(self, mr, post_data):
+ logging.info("Training data export initiated.")
+
+ bucket_name = app_identity.get_default_gcs_bucket_name()
+ date_str = date.today().isoformat()
+ export_target_path = '/' + bucket_name + '/spam_training_data/' + date_str
+ total_issues = 0
+
+ with cloudstorage.open(export_target_path, mode='w',
+ content_type=None, options=None, retry_params=None) as gcs_file:
+
+ csv_writer = csv.writer(gcs_file, delimiter=',', quotechar='"',
+ quoting=csv.QUOTE_ALL, lineterminator='\n')
+
+ since = datetime.now() - timedelta(days=1)
+
+ # TODO: Comments, and further pagination
+ issues, first_comments, _count = (
+ self.services.spam.GetTrainingIssues(
+ mr.cnxn, self.services.issue, since, offset=0, limit=BATCH_SIZE))
+ total_issues += len(issues)
+ for issue in issues:
+ # Cloud Prediction API doesn't allow newlines in the training data.
+ fixed_summary = issue.summary.replace('\r\n', ' ')
+ fixed_comment = first_comments[issue.issue_id].replace('\r\n', ' ')
+
+ csv_writer.writerow([
+ 'spam' if issue.is_spam else 'ham',
+ fixed_summary, fixed_comment,
+ ])
+
+ self.response.body = json.dumps({
+ "exported_issue_count": total_issues,
+ })
+
diff --git a/appengine/monorail/features/stars.py b/appengine/monorail/features/stars.py
new file mode 100644
index 0000000..83d7e1c
--- /dev/null
+++ b/appengine/monorail/features/stars.py
@@ -0,0 +1,43 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This is a starring servlet for users and projects."""
+
+import logging
+
+from framework import jsonfeed
+from framework import monorailrequest
+
+USER_STARS_SCOPE = 'users'
+PROJECT_STARS_SCOPE = 'projects'
+
+
+class SetStarsFeed(jsonfeed.JsonFeed):
+ """Process an AJAX request to (un)set a star on a project or user."""
+
+ def HandleRequest(self, mr):
+ """Retrieves the star persistence object and sets a star."""
+ starrer_id = mr.auth.user_id
+ item = mr.GetParam('item') # a project name or a user ID number
+ scope = mr.GetParam('scope')
+ starred = bool(mr.GetIntParam('starred'))
+ logging.info('Handling user set star request: %r %r %r %r',
+ starrer_id, item, scope, starred)
+
+ if scope == PROJECT_STARS_SCOPE:
+ project = self.services.project.GetProjectByName(mr.cnxn, item)
+ self.services.project_star.SetStar(
+ mr.cnxn, project.project_id, starrer_id, starred)
+
+ elif scope == USER_STARS_SCOPE:
+ user_id = int(item)
+ self.services.user_star.SetStar(mr.cnxn, user_id, starrer_id, starred)
+
+ else:
+ raise monorailrequest.InputException('unexpected star scope: %s' % scope)
+
+ return {
+ 'starred': starred,
+ }
diff --git a/appengine/monorail/features/test/__init__.py b/appengine/monorail/features/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/features/test/__init__.py
diff --git a/appengine/monorail/features/test/activities_test.py b/appengine/monorail/features/test/activities_test.py
new file mode 100644
index 0000000..2d738a4
--- /dev/null
+++ b/appengine/monorail/features/test/activities_test.py
@@ -0,0 +1,154 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.feature.activities."""
+
+import unittest
+
+import mox
+
+from features import activities
+from framework import framework_views
+from framework import profiler
+from proto import tracker_pb2
+from proto import user_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class ActivitiesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ )
+
+ self.project_name = 'proj'
+ self.project_id = 987
+ self.project = self.services.project.TestAddProject(
+ self.project_name, project_id=self.project_id,
+ process_inbound_email=True)
+
+ self.issue_id = 11
+ self.issue_local_id = 100
+ self.issue = tracker_pb2.Issue()
+ self.issue.issue_id = self.issue_id
+ self.issue.project_id = self.project_id
+ self.issue.local_id = self.issue_local_id
+ self.services.issue.TestAddIssue(self.issue)
+
+ self.comment_id = 123
+ self.comment_timestamp = 120
+ self.user_id = 2
+ self.mr_after = 1234
+
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testActivities_NoUpdates(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ updates_data = activities.GatherUpdatesData(
+ self.services, mr, profiler.Profiler(), project_ids=[self.project_id],
+ user_ids=None, ending=None, updates_page_url=None, autolink=None,
+ highlight=None)
+
+ self.assertIsNone(updates_data['pagination'])
+ self.assertIsNone(updates_data['no_stars'])
+ self.assertIsNone(updates_data['updates_data'])
+ self.assertEqual('yes', updates_data['no_activities'])
+ self.assertIsNone(updates_data['ending_type'])
+
+ def createAndAssertUpdates(self, project_ids=None, user_ids=None,
+ ascending=True):
+ user = user_pb2.MakeUser()
+ comment_1 = tracker_pb2.IssueComment(
+ id=self.comment_id, issue_id=self.issue_id,
+ project_id=self.project_id, user_id=self.user_id,
+ content='this is the 1st comment',
+ timestamp=self.comment_timestamp)
+ self.mox.StubOutWithMock(self.services.issue, 'GetComments')
+
+ created_order = 'created'
+ field = 'project_id' if project_ids else 'commenter_id'
+ where_clauses = [('Issue.id = Comment.issue_id', [])]
+ if project_ids:
+ where_clauses.append(('Comment.project_id IN (%s)', project_ids))
+ if user_ids:
+ where_clauses.append(('Comment.commenter_id IN (%s)', user_ids))
+ if ascending:
+ where_clauses.append(('created > %s', [self.mr_after]))
+ else:
+ created_order += ' DESC'
+ self.services.issue.GetComments(
+ mox.IgnoreArg(), deleted_by=None,
+ joins=[('Issue', [])], limit=activities.UPDATES_PER_PAGE + 1,
+ order_by=[(created_order, [])],
+ use_clause='USE INDEX (%s) USE INDEX FOR ORDER BY (%s)' % (field,
+ field),
+ where=where_clauses).AndReturn([comment_1])
+
+ self.mox.StubOutWithMock(framework_views, 'MakeAllUserViews')
+ framework_views.MakeAllUserViews(
+ mox.IgnoreArg(), self.services.user, [self.user_id], []).AndReturn(
+ {self.user_id: user})
+
+ self.mox.ReplayAll()
+
+ mr = testing_helpers.MakeMonorailRequest()
+ if ascending:
+ mr.after = self.mr_after
+
+ updates_page_url='testing/testing'
+ updates_data = activities.GatherUpdatesData(
+ self.services, mr, profiler.Profiler(), project_ids=project_ids,
+ user_ids=user_ids, ending=None, autolink=None,
+ highlight='highlightme', updates_page_url=updates_page_url)
+ self.mox.VerifyAll()
+
+ if mr.after:
+ pagination = updates_data['pagination']
+ self.assertIsNone(pagination.last)
+ self.assertEquals('%s?before=%d' % (updates_page_url.split('/')[-1],
+ self.comment_timestamp),
+ pagination.next_url)
+ self.assertEquals('%s?after=%d' % (updates_page_url.split('/')[-1],
+ self.comment_timestamp),
+ pagination.prev_url)
+
+ activity_view = updates_data['updates_data'].older[0]
+ self.assertEqual(
+ '<a class="ot-issue-link"\n href="/p//issues/detail?id=%s#c_id%s"\n >'
+ 'issue %s</a>\n\n()\n\n\n\n\n \n commented on' % (
+ self.issue_local_id, self.comment_id, self.issue_local_id),
+ activity_view.escaped_title)
+ self.assertEqual(
+ '<span class="ot-issue-comment">\n this is the 1st comment\n</span>',
+ activity_view.escaped_body)
+ self.assertEqual('highlightme', activity_view.highlight)
+ self.assertEqual(self.project_name, activity_view.project_name)
+
+ def testActivities_AscendingProjectUpdates(self):
+ self.createAndAssertUpdates(project_ids=[self.project_id], ascending=True)
+
+ def testActivities_DescendingProjectUpdates(self):
+ self.createAndAssertUpdates(project_ids=[self.project_id], ascending=False)
+
+ def testActivities_AscendingUserUpdates(self):
+ self.createAndAssertUpdates(user_ids=[self.user_id], ascending=True)
+
+ def testActivities_DescendingUserUpdates(self):
+ self.createAndAssertUpdates(user_ids=[self.user_id], ascending=False)
+
+ def testActivities_SpecifyProjectAndUser(self):
+ self.createAndAssertUpdates(
+ project_ids=[self.project_id], user_ids=[self.user_id], ascending=False)
+
diff --git a/appengine/monorail/features/test/autolink_test.py b/appengine/monorail/features/test/autolink_test.py
new file mode 100644
index 0000000..b4facd6
--- /dev/null
+++ b/appengine/monorail/features/test/autolink_test.py
@@ -0,0 +1,588 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the autolink feature."""
+
+import re
+import unittest
+
+from features import autolink
+from framework import template_helpers
+from proto import tracker_pb2
+from testing import fake
+from testing import testing_helpers
+
+
+SIMPLE_EMAIL_RE = re.compile(r'([a-z]+)@([a-z]+)\.com')
+OVER_AMBITIOUS_DOMAIN_RE = re.compile(r'([a-z]+)\.(com|net|org)')
+
+
+class AutolinkTest(unittest.TestCase):
+
+ def RegisterEmailCallbacks(self, aa):
+
+ def LookupUsers(_mr, all_addresses):
+ """Return user objects for only users who are at trusted domains."""
+ return [addr for addr in all_addresses
+ if addr.endswith('@example.com')]
+
+ def Match2Addresses(_mr, match):
+ return [match.group(0)]
+
+ def MakeMailtoLink(_mr, match, comp_ref_artifacts):
+ email = match.group(0)
+ if email in comp_ref_artifacts:
+ return [template_helpers.TextRun(
+ tag='a', href='mailto:%s' % email, content=email)]
+ else:
+ return [template_helpers.TextRun('%s AT %s.com' % match.group(1, 2))]
+
+ aa.RegisterComponent('testcomp',
+ LookupUsers,
+ Match2Addresses,
+ {SIMPLE_EMAIL_RE: MakeMailtoLink})
+
+ def RegisterDomainCallbacks(self, aa):
+
+ def LookupDomains(_mr, _all_refs):
+ """Return business objects for only real domains. Always just True."""
+ return True # We don't have domain business objects, accept anything.
+
+ def Match2Domains(_mr, match):
+ return [match.group(0)]
+
+ def MakeHyperLink(_mr, match, _comp_ref_artifacts):
+ domain = match.group(0)
+ return [template_helpers.TextRun(tag='a', href=domain, content=domain)]
+
+ aa.RegisterComponent('testcomp2',
+ LookupDomains,
+ Match2Domains,
+ {OVER_AMBITIOUS_DOMAIN_RE: MakeHyperLink})
+
+ def setUp(self):
+ self.aa = autolink.Autolink()
+ self.RegisterEmailCallbacks(self.aa)
+ self.comment1 = ('Feel free to contact me at a@other.com, '
+ 'or b@example.com, or c@example.org.')
+ self.comment2 = 'no matches in this comment'
+ self.comment3 = 'just matches with no ref: a@other.com, c@example.org'
+ self.comments = [self.comment1, self.comment2, self.comment3]
+
+ def testRegisterComponent(self):
+ self.assertIn('testcomp', self.aa.registry)
+
+ def testGetAllReferencedArtifacts(self):
+ all_ref_artifacts = self.aa.GetAllReferencedArtifacts(
+ None, self.comments)
+
+ self.assertIn('testcomp', all_ref_artifacts)
+ comp_refs = all_ref_artifacts['testcomp']
+ self.assertIn('b@example.com', comp_refs)
+ self.assertTrue(len(comp_refs) == 1)
+
+ def testMarkupAutolinks(self):
+ all_ref_artifacts = self.aa.GetAllReferencedArtifacts(None, self.comments)
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment1)], all_ref_artifacts)
+ self.assertEqual('Feel free to contact me at ', result[0].content)
+ self.assertEqual('a AT other.com', result[1].content)
+ self.assertEqual(', or ', result[2].content)
+ self.assertEqual('b@example.com', result[3].content)
+ self.assertEqual('mailto:b@example.com', result[3].href)
+ self.assertEqual(', or c@example.org.', result[4].content)
+
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment2)], all_ref_artifacts)
+ self.assertEqual('no matches in this comment', result[0].content)
+
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment3)], all_ref_artifacts)
+ self.assertEqual('just matches with no ref: ', result[0].content)
+ self.assertEqual('a AT other.com', result[1].content)
+ self.assertEqual(', c@example.org', result[2].content)
+
+ def testNonnestedAutolinks(self):
+ """Test that when a substitution yields plain text, others are applied."""
+ self.RegisterDomainCallbacks(self.aa)
+ all_ref_artifacts = self.aa.GetAllReferencedArtifacts(None, self.comments)
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment1)], all_ref_artifacts)
+ self.assertEqual('Feel free to contact me at ', result[0].content)
+ self.assertEqual('a AT ', result[1].content)
+ self.assertEqual('other.com', result[2].content)
+ self.assertEqual('other.com', result[2].href)
+ self.assertEqual(', or ', result[3].content)
+ self.assertEqual('b@example.com', result[4].content)
+ self.assertEqual('mailto:b@example.com', result[4].href)
+ self.assertEqual(', or c@', result[5].content)
+ self.assertEqual('example.org', result[6].content)
+ self.assertEqual('example.org', result[6].href)
+ self.assertEqual('.', result[7].content)
+
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment2)], all_ref_artifacts)
+ self.assertEqual('no matches in this comment', result[0].content)
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment3)], all_ref_artifacts)
+ self.assertEqual('just matches with no ref: ', result[0].content)
+ self.assertEqual('a AT ', result[1].content)
+ self.assertEqual('other.com', result[2].content)
+ self.assertEqual('other.com', result[2].href)
+ self.assertEqual(', c@', result[3].content)
+ self.assertEqual('example.org', result[4].content)
+ self.assertEqual('example.org', result[4].href)
+
+
+class URLAutolinkTest(unittest.TestCase):
+
+ def DoLinkify(self, content):
+ """Calls the linkify method and returns the result.
+
+ Args:
+ content: string with a hyperlink.
+
+ Returns:
+ A list of TextRuns with some runs will have the embedded URL hyperlinked.
+ Or, None if no link was detected.
+ """
+ match = autolink._IS_A_LINK_RE.search(content)
+ if not match:
+ return None
+
+ replacement_runs = autolink.Linkify(None, match, None)
+ return replacement_runs
+
+ def testLinkify(self):
+ """Test that given url is autolinked when put in the given context."""
+ # Disallow the linking of URLs with user names and passwords.
+ test = 'http://user:pass@www.yahoo.com'
+ result = self.DoLinkify('What about %s' % test)
+ self.assertEqual(None, result[0].tag)
+ self.assertEqual(None, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Disallow the linking of non-HTTP(S) links
+ test = 'nntp://news.google.com'
+ result = self.DoLinkify('%s' % test)
+ self.assertEqual(None, result)
+
+ # Disallow the linking of file links
+ test = 'file://C:/Windows/System32/cmd.exe'
+ result = self.DoLinkify('%s' % test)
+ self.assertEqual(None, result)
+
+ # Test some known URLs
+ test = 'http://www.example.com'
+ result = self.DoLinkify('What about %s' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ def testLinkify_FTP(self):
+ """Test that FTP urls are linked."""
+ # Check for a standard ftp link
+ test = 'ftp://ftp.example.com'
+ result = self.DoLinkify('%s' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ def testLinkify_Context(self):
+ """Test that surrounding syntax is not considered part of the url."""
+ test = 'http://www.example.com'
+
+ # Check for a link followed by a comma at end of English phrase.
+ result = self.DoLinkify('The URL %s, points to a great website.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(',', result[1].content)
+
+ # Check for a link followed by a period at end of English sentence.
+ result = self.DoLinkify('The best site ever, %s.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('.', result[1].content)
+
+ # Check for a link in paranthesis (), [], or {}
+ result = self.DoLinkify('My fav site (%s).' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(').', result[1].content)
+
+ result = self.DoLinkify('My fav site [%s].' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('].', result[1].content)
+
+ result = self.DoLinkify('My fav site {%s}.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('}.', result[1].content)
+
+ # Check for a link with trailing colon
+ result = self.DoLinkify('Hit %s: you will love it.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(':', result[1].content)
+
+ # Check link with commas in query string, but don't include trailing comma.
+ test = 'http://www.example.com/?v=1,2,3'
+ result = self.DoLinkify('Try %s, ok?' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Check link surrounded by angle-brackets, or quotes.
+ result = self.DoLinkify('<%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ result = self.DoLinkify('"%s"' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('"', result[1].content)
+
+ # Check link with embedded quotes.
+ test = 'http://www.example.com/?q="a+b+c"'
+ result = self.DoLinkify('Try %s, ok?' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(',', result[1].content)
+
+ # Check link with embedded parens.
+ test = 'http://www.example.com/funky(foo)and(bar).asp'
+ result = self.DoLinkify('Try %s, ok?' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(',', result[1].content)
+
+ test = 'http://www.example.com/funky(foo)and(bar).asp'
+ result = self.DoLinkify('My fav site <%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ # Check link with embedded brackets and braces.
+ test = 'http://www.example.com/funky[foo]and{bar}.asp'
+ result = self.DoLinkify('My fav site <%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ # Check link with mismatched delimeters inside it or outside it.
+ test = 'http://www.example.com/funky"(foo]and>bar}.asp'
+ result = self.DoLinkify('My fav site <%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ test = 'http://www.example.com/funky"(foo]and>bar}.asp'
+ result = self.DoLinkify('My fav site {%s' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ test = 'http://www.example.com/funky"(foo]and>bar}.asp'
+ result = self.DoLinkify('My fav site %s}' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('}', result[1].content)
+
+ # Link as part of an HTML example.
+ test = 'http://www.example.com/'
+ result = self.DoLinkify('<a href="%s">' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('">', result[1].content)
+
+ # Link nested in an HTML tag.
+ result = self.DoLinkify('<span>%s</span>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Link followed by HTML tag - same bug as above.
+ result = self.DoLinkify('%s<span>foo</span>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Link followed by unescaped HTML tag.
+ result = self.DoLinkify('%s<span>foo</span>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ def testLinkify_UnicodeContext(self):
+ """Test that unicode context does not mess up the link."""
+ test = 'http://www.example.com'
+
+ # This string has a non-breaking space \xa0.
+ result = self.DoLinkify(u'The correct RFC link is\xa0%s' % test)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(test, result[0].href)
+
+ def testLinkify_UnicodeLink(self):
+ """Test that unicode in a link is OK."""
+ test = u'http://www.example.com?q=division\xc3\xb7sign'
+
+ # This string has a non-breaking space \xa0.
+ result = self.DoLinkify(u'The unicode link is %s' % test)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(test, result[0].href)
+
+ def testLinkify_LinkTextEscapingDisabled(self):
+ """Test that url-like things that miss validation aren't linked."""
+ # Link matched by the regex but not accepted by the validator.
+ test = 'http://crash/reportdetail?reportid=35aa03e04772358b'
+ result = self.DoLinkify('<span>%s</span>' % test)
+ self.assertEqual(None, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+
+def _Issue(project_name, local_id, summary, status):
+ issue = tracker_pb2.Issue()
+ issue.project_name = project_name
+ issue.local_id = local_id
+ issue.summary = summary
+ issue.status = status
+ return issue
+
+
+class TrackerAutolinkTest(unittest.TestCase):
+
+ COMMENT_TEXT = (
+ 'This relates to issue 1, issue #2, and issue3 \n'
+ 'as well as bug 4, bug #5, and bug6 \n'
+ 'with issue other-project:12 and issue other-project#13. \n'
+ 'Watch out for issues 21, 22, and 23 with oxford comma. \n'
+ 'And also bugs 31, 32 and 33 with no oxford comma\n'
+ 'We do not match when an issue\n'
+ '999. Is split across lines.'
+ )
+
+ def testExtractProjectAndIssueId(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=1')
+ ref_batches = []
+ for match in autolink._ISSUE_REF_RE.finditer(self.COMMENT_TEXT):
+ new_refs = autolink.ExtractProjectAndIssueIds(mr, match)
+ ref_batches.append(new_refs)
+
+ self.assertEquals(
+ ref_batches,
+ [[(None, 1)],
+ [(None, 2)],
+ [(None, 3)],
+ [(None, 4)],
+ [(None, 5)],
+ [(None, 6)],
+ [('other-project', 12)],
+ [('other-project', 13)],
+ [(None, 21), (None, 22), (None, 23)],
+ [(None, 31), (None, 32), (None, 33)],
+ ])
+
+ def DoReplaceIssueRef(self, content):
+ """Calls the ReplaceIssueRef method and returns the result.
+
+ Args:
+ content: string that may have a textual reference to an issue.
+
+ Returns:
+ A list of TextRuns with some runs will have the reference hyperlinked.
+ Or, None if no reference detected.
+ """
+ match = autolink._ISSUE_REF_RE.search(content)
+ if not match:
+ return None
+
+ open_dict = {'proj:1': _Issue('proj', 1, 'summary-PROJ-1', 'New'),
+ # Assume there is no issue 3 in PROJ
+ 'proj:4': _Issue('proj', 4, 'summary-PROJ-4', 'New'),
+ 'proj:6': _Issue('proj', 6, 'summary-PROJ-6', 'New'),
+ 'other-project:12': _Issue('other-project', 12,
+ 'summary-OP-12', 'Accepted'),
+ }
+ closed_dict = {'proj:2': _Issue('proj', 2, 'summary-PROJ-2', 'Fixed'),
+ 'proj:5': _Issue('proj', 5, 'summary-PROJ-5', 'Fixed'),
+ 'other-project:13': _Issue('other-project', 13,
+ 'summary-OP-12', 'Invalid'),
+ }
+ comp_ref_artifacts = (open_dict, closed_dict,)
+
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/detail?r=1')
+ replacement_runs = autolink.ReplaceIssueRef(mr, match, comp_ref_artifacts)
+ return replacement_runs
+
+ def testReplaceIssueRef(self):
+
+ result = self.DoReplaceIssueRef('This relates to issue 1')
+ self.assertEquals('/p/proj/issues/detail?id=1', result[0].href)
+ self.assertEquals('issue 1', result[0].content)
+ self.assertEquals(None, result[0].css_class)
+ self.assertEquals('summary-PROJ-1', result[0].title)
+ self.assertEquals('a', result[0].tag)
+
+ result = self.DoReplaceIssueRef(', issue #2')
+ self.assertEquals('/p/proj/issues/detail?id=2', result[0].href)
+ self.assertEquals('issue #2', result[0].content)
+ self.assertEquals('closed_ref', result[0].css_class)
+ self.assertEquals('summary-PROJ-2', result[0].title)
+ self.assertEquals('a', result[0].tag)
+
+ result = self.DoReplaceIssueRef(', and issue3 ')
+ self.assertEquals(None, result[0].href) # There is no issue 3
+ self.assertEquals('issue3', result[0].content)
+
+ result = self.DoReplaceIssueRef('as well as bug 4')
+ self.assertEquals('/p/proj/issues/detail?id=4', result[0].href)
+ self.assertEquals('bug 4', result[0].content)
+
+ result = self.DoReplaceIssueRef(', bug #5, ')
+ self.assertEquals('/p/proj/issues/detail?id=5', result[0].href)
+ self.assertEquals('bug #5', result[0].content)
+
+ result = self.DoReplaceIssueRef('and bug6')
+ self.assertEquals('/p/proj/issues/detail?id=6', result[0].href)
+ self.assertEquals('bug6', result[0].content)
+
+ result = self.DoReplaceIssueRef('with issue other-project:12')
+ self.assertEquals('/p/other-project/issues/detail?id=12', result[0].href)
+ self.assertEquals('issue other-project:12', result[0].content)
+
+ result = self.DoReplaceIssueRef('and issue other-project#13')
+ self.assertEquals('/p/other-project/issues/detail?id=13', result[0].href)
+ self.assertEquals('issue other-project#13', result[0].content)
+
+ def testParseProjectNameMatch(self):
+ golden = 'project-name'
+ variations = ['%s', ' %s', '%s ', '%s:', '%s#', '%s#:', '%s:#', '%s :#',
+ '\t%s', '%s\t', '\t%s\t', '\t\t%s\t\t', '\n%s', '%s\n',
+ '\n%s\n', '\n\n%s\n\n', '\t\n%s', '\n\t%s', '%s\t\n',
+ '%s\n\t', '\t\n%s#', '\n\t%s#', '%s\t\n#', '%s\n\t#',
+ '\t\n%s:', '\n\t%s:', '%s\t\n:', '%s\n\t:'
+ ]
+
+ # First pass checks all valid project name results
+ for pattern in variations:
+ self.assertEquals(
+ golden, autolink._ParseProjectNameMatch(pattern % golden))
+
+ # Second pass tests all inputs that should result in None
+ for pattern in variations:
+ self.assert_(
+ autolink._ParseProjectNameMatch(pattern % '') in [None, ''])
+
+
+class VCAutolinkTest(unittest.TestCase):
+
+ GIT_HASH_1 = '1' * 40
+ GIT_HASH_2 = '2' * 40
+ GIT_HASH_3 = 'a1' * 20
+ GIT_COMMENT_TEXT = (
+ 'This is a fix for r%s and R%s, by r2d2, who also authored revision %s, '
+ 'revision #%s, revision %s, and revision %s' % (
+ GIT_HASH_1, GIT_HASH_2, GIT_HASH_3,
+ GIT_HASH_1.upper(), GIT_HASH_2.upper(), GIT_HASH_3.upper()))
+ SVN_COMMENT_TEXT = (
+ 'This is a fix for r12 and R34, by r2d2, who also authored revision r4, '
+ 'revision #1234567, revision 789, and revision 9025. If you have '
+ 'questions, call me at 18005551212')
+
+ def testGetReferencedRevisions(self):
+ refs = ['1', '2', '3']
+ # For now, we do not look up revision objects, result is always None
+ self.assertIsNone(autolink.GetReferencedRevisions(None, refs))
+
+ def testExtractGitHashes(self):
+ refs = []
+ for match in autolink._GIT_HASH_RE.finditer(self.GIT_COMMENT_TEXT):
+ new_refs = autolink.ExtractRevNums(None, match)
+ refs.extend(new_refs)
+
+ self.assertEquals(
+ refs, [self.GIT_HASH_1, self.GIT_HASH_2, self.GIT_HASH_3,
+ self.GIT_HASH_1.upper(), self.GIT_HASH_2.upper(),
+ self.GIT_HASH_3.upper()])
+
+ def testExtractRevNums(self):
+ refs = []
+ for match in autolink._SVN_REF_RE.finditer(self.SVN_COMMENT_TEXT):
+ new_refs = autolink.ExtractRevNums(None, match)
+ refs.extend(new_refs)
+
+ self.assertEquals(
+ refs, ['12', '34', '4', '1234567', '789', '9025'])
+
+
+ def DoReplaceRevisionRef(self, content, project=None):
+ """Calls the ReplaceRevisionRef method and returns the result.
+
+ Args:
+ content: string with a hyperlink.
+ project: optional project.
+
+ Returns:
+ A list of TextRuns with some runs will have the embedded URL hyperlinked.
+ Or, None if no link was detected.
+ """
+ match = autolink._GIT_HASH_RE.search(content)
+ if not match:
+ return None
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/source/detail?r=1', project=project)
+ replacement_runs = autolink.ReplaceRevisionRef(mr, match, None)
+ return replacement_runs
+
+ def testReplaceRevisionRef(self):
+ result = self.DoReplaceRevisionRef(
+ 'This is a fix for r%s' % self.GIT_HASH_1)
+ self.assertEquals('https://crrev.com/%s' % self.GIT_HASH_1, result[0].href)
+ self.assertEquals('r%s' % self.GIT_HASH_1, result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'and R%s, by r2d2, who ' % self.GIT_HASH_2)
+ self.assertEquals('https://crrev.com/%s' % self.GIT_HASH_2, result[0].href)
+ self.assertEquals('R%s' % self.GIT_HASH_2, result[0].content)
+
+ result = self.DoReplaceRevisionRef('by r2d2, who ')
+ self.assertEquals(None, result)
+
+ result = self.DoReplaceRevisionRef(
+ 'also authored revision %s, ' % self.GIT_HASH_3)
+ self.assertEquals('https://crrev.com/%s' % self.GIT_HASH_3, result[0].href)
+ self.assertEquals('revision %s' % self.GIT_HASH_3, result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'revision #%s, ' % self.GIT_HASH_1.upper())
+ self.assertEquals(
+ 'https://crrev.com/%s' % self.GIT_HASH_1.upper(), result[0].href)
+ self.assertEquals(
+ 'revision #%s' % self.GIT_HASH_1.upper(), result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'revision %s, ' % self.GIT_HASH_2.upper())
+ self.assertEquals(
+ 'https://crrev.com/%s' % self.GIT_HASH_2.upper(), result[0].href)
+ self.assertEquals(
+ 'revision %s' % self.GIT_HASH_2.upper(), result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'and revision %s' % self.GIT_HASH_3.upper())
+ self.assertEquals(
+ 'https://crrev.com/%s' % self.GIT_HASH_3.upper(), result[0].href)
+ self.assertEquals(
+ 'revision %s' % self.GIT_HASH_3.upper(), result[0].content)
+
+ def testReplaceRevisionRef_CustomURL(self):
+ """A project can override the URL used for revision links."""
+ project = fake.Project()
+ project.revision_url_format = 'http://example.com/+/{revnum}'
+ result = self.DoReplaceRevisionRef(
+ 'This is a fix for r%s' % self.GIT_HASH_1, project=project)
+ self.assertEquals(
+ 'http://example.com/+/%s' % self.GIT_HASH_1, result[0].href)
+ self.assertEquals('r%s' % self.GIT_HASH_1, result[0].content)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/commands_test.py b/appengine/monorail/features/test/commands_test.py
new file mode 100644
index 0000000..0c61a53
--- /dev/null
+++ b/appengine/monorail/features/test/commands_test.py
@@ -0,0 +1,231 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions that implement command-line-like issue updates."""
+
+import logging
+import unittest
+
+from features import commands
+from framework import framework_constants
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+class CommandsTest(unittest.TestCase):
+
+ def VerifyParseQuickEditCommmand(
+ self, cmd, exp_summary='sum', exp_status='New', exp_owner_id=111L,
+ exp_cc_ids=None, exp_labels=None):
+
+ issue = tracker_pb2.Issue()
+ issue.project_name = 'proj'
+ issue.local_id = 1
+ issue.summary = 'sum'
+ issue.status = 'New'
+ issue.owner_id = 111L
+ issue.cc_ids.extend([222L, 333L])
+ issue.labels.extend(['Type-Defect', 'Priority-Medium', 'Hot'])
+
+ if exp_cc_ids is None:
+ exp_cc_ids = [222L, 333L]
+ if exp_labels is None:
+ exp_labels = ['Type-Defect', 'Priority-Medium', 'Hot']
+
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ logged_in_user_id = 999L
+ services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService())
+ services.user.TestAddUser('jrobbins', 333L)
+ services.user.TestAddUser('jrobbins@jrobbins.org', 888L)
+
+ cnxn = 'fake cnxn'
+ (summary, status, owner_id, cc_ids,
+ labels) = commands.ParseQuickEditCommand(
+ cnxn, cmd, issue, config, logged_in_user_id, services)
+ self.assertEqual(exp_summary, summary)
+ self.assertEqual(exp_status, status)
+ self.assertEqual(exp_owner_id, owner_id)
+ self.assertListEqual(exp_cc_ids, cc_ids)
+ self.assertListEqual(exp_labels, labels)
+
+ def testParseQuickEditCommmand_Empty(self):
+ self.VerifyParseQuickEditCommmand('') # Nothing should change.
+
+ def testParseQuickEditCommmand_BuiltInFields(self):
+ self.VerifyParseQuickEditCommmand(
+ 'status=Fixed', exp_status='Fixed')
+ self.VerifyParseQuickEditCommmand( # Normalized capitalization.
+ 'status=fixed', exp_status='Fixed')
+ self.VerifyParseQuickEditCommmand(
+ 'status=limbo', exp_status='limbo')
+
+ self.VerifyParseQuickEditCommmand(
+ 'owner=me', exp_owner_id=999L)
+ self.VerifyParseQuickEditCommmand(
+ 'owner=jrobbins@jrobbins.org', exp_owner_id=888L)
+ self.VerifyParseQuickEditCommmand(
+ 'owner=----', exp_owner_id=framework_constants.NO_USER_SPECIFIED)
+
+ self.VerifyParseQuickEditCommmand(
+ 'summary=JustOneWord', exp_summary='JustOneWord')
+ self.VerifyParseQuickEditCommmand(
+ 'summary="quoted sentence"', exp_summary='quoted sentence')
+ self.VerifyParseQuickEditCommmand(
+ "summary='quoted sentence'", exp_summary='quoted sentence')
+
+ self.VerifyParseQuickEditCommmand(
+ 'cc=me', exp_cc_ids=[222L, 333L, 999L])
+ self.VerifyParseQuickEditCommmand(
+ 'cc=jrobbins@jrobbins.org', exp_cc_ids=[222L, 333L, 888L])
+ self.VerifyParseQuickEditCommmand(
+ 'cc=me,jrobbins@jrobbins.org',
+ exp_cc_ids=[222L, 333L, 999L, 888L])
+ self.VerifyParseQuickEditCommmand(
+ 'cc=-jrobbins,jrobbins@jrobbins.org',
+ exp_cc_ids=[222L, 888L])
+
+ def testParseQuickEditCommmand_Labels(self):
+ self.VerifyParseQuickEditCommmand(
+ 'Priority=Low', exp_labels=['Type-Defect', 'Hot', 'Priority-Low'])
+ self.VerifyParseQuickEditCommmand(
+ 'priority=low', exp_labels=['Type-Defect', 'Hot', 'Priority-Low'])
+ self.VerifyParseQuickEditCommmand(
+ 'priority-low', exp_labels=['Type-Defect', 'Hot', 'Priority-Low'])
+ self.VerifyParseQuickEditCommmand(
+ '-priority-low', exp_labels=['Type-Defect', 'Priority-Medium', 'Hot'])
+ self.VerifyParseQuickEditCommmand(
+ '-priority-medium', exp_labels=['Type-Defect', 'Hot'])
+
+ self.VerifyParseQuickEditCommmand(
+ 'Cold', exp_labels=['Type-Defect', 'Priority-Medium', 'Hot', 'Cold'])
+ self.VerifyParseQuickEditCommmand(
+ '+Cold', exp_labels=['Type-Defect', 'Priority-Medium', 'Hot', 'Cold'])
+ self.VerifyParseQuickEditCommmand(
+ '-Hot Cold', exp_labels=['Type-Defect', 'Priority-Medium', 'Cold'])
+ self.VerifyParseQuickEditCommmand(
+ '-Hot', exp_labels=['Type-Defect', 'Priority-Medium'])
+
+ def testParseQuickEditCommmand_Multiple(self):
+ self.VerifyParseQuickEditCommmand(
+ 'Priority=Low -hot owner:me cc:-jrobbins summary="other summary"',
+ exp_summary='other summary', exp_owner_id=999L,
+ exp_cc_ids=[222L], exp_labels=['Type-Defect', 'Priority-Low'])
+
+ def testBreakCommandIntoParts_Empty(self):
+ self.assertListEqual(
+ [],
+ commands._BreakCommandIntoParts(''))
+
+ def testBreakCommandIntoParts_Single(self):
+ self.assertListEqual(
+ [('summary', 'new summary')],
+ commands._BreakCommandIntoParts('summary="new summary"'))
+ self.assertListEqual(
+ [('summary', 'OneWordSummary')],
+ commands._BreakCommandIntoParts('summary=OneWordSummary'))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts('key=value'))
+ self.assertListEqual(
+ [('key', 'value-with-dashes')],
+ commands._BreakCommandIntoParts('key=value-with-dashes'))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts('key:value'))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts(' key:value '))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts('key:"value"'))
+ self.assertListEqual(
+ [('key', 'user@dom.com')],
+ commands._BreakCommandIntoParts('key:user@dom.com'))
+ self.assertListEqual(
+ [('key', 'a@dom.com,-b@dom.com')],
+ commands._BreakCommandIntoParts('key:a@dom.com,-b@dom.com'))
+ self.assertListEqual(
+ [(None, 'label')],
+ commands._BreakCommandIntoParts('label'))
+ self.assertListEqual(
+ [(None, '-label')],
+ commands._BreakCommandIntoParts('-label'))
+ self.assertListEqual(
+ [(None, '+label')],
+ commands._BreakCommandIntoParts('+label'))
+
+ def testBreakCommandIntoParts_Multiple(self):
+ self.assertListEqual(
+ [('summary', 'new summary'), (None, 'Hot'), (None, '-Cold'),
+ ('owner', 'me'), ('cc', '+a,-b')],
+ commands._BreakCommandIntoParts(
+ 'summary="new summary" Hot -Cold owner:me cc:+a,-b'))
+
+
+class CommandSyntaxParsingTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ user=fake.UserService())
+
+ self.services.project.TestAddProject('proj', owner_ids=[111L])
+ self.services.user.TestAddUser('a@example.com', 222L)
+
+ cnxn = 'fake connection'
+ config = self.services.config.GetProjectConfig(cnxn, 789)
+
+ for status in ['New', 'ReadyForReview']:
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status=status))
+
+ for label in ['Prioity-Low', 'Priority-High']:
+ config.well_known_labels.append(tracker_pb2.LabelDef(
+ label=label))
+
+ config.exclusive_label_prefixes.extend(
+ tracker_constants.DEFAULT_EXCL_LABEL_PREFIXES)
+
+ self.services.config.StoreConfig(cnxn, config)
+
+ def testStandardizeStatus(self):
+ config = self.services.config.GetProjectConfig('fake cnxn', 789)
+ self.assertEqual('New',
+ commands._StandardizeStatus('NEW', config))
+ self.assertEqual('New',
+ commands._StandardizeStatus('n$Ew ', config))
+ self.assertEqual(
+ 'custom-label',
+ commands._StandardizeLabel('custom=label ', config))
+
+ def testStandardizeLabel(self):
+ config = self.services.config.GetProjectConfig('fake cnxn', 789)
+ self.assertEqual(
+ 'Priority-High',
+ commands._StandardizeLabel('priority-high', config))
+ self.assertEqual(
+ 'Priority-High',
+ commands._StandardizeLabel('PRIORITY=HIGH', config))
+
+ def testLookupMeOrUsername(self):
+ self.assertEqual(
+ 123L,
+ commands._LookupMeOrUsername('fake cnxn', 'me', self.services, 123L))
+
+ self.assertEqual(
+ 222L,
+ commands._LookupMeOrUsername(
+ 'fake cnxn', 'a@example.com', self.services, 0))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/commitlogcommands_test.py b/appengine/monorail/features/test/commitlogcommands_test.py
new file mode 100644
index 0000000..4c43cd4
--- /dev/null
+++ b/appengine/monorail/features/test/commitlogcommands_test.py
@@ -0,0 +1,96 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.features.commitlogcommands."""
+
+import unittest
+
+import mox
+
+from features import commitlogcommands
+from features import notify
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class InboundEmailTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ issue=fake.IssueService(),
+ project=fake.ProjectService(),
+ config=fake.ConfigService())
+
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=987, process_inbound_email=True)
+ self.issue = tracker_pb2.Issue()
+ self.issue.project_id = 987
+ self.issue.summary = 'summary'
+ self.issue.status = 'Assigned'
+ self.services.issue.TestAddIssue(self.issue)
+
+ self.uia = commitlogcommands.UpdateIssueAction(self.issue.local_id)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testParse_StripQuotedLines(self):
+ self.uia.Parse(self.cnxn, self.project.project_name, 101,
+ ['summary:something', '> line 1', 'line 2'], self.services,
+ hostport=80, strip_quoted_lines=True)
+ self.assertEquals('line 2', self.uia.description)
+ self.assertEquals('summary:something\n> line 1\nline 2',
+ self.uia.inbound_message)
+
+ def testParse_NoStripQuotedLines(self):
+ self.uia.Parse(self.cnxn, self.project.project_name, 101,
+ ['summary:something', '> line 1', 'line 2'], self.services,
+ hostport=80)
+ self.assertEquals('> line 1\nline 2', self.uia.description)
+ self.assertIsNone(self.uia.inbound_message)
+
+ def setupAndCallRun(self, allow_edit):
+ comments = ['comment 1', 'comment 2', 'comment 3']
+
+ self.mox.StubOutWithMock(self.services.issue, 'GetCommentsForIssue')
+ self.services.issue.GetCommentsForIssue(
+ self.cnxn, self.issue.issue_id).AndReturn(comments)
+ self.mox.StubOutWithMock(notify, 'PrepareAndSendIssueChangeNotification')
+ notify.PrepareAndSendIssueChangeNotification(
+ self.project.project_id, self.issue.local_id, 80, 101,
+ len(comments) - 1, old_owner_id=self.issue.owner_id)
+ self.mox.ReplayAll()
+
+ self.uia.Parse(self.cnxn, self.project.project_name, 101,
+ ['summary:something', 'status:New', '> line 1', '> line 2'],
+ self.services, hostport=80)
+ self.uia.Run(self.cnxn, self.services, allow_edit=allow_edit)
+ self.mox.VerifyAll()
+
+ def testRun_AllowEdit(self):
+ self.setupAndCallRun(allow_edit=True)
+
+ self.assertEquals('> line 1\n> line 2', self.uia.description)
+ # Assert that ammendments were made to the issue.
+ self.assertEquals('something', self.issue.summary)
+ self.assertEquals('New', self.issue.status)
+
+
+ def testRun_NoAllowEdit(self):
+ self.setupAndCallRun(allow_edit=False)
+
+ self.assertEquals('> line 1\n> line 2', self.uia.description)
+ # Assert that ammendments were *not* made to the issue.
+ self.assertEquals('summary', self.issue.summary)
+ self.assertEquals('Assigned', self.issue.status)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/cues_test.py b/appengine/monorail/features/test/cues_test.py
new file mode 100644
index 0000000..2c11b47
--- /dev/null
+++ b/appengine/monorail/features/test/cues_test.py
@@ -0,0 +1,36 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the cues module."""
+
+import unittest
+
+from features import cues
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class CuesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService())
+ self.servlet = cues.SetCuesFeed('req', 'res', services=self.services)
+ self.services.user.TestAddUser('a@example.com', 111L)
+
+ def testHandleRequest(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/hosting/cues?cue_id=42', user_info={'user_id': 111L})
+
+ self.servlet.HandleRequest(mr)
+ user = self.services.user.test_users[111L]
+ self.assertTrue(user is not None)
+ dismissed_cues = user.dismissed_cues
+ self.assertTrue(dismissed_cues is not None)
+ self.assertIn('42', dismissed_cues)
+ self.assertNotIn('1492', dismissed_cues)
+
+
diff --git a/appengine/monorail/features/test/filterrules_helpers_test.py b/appengine/monorail/features/test/filterrules_helpers_test.py
new file mode 100644
index 0000000..8c48107
--- /dev/null
+++ b/appengine/monorail/features/test/filterrules_helpers_test.py
@@ -0,0 +1,633 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for filterrules_helpers feature."""
+
+import unittest
+
+import mox
+
+from google.appengine.api import taskqueue
+
+import settings
+from features import filterrules_helpers
+from framework import template_helpers
+from framework import urls
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import query2ast
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+
+
+ORIG_SUMMARY = 'this is the orginal summary'
+ORIG_LABELS = ['one', 'two']
+
+# Fake user id mapping
+TEST_ID_MAP = {
+ 'mike.j.parent': 1,
+ 'jrobbins': 2,
+ 'ningerso': 3,
+ }
+
+
+class MockTaskQueue(object):
+ def __init__(self):
+ self.work_items = []
+
+ def add(self, **kwargs):
+ self.work_items.append(kwargs)
+
+
+class RecomputeAllDerivedFieldsTest(unittest.TestCase):
+
+ BLOCK = filterrules_helpers.BLOCK
+
+ def setUp(self):
+ self.features = fake.FeaturesService()
+ self.user = fake.UserService()
+ self.services = service_manager.Services(
+ features=self.features,
+ user=self.user,
+ issue=fake.IssueService())
+ self.project = fake.Project(project_name='proj')
+ self.config = 'fake config'
+ self.cnxn = 'fake cnxn'
+ self.mox = mox.Mox()
+ self.mock_task_queue = MockTaskQueue()
+ self.mox.StubOutWithMock(taskqueue, 'add')
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testRecomputeDerivedFields_Disabled(self):
+ """Servlet should just call RecomputeAllDerivedFieldsNow with no bounds."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = False
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertTrue(self.services.issue.get_all_issues_in_project_called)
+ self.assertTrue(self.services.issue.update_issues_called)
+ self.assertTrue(self.services.issue.enqueue_issues_called)
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_DisabledNextIDSet(self):
+ """Servlet should just call RecomputeAllDerivedFields with no bounds."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = False
+ self.services.issue.next_id = 1234
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.services.issue.UpdateIssues('no', ['1'])
+ self.assertTrue(self.services.issue.get_all_issues_in_project_called)
+ self.assertTrue(self.services.issue.update_issues_called)
+ self.assertTrue(self.services.issue.enqueue_issues_called)
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_NoIssues(self):
+ """Servlet should not call because there is no work to do."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = True
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertFalse(self.services.issue.get_all_issues_in_project_called)
+ self.assertFalse(self.services.issue.update_issues_called)
+ self.assertFalse(self.services.issue.enqueue_issues_called)
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_SomeIssues(self):
+ """Servlet should enqueue one work item rather than call directly."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = True
+ self.services.issue.next_id = 1234
+ num_calls = (self.services.issue.next_id // self.BLOCK + 1)
+ for _ in range(num_calls):
+ taskqueue.add(
+ params=mox.IsA(dict),
+ url='/_task/recomputeDerivedFields.do').WithSideEffects(
+ self.mock_task_queue.add)
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertFalse(self.services.issue.get_all_issues_in_project_called)
+ self.assertFalse(self.services.issue.update_issues_called)
+ self.assertFalse(self.services.issue.enqueue_issues_called)
+ work_items = self.mock_task_queue.work_items
+ self.assertEqual(num_calls, len(work_items))
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_LotsOfIssues(self):
+ """Servlet should enqueue multiple work items."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = True
+ self.services.issue.next_id = 12345
+ num_calls = (self.services.issue.next_id // self.BLOCK + 1)
+ for _ in range(num_calls):
+ taskqueue.add(
+ params=mox.IsA(dict),
+ url='/_task/recomputeDerivedFields.do').WithSideEffects(
+ self.mock_task_queue.add)
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertFalse(self.services.issue.get_all_issues_in_project_called)
+ self.assertFalse(self.services.issue.update_issues_called)
+ self.assertFalse(self.services.issue.enqueue_issues_called)
+
+ work_items = self.mock_task_queue.work_items
+ self.assertEqual(num_calls, len(work_items))
+ url, params = work_items[0]['url'], work_items[0]['params']
+ self.assertEqual(urls.RECOMPUTE_DERIVED_FIELDS_TASK + '.do', url)
+ self.assertEqual(self.project.project_id, params['project_id'])
+ self.assertEqual(12345 // self.BLOCK * self.BLOCK + 1,
+ params['lower_bound'])
+ self.assertEqual(12345, params['upper_bound'])
+
+ url, params = work_items[-1]['url'], work_items[-1]['params']
+ self.assertEqual(urls.RECOMPUTE_DERIVED_FIELDS_TASK + '.do', url)
+ self.assertEqual(self.project.project_id, params['project_id'])
+ self.assertEqual(1, params['lower_bound'])
+ self.assertEqual(self.BLOCK + 1, params['upper_bound'])
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeAllDerivedFieldsNow(self):
+ """Servlet should reapply all filter rules to project's issues."""
+ self.services.issue.next_id = 12345
+ test_issue_1 = fake.MakeTestIssue(
+ project_id=self.project.project_id, local_id=1, issue_id=1001,
+ summary='sum1', owner_id=100, status='New')
+ test_issue_2 = fake.MakeTestIssue(
+ project_id=self.project.project_id, local_id=2, issue_id=1002,
+ summary='sum2', owner_id=100, status='New')
+ test_issues = [test_issue_1, test_issue_2]
+ self.services.issue.TestAddIssue(test_issue_1)
+ self.services.issue.TestAddIssue(test_issue_2)
+
+ self.mox.StubOutWithMock(filterrules_helpers, 'ApplyGivenRules')
+ for test_issue in test_issues:
+ filterrules_helpers.ApplyGivenRules(
+ self.cnxn, self.services, test_issue, self.config,
+ [], []).AndReturn(True)
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFieldsNow(
+ self.cnxn, self.services, self.project, self.config)
+
+ self.assertTrue(self.services.issue.get_all_issues_in_project_called)
+ self.assertTrue(self.services.issue.update_issues_called)
+ self.assertTrue(self.services.issue.enqueue_issues_called)
+ self.assertEqual(test_issues, self.services.issue.updated_issues)
+ self.assertEqual([issue.issue_id for issue in test_issues],
+ self.services.issue.enqueued_issues)
+ self.mox.VerifyAll()
+
+
+class FilterRulesHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services()
+ self.services.user = fake.UserService()
+ self.services.project = fake.ProjectService()
+ self.services.issue = fake.IssueService()
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.other_project = self.services.project.TestAddProject(
+ 'otherproj', project_id=890)
+ for email, user_id in TEST_ID_MAP.iteritems():
+ self.services.user.TestAddUser(email, user_id)
+
+ def testApplyRule(self):
+ cnxn = 'fake sql connection'
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 111L, labels=ORIG_LABELS)
+ config = tracker_pb2.ProjectIssueConfig()
+ # Empty label set cannot satisfy rule looking for labels.
+ pred = 'label:a label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (None, None, [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, set(), config))
+
+ pred = 'label:a -label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (None, None, [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, set(), config))
+
+ # Empty label set will satisfy rule looking for missing labels.
+ pred = '-label:a -label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, set(), config))
+
+ # Label set has the needed labels.
+ pred = 'label:a label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {'a', 'b'},
+ config))
+
+ # Label set has the needed labels with test for unicode.
+ pred = 'label:a label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {u'a', u'b'},
+ config))
+
+ # Label set has the needed labels, capitalization irrelevant.
+ pred = 'label:A label:B'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {'a', 'b'},
+ config))
+
+ # Label set has a label, the rule negates.
+ pred = 'label:a -label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (None, None, [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {'a', 'b'},
+ config))
+
+ def testComputeDerivedFields(self):
+ cnxn = 'fake sql connection'
+ rules = [
+ filterrules_helpers.MakeRule(
+ 'label:HasWorkaround', add_labels=['Priority-Low']),
+ filterrules_helpers.MakeRule(
+ 'label:Security', add_labels=['Private']),
+ filterrules_helpers.MakeRule(
+ 'label:Security', add_labels=['Priority-High'],
+ add_notify=['jrobbins@chromium.org']),
+ filterrules_helpers.MakeRule(
+ 'Priority=High label:Regression', add_labels=['Urgent']),
+ filterrules_helpers.MakeRule(
+ 'Size=L', default_owner_id=444L),
+ ]
+ excl_prefixes = ['priority', 'type', 'milestone']
+ config = tracker_pb2.ProjectIssueConfig(
+ exclusive_label_prefixes=excl_prefixes)
+ predicate_asts = filterrules_helpers.ParsePredicateASTs(rules, config, None)
+
+ # No rules fire.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=ORIG_LABELS)
+ self.assertEquals(
+ (0, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['foo', 'bar'])
+ self.assertEquals(
+ (0, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # One rule fires.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['Size-L'])
+ self.assertEquals(
+ (444L, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # One rule fires, but no effect because of explicit fields.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L,
+ labels=['HasWorkaround', 'Priority-Critical'])
+ self.assertEquals(
+ (0, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # One rule fires, but limited effect because of explicit exclusive label.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L,
+ labels=['Security', 'Priority-Critical'])
+ self.assertEquals(
+ (0, '', [], ['Private'], ['jrobbins@chromium.org']),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Multiple rules have cumulative effect.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['HasWorkaround', 'Size-L'])
+ self.assertEquals(
+ (444L, '', [], ['Priority-Low'], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Two rules fire, second overwrites the first.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['HasWorkaround', 'Security'])
+ self.assertEquals(
+ (0, '', [], ['Private', 'Priority-High'], ['jrobbins@chromium.org']),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Two rules fire, second triggered by the first.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['Security', 'Regression'])
+ self.assertEquals(
+ (0, '', [], ['Private', 'Priority-High', 'Urgent'],
+ ['jrobbins@chromium.org']),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Two rules fire, each one wants to add the same CC: only add once.
+ rules.append(filterrules_helpers.MakeRule('Watch', add_cc_ids=[111L]))
+ rules.append(filterrules_helpers.MakeRule('Monitor', add_cc_ids=[111L]))
+ config = tracker_pb2.ProjectIssueConfig(
+ exclusive_label_prefixes=excl_prefixes)
+ predicate_asts = filterrules_helpers.ParsePredicateASTs(rules, config, None)
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 111L, labels=['Watch', 'Monitor'])
+ self.assertEquals(
+ (0, '', [111L], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ def testCompareComponents_Trivial(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_NOT_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_DEFINED, [], []))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_NOT_DEFINED, [], []))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, [123], []))
+
+ def testCompareComponents_Normal(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 100, 789, 'UI', 'doc', False, [], [], 0, 0))
+ config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 110, 789, 'UI>Help', 'doc', False, [], [], 0, 0))
+ config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 200, 789, 'Networking', 'doc', False, [], [], 0, 0))
+
+ # Check if the issue is in a specified component or subcomponent.
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [100]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI>Help'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [100, 110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], []))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [200]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI>Help'], [100]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Networking'], [100]))
+
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NE, ['UI'], []))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NE, ['UI'], [100]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NE, ['Networking'], [100]))
+
+ # Exact vs non-exact.
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Help'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.TEXT_HAS, ['UI'], [110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.TEXT_HAS, ['Help'], [110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NOT_TEXT_HAS, ['UI'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NOT_TEXT_HAS, ['Help'], [110]))
+
+ # Multivalued issues and Quick-OR notation
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Networking'], [200]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Networking'], [100, 110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [100]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [200]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [110, 200]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.TEXT_HAS, ['UI', 'Networking'], [110, 200]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI>Help', 'Networking'], [110, 200]))
+
+ def testCompareIssueRefs_Trivial(self):
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_NOT_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_DEFINED, [], []))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_NOT_DEFINED, [], []))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['1'], []))
+
+ def testCompareIssueRefs_Normal(self):
+ self.services.issue.TestAddIssue(fake.MakeTestIssue(
+ 789, 1, 'summary', 'New', 0L, issue_id=123))
+ self.services.issue.TestAddIssue(fake.MakeTestIssue(
+ 789, 2, 'summary', 'New', 0L, issue_id=124))
+ self.services.issue.TestAddIssue(fake.MakeTestIssue(
+ 890, 1, 'other summary', 'New', 0L, issue_id=125))
+
+ # EQ and NE, implict references to the current project.
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['1'], [123]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.NE, ['1'], [123]))
+
+ # EQ and NE, explicit project references.
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['proj:1'], [123]))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['otherproj:1'], [125]))
+
+ # Inequalities
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GE, ['1'], [123]))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GE, ['1'], [124]))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GE, ['2'], [124]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GT, ['2'], [124]))
+
+ def testCompareUsers(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testCompareUserIDs(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testCompareEmails(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testCompare(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testParseOneRuleAddLabels(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1 label:lab2', 'add_labels', 'hot cOld, ', None, 1,
+ error_list)
+ self.assertEquals('label:lab1 label:lab2', rule_pb.predicate)
+ self.assertEquals(error_list, [])
+ self.assertEquals(len(rule_pb.add_labels), 2)
+ self.assertEquals(rule_pb.add_labels[0], 'hot')
+ self.assertEquals(rule_pb.add_labels[1], 'cOld')
+
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, '', 'default_status', 'hot cold', None, 1, error_list)
+ self.assertEquals(len(rule_pb.predicate), 0)
+ self.assertEquals(error_list, [])
+
+ def testParseOneRuleDefaultOwner(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1, label:lab2 ', 'default_owner', 'jrobbins',
+ self.services.user, 1, error_list)
+ self.assertEquals(error_list, [])
+ self.assertEquals(rule_pb.default_owner_id, TEST_ID_MAP['jrobbins'])
+
+ def testParseOneRuleDefaultStatus(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1', 'default_status', 'InReview',
+ None, 1, error_list)
+ self.assertEquals(error_list, [])
+ self.assertEquals(rule_pb.default_status, 'InReview')
+
+ def testParseOneRuleAddCcs(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1', 'add_ccs', 'jrobbins, mike.j.parent',
+ self.services.user, 1, error_list)
+ self.assertEquals(error_list, [])
+ self.assertEquals(rule_pb.add_cc_ids[0], TEST_ID_MAP['jrobbins'])
+ self.assertEquals(rule_pb.add_cc_ids[1], TEST_ID_MAP['mike.j.parent'])
+ self.assertEquals(len(rule_pb.add_cc_ids), 2)
+
+ def testParseRulesNone(self):
+ cnxn = 'fake SQL connection'
+ post_data = {}
+ rules = filterrules_helpers.ParseRules(
+ cnxn, post_data, None, template_helpers.EZTError())
+ self.assertEquals(rules, [])
+
+ def testParseRules(self):
+ cnxn = 'fake SQL connection'
+ post_data = {
+ 'predicate1': 'a, b c',
+ 'action_type1': 'default_status',
+ 'action_value1': 'Reviewed',
+ 'predicate2': 'a, b c',
+ 'action_type2': 'default_owner',
+ 'action_value2': 'jrobbins',
+ 'predicate3': 'a, b c',
+ 'action_type3': 'add_ccs',
+ 'action_value3': 'jrobbins, mike.j.parent',
+ 'predicate4': 'a, b c',
+ 'action_type4': 'add_labels',
+ 'action_value4': 'hot, cold',
+ }
+ errors = template_helpers.EZTError()
+ rules = filterrules_helpers.ParseRules(
+ cnxn, post_data, self.services.user, errors)
+ self.assertEquals(rules[0].predicate, 'a, b c')
+ self.assertEquals(rules[0].default_status, 'Reviewed')
+ self.assertEquals(rules[1].default_owner_id, TEST_ID_MAP['jrobbins'])
+ self.assertEquals(rules[2].add_cc_ids[0], TEST_ID_MAP['jrobbins'])
+ self.assertEquals(rules[2].add_cc_ids[1], TEST_ID_MAP['mike.j.parent'])
+ self.assertEquals(rules[3].add_labels[0], 'hot')
+ self.assertEquals(rules[3].add_labels[1], 'cold')
+ self.assertEquals(len(rules), 4)
+ self.assertFalse(errors.AnyErrors())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/filterrules_views_test.py b/appengine/monorail/features/test/filterrules_views_test.py
new file mode 100644
index 0000000..124db4f
--- /dev/null
+++ b/appengine/monorail/features/test/filterrules_views_test.py
@@ -0,0 +1,71 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for issue tracker views."""
+
+import unittest
+
+from features import filterrules_views
+from proto import tracker_pb2
+from testing import testing_helpers
+
+
+class RuleViewTest(unittest.TestCase):
+
+ def setUp(self):
+ self.rule = tracker_pb2.FilterRule()
+ self.rule.predicate = 'label:a label:b'
+
+ def testEmpty(self):
+ view = filterrules_views.RuleView(self.rule, {})
+ self.rule.predicate = ''
+ self.assertEquals('', view.predicate)
+ self.assertEquals('', view.action_type)
+ self.assertEquals('', view.action_value)
+
+ def testDefaultStatus(self):
+ self.rule.default_status = 'Unknown'
+ view = filterrules_views.RuleView(self.rule, {})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('default_status', view.action_type)
+ self.assertEquals('Unknown', view.action_value)
+
+ def testDefaultOwner(self):
+ self.rule.default_owner_id = 111L
+ view = filterrules_views.RuleView(
+ self.rule, {
+ 111L: testing_helpers.Blank(email='jrobbins@chromium.org')})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('default_owner', view.action_type)
+ self.assertEquals('jrobbins@chromium.org', view.action_value)
+
+ def testAddCCs(self):
+ self.rule.add_cc_ids.extend([111L, 222L])
+ view = filterrules_views.RuleView(
+ self.rule, {
+ 111L: testing_helpers.Blank(email='jrobbins@chromium.org'),
+ 222L: testing_helpers.Blank(email='jrobbins@gmail.com')})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('add_ccs', view.action_type)
+ self.assertEquals('jrobbins@chromium.org, jrobbins@gmail.com',
+ view.action_value)
+
+ def testAddLabels(self):
+ self.rule.add_labels.extend(['Hot', 'Cool'])
+ view = filterrules_views.RuleView(self.rule, {})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('add_labels', view.action_type)
+ self.assertEquals('Hot, Cool', view.action_value)
+
+ def testAlsoNotify(self):
+ self.rule.add_notify_addrs.extend(['a@dom.com', 'b@dom.com'])
+ view = filterrules_views.RuleView(self.rule, {})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('also_notify', view.action_type)
+ self.assertEquals('a@dom.com, b@dom.com', view.action_value)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/inboundemail_test.py b/appengine/monorail/features/test/inboundemail_test.py
new file mode 100644
index 0000000..60b4e52
--- /dev/null
+++ b/appengine/monorail/features/test/inboundemail_test.py
@@ -0,0 +1,280 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.feature.inboundemail."""
+
+import unittest
+
+import mox
+
+from features import commitlogcommands
+from features import inboundemail
+from framework import emailfmt
+from framework import monorailrequest
+from framework import permissions
+from proto import project_pb2
+from proto import tracker_pb2
+from proto import user_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class InboundEmailTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService())
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=987, process_inbound_email=True)
+ self.project_addr = 'proj@monorail.example.com'
+
+ self.issue = tracker_pb2.Issue()
+ self.issue.project_id = 987
+ self.issue.local_id = 100
+ self.services.issue.TestAddIssue(self.issue)
+
+ self.msg = testing_helpers.MakeMessage(
+ testing_helpers.HEADER_LINES, 'awesome!')
+
+ request, _ = testing_helpers.GetRequestObjects()
+ self.inbound = inboundemail.InboundEmail(request, None, self.services)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testTemplates(self):
+ for name, template_path in self.inbound._templates.iteritems():
+ assert(name in inboundemail.MSG_TEMPLATES)
+ assert(
+ template_path.GetTemplatePath().endswith(
+ inboundemail.MSG_TEMPLATES[name]))
+
+ def testProcessMail_MsgTooBig(self):
+ self.mox.StubOutWithMock(emailfmt, 'IsBodyTooBigToParse')
+ emailfmt.IsBodyTooBigToParse(mox.IgnoreArg()).AndReturn(True)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Email body too long', email_task['subject'])
+
+ def testProcessMail_NoProjectOnToLine(self):
+ self.mox.StubOutWithMock(emailfmt, 'IsProjectAddressOnToLine')
+ emailfmt.IsProjectAddressOnToLine(
+ self.project_addr, [self.project_addr]).AndReturn(False)
+ self.mox.ReplayAll()
+
+ ret = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessMail_ProjectUnidentified(self):
+ self.mox.StubOutWithMock(emailfmt, 'IdentifyProjectAndIssue')
+ emailfmt.IdentifyProjectAndIssue(
+ self.project_addr, mox.IgnoreArg()).AndReturn((None, None))
+ self.mox.ReplayAll()
+
+ ret = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessMail_ProjectNotLive(self):
+ self.project.state = project_pb2.ProjectState.DELETABLE
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Project not found', email_task['subject'])
+
+ def testProcessMail_ProjectInboundEmailDisabled(self):
+ self.project.process_inbound_email = False
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Email replies are not enabled in project proj',
+ email_task['subject'])
+
+ def testProcessMail_NoRefHeader(self):
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(False)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Your message is not a reply to a notification email',
+ email_task['subject'])
+
+ def testProcessMail_NoAccount(self):
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(True)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Could not determine account of sender',
+ email_task['subject'])
+
+ def testProcessMail_BannedAccount(self):
+ self.services.user.TestAddUser('user@example.com', 111L)
+ class MockAuthData:
+ def __init__(self):
+ self.user_pb = user_pb2.MakeUser()
+ self.effective_ids = set([1, 2, 3])
+ self.user_id = 111L
+ mock_auth_data = MockAuthData()
+ mock_auth_data.user_pb.banned = 'banned'
+
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(True)
+ self.mox.StubOutWithMock(monorailrequest.AuthData, 'FromEmail')
+ monorailrequest.AuthData.FromEmail(
+ mox.IgnoreArg(), 'user@example.com', self.services).AndReturn(
+ mock_auth_data)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('You are banned from using this issue tracker',
+ email_task['subject'])
+
+ def testProcessMail_Success(self):
+ self.services.user.TestAddUser('user@example.com', 111L)
+ class MockAuthData:
+ def __init__(self):
+ self.user_pb = user_pb2.MakeUser()
+ self.effective_ids = set([1, 2, 3])
+ self.user_id = 111L
+ mock_auth_data = MockAuthData()
+
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(True)
+
+ self.mox.StubOutWithMock(monorailrequest.AuthData, 'FromEmail')
+ monorailrequest.AuthData.FromEmail(
+ mox.IgnoreArg(), 'user@example.com', self.services).AndReturn(
+ mock_auth_data)
+
+ self.mox.StubOutWithMock(permissions, 'GetPermissions')
+ permissions.GetPermissions(
+ mock_auth_data.user_pb, mock_auth_data.effective_ids,
+ self.project).AndReturn('test permissions')
+
+ self.mox.StubOutWithMock(self.inbound, 'ProcessIssueReply')
+ self.inbound.ProcessIssueReply(
+ mox.IgnoreArg(), self.project, 123, self.project_addr,
+ 'user@example.com', 111L, mock_auth_data.effective_ids,
+ 'test permissions', 'awesome!')
+
+ self.mox.ReplayAll()
+
+ ret = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessIssueReply_NoIssue(self):
+ nonexistant_local_id = 200
+ email_tasks = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, nonexistant_local_id, self.project_addr,
+ 'user@example.com', 111L, [1, 2, 3], permissions.USER_PERMISSIONSET,
+ 'awesome!')
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Could not find issue %d in project %s' % (
+ nonexistant_local_id, self.project.project_name),
+ email_task['subject'])
+
+ def testProcessIssueReply_DeletedIssue(self):
+ self.issue.deleted = True
+ email_tasks = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'user@example.com', 111L, [1, 2, 3], permissions.USER_PERMISSIONSET,
+ 'awesome!')
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Could not find issue %d in project %s' % (
+ self.issue.local_id, self.project.project_name),
+ email_task['subject'])
+
+ def testProcessIssueReply_NoAddIssuePerm(self):
+ perms = permissions.READ_ONLY_PERMISSIONSET
+ email_tasks = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'user@example.com', 111L, [1, 2, 3], perms, 'awesome!')
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('User does not have permission to add a comment',
+ email_task['subject'])
+
+ def testProcessIssueReply_NoEditIssuePerm(self):
+ perms = permissions.USER_PERMISSIONSET
+ mock_uia = commitlogcommands.UpdateIssueAction(self.issue.local_id)
+
+ self.mox.StubOutWithMock(commitlogcommands, 'UpdateIssueAction')
+ commitlogcommands.UpdateIssueAction(self.issue.local_id).AndReturn(mock_uia)
+
+ self.mox.StubOutWithMock(mock_uia, 'Parse')
+ mock_uia.Parse(
+ self.cnxn, self.project.project_name, 111L, ['awesome!'], self.services,
+ strip_quoted_lines=True)
+ self.mox.StubOutWithMock(mock_uia, 'Run')
+ # Allow edit is false here because the permission set does not contain
+ # EDIT_ISSUE.
+ mock_uia.Run(self.cnxn, self.services, allow_edit=False)
+
+ self.mox.ReplayAll()
+ ret = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'from_addr', 111L, [1, 2, 3], perms, 'awesome!')
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessIssueReply_Success(self):
+ perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ mock_uia = commitlogcommands.UpdateIssueAction(self.issue.local_id)
+
+ self.mox.StubOutWithMock(commitlogcommands, 'UpdateIssueAction')
+ commitlogcommands.UpdateIssueAction(self.issue.local_id).AndReturn(mock_uia)
+
+ self.mox.StubOutWithMock(mock_uia, 'Parse')
+ mock_uia.Parse(
+ self.cnxn, self.project.project_name, 111L, ['awesome!'], self.services,
+ strip_quoted_lines=True)
+ self.mox.StubOutWithMock(mock_uia, 'Run')
+ mock_uia.Run(self.cnxn, self.services, allow_edit=True)
+
+ self.mox.ReplayAll()
+ ret = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'from_addr', 111L, [1, 2, 3], perms, 'awesome!')
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
diff --git a/appengine/monorail/features/test/notify_helpers_test.py b/appengine/monorail/features/test/notify_helpers_test.py
new file mode 100644
index 0000000..7267269
--- /dev/null
+++ b/appengine/monorail/features/test/notify_helpers_test.py
@@ -0,0 +1,384 @@
+# -*- coding: utf8 -*-
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for notify_helpers.py."""
+
+import unittest
+
+from features import notify_helpers
+from framework import emailfmt
+from framework import framework_views
+from services import service_manager
+from testing import fake
+
+
+REPLY_NOT_ALLOWED = notify_helpers.REPLY_NOT_ALLOWED
+REPLY_MAY_COMMENT = notify_helpers.REPLY_MAY_COMMENT
+REPLY_MAY_UPDATE = notify_helpers.REPLY_MAY_UPDATE
+
+
+class ComputeIssueChangeAddressPermListTest(unittest.TestCase):
+
+ def setUp(self):
+ self.users_by_id = {
+ 111L: framework_views.UserView(111L, 'owner@example.com', True),
+ 222L: framework_views.UserView(222L, 'member@example.com', True),
+ 999L: framework_views.UserView(999L, 'visitor@example.com', True),
+ }
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ self.services.user.TestAddUser('owner@example.com', 111L)
+ self.services.user.TestAddUser('member@example.com', 222L)
+ self.services.user.TestAddUser('visitor@example.com', 999L)
+ self.project = self.services.project.TestAddProject(
+ 'proj', owner_ids=[111L], committer_ids=[222L])
+ self.project.process_inbound_email = True
+ self.issue = fake.MakeTestIssue(
+ self.project.project_id, 1, 'summary', 'New', 111L)
+
+ def testEmptyIDs(self):
+ cnxn = 'fake cnxn'
+ addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, [], self.project, self.issue, self.services, [], {},
+ pref_check_function=lambda *args: True)
+ self.assertEqual([], addr_perm_list)
+
+ def testRecipientIsMember(self):
+ cnxn = 'fake cnxn'
+ ids_to_consider = [111L, 222L, 999L]
+ addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, ids_to_consider, self.project, self.issue, self.services, set(),
+ self.users_by_id, pref_check_function=lambda *args: True)
+ self.assertEqual(
+ [(True, 'owner@example.com', REPLY_MAY_UPDATE),
+ (True, 'member@example.com', REPLY_MAY_UPDATE),
+ (False, 'visitor@example.com', REPLY_MAY_COMMENT)],
+ addr_perm_list)
+
+
+class ComputeProjectAndIssueNotificationAddrListTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ self.project = self.services.project.TestAddProject('project')
+ self.services.user.TestAddUser('alice@gmail.com', 111L)
+ self.services.user.TestAddUser('bob@gmail.com', 222L)
+ self.services.user.TestAddUser('fred@gmail.com', 555L)
+
+ def testNotifyAddress(self):
+ # No mailing list or filter rules are defined
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, True, set())
+ self.assertListEqual([], addr_perm_list)
+
+ # Only mailing list is notified.
+ self.project.issue_notify_address = 'mailing-list@domain.com'
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, True, set())
+ self.assertListEqual(
+ [(False, 'mailing-list@domain.com', REPLY_NOT_ALLOWED)],
+ addr_perm_list)
+
+ # No one is notified because mailing list was already notified.
+ omit_addrs = {'mailing-list@domain.com'}
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, False, omit_addrs)
+ self.assertListEqual([], addr_perm_list)
+
+ # No one is notified because anon users cannot view.
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, False, set())
+ self.assertListEqual([], addr_perm_list)
+
+ def testFilterRuleNotifyAddresses(self):
+ issue = fake.MakeTestIssue(
+ self.project.project_id, 1, 'summary', 'New', 555L)
+ issue.derived_notify_addrs.extend(['notify@domain.com'])
+
+ addr_perm_list = notify_helpers.ComputeIssueNotificationAddrList(
+ issue, set())
+ self.assertListEqual(
+ [(False, 'notify@domain.com', REPLY_NOT_ALLOWED)],
+ addr_perm_list)
+
+ # Also-notify addresses can be omitted (e.g., if it is the same as
+ # the email address of the user who made the change).
+ addr_perm_list = notify_helpers.ComputeIssueNotificationAddrList(
+ issue, {'notify@domain.com'})
+ self.assertListEqual([], addr_perm_list)
+
+
+class MakeBulletedEmailWorkItemsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project = fake.Project(project_name='proj1')
+ self.commenter_view = framework_views.UserView(
+ 111L, 'test@example.com', True)
+
+ def testEmptyAddrs(self):
+ """Test the case where we found zero users to notify."""
+ email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ [], 'subject', 'body', 'body', self.project, 'example.com',
+ self.commenter_view)
+ self.assertEqual([], email_tasks)
+ email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ [([], 'reason')], 'subject', 'body', 'body', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual([], email_tasks)
+
+
+class MakeEmailWorkItemTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project = fake.Project(project_name='proj1')
+ self.project.process_inbound_email = True
+ self.commenter_view = framework_views.UserView(
+ 111L, 'test@example.com', True)
+ self.expected_html_footer = (
+ 'You received this message because:<br/> 1. reason<br/><br/>You may '
+ 'adjust your notification preferences at:<br/><a href="https://'
+ 'example.com/hosting/settings">https://example.com/hosting/settings'
+ '</a>')
+
+ def testBodySelection(self):
+ """We send non-members the email body that is indented for non-members."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+
+ self.assertEqual('a@a.com', email_task['to'])
+ self.assertEqual('subject', email_task['subject'])
+ self.assertIn('body non', email_task['body'])
+ self.assertEqual(
+ emailfmt.FormatFromAddr(self.project, commenter_view=self.commenter_view,
+ can_reply_to=False),
+ email_task['from_addr'])
+ self.assertEqual(emailfmt.NoReplyAddress(), email_task['reply_to'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body mem', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertIn('body mem', email_task['body'])
+
+ def testHtmlBody_NoDetailUrl(self):
+ """"An html body is not be sent if detail_url is not specified."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view, detail_url=None)
+
+ self.assertIsNone(email_task['html_body'])
+
+ def testHtmlBody_WithDetailUrl(self):
+ """"An html body is sent if a detail_url is specified."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'body non-- <br/>%s' % self.expected_html_footer))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_WithUnicodeChars(self):
+ """"An html body is sent if a detail_url is specified."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ unicode_content = '\xe2\x9d\xa4 â â'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', unicode_content, 'unused body mem',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ '%s-- <br/>%s' % (unicode_content.decode('utf-8'),
+ self.expected_html_footer)))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_WithLinks(self):
+ """"An html body is sent if a detail_url is specified."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'test google.com test', 'unused body mem',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'test <a href="http://google.com">google.com</a> test-- <br/>%s' % (
+ self.expected_html_footer)))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_LinkWithinTags(self):
+ """"An html body is sent with correct <a href>s."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'test <http://google.com> test', 'unused body',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'test <a href="http://google.com"><http://google.com></a> '
+ 'test-- <br/>%s' % self.expected_html_footer))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_EmailWithinTags(self):
+ """"An html body is sent with correct <a href>s."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'test <t@chromium.org> <a@chromium.org> test',
+ 'unused body mem', self.project, 'example.com', self.commenter_view,
+ detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'test <a href="mailto:t@chromium.org"><t@chromium.org></a> '
+ '<a href="mailto:a@chromium.org"><a@chromium.org></a> '
+ 'test-- <br/>%s' % self.expected_html_footer))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_WithEscapedHtml(self):
+ """"An html body is sent with html content escaped."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ body_with_html_content = (
+ '<a href="http://www.google.com">test</a> \'something\'')
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', body_with_html_content, 'unused body mem',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ escaped_body_with_html_content = (
+ '<a href="http://www.google.com">test</a> '
+ ''something'')
+ notify_helpers._MakeNotificationFooter(
+ ['reason'], REPLY_NOT_ALLOWED, 'example.com')
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ '%s-- <br/>%s' % (escaped_body_with_html_content,
+ self.expected_html_footer)))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testReplyInvitation(self):
+ """We include a footer about replying that is appropriate for that user."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(emailfmt.NoReplyAddress(), email_task['reply_to'])
+ self.assertNotIn('Reply to this email', email_task['body'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_COMMENT),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(
+ '%s@%s' % (self.project.project_name, emailfmt.MailDomain()),
+ email_task['reply_to'])
+ self.assertIn('Reply to this email to add a comment', email_task['body'])
+ self.assertNotIn('make changes', email_task['body'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(
+ '%s@%s' % (self.project.project_name, emailfmt.MailDomain()),
+ email_task['reply_to'])
+ self.assertIn('Reply to this email to add a comment', email_task['body'])
+ self.assertIn('make updates', email_task['body'])
+
+ def testInboundEmailDisabled(self):
+ """We don't invite replies if they are disabled for this project."""
+ self.project.process_inbound_email = False
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(emailfmt.NoReplyAddress(), email_task['reply_to'])
+
+ def testReasons(self):
+ """The footer lists reasons why that email was sent to that user."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ ['Funny', 'Caring', 'Near'], 'subject', 'body', 'body', self.project,
+ 'example.com', self.commenter_view)
+ self.assertIn('because:', email_task['body'])
+ self.assertIn('1. Funny', email_task['body'])
+ self.assertIn('2. Caring', email_task['body'])
+ self.assertIn('3. Near', email_task['body'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ [], 'subject', 'body', 'body', self.project,
+ 'example.com', self.commenter_view)
+ self.assertNotIn('because', email_task['body'])
+
+
+class MakeNotificationFooterTest(unittest.TestCase):
+
+ def testMakeNotificationFooter_NoReason(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ [], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertEqual('', footer)
+
+ def testMakeNotificationFooter_WithReason(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertIn('REASON', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertIn('REASON', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ def testMakeNotificationFooter_ManyReasons(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ ['Funny', 'Caring', 'Warmblooded'], REPLY_NOT_ALLOWED,
+ 'example.com')
+ self.assertIn('Funny', footer)
+ self.assertIn('Caring', footer)
+ self.assertIn('Warmblooded', footer)
+
+ def testMakeNotificationFooter_WithReplyInstructions(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertNotIn('Reply', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_MAY_COMMENT, 'example.com')
+ self.assertIn('add a comment', footer)
+ self.assertNotIn('make updates', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_MAY_UPDATE, 'example.com')
+ self.assertIn('add a comment', footer)
+ self.assertIn('make updates', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/notify_test.py b/appengine/monorail/features/test/notify_test.py
new file mode 100644
index 0000000..f6a0a1a
--- /dev/null
+++ b/appengine/monorail/features/test/notify_test.py
@@ -0,0 +1,291 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for notify.py."""
+
+import os
+import unittest
+import urllib
+import webapp2
+import webtest
+
+from google.appengine.api import taskqueue
+from google.appengine.ext import testbed
+
+from features import notify
+from framework import urls
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+def MakeTestIssue(project_id, local_id, owner_id, reporter_id, is_spam=False):
+ issue = tracker_pb2.Issue()
+ issue.project_id = project_id
+ issue.local_id = local_id
+ issue.owner_id = owner_id
+ issue.reporter_id = reporter_id
+ issue.is_spam = is_spam
+ return issue
+
+
+class SendNotificationTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_taskqueue_stub()
+ self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
+ self.taskqueue_stub._root_path = os.path.dirname(
+ os.path.dirname(os.path.dirname( __file__ )))
+
+ def tearDown(self):
+ self.testbed.deactivate()
+
+ def testPrepareAndSendIssueChangeNotification(self):
+ notify.PrepareAndSendIssueChangeNotification(
+ project_id=789,
+ local_id=1,
+ hostport='testbed-test.appspotmail.com',
+ commenter_id=1,
+ seq_num=0,
+ old_owner_id=2,
+ send_email=True)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_ISSUE_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+
+ def testPrepareAndSendIssueBlockingNotification(self):
+ notify.PrepareAndSendIssueBlockingNotification(
+ project_id=789,
+ hostport='testbed-test.appspotmail.com',
+ local_id=1,
+ delta_blocker_iids=[],
+ commenter_id=1,
+ send_email=True)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BLOCKING_CHANGE_TASK + '.do')
+ self.assertEqual(0, len(tasks))
+
+ notify.PrepareAndSendIssueBlockingNotification(
+ project_id=789,
+ hostport='testbed-test.appspotmail.com',
+ local_id=1,
+ delta_blocker_iids=[2],
+ commenter_id=1,
+ send_email=True)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BLOCKING_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+
+ def testSendIssueBulkChangeNotification_CommentOnly(self):
+ notify.SendIssueBulkChangeNotification(
+ hostport='testbed-test.appspotmail.com',
+ project_id=789,
+ local_ids=[1],
+ old_owner_ids=[2],
+ comment_text='comment',
+ commenter_id=1,
+ amendments=[],
+ send_email=True,
+ users_by_id=2)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BULK_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+ params = dict(urllib.unquote_plus(item).split('=')
+ for item in tasks[0].payload.split('&'))
+ self.assertEqual('comment', params['comment_text'])
+ self.assertEqual('', params['amendments'])
+
+ def testSendIssueBulkChangeNotification_Normal(self):
+ notify.SendIssueBulkChangeNotification(
+ hostport='testbed-test.appspotmail.com',
+ project_id=789,
+ local_ids=[1],
+ old_owner_ids=[2],
+ comment_text='comment',
+ commenter_id=1,
+ amendments=[
+ tracker_bizobj.MakeStatusAmendment('New', 'Old'),
+ tracker_bizobj.MakeLabelsAmendment(['Added'], ['Removed']),
+ tracker_bizobj.MakeStatusAmendment('New', 'Old'),
+ ],
+ send_email=True,
+ users_by_id=2)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BULK_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+ params = dict(urllib.unquote_plus(item).split('=')
+ for item in tasks[0].payload.split('&'))
+ self.assertEqual('comment', params['comment_text'])
+ self.assertEqual(
+ [' Status: New',
+ ' Labels: -Removed Added'],
+ params['amendments'].split('\n'))
+
+ def testAddAllEmailTasks(self):
+ notify.AddAllEmailTasks(
+ tasks=[{'to': 'user'}, {'to': 'user2'}])
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.OUTBOUND_EMAIL_TASK + '.do')
+ self.assertEqual(2, len(tasks))
+
+
+class NotifyTaskHandleRequestTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_taskqueue_stub()
+ self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
+ self.taskqueue_stub._root_path = os.path.dirname(
+ os.path.dirname(os.path.dirname( __file__ )))
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ issue_star=fake.IssueStarService(),
+ features=fake.FeaturesService())
+ self.services.user.TestAddUser('requester@example.com', 1)
+ self.services.user.TestAddUser('user@example.com', 2)
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1],
+ project_id=12345)
+ issue1 = MakeTestIssue(
+ project_id=12345, local_id=1, owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue1)
+
+ def VerifyParams(self, result, params):
+ self.assertEqual(
+ bool(params['send_email']), result['params']['send_email'])
+ if 'id' in params:
+ self.assertEqual(params['id'], result['params']['local_id'])
+ if 'ids' in params:
+ self.assertEqual([int(p) for p in params['ids'].split(',')],
+ result['params']['local_ids'])
+ self.assertEqual(params['project_id'], result['params']['project_id'])
+
+ def testNotifyIssueChangeTask(self):
+ task = notify.NotifyIssueChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'commenter_id': 2}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.VerifyParams(result, params)
+
+ def testNotifyIssueChangeTask_spam(self):
+ issue = MakeTestIssue(
+ project_id=12345, local_id=1, owner_id=1, reporter_id=1,
+ is_spam=True)
+ self.services.issue.TestAddIssue(issue)
+ task = notify.NotifyIssueChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 0, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'commenter_id': 2}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEquals(0, len(result['notified']))
+
+ def testNotifyBlockingChangeTask(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBlockingChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'delta_blocker_iids': 2, 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.VerifyParams(result, params)
+
+ def testNotifyBlockingChangeTask_spam(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1,
+ is_spam=True)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBlockingChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'delta_blocker_iids': 2, 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEquals(0, len(result['notified']))
+
+ def testNotifyBulkChangeTask(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBulkChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'ids': '1,2', 'seq': 0,
+ 'old_owner_ids': '1,1', 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.VerifyParams(result, params)
+
+ def testNotifyBulkChangeTask_spam(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1,
+ is_spam=True)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBulkChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'ids': '1,2', 'seq': 0,
+ 'old_owner_ids': '1,1', 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEquals(1, len(result['notified']))
+
+ def testOutboundEmailTask(self):
+ task = notify.OutboundEmailTask(
+ request=None, response=None, services=self.services)
+ params = {
+ 'from_addr': 'requester@example.com',
+ 'reply_to': 'user@example.com',
+ 'to': 'user@example.com',
+ 'subject': 'Test subject'}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEqual(params['from_addr'], result['sender'])
+ self.assertEqual(params['subject'], result['subject'])
diff --git a/appengine/monorail/features/test/prettify_test.py b/appengine/monorail/features/test/prettify_test.py
new file mode 100644
index 0000000..09fb403
--- /dev/null
+++ b/appengine/monorail/features/test/prettify_test.py
@@ -0,0 +1,93 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the prettify module."""
+
+import unittest
+
+from third_party import ezt
+
+from features import prettify
+
+
+class SourceBrowseTest(unittest.TestCase):
+
+ def testPrepareSourceLinesForHighlighting(self):
+ # String representing an empty source file
+ src = ''
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 0)
+
+ def testPrepareSourceLinesForHighlightingNoBreaks(self):
+ # seven lines of text with no blank lines
+ src = ' 1\n 2\n 3\n 4\n 5\n 6\n 7'
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 7)
+ out_lines = [fl.line for fl in file_lines]
+ self.assertEqual('\n'.join(out_lines), src)
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 7)
+
+ def testPrepareSourceLinesForHighlightingWithBreaks(self):
+ # seven lines of text with line 5 being blank
+ src = ' 1\n 2\n 3\n 4\n\n 6\n 7'
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 7)
+
+
+class BuildPrettifyDataTest(unittest.TestCase):
+
+ def testNonSourceFile(self):
+ prettify_data = prettify.BuildPrettifyData(0, '/dev/null')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(False),
+ prettify_class=None),
+ prettify_data)
+
+ prettify_data = prettify.BuildPrettifyData(10, 'readme.txt')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(False),
+ prettify_class=None),
+ prettify_data)
+
+ def testGenericLanguage(self):
+ prettify_data = prettify.BuildPrettifyData(123, 'trunk/src/hello.php')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class=''),
+ prettify_data)
+
+ def testSpecificLanguage(self):
+ prettify_data = prettify.BuildPrettifyData(123, 'trunk/src/hello.java')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class='lang-java'),
+ prettify_data)
+
+ def testThirdPartyExtensionLanguages(self):
+ for ext in ['apollo', 'agc', 'aea', 'el', 'scm', 'cl', 'lisp',
+ 'go', 'hs', 'lua', 'fs', 'ml', 'proto', 'scala',
+ 'sql', 'vb', 'vbs', 'vhdl', 'vhd', 'wiki', 'yaml',
+ 'yml', 'clj']:
+ prettify_data = prettify.BuildPrettifyData(123, '/trunk/src/hello.' + ext)
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class='lang-' + ext),
+ prettify_data)
+
+ def testExactFilename(self):
+ prettify_data = prettify.BuildPrettifyData(123, 'trunk/src/Makefile')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class='lang-sh'),
+ prettify_data)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/savedqueries_helpers_test.py b/appengine/monorail/features/test/savedqueries_helpers_test.py
new file mode 100644
index 0000000..d231294
--- /dev/null
+++ b/appengine/monorail/features/test/savedqueries_helpers_test.py
@@ -0,0 +1,106 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for savedqueries_helpers feature."""
+
+import unittest
+
+import mox
+
+from features import savedqueries_helpers
+from testing import fake
+from tracker import tracker_bizobj
+
+
+class SavedQueriesHelperTest(unittest.TestCase):
+
+ def setUp(self):
+ self.features = fake.FeaturesService()
+ self.project = fake.ProjectService()
+ self.cnxn = 'fake cnxn'
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testParseSavedQueries(self):
+ post_data = {
+ 'xyz_savedquery_name_1': '',
+ 'xyz_savedquery_name_2': 'name2',
+ 'xyz_savedquery_name_3': 'name3',
+ 'xyz_savedquery_id_1': 1,
+ 'xyz_savedquery_id_2': 2,
+ 'xyz_savedquery_id_3': 3,
+ 'xyz_savedquery_projects_1': '123',
+ 'xyz_savedquery_projects_2': 'abc',
+ 'xyz_savedquery_projects_3': 'def',
+ 'xyz_savedquery_base_1': 4,
+ 'xyz_savedquery_base_2': 5,
+ 'xyz_savedquery_base_3': 6,
+ 'xyz_savedquery_query_1': 'query1',
+ 'xyz_savedquery_query_2': 'query2',
+ 'xyz_savedquery_query_3': 'query3',
+ 'xyz_savedquery_sub_mode_1': 'sub_mode1',
+ 'xyz_savedquery_sub_mode_2': 'sub_mode2',
+ 'xyz_savedquery_sub_mode_3': 'sub_mode3',
+ }
+ self.project.TestAddProject(name='abc', project_id=1001)
+ self.project.TestAddProject(name='def', project_id=1002)
+
+ saved_queries = savedqueries_helpers.ParseSavedQueries(
+ self.cnxn, post_data, self.project, prefix='xyz_')
+ self.assertEqual(2, len(saved_queries))
+
+ # pylint: disable=unbalanced-tuple-unpacking
+ saved_query1, saved_query2 = saved_queries
+ # Assert contents of saved_query1.
+ self.assertEqual(2, saved_query1.query_id)
+ self.assertEqual('name2', saved_query1.name)
+ self.assertEqual(5, saved_query1.base_query_id)
+ self.assertEqual('query2', saved_query1.query)
+ self.assertEqual([1001], saved_query1.executes_in_project_ids)
+ self.assertEqual('sub_mode2', saved_query1.subscription_mode)
+ # Assert contents of saved_query2.
+ self.assertEqual(3, saved_query2.query_id)
+ self.assertEqual('name3', saved_query2.name)
+ self.assertEqual(6, saved_query2.base_query_id)
+ self.assertEqual('query3', saved_query2.query)
+ self.assertEqual([1002], saved_query2.executes_in_project_ids)
+ self.assertEqual('sub_mode3', saved_query2.subscription_mode)
+
+ def testSavedQueryToCond(self):
+ class MockSavedQuery:
+ def __init__(self):
+ self.base_query_id = 1
+ self.query = 'query'
+ saved_query = MockSavedQuery()
+
+ cond_with_no_base = savedqueries_helpers.SavedQueryToCond(saved_query)
+ self.assertEquals('query', cond_with_no_base)
+
+ self.mox.StubOutWithMock(tracker_bizobj, 'GetBuiltInQuery')
+ tracker_bizobj.GetBuiltInQuery(1).AndReturn('base')
+ self.mox.ReplayAll()
+ cond_with_base = savedqueries_helpers.SavedQueryToCond(saved_query)
+ self.assertEquals('base query', cond_with_base)
+ self.mox.VerifyAll()
+
+ def testSavedQueryIDToCond(self):
+ self.mox.StubOutWithMock(savedqueries_helpers, 'SavedQueryToCond')
+ savedqueries_helpers.SavedQueryToCond(mox.IgnoreArg()).AndReturn('ret')
+ self.mox.ReplayAll()
+ query_cond = savedqueries_helpers.SavedQueryIDToCond(
+ self.cnxn, self.features, 1)
+ self.assertEquals('ret', query_cond)
+ self.mox.VerifyAll()
+
+ self.mox.StubOutWithMock(tracker_bizobj, 'GetBuiltInQuery')
+ tracker_bizobj.GetBuiltInQuery(1).AndReturn('built_in_query')
+ self.mox.ReplayAll()
+ query_cond = savedqueries_helpers.SavedQueryIDToCond(
+ self.cnxn, self.features, 1)
+ self.assertEquals('built_in_query', query_cond)
+ self.mox.VerifyAll()
diff --git a/appengine/monorail/features/test/savedqueries_test.py b/appengine/monorail/features/test/savedqueries_test.py
new file mode 100644
index 0000000..8475041
--- /dev/null
+++ b/appengine/monorail/features/test/savedqueries_test.py
@@ -0,0 +1,40 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for savedqueries feature."""
+
+import unittest
+
+from features import savedqueries
+from framework import monorailrequest
+from framework import permissions
+from services import service_manager
+from testing import fake
+
+
+class SavedQueriesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService())
+ self.servlet = savedqueries.SavedQueries(
+ 'req', 'res', services=self.services)
+ self.services.user.TestAddUser('a@example.com', 111L)
+
+ def testAssertBasePermission(self):
+ """Only permit site admins and users viewing themselves."""
+ mr = monorailrequest.MonorailRequest()
+ mr.viewed_user_auth.user_id = 111L
+ mr.auth.user_id = 222L
+
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ mr.auth.user_id = 111L
+ self.servlet.AssertBasePermission(mr)
+
+ mr.auth.user_id = 222L
+ mr.auth.user_pb.is_site_admin = True
+ self.servlet.AssertBasePermission(mr)
\ No newline at end of file
diff --git a/appengine/monorail/features/test/stars_test.py b/appengine/monorail/features/test/stars_test.py
new file mode 100644
index 0000000..e96d765
--- /dev/null
+++ b/appengine/monorail/features/test/stars_test.py
@@ -0,0 +1,68 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the project and user stars feature."""
+
+import unittest
+
+from features import stars
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class StarsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ project_star=fake.ProjectStarService(),
+ user_star=fake.UserStarService())
+ self.services.project.TestAddProject('proj', project_id=123)
+ self.services.user.TestAddUser('testuser', 111L)
+ self.set_stars_feed = stars.SetStarsFeed(
+ 'req', 'res', services=self.services)
+
+ def SetAndVerifyStarredItems(self, scope, item, item_id, get_star_count):
+ self.assertEqual(0, get_star_count('fake cnxn', item_id))
+
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 111L}, services=self.services, method='POST',
+ params={'scope': scope, 'item': item, 'starred': 1, 'token': 'x'})
+ result = self.set_stars_feed.HandleRequest(mr)
+
+ self.assertEqual({'starred': True}, result)
+ self.assertEqual(1, get_star_count('fake cnxn', item_id))
+
+ # The same starrer doing it again does not drive up the count more.
+ result = self.set_stars_feed.HandleRequest(mr)
+ self.assertEqual({'starred': True}, result)
+ self.assertEqual(1, get_star_count('fake cnxn', item_id))
+
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 111L}, services=self.services, method='POST',
+ params={'scope': scope, 'item': item, 'starred': 0, 'token': 'x'})
+ result = self.set_stars_feed.HandleRequest(mr)
+ self.assertEqual({'starred': False}, result)
+ self.assertEqual(0, get_star_count('fake cnxn', item_id))
+
+ # The same starrer doing it again does not drive down the count more.
+ result = self.set_stars_feed.HandleRequest(mr)
+ self.assertEqual({'starred': False}, result)
+ self.assertEqual(0, get_star_count('fake cnxn', item_id))
+
+ def testSetAndGetStarredItems_User(self):
+ """Tests SetStarsFeed.HandleRequest method."""
+ self.SetAndVerifyStarredItems(
+ 'users', '111', 111L, self.services.user_star.CountItemStars)
+
+ def testSetAndGetStarredItems_Project(self):
+ self.SetAndVerifyStarredItems(
+ 'projects', 'proj', 123, self.services.project_star.CountItemStars)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/__init__.py b/appengine/monorail/framework/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/framework/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/framework/actionlimit.py b/appengine/monorail/framework/actionlimit.py
new file mode 100644
index 0000000..b994c1f
--- /dev/null
+++ b/appengine/monorail/framework/actionlimit.py
@@ -0,0 +1,227 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions to test action limits.
+
+Action limits help prevent an individual user from abusing the system
+by performing an excessive number of operations. E.g., creating
+thousands of projects.
+
+If the user reaches a soft limit within a given time period, the
+servlets will start demanding that the user solve a CAPTCHA.
+
+If the user reaches a hard limit within a given time period, any further
+requests to perform that type of action will fail.
+
+When the user reaches a lifetime limit, they are shown an error page.
+We can increase the lifetime limit for individual users who contact us.
+"""
+
+import logging
+import time
+
+from framework import framework_constants
+from proto import user_pb2
+
+
+# Action types
+PROJECT_CREATION = 1
+ISSUE_COMMENT = 2
+ISSUE_ATTACHMENT = 3
+ISSUE_BULK_EDIT = 4
+FLAG_SPAM = 5
+API_REQUEST = 6
+
+ACTION_TYPE_NAMES = {
+ 'project_creation': PROJECT_CREATION,
+ 'issue_comment': ISSUE_COMMENT,
+ 'issue_attachment': ISSUE_ATTACHMENT,
+ 'issue_bulk_edit': ISSUE_BULK_EDIT,
+ 'flag_spam': FLAG_SPAM,
+ 'api_request': API_REQUEST,
+ }
+
+# Action Limit definitions
+# {action_type: (period, soft_limit, hard_limit, life_max),...}
+ACTION_LIMITS = {
+ PROJECT_CREATION: (framework_constants.SECS_PER_DAY, 2, 5, 25),
+ ISSUE_COMMENT: (framework_constants.SECS_PER_DAY / 4, 5, 100, 10000),
+ ISSUE_ATTACHMENT: (framework_constants.SECS_PER_DAY, 25, 100, 1000),
+ ISSUE_BULK_EDIT: (framework_constants.SECS_PER_DAY, 100, 500, 10000),
+ FLAG_SPAM: (framework_constants.SECS_PER_DAY, 100, 100, 10000),
+ API_REQUEST: (framework_constants.SECS_PER_DAY, 100000, 100000, 10000000),
+ }
+
+
+# Determine scaling of CAPTCHA frequency.
+MAX_SOFT_LIMITS = max([ACTION_LIMITS[key][2] - ACTION_LIMITS[key][1]
+ for key in ACTION_LIMITS])
+SQUARES = {i**2 for i in range(1, MAX_SOFT_LIMITS)}
+SQUARES.add(1)
+
+
+def NeedCaptcha(user, action_type, now=None, skip_lifetime_check=False):
+ """Check that the user is under the limit on a given action.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ now: int time in millis. Defaults to int(time.time()). Used for testing.
+ skip_lifetime_check: No limit for lifetime actions.
+
+ Raises:
+ ExcessiveActivityException: when user is over hard or lifetime limits.
+
+ Returns:
+ False if user is under the soft-limit. True if user is over the
+ soft-limit, but under the hard and lifetime limits.
+ """
+ if not user: # Anything that can be done by anon users (which is not
+ return False # much) can be done any number of times w/o CAPTCHA.
+ if not now:
+ now = int(time.time())
+
+ period, soft, hard, life_max = ACTION_LIMITS[action_type]
+ actionlimit_pb = GetLimitPB(user, action_type)
+
+ # First, users with no action limits recorded must be below limits.
+ # And, users that we explicitly trust as non-abusers are allowed to take
+ # and unlimited number of actions. And, site admins are trusted non-abusers.
+ if (not actionlimit_pb or user.ignore_action_limits or
+ user.is_site_admin):
+ return False
+
+ # Second, check if user has reached lifetime limit.
+ if actionlimit_pb.lifetime_limit:
+ life_max = actionlimit_pb.lifetime_limit
+ if actionlimit_pb.period_soft_limit:
+ soft = actionlimit_pb.period_soft_limit
+ if actionlimit_pb.period_hard_limit:
+ hard = actionlimit_pb.period_hard_limit
+ if (not skip_lifetime_check and life_max is not None
+ and actionlimit_pb.lifetime_count >= life_max):
+ raise ExcessiveActivityException()
+
+ # Third, if user can begin a new time period, they are free to go ahead.
+ if now - actionlimit_pb.reset_timestamp > period:
+ return False
+
+ # Fourth, check for hard rate limits.
+ if hard is not None and actionlimit_pb.recent_count >= hard:
+ raise ExcessiveActivityException()
+
+ # Finally, check the soft limit in this time period.
+ action_limit = False
+ if soft is not None:
+ recent_count = actionlimit_pb.recent_count
+ if recent_count == soft:
+ action_limit = True
+ elif recent_count > soft:
+ remaining_soft = hard - recent_count
+ if remaining_soft in SQUARES:
+ action_limit = True
+
+ if action_limit:
+ logging.info('soft limit captcha: %d', recent_count)
+ return action_limit
+
+
+def GetLimitPB(user, action_type):
+ """Return the apporiate action limit PB part of the given User PB."""
+ if action_type == PROJECT_CREATION:
+ if not user.project_creation_limit:
+ user.project_creation_limit = user_pb2.ActionLimit()
+ return user.project_creation_limit
+ elif action_type == ISSUE_COMMENT:
+ if not user.issue_comment_limit:
+ user.issue_comment_limit = user_pb2.ActionLimit()
+ return user.issue_comment_limit
+ elif action_type == ISSUE_ATTACHMENT:
+ if not user.issue_attachment_limit:
+ user.issue_attachment_limit = user_pb2.ActionLimit()
+ return user.issue_attachment_limit
+ elif action_type == ISSUE_BULK_EDIT:
+ if not user.issue_bulk_edit_limit:
+ user.issue_bulk_edit_limit = user_pb2.ActionLimit()
+ return user.issue_bulk_edit_limit
+ elif action_type == FLAG_SPAM:
+ if not user.flag_spam_limit:
+ user.flag_spam_limit = user_pb2.ActionLimit()
+ return user.flag_spam_limit
+ elif action_type == API_REQUEST:
+ if not user.api_request_limit:
+ user.api_request_limit = user_pb2.ActionLimit()
+ return user.api_request_limit
+ raise Exception('unexpected action type %r' % action_type)
+
+
+def ResetRecentActions(user, action_type):
+ """Reset the recent counter for an action.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ """
+ al = GetLimitPB(user, action_type)
+ al.recent_count = 0
+ al.reset_timestamp = 0
+
+
+def CountAction(user, action_type, delta=1, now=int(time.time())):
+ """Reset recent counter if eligible, then increment recent and lifetime.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ delta: int number to increment count by.
+ now: int time in millis. Defaults to int(time.time()). Used for testing.
+ """
+ al = GetLimitPB(user, action_type)
+ period = ACTION_LIMITS[action_type][0]
+
+ if now - al.reset_timestamp > period:
+ al.reset_timestamp = now
+ al.recent_count = 0
+
+ al.recent_count = al.recent_count + delta
+ al.lifetime_count = al.lifetime_count + delta
+
+
+def CustomizeLimit(user, action_type, soft_limit, hard_limit, lifetime_limit):
+ """Set custom action limits for a user.
+
+ The recent counters are reset to zero, so the user will not run into
+ a hard limit.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ soft_limit: soft limit of period.
+ hard_limit: hard limit of period.
+ lifetime_limit: lifetime limit.
+ """
+ al = GetLimitPB(user, action_type)
+ al.lifetime_limit = lifetime_limit
+ al.period_soft_limit = soft_limit
+ al.period_hard_limit = hard_limit
+
+ # The mutator will mark the ActionLimit as present, but does not
+ # necessarily *initialize* the protobuf. We need to ensure that the
+ # lifetime_count is set (a required field). Additional required
+ # fields will be set below.
+ if not al.lifetime_count:
+ al.lifetime_count = 0
+
+ # Clear the recent counters so the user will not hit the period limit.
+ al.recent_count = 0
+ al.reset_timestamp = 0
+
+
+class Error(Exception):
+ """Base exception class for this package."""
+
+
+class ExcessiveActivityException(Error):
+ """No user with the specified name exists."""
diff --git a/appengine/monorail/framework/alerts.py b/appengine/monorail/framework/alerts.py
new file mode 100644
index 0000000..ef939b1
--- /dev/null
+++ b/appengine/monorail/framework/alerts.py
@@ -0,0 +1,54 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helpers for showing alerts at the top of the page.
+
+These alerts are then displayed by alerts.ezt.
+"""
+
+import time
+
+from third_party import ezt
+
+# Expiration time for special features of timestamped links.
+# This is not for security, just for informational messages that
+# make sense in the context of a user session, but that should
+# not appear days later if the user follows a bookmarked link.
+_LINK_EXPIRATION_SEC = 8
+
+
+class AlertsView(object):
+ """EZT object for showing alerts at the top of the page."""
+
+ def __init__(self, mr):
+ # Used to show message confirming item was updated
+ self.updated = mr.GetIntParam('updated')
+
+ # Used to show message confirming item was moved and the location of the new
+ # item.
+ self.moved_to_project = mr.GetParam('moved_to_project')
+ self.moved_to_id = mr.GetIntParam('moved_to_id')
+ self.moved = self.moved_to_project and self.moved_to_id
+
+ # Used to show message confirming item was copied and the location of the
+ # new item.
+ self.copied_from_id = mr.GetIntParam('copied_from_id')
+ self.copied_to_project = mr.GetParam('copied_to_project')
+ self.copied_to_id = mr.GetIntParam('copied_to_id')
+ self.copied = self.copied_to_project and self.copied_to_id
+
+ # Used to show message confirming items deleted
+ self.deleted = mr.GetParam('deleted')
+
+ # If present, we will show message confirming that data was saved
+ self.saved = mr.GetParam('saved')
+
+ link_generation_timestamp = mr.GetIntParam('ts', default_value=0)
+ now = int(time.time())
+ ts_links_are_valid = now - link_generation_timestamp < _LINK_EXPIRATION_SEC
+
+ show_alert = ts_links_are_valid and (
+ self.updated or self.moved or self.copied or self.deleted or self.saved)
+ self.show = ezt.boolean(show_alert)
diff --git a/appengine/monorail/framework/artifactcollision.py b/appengine/monorail/framework/artifactcollision.py
new file mode 100644
index 0000000..2cd0651
--- /dev/null
+++ b/appengine/monorail/framework/artifactcollision.py
@@ -0,0 +1,49 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Class that implements the artifact update collision page.
+
+This page is displayed only when one user views and edits an issue,
+but another user has already submitted an issue update before the
+first user submits his/her update.
+
+TODO(jrobbins): give the user better options on how to proceed.
+
+Summary of classes:
+ ArtifactCollision: Show an error message explaining the mid-air collision.
+"""
+
+import re
+
+from framework import monorailrequest
+from framework import servlet
+
+
+class ArtifactCollision(servlet.Servlet):
+ """ArtifactCollision page explains that a mid-air collision has occured."""
+
+ _PAGE_TEMPLATE = 'framework/artifact-collision-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_NONE
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ A dict of values used by EZT for rendering the page.
+ """
+ artifact_name = mr.GetParam('name')
+ if not artifact_name:
+ raise monorailrequest.InputException() # someone forged a link
+
+ artifact_detail_url = '/p/%s/issues/detail?id=%s' % (
+ mr.project_name, mr.continue_issue_id)
+
+ return {
+ 'artifact_name': artifact_name,
+ 'artifact_detail_url': artifact_detail_url,
+ }
diff --git a/appengine/monorail/framework/banned.py b/appengine/monorail/framework/banned.py
new file mode 100644
index 0000000..45dd326
--- /dev/null
+++ b/appengine/monorail/framework/banned.py
@@ -0,0 +1,46 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display the a message explaining that the user has been banned.
+
+We can ban a user for anti-social behavior. We indicate that the user is
+banned by adding a 'banned' field to his/her User PB in the DB. Whenever
+a user with a banned indicator visits any page, AssertBasePermission()
+checks has_banned and redirects to this page.
+"""
+
+import logging
+
+from framework import permissions
+from framework import servlet
+
+
+class Banned(servlet.Servlet):
+ """The Banned page shows a message explaining that the user is banned."""
+
+ _PAGE_TEMPLATE = 'framework/banned-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Allow banned users to see this page, and prevent non-banned users."""
+ # Note, we do not call Servlet.AssertBasePermission because
+ # that would redirect banned users here again in an endless loop.
+
+ # We only show this page to users who are banned. If a non-banned user
+ # follows a link to this URL, don't show the banned message, because that
+ # would lead to a big misunderstanding.
+ if not permissions.IsBanned(mr.auth.user_pb, mr.auth.user_view):
+ logging.info('non-banned user: %s', mr.auth.user_pb)
+ self.abort(404)
+
+ def GatherPageData(self, _mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ return {
+ # We do not actually display the specific reason for banning.
+ # That info is available via command-line tools..
+
+ # Make the "Sign Out" link just sign out, don't try to bring the
+ # user back to this page after they sign out.
+ 'currentPageURLEncoded': None,
+ }
diff --git a/appengine/monorail/framework/captcha.py b/appengine/monorail/framework/captcha.py
new file mode 100644
index 0000000..2aa9c19
--- /dev/null
+++ b/appengine/monorail/framework/captcha.py
@@ -0,0 +1,58 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A simple python interface to ReCAPTCHA."""
+
+import json
+import logging
+import urllib
+import urllib2
+
+from services import secrets_svc
+
+def Verify(remote_ip, response):
+ """Check the user's guess at a captcha solution.
+
+ Args:
+ remote_ip: user's IP address.
+ challenge: The captcha challenge presented to the user.
+ guess: The user's response to the captcha.
+
+ Returns:
+ A pair (correct, error_msg) where error_msg will indicate
+ why a response was deemed incorrect. It is logged so that
+ you can see, e.g., if you have the wrong private key.
+ """
+ # If the use did not enter anything, that is always incorrect
+ if not response:
+ logging.info('response was blank')
+ return False, 'incorrect-captcha-sol'
+
+ resp = _AskRecaptcha(remote_ip, response)
+ if not resp['success']:
+ if 'error-codes' in resp:
+ return False, resp['error-codes']
+ else:
+ return False, 'incorrect-captcha-sol'
+
+ return True, ''
+
+def _AskRecaptcha(remote_ip, response):
+ """Ask the ReCAPTCHA backend to verify the user's guess."""
+ recaptcha_server_request = urllib2.Request(
+ url='https://www.google.com/recaptcha/api/siteverify',
+ data=urllib.urlencode({
+ 'secret': secrets_svc.GetRecaptchaPrivateKey(),
+ 'remoteip': remote_ip,
+ 'response': response}),
+ headers={
+ 'Content-type': 'application/x-www-form-urlencoded',
+ 'User-agent': 'reCAPTCHA Python'})
+ recaptcha_server_response = urllib2.urlopen(recaptcha_server_request)
+ resp = json.loads(recaptcha_server_response.read())
+ recaptcha_server_response.close()
+
+ return resp
+
diff --git a/appengine/monorail/framework/csp_report.py b/appengine/monorail/framework/csp_report.py
new file mode 100644
index 0000000..b4dd0a4
--- /dev/null
+++ b/appengine/monorail/framework/csp_report.py
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet for Content Security Policy violation reporting.
+See http://www.html5rocks.com/en/tutorials/security/content-security-policy/
+for more information on how this mechanism works.
+"""
+
+import webapp2
+import logging
+
+
+class CSPReportPage(webapp2.RequestHandler):
+ """CSPReportPage serves CSP violation reports."""
+
+ def post(self):
+ logging.error('CSP Violation: %s' % self.request.body)
diff --git a/appengine/monorail/framework/emailfmt.py b/appengine/monorail/framework/emailfmt.py
new file mode 100644
index 0000000..d4aa955
--- /dev/null
+++ b/appengine/monorail/framework/emailfmt.py
@@ -0,0 +1,359 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Functions that format or parse email messages in Monorail.
+
+Specifically, this module has the logic for generating various email
+header lines that help match inbound and outbound email to the project
+and artifact that generated it.
+"""
+
+import hmac
+import logging
+import re
+import rfc822
+
+from google.appengine.api import app_identity
+
+import settings
+from framework import framework_constants
+from services import client_config_svc
+from services import secrets_svc
+
+# TODO(jrobbins): Parsing very large messages is slow, and we are not going
+# to handle attachments at first, so there is no reason to consider large
+# emails.
+MAX_BODY_SIZE = 100 * 1024
+MAX_HEADER_CHARS_CONSIDERED = 255
+
+
+
+def IsBodyTooBigToParse(body):
+ """Return True if the email message body is too big to process."""
+ return len(body) > MAX_BODY_SIZE
+
+
+def IsProjectAddressOnToLine(project_addr, to_addrs):
+ """Return True if an email was explicitly sent directly to us."""
+ return project_addr in to_addrs
+
+
+def ParseEmailMessage(msg):
+ """Parse the given MessageRouterMessage and return relevant fields.
+
+ Args:
+ msg: email.message.Message object for the email message sent to us.
+
+ Returns:
+ A tuple: from_addr, to_addrs, cc_addrs, references, subject, body.
+ """
+ # Ignore messages that are probably not from humans, see:
+ # http://google.com/search?q=precedence+bulk+junk
+ precedence = msg.get('precedence', '')
+ if precedence.lower() in ['bulk', 'junk']:
+ logging.info('Precedence: %r indicates an autoresponder', precedence)
+ return '', [], [], '', '', ''
+
+ from_addrs = _ExtractAddrs(msg.get('from', ''))
+ if from_addrs:
+ from_addr = from_addrs[0]
+ else:
+ from_addr = ''
+
+ to_addrs = _ExtractAddrs(msg.get('to', ''))
+ cc_addrs = _ExtractAddrs(msg.get('cc', ''))
+
+ in_reply_to = msg.get('in-reply-to', '')
+ references = msg.get('references', '').split()
+ references = list({ref for ref in [in_reply_to] + references if ref})
+ subject = _StripSubjectPrefixes(msg.get('subject', ''))
+
+ body = ''
+ for part in msg.walk():
+ # We only process plain text emails.
+ if part.get_content_type() == 'text/plain':
+ body = part.get_payload(decode=True)
+ break # Only consider the first text part.
+
+ return from_addr, to_addrs, cc_addrs, references, subject, body
+
+
+def _ExtractAddrs(header_value):
+ """Given a message header value, return email address found there."""
+ friendly_addr_pairs = list(rfc822.AddressList(header_value))
+ return [addr for _friendly, addr in friendly_addr_pairs]
+
+
+def _StripSubjectPrefixes(subject):
+ """Strip off any 'Re:', 'Fwd:', etc. subject line prefixes."""
+ prefix = _FindSubjectPrefix(subject)
+ while prefix:
+ subject = subject[len(prefix):].strip()
+ prefix = _FindSubjectPrefix(subject)
+
+ return subject
+
+
+def _FindSubjectPrefix(subject):
+ """If the given subject starts with a prefix, return that prefix."""
+ for prefix in ['re:', 'aw:', 'fwd:', 'fw:']:
+ if subject.lower().startswith(prefix):
+ return prefix
+
+ return None
+
+
+def MailDomain():
+ """Return the domain name where this app can recieve email."""
+ if settings.unit_test_mode:
+ return 'testbed-test.appspotmail.com'
+
+ # If running on a GAFYD domain, you must define an app alias on the
+ # Application Settings admin web page. If you cannot reserve the matching
+ # APP_ID for the alias, then specify it in settings.mail_domain.
+ if settings.mail_domain:
+ return settings.mail_domain
+
+ app_id = app_identity.get_application_id()
+ if ':' in app_id:
+ app_id = app_id.split(':')[-1]
+
+ return '%s.appspotmail.com' % app_id
+
+
+def FormatFriendly(commenter_view, sender, reveal_addr):
+ """Format the From: line to include the commenter's friendly name if given."""
+ if commenter_view:
+ site_name = settings.site_name
+ if commenter_view.email in client_config_svc.GetServiceAccountMap():
+ friendly = commenter_view.display_name
+ elif reveal_addr:
+ friendly = commenter_view.email
+ else:
+ friendly = commenter_view.display_name
+ return '%s via %s <%s>' % (friendly, site_name, sender)
+ else:
+ return sender
+
+
+def NoReplyAddress(commenter_view=None, reveal_addr=False):
+ """Return an address that ignores all messages sent to it."""
+ # Note: We use "no_reply" with an underscore to avoid potential conflict
+ # with any project name. Project names cannot have underscores.
+ sender = 'no_reply@%s' % MailDomain()
+ return FormatFriendly(commenter_view, sender, reveal_addr)
+
+
+def FormatFromAddr(_project, commenter_view=None, reveal_addr=False,
+ can_reply_to=True):
+ """Return a string to be used on the email From: line.
+
+ Args:
+ project: Project PB for the project that the email is sent from.
+ commenter_view: Optional UserView of the user who made a comment. We use
+ the user's (potentially obscured) email address as their friendly name.
+ reveal_addr: Optional bool. If False then the address is obscured.
+ can_reply_to: Optional bool. If True then settings.send_email_as is used,
+ otherwise settings.send_noreply_email_as is used.
+
+ Returns:
+ A string that should be used in the From: line of outbound email
+ notifications for the given project.
+ """
+ addr = (settings.send_email_as if can_reply_to
+ else settings.send_noreply_email_as)
+ return FormatFriendly(commenter_view, addr, reveal_addr)
+
+
+def NormalizeHeader(s):
+ """Make our message-ids robust against mail client spacing and truncation."""
+ words = _StripSubjectPrefixes(s).split() # Split on any runs of whitespace.
+ normalized = ' '.join(words)
+ truncated = normalized[:MAX_HEADER_CHARS_CONSIDERED]
+ return truncated
+
+
+def MakeMessageID(to_addr, subject, from_addr):
+ """Make a unique (but deterministic) email Message-Id: value."""
+ normalized_subject = NormalizeHeader(subject)
+ if isinstance(normalized_subject, unicode):
+ normalized_subject = normalized_subject.encode('utf-8')
+ mail_hmac_key = secrets_svc.GetEmailKey()
+ return '<0=%s=%s=%s@%s>' % (
+ hmac.new(mail_hmac_key, to_addr).hexdigest(),
+ hmac.new(mail_hmac_key, normalized_subject).hexdigest(),
+ from_addr.split('@')[0],
+ MailDomain())
+
+
+def GetReferences(to_addr, subject, seq_num, project_from_addr):
+ """Make a References: header to make this message thread properly.
+
+ Args:
+ to_addr: address that email message will be sent to.
+ subject: subject line of email message.
+ seq_num: sequence number of message in thread, e.g., 0, 1, 2, ...,
+ or None if the message is not part of a thread.
+ project_from_addr: address that the message will be sent from.
+
+ Returns:
+ A string Message-ID that does not correspond to any actual email
+ message that was ever sent, but it does serve to unite all the
+ messages that belong togther in a thread.
+ """
+ if seq_num is not None:
+ return MakeMessageID(to_addr, subject, project_from_addr)
+ else:
+ return ''
+
+
+def ValidateReferencesHeader(message_ref, project, from_addr, subject):
+ """Check that the References header is one that we could have sent.
+
+ Args:
+ message_ref: one of the References header values from the inbound email.
+ project: Project PB for the affected project.
+ from_addr: string email address that inbound email was sent from.
+ subject: string base subject line of inbound email.
+
+ Returns:
+ True if it looks like this is a reply to a message that we sent
+ to the same address that replied. Otherwise, False.
+ """
+ sender = '%s@%s' % (project.project_name, MailDomain())
+ expected_ref = MakeMessageID(from_addr, subject, sender)
+
+ # TODO(jrobbins): project option to not check from_addr.
+ # TODO(jrobbins): project inbound auth token.
+ return expected_ref == message_ref
+
+
+PROJECT_EMAIL_RE = re.compile(
+ r'(?P<project>[-a-z0-9]+)'
+ r'@(?P<domain>[-a-z0-9.]+)')
+
+ISSUE_CHANGE_SUMMARY_RE = re.compile(
+ r'Issue (?P<local_id>[0-9]+) in '
+ r'(?P<project>[-a-z0-9]+): '
+ r'(?P<summary>.+)')
+
+
+def IdentifyProjectAndIssue(project_addr, subject):
+ """Parse the domain name, project name, and artifact id from a reply.
+
+ Args:
+ project_addr: string email address that the email was delivered to,
+ it must match the Reply-To: header sent in the notification message.
+ subject: string email subject line received, it must match the one
+ sent. Leading prefixes like "Re:" should already have been stripped.
+
+ Returns:
+ A 2-tuple: (project_name, local_id). If either or both are
+ None, they could not be determined.
+ """
+ # Ignore any inbound email sent to a "no_reply@" address.
+ if project_addr.startswith('no_reply@'):
+ return None, None
+
+ project_name = None
+
+ m = PROJECT_EMAIL_RE.match(project_addr.lower())
+ if m:
+ project_name = m.group('project')
+
+ issue_project_name, local_id_str = _MatchSubject(subject)
+
+ if project_name != issue_project_name:
+ # Something is wrong with the project name.
+ project_name = None
+
+ logging.info('project_name = %r', project_name)
+ logging.info('local_id_str = %r', local_id_str)
+
+ try:
+ local_id = int(local_id_str)
+ except ValueError:
+ local_id = None
+
+ return project_name, local_id
+
+
+def _MatchSubject(subject):
+ """Parse the project, artifact type, and artifact id from a subject line."""
+ m = ISSUE_CHANGE_SUMMARY_RE.match(subject)
+ if m:
+ return m.group('project'), m.group('local_id')
+
+ return None, None
+
+
+# TODO(jrobbins): For now, we strip out lines that look like quoted
+# text and then will give the user the option to see the whole email.
+# For 2.0 of this feature, we should change the Comment PB to have
+# runs of text with different properties so that the UI can present
+# "- Show quoted text -" and expand it in-line.
+
+# TODO(jrobbins): For now, we look for lines that indicate quoted
+# text (e.g., they start with ">"). But, we should also collapse
+# multiple lines that are identical to other lines in previous
+# non-deleted comments on the same issue, regardless of quote markers.
+
+
+# We cut off the message if we see something that looks like a signature and
+# it is near the bottom of the message.
+SIGNATURE_BOUNDARY_RE = re.compile(
+ r'^(([-_=]+ ?)+|'
+ r'cheers|(best |warm |kind )?regards|thx|thanks|thank you|'
+ r'Sent from my i?Phone|Sent from my iPod)'
+ r',? *$', re.I)
+
+MAX_SIGNATURE_LINES = 8
+
+FORWARD_OR_EXPLICIT_SIG_PATS = [
+ r'[^0-9a-z]+(forwarded|original) message[^0-9a-z]+\s*$',
+ r'Updates:\s*$',
+ r'Comment #\d+ on issue \d+ by \S+:',
+ # If we see this anywhere in the message, treat the rest as a signature.
+ r'--\s*$',
+ ]
+FORWARD_OR_EXPLICIT_SIG_PATS_AND_REST_RE = re.compile(
+ r'^(%s)(.|\n)*' % '|'.join(FORWARD_OR_EXPLICIT_SIG_PATS),
+ flags=re.MULTILINE | re.IGNORECASE)
+
+# This handles gmail well, and it's pretty broad without seeming like
+# it would cause false positives.
+QUOTE_PATS = [
+ r'^On .*\s+<\s*\S+?@[-a-z0-9.]+>\s*wrote:\s*$',
+ r'^On .* \S+?@[-a-z0-9.]+\s*wrote:\s*$',
+ r'^\S+?@[-a-z0-9.]+ \(\S+?@[-a-z0-9.]+\)\s*wrote:\s*$',
+ r'\S+?@[-a-z0-9]+.appspotmail.com\s.*wrote:\s*$',
+ r'\S+?@[-a-z0-9]+.appspotmail.com\s+.*a\s+\xc3\xa9crit\s*:\s*$',
+ r'^\d+/\d+/\d+ +<\S+@[-a-z0-9.]+>:?\s*$',
+ r'^>.*$',
+ ]
+QUOTED_BLOCKS_RE = re.compile(
+ r'(^\s*\n)*((%s)\n?)+(^\s*\n)*' % '|'.join(QUOTE_PATS),
+ flags=re.MULTILINE | re.IGNORECASE)
+
+
+def StripQuotedText(description):
+ """Strip all quoted text lines out of the given comment text."""
+ # If the rest of message is forwared text, we're done.
+ description = FORWARD_OR_EXPLICIT_SIG_PATS_AND_REST_RE.sub('', description)
+ # Replace each quoted block of lines and surrounding blank lines with at
+ # most one blank line.
+ description = QUOTED_BLOCKS_RE.sub('\n', description)
+
+ new_lines = description.strip().split('\n')
+ # Make another pass over the last few lines to strip out signatures.
+ sig_zone_start = max(0, len(new_lines) - MAX_SIGNATURE_LINES)
+ for idx in range(sig_zone_start, len(new_lines)):
+ line = new_lines[idx]
+ if SIGNATURE_BOUNDARY_RE.match(line):
+ # We found the likely start of a signature, just keep the lines above it.
+ new_lines = new_lines[:idx]
+ break
+
+ return '\n'.join(new_lines).strip()
diff --git a/appengine/monorail/framework/excessiveactivity.py b/appengine/monorail/framework/excessiveactivity.py
new file mode 100644
index 0000000..fddb7e5
--- /dev/null
+++ b/appengine/monorail/framework/excessiveactivity.py
@@ -0,0 +1,22 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display the an error page for excessive activity.
+
+This page is shown when the user performs a given type of action
+too many times in a 24-hour period or exceeds a lifetime limit.
+"""
+
+from framework import servlet
+
+
+class ExcessiveActivity(servlet.Servlet):
+ """ExcessiveActivity page shows an error message."""
+
+ _PAGE_TEMPLATE = 'framework/excessive-activity-page.ezt'
+
+ def GatherPageData(self, _mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ return {}
diff --git a/appengine/monorail/framework/filecontent.py b/appengine/monorail/framework/filecontent.py
new file mode 100644
index 0000000..ec3c171
--- /dev/null
+++ b/appengine/monorail/framework/filecontent.py
@@ -0,0 +1,171 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Utility routines for dealing with MIME types and decoding text files."""
+
+import itertools
+import logging
+
+from framework import framework_constants
+
+
+_EXTENSION_TO_CTYPE_TABLE = {
+ # These are images/PDFs that we trust the browser to display.
+ 'gif': 'image/gif',
+ 'jpg': 'image/jpeg',
+ 'jpeg': 'image/jpeg',
+ 'png': 'image/png',
+ 'ico': 'image/x-icon',
+ 'svg': 'image/svg+xml',
+ 'pdf': 'application/pdf',
+
+ # We do not serve mimetypes that cause the brower to launch a local
+ # app because that is not required for issue tracking and it is a
+ # potential security risk.
+}
+
+
+def GuessContentTypeFromFilename(filename):
+ """Guess a file's content type based on the filename extension.
+
+ Args:
+ filename: String name of a file.
+
+ Returns:
+ MIME type string to use when serving this file. We only use text/plain for
+ text files, appropriate image content-types, or application/octet-stream
+ for virtually all binary files. This limits the richness of the user's
+ experience, e.g., the user cannot open an MS Office application directly
+ by clicking on an attachment, but it is safer.
+ """
+ ext = filename.split('.')[-1] if ('.' in filename) else ''
+ ext = ext.lower()
+ if ext in COMMON_TEXT_FILE_EXTENSIONS:
+ return 'text/plain'
+ return _EXTENSION_TO_CTYPE_TABLE.get(ext.lower(), 'application/octet-stream')
+
+
+# Constants used in detecting if a file has binary content.
+# All line lengths must be below the upper limit, and there must be a spefic
+# ratio below the lower limit.
+_MAX_SOURCE_LINE_LEN_LOWER = 350
+_MAX_SOURCE_LINE_LEN_UPPER = 800
+_SOURCE_LINE_LEN_LOWER_RATIO = 0.9
+
+# Message to display for undecodable commit log or author values.
+UNDECODABLE_LOG_CONTENT = '[Cannot be displayed]'
+
+# How large a repository file is in bytes before we don't try to display it
+SOURCE_FILE_MAX_SIZE = 1000 * 1024
+SOURCE_FILE_MAX_LINES = 50000
+
+# The source code browser will not attempt to display any filename ending
+# with one of these extensions.
+COMMON_BINARY_FILE_EXTENSIONS = {
+ 'gif', 'jpg', 'jpeg', 'psd', 'ico', 'icon', 'xbm', 'xpm', 'xwd', 'pcx',
+ 'bmp', 'png', 'vsd,' 'mpg', 'mpeg', 'wmv', 'wmf', 'avi', 'flv', 'snd',
+ 'mp3', 'wma', 'exe', 'dll', 'bin', 'class', 'o', 'so', 'lib', 'dylib',
+ 'jar', 'ear', 'war', 'par', 'msi', 'tar', 'zip', 'rar', 'cab', 'z', 'gz',
+ 'bz2', 'dmg', 'iso', 'rpm', 'pdf', 'eps', 'tif', 'tiff', 'xls', 'ppt',
+ 'graffie', 'violet',
+ }
+
+# The source code browser will display file contents as text data for files
+# with the following extensions or exact filenames (assuming they decode
+# correctly).
+COMMON_TEXT_FILE_EXTENSIONS = (
+ set(framework_constants.PRETTIFY_CLASS_MAP.iterkeys()) |
+ { '', 'ada', 'asm', 'asp', 'bat', 'cgi', 'csv', 'el', 'emacs',
+ 'jsp', 'log', 'markdown', 'md', 'mf', 'plist', 'properties', 'r',
+ 'rc', 'txt', 'vim', 'wiki', 'xemacs', 'yacc',
+ })
+COMMON_TEXT_FILENAMES = (
+ set(framework_constants.PRETTIFY_FILENAME_CLASS_MAP.iterkeys()) |
+ {'authors', 'install', 'readme'})
+
+
+def DecodeFileContents(file_contents, path=None):
+ """Try converting file contents to unicode using utf-8 or latin-1.
+
+ This is applicable to untrusted maybe-text from vcs files or inbound emails.
+
+ We try decoding the file as utf-8, then fall back on latin-1. In the former
+ case, we call the file a text file; in the latter case, we guess whether
+ the file is text or binary based on line length.
+
+ If we guess text when the file is binary, the user sees safely encoded
+ gibberish. If the other way around, the user sees a message that we will
+ not display the file.
+
+ TODO(jrobbins): we could try the user-supplied encoding, iff it
+ is one of the encodings that we know that we can handle.
+
+ Args:
+ file_contents: byte string from svn file. It could be text in almost
+ any encoding, or binary. We cannot trust the user-supplied encoding
+ in the mime-type property.
+ path: string pathname of file.
+
+ Returns:
+ The tuple (unicode_string, is_binary, is_long):
+ - The unicode version of the string.
+ - is_binary is true if the string could not be decoded as text.
+ - is_long is true if the file has more than SOURCE_FILE_MAX_LINES lines.
+ """
+ # If the filename is one that typically identifies a binary file, then
+ # just treat it as binary without any further analysis.
+ ext = None
+ if path and '.' in path:
+ ext = path.split('.')[-1]
+ if ext.lower() in COMMON_BINARY_FILE_EXTENSIONS:
+ # If the file is binary, we don't care about the length, since we don't
+ # show or diff it.
+ return u'', True, False
+
+ # If the string can be decoded as utf-8, we treat it as textual.
+ try:
+ u_str = file_contents.decode('utf-8', 'strict')
+ is_long = len(u_str.split('\n')) > SOURCE_FILE_MAX_LINES
+ return u_str, False, is_long
+ except UnicodeDecodeError:
+ logging.info('not a utf-8 file: %s bytes', len(file_contents))
+
+ # Fall back on latin-1. This will always succeed, since every byte maps to
+ # something in latin-1, even if that something is gibberish.
+ u_str = file_contents.decode('latin-1', 'strict')
+
+ lines = u_str.split('\n')
+ is_long = len(lines) > SOURCE_FILE_MAX_LINES
+ # Treat decodable files with certain filenames and/or extensions as text
+ # files. This avoids problems with common file types using our text/binary
+ # heuristic rules below.
+ if path:
+ name = path.split('/')[-1]
+ if (name.lower() in COMMON_TEXT_FILENAMES or
+ (ext and ext.lower() in COMMON_TEXT_FILE_EXTENSIONS)):
+ return u_str, False, is_long
+
+ # HEURISTIC: Binary files can qualify as latin-1, so we need to
+ # check further. Any real source code is going to be divided into
+ # reasonably sized lines. All lines must be below an upper character limit,
+ # and most lines must be below a lower limit. This allows some exceptions
+ # to the lower limit, but is more restrictive than just using a single
+ # large character limit.
+ is_binary = False
+ if lines:
+ lower_count = 0
+ for line in itertools.islice(lines, SOURCE_FILE_MAX_LINES):
+ size = len(line)
+ if size <= _MAX_SOURCE_LINE_LEN_LOWER:
+ lower_count += 1
+ elif size > _MAX_SOURCE_LINE_LEN_UPPER:
+ is_binary = True
+ break
+
+ ratio = lower_count / float(len(lines))
+ if ratio < _SOURCE_LINE_LEN_LOWER_RATIO:
+ is_binary = True
+
+ return u_str, is_binary, is_long
diff --git a/appengine/monorail/framework/framework_bizobj.py b/appengine/monorail/framework/framework_bizobj.py
new file mode 100644
index 0000000..b1478fc
--- /dev/null
+++ b/appengine/monorail/framework/framework_bizobj.py
@@ -0,0 +1,156 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Business objects for Monorail's framework.
+
+These are classes and functions that operate on the objects that
+users care about in Monorail but that are not part of just one specific
+component: e.g., projects, users, and labels.
+"""
+
+import logging
+import re
+import string
+
+import settings
+from framework import framework_constants
+
+
+# Pattern to match a valid project name. Users of this pattern MUST use
+# the re.VERBOSE flag or the whitespace and comments we be considered
+# significant and the pattern will not work. See "re" module documentation.
+_RE_PROJECT_NAME_PATTERN_VERBOSE = r"""
+ (?=[-a-z0-9]*[a-z][-a-z0-9]*) # Lookahead to make sure there is at least
+ # one letter in the whole name.
+ [a-z0-9] # Start with a letter or digit.
+ [-a-z0-9]* # Follow with any number of valid characters.
+ [a-z0-9] # End with a letter or digit.
+"""
+
+
+# Compiled regexp to match the project name and nothing more before or after.
+RE_PROJECT_NAME = re.compile(
+ '^%s$' % _RE_PROJECT_NAME_PATTERN_VERBOSE, re.VERBOSE)
+
+
+def IsValidProjectName(s):
+ """Return true if the given string is a valid project name."""
+ return (RE_PROJECT_NAME.match(s) and
+ len(s) <= framework_constants.MAX_PROJECT_NAME_LENGTH)
+
+
+def UserOwnsProject(project, effective_ids):
+ """Return True if any of the effective_ids is a project owner."""
+ return not effective_ids.isdisjoint(project.owner_ids or set())
+
+
+def UserIsInProject(project, effective_ids):
+ """Return True if any of the effective_ids is a project member.
+
+ Args:
+ project: Project PB for the current project.
+ effective_ids: set of int user IDs for the current user (including all
+ user groups). This will be an empty set for anonymous users.
+
+ Returns:
+ True if the user has any direct or indirect role in the project. The value
+ will actually be a set(), but it will have an ID in it if the user is in
+ the project, or it will be an empty set which is considered False.
+ """
+ return (UserOwnsProject(project, effective_ids) or
+ not effective_ids.isdisjoint(project.committer_ids or set()) or
+ not effective_ids.isdisjoint(project.contributor_ids or set()))
+
+
+def AllProjectMembers(project):
+ """Return a list of user IDs of all members in the given project."""
+ return project.owner_ids + project.committer_ids + project.contributor_ids
+
+
+def IsPriviledgedDomainUser(email):
+ """Return True if the user's account is from a priviledged domain."""
+ if email and '@' in email:
+ _, user_domain = email.split('@', 1)
+ return user_domain in settings.priviledged_user_domains
+
+ return False
+
+
+
+# String translation table to catch a common typos in label names.
+_CANONICALIZATION_TRANSLATION_TABLE = {
+ ord(delete_u_char): None
+ for delete_u_char in u'!"#$%&\'()*+,/:;<>?@[\\]^`{|}~\t\n\x0b\x0c\r '
+ }
+_CANONICALIZATION_TRANSLATION_TABLE.update({ord(u'='): ord(u'-')})
+
+
+def CanonicalizeLabel(user_input):
+ """Canonicalize a given label or status value.
+
+ When the user enters a string that represents a label or an enum,
+ convert it a canonical form that makes it more likely to match
+ existing values.
+
+ Args:
+ user_input: string that the user typed for a label.
+
+ Returns:
+ Canonical form of that label as a unicode string.
+ """
+ if user_input is None:
+ return user_input
+
+ if not isinstance(user_input, unicode):
+ user_input = user_input.decode('utf-8')
+
+ canon_str = user_input.translate(_CANONICALIZATION_TRANSLATION_TABLE)
+ return canon_str
+
+
+def MergeLabels(labels_list, labels_add, labels_remove, excl_prefixes):
+ """Update a list of labels with the given add and remove label lists.
+
+ Args:
+ labels_list: list of current labels.
+ labels_add: labels that the user wants to add.
+ labels_remove: labels that the user wants to remove.
+ excl_prefixes: prefixes that can have only one value, e.g., Priority.
+
+ Returns:
+ (merged_labels, update_labels_add, update_labels_remove):
+ A new list of labels with the given labels added and removed, and
+ any exclusive label prefixes taken into account. Then two
+ lists of update strings to explain the changes that were actually
+ made.
+ """
+ old_lower_labels = [lab.lower() for lab in labels_list]
+ labels_add = [lab for lab in labels_add
+ if lab.lower() not in old_lower_labels]
+ labels_remove = [lab for lab in labels_remove
+ if lab.lower() in old_lower_labels]
+ labels_remove_lower = [lab.lower() for lab in labels_remove]
+ config_excl = [lab.lower() for lab in excl_prefixes]
+
+ # "Old minus exclusive" is the set of old label values minus any
+ # that are implictly removed by newly set exclusive labels.
+ excl_add = [] # E.g., there can be only one "Priority-*" label
+ for lab in labels_add:
+ prefix = lab.split('-')[0].lower()
+ if prefix in config_excl:
+ excl_add.append('%s-' % prefix)
+ old_minus_excl = []
+ for lab in labels_list:
+ for prefix_dash in excl_add:
+ if lab.lower().startswith(prefix_dash):
+ # Note: don't add -lab to update_labels_remove, it is implicit.
+ break
+ else:
+ old_minus_excl.append(lab)
+
+ merged_labels = [lab for lab in old_minus_excl + labels_add
+ if lab.lower() not in labels_remove_lower]
+
+ return merged_labels, labels_add, labels_remove
diff --git a/appengine/monorail/framework/framework_constants.py b/appengine/monorail/framework/framework_constants.py
new file mode 100644
index 0000000..42d3e04
--- /dev/null
+++ b/appengine/monorail/framework/framework_constants.py
@@ -0,0 +1,158 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Some constants used throughout Monorail."""
+
+import os
+import re
+
+
+# Number of seconds in various periods.
+SECS_PER_MINUTE = 60
+SECS_PER_HOUR = SECS_PER_MINUTE * 60
+SECS_PER_DAY = SECS_PER_HOUR * 24
+SECS_PER_MONTH = SECS_PER_DAY * 30
+SECS_PER_YEAR = SECS_PER_DAY * 365
+
+# When we write to memcache, let the values expire so that we don't
+# get any unexpected super-old values as we make code changes over the
+# years. Also, searches can contain date terms like [opened<today-1]
+# that would become wrong if cached for a long time.
+MEMCACHE_EXPIRATION = 6 * SECS_PER_HOUR
+
+# Fulltext indexing happens asynchronously and we get no notification
+# when the indexing operation has completed. So, when we cache searches
+# that use fulltext terms, the results might be stale. We still do
+# cache them and use the cached values, but we expire them so that the
+# results cannot be stale for a long period of time.
+FULLTEXT_MEMCACHE_EXPIRATION = 3 * SECS_PER_MINUTE
+
+# Size in bytes of the largest form submission that we will accept
+MAX_POST_BODY_SIZE = 10 * 1024 * 1024 # = 10 MB
+
+# Special user ID and name to use when no user was specified.
+NO_USER_SPECIFIED = 0
+NO_SESSION_SPECIFIED = 0
+NO_USER_NAME = '----'
+
+# String to display when some field has no value.
+NO_VALUES = '----'
+
+# If the user enters one or more dashes, that means "no value". This is useful
+# in bulk edit, inbound email, and commit log command where a blank field
+# means "keep what was there" or is ignored.
+NO_VALUE_RE = re.compile(r'^-+$')
+
+# Used to loosely validate column spec. Mainly guards against malicious input.
+COLSPEC_RE = re.compile(r'^[-.\w\s/]*$', re.UNICODE)
+COLSPEC_COL_RE = re.compile(r'[-.\w/]+', re.UNICODE)
+
+# Used to loosely validate sort spec. Mainly guards against malicious input.
+SORTSPEC_RE = re.compile(r'^[-.\w\s/]*$', re.UNICODE)
+
+# For the artifact search box autosizing when the user types a long query.
+MIN_ARTIFACT_SEARCH_FIELD_SIZE = 38
+MAX_ARTIFACT_SEARCH_FIELD_SIZE = 75
+AUTOSIZE_STEP = 3
+
+# Regular expressions used in parsing label and status configuration text
+IDENTIFIER_REGEX = r'[-.\w]+'
+IDENTIFIER_RE = re.compile(IDENTIFIER_REGEX, re.UNICODE)
+# Labels and status values that are prefixed by a pound-sign are not displayed
+# in autocomplete menus.
+IDENTIFIER_DOCSTRING_RE = re.compile(
+ r'^(#?%s)[ \t]*=?[ \t]*(.*)$' % IDENTIFIER_REGEX,
+ re.MULTILINE | re.UNICODE)
+
+# Number of label text fields that we can display on a web form for issues.
+MAX_LABELS = 24
+
+# Default number of comments to display on an artifact detail page at one time.
+# Other comments will be paginated. This happens to be the same as the max
+# set by the --max_comments_per_page flag.
+DEFAULT_COMMENTS_PER_PAGE = 500
+
+# Content type to use when serving JSON.
+CONTENT_TYPE_JSON = 'application/x-javascript; charset=UTF-8'
+
+# Maximum comments to index to keep the search index from choking. E.g., if an
+# artifact had 1200 comments, only 0..99 and 701..1200 would be indexed.
+# This mainly affects advocacy issues which are highly redundant anyway.
+INITIAL_COMMENTS_TO_INDEX = 100
+FINAL_COMMENTS_TO_INDEX = 500
+
+# This is the longest string that GAE search will accept in one field.
+# The entire serach document is also limited to 1M, so our limit is 800
+# so that the comments leave room for metadata.
+MAX_FTS_FIELD_SIZE = 800 * 1024
+
+# Base path to EZT templates.
+this_dir = os.path.dirname(__file__)
+TEMPLATE_PATH = this_dir[:this_dir.rindex('/')] + '/templates/'
+
+# Defaults for dooming a project.
+DEFAULT_DOOM_REASON = 'No longer needed'
+DEFAULT_DOOM_PERIOD = SECS_PER_DAY * 90
+
+MAX_PROJECT_PEOPLE = 1000
+MAX_PROJECT_NAME_LENGTH = 63
+
+# When logging potentially long debugging strings, only show this many chars.
+LOGGING_MAX_LENGTH = 2000
+
+# Maps languages supported by google-code-prettify
+# to the class name that should be added to code blocks in that language.
+# This list should be kept in sync with the handlers registered
+# in lang-*.js and prettify.js from the prettify project.
+PRETTIFY_CLASS_MAP = {
+ ext: 'lang-' + ext
+ for ext in [
+ # Supported in lang-*.js
+ 'apollo', 'agc', 'aea', 'lisp', 'el', 'cl', 'scm',
+ 'css', 'go', 'hs', 'lua', 'fs', 'ml', 'proto', 'scala', 'sql', 'vb',
+ 'vbs', 'vhdl', 'vhd', 'wiki', 'yaml', 'yml', 'clj',
+ # Supported in prettify.js
+ 'htm', 'html', 'mxml', 'xhtml', 'xml', 'xsl',
+ 'c', 'cc', 'cpp', 'cxx', 'cyc', 'm',
+ 'json', 'cs', 'java', 'bsh', 'csh', 'sh', 'cv', 'py', 'perl', 'pl',
+ 'pm', 'rb', 'js', 'coffee',
+ ]}
+
+# Languages which are not specifically mentioned in prettify.js
+# but which render intelligibly with the default handler.
+PRETTIFY_CLASS_MAP.update(
+ (ext, '') for ext in [
+ 'hpp', 'hxx', 'hh', 'h', 'inl', 'idl', 'swig', 'd',
+ 'php', 'tcl', 'aspx', 'cfc', 'cfm',
+ 'ent', 'mod', 'as',
+ 'y', 'lex', 'awk', 'n', 'pde',
+ ])
+
+# Languages which are not specifically mentioned in prettify.js
+# but which should be rendered using a certain prettify module.
+PRETTIFY_CLASS_MAP.update({
+ 'docbook': 'lang-xml',
+ 'dtd': 'lang-xml',
+ 'duby': 'lang-rb',
+ 'mk': 'lang-sh',
+ 'mak': 'lang-sh',
+ 'make': 'lang-sh',
+ 'mirah': 'lang-rb',
+ 'ss': 'lang-lisp',
+ 'vcproj': 'lang-xml',
+ 'xsd': 'lang-xml',
+ 'xslt': 'lang-xml',
+})
+
+PRETTIFY_FILENAME_CLASS_MAP = {
+ 'makefile': 'lang-sh',
+ 'makefile.in': 'lang-sh',
+ 'doxyfile': 'lang-sh', # Key-value pairs with hash comments
+ '.checkstyle': 'lang-xml',
+ '.classpath': 'lang-xml',
+ '.project': 'lang-xml',
+}
+
+OAUTH_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
diff --git a/appengine/monorail/framework/framework_helpers.py b/appengine/monorail/framework/framework_helpers.py
new file mode 100644
index 0000000..2b30a63
--- /dev/null
+++ b/appengine/monorail/framework/framework_helpers.py
@@ -0,0 +1,671 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions and classes used throughout Monorail."""
+
+import logging
+import random
+import string
+import textwrap
+import threading
+import time
+import traceback
+import urllib
+import urlparse
+
+from google.appengine.api import app_identity
+
+from third_party import ezt
+
+import settings
+from framework import actionlimit
+from framework import framework_constants
+from framework import template_helpers
+from framework import timestr
+from framework import urls
+from services import client_config_svc
+
+
+# For random key generation
+RANDOM_KEY_LENGTH = 128
+RANDOM_KEY_CHARACTERS = string.ascii_letters + string.digits
+
+# params recognized by FormatURL, in the order they will appear in the url
+RECOGNIZED_PARAMS = ['can', 'start', 'num', 'q', 'colspec', 'groupby', 'sort',
+ 'show', 'format', 'me', 'table_title', 'projects']
+
+
+def retry(tries, delay=1, backoff=2):
+ """A retry decorator with exponential backoff.
+
+ Functions are retried when Exceptions occur.
+
+ Args:
+ tries: int Number of times to retry, set to 0 to disable retry.
+ delay: float Initial sleep time in seconds.
+ backoff: float Must be greater than 1, further failures would sleep
+ delay*=backoff seconds.
+ """
+ if backoff <= 1:
+ raise ValueError("backoff must be greater than 1")
+ if tries < 0:
+ raise ValueError("tries must be 0 or greater")
+ if delay <= 0:
+ raise ValueError("delay must be greater than 0")
+
+ def decorator(func):
+ def wrapper(*args, **kwargs):
+ _tries, _delay = tries, delay
+ _tries += 1 # Ensure we call func at least once.
+ while _tries > 0:
+ try:
+ ret = func(*args, **kwargs)
+ return ret
+ except Exception:
+ _tries -= 1
+ if _tries == 0:
+ logging.error('Exceeded maximum number of retries for %s.',
+ func.__name__)
+ raise
+ trace_str = traceback.format_exc()
+ logging.warning('Retrying %s due to Exception: %s',
+ func.__name__, trace_str)
+ time.sleep(_delay)
+ _delay *= backoff # Wait longer the next time we fail.
+ return wrapper
+ return decorator
+
+
+class PromiseCallback(object):
+ """Executes the work of a Promise and then dereferences everything."""
+
+ def __init__(self, promise, callback, *args, **kwargs):
+ self.promise = promise
+ self.callback = callback
+ self.args = args
+ self.kwargs = kwargs
+
+ def __call__(self):
+ try:
+ self.promise._WorkOnPromise(self.callback, *self.args, **self.kwargs)
+ finally:
+ # Make sure we no longer hold onto references to anything.
+ self.promise = self.callback = self.args = self.kwargs = None
+
+
+class Promise(object):
+ """Class for promises to deliver a value in the future.
+
+ A thread is started to run callback(args), that thread
+ should return the value that it generates, or raise an expception.
+ p.WaitAndGetValue() will block until a value is available.
+ If an exception was raised, p.WaitAndGetValue() will re-raise the
+ same exception.
+ """
+
+ def __init__(self, callback, *args, **kwargs):
+ """Initialize the promise and immediately call the supplied function.
+
+ Args:
+ callback: Function that takes the args and returns the promise value.
+ *args: Any arguments to the target function.
+ **kwargs: Any keyword args for the target function.
+ """
+
+ self.has_value = False
+ self.value = None
+ self.event = threading.Event()
+ self.exception = None
+
+ promise_callback = PromiseCallback(self, callback, *args, **kwargs)
+
+ # Execute the callback in another thread.
+ promise_thread = threading.Thread(target=promise_callback)
+ promise_thread.start()
+
+ def _WorkOnPromise(self, callback, *args, **kwargs):
+ """Run callback to compute the promised value. Save any exceptions."""
+ try:
+ self.value = callback(*args, **kwargs)
+ except Exception as e:
+ trace_str = traceback.format_exc()
+ logging.info('Exception while working on promise: %s\n', trace_str)
+ # Add the stack trace at this point to the exception. That way, in the
+ # logs, we can see what happened further up in the call stack
+ # than WaitAndGetValue(), which re-raises exceptions.
+ e.pre_promise_trace = trace_str
+ self.exception = e
+ finally:
+ self.has_value = True
+ self.event.set()
+
+ def WaitAndGetValue(self):
+ """Block until my value is available, then return it or raise exception."""
+ self.event.wait()
+ if self.exception:
+ raise self.exception # pylint: disable=raising-bad-type
+ return self.value
+
+
+def FormatAbsoluteURLForDomain(
+ host, project_name, servlet_name, scheme='https', **kwargs):
+ """A variant of FormatAbsoluteURL for when request objects are not available.
+
+ Args:
+ host: string with hostname and optional port, e.g. 'localhost:8080'.
+ project_name: the destination project name, if any.
+ servlet_name: site or project-local url fragement of dest page.
+ scheme: url scheme, e.g., 'http' or 'https'.
+ **kwargs: additional query string parameters may be specified as named
+ arguments to this function.
+
+ Returns:
+ A full url beginning with 'http[s]://'.
+ """
+ path_and_args = FormatURL(None, servlet_name, **kwargs)
+
+ if host:
+ domain_port = host.split(':')
+ domain_port[0] = GetPreferredDomain(domain_port[0])
+ host = ':'.join(domain_port)
+
+ absolute_domain_url = '%s://%s' % (scheme, host)
+ if project_name:
+ return '%s/p/%s%s' % (absolute_domain_url, project_name, path_and_args)
+ return absolute_domain_url + path_and_args
+
+
+def FormatAbsoluteURL(
+ mr, servlet_name, include_project=True, project_name=None,
+ scheme=None, copy_params=True, **kwargs):
+ """Return an absolute URL to a servlet with old and new params.
+
+ Args:
+ mr: info parsed from the current request.
+ servlet_name: site or project-local url fragement of dest page.
+ include_project: if True, include the project home url as part of the
+ destination URL (as long as it is specified either in mr
+ or as the project_name param.)
+ project_name: the destination project name, to override
+ mr.project_name if include_project is True.
+ scheme: either 'http' or 'https', to override mr.request.scheme.
+ copy_params: if True, copy well-known parameters from the existing request.
+ **kwargs: additional query string parameters may be specified as named
+ arguments to this function.
+
+ Returns:
+ A full url beginning with 'http[s]://'. The destination URL will be in
+ the same domain as the current request.
+ """
+ path_and_args = FormatURL(
+ mr if copy_params else None, servlet_name, **kwargs)
+ scheme = scheme or mr.request.scheme
+
+ project_base = ''
+ if include_project:
+ project_base = '/p/%s' % (project_name or mr.project_name)
+
+ return '%s://%s%s%s' % (scheme, mr.request.host, project_base, path_and_args)
+
+
+def FormatMovedProjectURL(mr, moved_to):
+ """Return a transformation of the given url into the given project.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ moved_to: A string from a project's moved_to field that matches
+ framework_bizobj.RE_PROJECT_NAME.
+
+ Returns:
+ The url transposed into the given destination project.
+ """
+ project_name = moved_to
+ _, _, path, parameters, query, fragment_identifier = urlparse.urlparse(
+ mr.current_page_url)
+ # Strip off leading "/p/<moved from project>"
+ path = '/' + path.split('/', 3)[3]
+ rest_of_url = urlparse.urlunparse(
+ ('', '', path, parameters, query, fragment_identifier))
+ return '/p/%s%s' % (project_name, rest_of_url)
+
+
+def FormatURL(mr, servlet_path, **kwargs):
+ """Return a project relative URL to a servlet with old and new params."""
+ # Standard params not overridden in **kwargs come first, followed by kwargs.
+ # The exception is the 'id' param. If present then the 'id' param always comes
+ # first. See bugs.chromium.org/p/monorail/issues/detail?id=374
+ all_params = []
+ if kwargs.get('id'):
+ all_params.append(('id', kwargs['id']))
+ if mr:
+ all_params.extend(
+ (name, mr.GetParam(name)) for name in RECOGNIZED_PARAMS
+ if name not in kwargs)
+
+ all_params.extend(
+ # Ignore the 'id' param since we already added it above.
+ sorted([kwarg for kwarg in kwargs.items() if kwarg[0] != 'id']))
+ return _FormatQueryString(servlet_path, all_params)
+
+
+def _FormatQueryString(url, params):
+ """URLencode a list of parameters and attach them to the end of a URL."""
+ param_string = '&'.join(
+ '%s=%s' % (name, urllib.quote(unicode(value).encode('utf-8')))
+ for name, value in params if value is not None)
+ if not param_string:
+ qs_start_char = ''
+ elif '?' in url:
+ qs_start_char = '&'
+ else:
+ qs_start_char = '?'
+ return '%s%s%s' % (url, qs_start_char, param_string)
+
+
+def WordWrapSuperLongLines(s, max_cols=100):
+ """Reformat input that was not word-wrapped by the browser.
+
+ Args:
+ s: the string to be word-wrapped, it may have embedded newlines.
+ max_cols: int maximum line length.
+
+ Returns:
+ Wrapped text string.
+
+ Rather than wrap the whole thing, we only wrap super-long lines and keep
+ all the reasonable lines formated as-is.
+ """
+ lines = [textwrap.fill(line, max_cols) for line in s.splitlines()]
+ wrapped_text = '\n'.join(lines)
+
+ # The split/join logic above can lose one final blank line.
+ if s.endswith('\n') or s.endswith('\r'):
+ wrapped_text += '\n'
+
+ return wrapped_text
+
+
+def StaticCacheHeaders():
+ """Returns HTTP headers for static content, based on the current time."""
+ year_from_now = int(time.time()) + framework_constants.SECS_PER_YEAR
+ headers = [
+ ('Cache-Control',
+ 'max-age=%d, private' % framework_constants.SECS_PER_YEAR),
+ ('Last-Modified', timestr.TimeForHTMLHeader()),
+ ('Expires', timestr.TimeForHTMLHeader(when=year_from_now)),
+ ]
+ logging.info('static headers are %r', headers)
+ return headers
+
+
+def ComputeListDeltas(old_list, new_list):
+ """Given an old and new list, return the items added and removed.
+
+ Args:
+ old_list: old list of values for comparison.
+ new_list: new list of values for comparison.
+
+ Returns:
+ Two lists: one with all the values added (in new_list but was not
+ in old_list), and one with all the values removed (not in new_list
+ but was in old_lit).
+ """
+ if old_list == new_list:
+ return [], [] # A common case: nothing was added or removed.
+
+ added = set(new_list)
+ added.difference_update(old_list)
+ removed = set(old_list)
+ removed.difference_update(new_list)
+ return list(added), list(removed)
+
+
+def GetRoleName(effective_ids, project):
+ """Determines the name of the role a member has for a given project.
+
+ Args:
+ effective_ids: set of user IDs to get the role name for.
+ project: Project PB containing the different the different member lists.
+
+ Returns:
+ The name of the role.
+ """
+ if not effective_ids.isdisjoint(project.owner_ids):
+ return 'Owner'
+ if not effective_ids.isdisjoint(project.committer_ids):
+ return 'Committer'
+ if not effective_ids.isdisjoint(project.contributor_ids):
+ return 'Contributor'
+ return None
+
+
+class UserSettings(object):
+ """Abstract class providing static methods for user settings forms."""
+
+ @classmethod
+ def GatherUnifiedSettingsPageData(
+ cls, logged_in_user_id, settings_user_view, settings_user):
+ """Gather EZT variables needed for the unified user settings form.
+
+ Args:
+ logged_in_user_id: The user ID of the acting user.
+ settings_user_view: The UserView of the target user.
+ settings_user: The User PB of the target user.
+
+ Returns:
+ A dictionary giving the names and values of all the variables to
+ be exported to EZT to support the unified user settings form template.
+ """
+
+ def ActionLastReset(action_limit):
+ """Return a formatted time string for the last action limit reset."""
+ if action_limit:
+ return time.asctime(time.localtime(action_limit.reset_timestamp))
+ return 'Never'
+
+ def DefaultLifetimeLimit(action_type):
+ """Return the deault lifetime limit for the give type of action."""
+ return actionlimit.ACTION_LIMITS[action_type][3]
+
+ def DefaultPeriodSoftLimit(action_type):
+ """Return the deault period soft limit for the give type of action."""
+ return actionlimit.ACTION_LIMITS[action_type][1]
+
+ def DefaultPeriodHardLimit(action_type):
+ """Return the deault period jard limit for the give type of action."""
+ return actionlimit.ACTION_LIMITS[action_type][2]
+
+ project_creation_lifetime_limit = (
+ (settings_user.project_creation_limit and
+ settings_user.project_creation_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.PROJECT_CREATION))
+ project_creation_soft_limit = (
+ (settings_user.project_creation_limit and
+ settings_user.project_creation_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.PROJECT_CREATION))
+ project_creation_hard_limit = (
+ (settings_user.project_creation_limit and
+ settings_user.project_creation_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.PROJECT_CREATION))
+ issue_comment_lifetime_limit = (
+ (settings_user.issue_comment_limit and
+ settings_user.issue_comment_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.ISSUE_COMMENT))
+ issue_comment_soft_limit = (
+ (settings_user.issue_comment_limit and
+ settings_user.issue_comment_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.ISSUE_COMMENT))
+ issue_comment_hard_limit = (
+ (settings_user.issue_comment_limit and
+ settings_user.issue_comment_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.ISSUE_COMMENT ))
+ issue_attachment_lifetime_limit = (
+ (settings_user.issue_attachment_limit and
+ settings_user.issue_attachment_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.ISSUE_ATTACHMENT))
+ issue_attachment_soft_limit = (
+ (settings_user.issue_attachment_limit and
+ settings_user.issue_attachment_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.ISSUE_ATTACHMENT))
+ issue_attachment_hard_limit = (
+ (settings_user.issue_attachment_limit and
+ settings_user.issue_attachment_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.ISSUE_ATTACHMENT))
+ issue_bulk_edit_lifetime_limit = (
+ (settings_user.issue_bulk_edit_limit and
+ settings_user.issue_bulk_edit_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.ISSUE_BULK_EDIT))
+ issue_bulk_edit_soft_limit = (
+ (settings_user.issue_bulk_edit_limit and
+ settings_user.issue_bulk_edit_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.ISSUE_BULK_EDIT))
+ issue_bulk_edit_hard_limit = (
+ (settings_user.issue_bulk_edit_limit and
+ settings_user.issue_bulk_edit_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.ISSUE_BULK_EDIT))
+ api_request_lifetime_limit = (
+ (settings_user.api_request_limit and
+ settings_user.api_request_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.API_REQUEST))
+ api_request_soft_limit = (
+ (settings_user.api_request_limit and
+ settings_user.api_request_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.API_REQUEST))
+ api_request_hard_limit = (
+ (settings_user.api_request_limit and
+ settings_user.api_request_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.API_REQUEST))
+
+ return {
+ 'settings_user': settings_user_view,
+ 'settings_user_pb': template_helpers.PBProxy(settings_user),
+ 'settings_user_is_banned': ezt.boolean(settings_user.banned),
+ 'settings_user_ignore_action_limits': (
+ ezt.boolean(settings_user.ignore_action_limits)),
+ 'self': ezt.boolean(logged_in_user_id == settings_user_view.user_id),
+ 'project_creation_reset': (
+ ActionLastReset(settings_user.project_creation_limit)),
+ 'issue_comment_reset': (
+ ActionLastReset(settings_user.issue_comment_limit)),
+ 'issue_attachment_reset': (
+ ActionLastReset(settings_user.issue_attachment_limit)),
+ 'issue_bulk_edit_reset': (
+ ActionLastReset(settings_user.issue_bulk_edit_limit)),
+ 'api_request_reset': (
+ ActionLastReset(settings_user.api_request_limit)),
+ 'project_creation_lifetime_limit': project_creation_lifetime_limit,
+ 'project_creation_soft_limit': project_creation_soft_limit,
+ 'project_creation_hard_limit': project_creation_hard_limit,
+ 'issue_comment_lifetime_limit': issue_comment_lifetime_limit,
+ 'issue_comment_soft_limit': issue_comment_soft_limit,
+ 'issue_comment_hard_limit': issue_comment_hard_limit,
+ 'issue_attachment_lifetime_limit': issue_attachment_lifetime_limit,
+ 'issue_attachment_soft_limit': issue_attachment_soft_limit,
+ 'issue_attachment_hard_limit': issue_attachment_hard_limit,
+ 'issue_bulk_edit_lifetime_limit': issue_bulk_edit_lifetime_limit,
+ 'issue_bulk_edit_soft_limit': issue_bulk_edit_soft_limit,
+ 'issue_bulk_edit_hard_limit': issue_bulk_edit_hard_limit,
+ 'api_request_lifetime_limit': api_request_lifetime_limit,
+ 'api_request_soft_limit': api_request_soft_limit,
+ 'api_request_hard_limit': api_request_hard_limit,
+ 'profile_url_fragment': (
+ settings_user_view.profile_url[len('/u/'):]),
+ 'preview_on_hover': ezt.boolean(settings_user.preview_on_hover),
+ }
+
+ @classmethod
+ def ProcessSettingsForm(
+ cls, cnxn, user_service, post_data, user_id, user, admin=False):
+ """Process the posted form data from the unified user settings form.
+
+ Args:
+ cnxn: connection to the SQL database.
+ user_service: An instance of UserService for saving changes.
+ post_data: The parsed post data from the form submission request.
+ user_id: The user id of the target user.
+ user: The user PB of the target user.
+ admin: Whether settings reserved for admins are supported.
+ """
+ obscure_email = 'obscure_email' in post_data
+
+ kwargs = {}
+ if admin:
+ kwargs.update(is_site_admin='site_admin' in post_data,
+ ignore_action_limits='ignore_action_limits' in post_data)
+ kwargs.update(is_banned='banned' in post_data,
+ banned_reason=post_data.get('banned_reason', ''))
+
+ # action limits
+ action_limit_updates = {}
+ for action_name in actionlimit.ACTION_TYPE_NAMES.iterkeys():
+ reset_input = 'reset_' + action_name
+ lifetime_input = action_name + '_lifetime_limit'
+ soft_input = action_name + '_soft_limit'
+ hard_input = action_name + '_hard_limit'
+ pb_getter = action_name + '_limit'
+ old_lifetime_limit = getattr(user, pb_getter).lifetime_limit
+ old_soft_limit = getattr(user, pb_getter).period_soft_limit
+ old_hard_limit = getattr(user, pb_getter).period_hard_limit
+
+ # Try and get the new limit from post data.
+ # If the user doesn't use an integer, act as if no change requested.
+ def _GetLimit(post_data, limit_input, old_limit):
+ try:
+ new_limit = int(post_data[limit_input])
+ except (KeyError, ValueError):
+ new_limit = old_limit
+ return new_limit
+
+ new_lifetime_limit = _GetLimit(post_data, lifetime_input,
+ old_lifetime_limit)
+ new_soft_limit = _GetLimit(post_data, soft_input,
+ old_soft_limit)
+ new_hard_limit = _GetLimit(post_data, hard_input,
+ old_hard_limit)
+
+ if ((new_lifetime_limit >= 0 and
+ new_lifetime_limit != old_lifetime_limit) or
+ (new_soft_limit >= 0 and new_soft_limit != old_soft_limit) or
+ (new_hard_limit >= 0 and new_hard_limit != old_hard_limit)):
+ action_limit_updates[action_name] = (
+ new_soft_limit, new_hard_limit, new_lifetime_limit)
+ elif reset_input in post_data:
+ action_limit_updates[action_name] = None
+ kwargs.update(action_limit_updates=action_limit_updates)
+
+ user_service.UpdateUserSettings(
+ cnxn, user_id, user, notify='notify' in post_data,
+ notify_starred='notify_starred' in post_data,
+ preview_on_hover='preview_on_hover' in post_data,
+ obscure_email=obscure_email, **kwargs)
+
+
+def GetHostPort():
+ """Get string domain name and port number."""
+
+ app_id = app_identity.get_application_id()
+ if ':' in app_id:
+ domain, app_id = app_id.split(':')
+ else:
+ domain = ''
+
+ if domain.startswith('google'):
+ hostport = '%s.googleplex.com' % app_id
+ else:
+ hostport = '%s.appspot.com' % app_id
+
+ return GetPreferredDomain(hostport)
+
+
+def IssueCommentURL(hostport, project, local_id, seq_num=None):
+ """Return a URL pointing directly to the specified comment."""
+ detail_url = FormatAbsoluteURLForDomain(
+ hostport, project.project_name, urls.ISSUE_DETAIL, id=local_id)
+ if seq_num:
+ detail_url += '#c%d' % seq_num
+
+ return detail_url
+
+
+def MurmurHash3_x86_32(key, seed=0x0):
+ """Implements the x86/32-bit version of Murmur Hash 3.0.
+
+ MurmurHash3 is written by Austin Appleby, and is placed in the public
+ domain. See https://code.google.com/p/smhasher/ for details.
+
+ This pure python implementation of the x86/32 bit version of MurmurHash3 is
+ written by Fredrik Kihlander and also placed in the public domain.
+ See https://github.com/wc-duck/pymmh3 for details.
+
+ The MurmurHash3 algorithm is chosen for these reasons:
+ * It is fast, even when implemented in pure python.
+ * It is remarkably well distributed, and unlikely to cause collisions.
+ * It is stable and unchanging (any improvements will be in MurmurHash4).
+ * It is well-tested, and easily usable in other contexts (such as bulk
+ data imports).
+
+ Args:
+ key (string): the data that you want hashed
+ seed (int): An offset, treated as essentially part of the key.
+
+ Returns:
+ A 32-bit integer (can be interpreted as either signed or unsigned).
+ """
+ key = bytearray(key.encode('utf-8'))
+
+ def fmix(h):
+ h ^= h >> 16
+ h = (h * 0x85ebca6b) & 0xFFFFFFFF
+ h ^= h >> 13
+ h = (h * 0xc2b2ae35) & 0xFFFFFFFF
+ h ^= h >> 16
+ return h;
+
+ length = len(key)
+ nblocks = int(length / 4)
+
+ h1 = seed;
+
+ c1 = 0xcc9e2d51
+ c2 = 0x1b873593
+
+ # body
+ for block_start in xrange(0, nblocks * 4, 4):
+ k1 = key[ block_start + 3 ] << 24 | \
+ key[ block_start + 2 ] << 16 | \
+ key[ block_start + 1 ] << 8 | \
+ key[ block_start + 0 ]
+
+ k1 = c1 * k1 & 0xFFFFFFFF
+ k1 = (k1 << 15 | k1 >> 17) & 0xFFFFFFFF
+ k1 = (c2 * k1) & 0xFFFFFFFF;
+
+ h1 ^= k1
+ h1 = ( h1 << 13 | h1 >> 19 ) & 0xFFFFFFFF
+ h1 = ( h1 * 5 + 0xe6546b64 ) & 0xFFFFFFFF
+
+ # tail
+ tail_index = nblocks * 4
+ k1 = 0
+ tail_size = length & 3
+
+ if tail_size >= 3:
+ k1 ^= key[ tail_index + 2 ] << 16
+ if tail_size >= 2:
+ k1 ^= key[ tail_index + 1 ] << 8
+ if tail_size >= 1:
+ k1 ^= key[ tail_index + 0 ]
+
+ if tail_size != 0:
+ k1 = ( k1 * c1 ) & 0xFFFFFFFF
+ k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF
+ k1 = ( k1 * c2 ) & 0xFFFFFFFF
+ h1 ^= k1
+
+ return fmix( h1 ^ length )
+
+
+def MakeRandomKey(length=RANDOM_KEY_LENGTH, chars=RANDOM_KEY_CHARACTERS):
+ """Return a string with lots of random characters."""
+ chars = [random.choice(chars) for _ in range(length)]
+ return ''.join(chars)
+
+
+def IsServiceAccount(email):
+ """Return a boolean value whether this email is a service account."""
+ if email.endswith('gserviceaccount.com'):
+ return True
+ _, client_emails = (
+ client_config_svc.GetClientConfigSvc().GetClientIDEmails())
+ return email in client_emails
+
+
+def GetPreferredDomain(domain):
+ """Get preferred domain to display.
+
+ The preferred domain replaces app_id for default version of monorail-prod
+ and monorail-staging.
+ """
+ return settings.preferred_domains.get(domain, domain)
diff --git a/appengine/monorail/framework/framework_views.py b/appengine/monorail/framework/framework_views.py
new file mode 100644
index 0000000..2b9453c
--- /dev/null
+++ b/appengine/monorail/framework/framework_views.py
@@ -0,0 +1,214 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""View classes to make it easy to display framework objects in EZT."""
+
+from third_party import ezt
+
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import permissions
+from framework import template_helpers
+from services import client_config_svc
+import settings
+
+
+_LABEL_DISPLAY_CHARS = 30
+_LABEL_PART_DISPLAY_CHARS = 15
+
+
+class LabelView(object):
+ """Wrapper class that makes it easier to display a label via EZT."""
+
+ def __init__(self, label, config):
+ """Make several values related to this label available as attrs.
+
+ Args:
+ label: artifact label string. E.g., 'Priority-High' or 'Frontend'.
+ config: PB with a well_known_labels list, or None.
+ """
+ self.name = label
+ self.tooltip = label
+ self.is_restrict = ezt.boolean(permissions.IsRestrictLabel(label))
+
+ self.docstring = ''
+ if config:
+ for wkl in config.well_known_labels:
+ if label.lower() == wkl.label.lower():
+ self.docstring = wkl.label_docstring
+
+ if '-' in label:
+ self.prefix, self.value = label.split('-', 1)
+ else:
+ self.prefix, self.value = '', label
+
+
+class StatusView(object):
+ """Wrapper class that makes it easier to display a status via EZT."""
+
+ def __init__(self, status, config):
+ """Make several values related to this status available as attrs.
+
+ Args:
+ status: artifact status string. E.g., 'New' or 'Accepted'.
+ config: PB with a well_known_statuses list, or None.
+ """
+
+ self.name = status
+ self.tooltip = status
+
+ self.docstring = ''
+ self.means_open = ezt.boolean(True)
+ if config:
+ for wks in config.well_known_statuses:
+ if status.lower() == wks.status.lower():
+ self.docstring = wks.status_docstring
+ self.means_open = ezt.boolean(wks.means_open)
+
+
+class UserView(object):
+ """Wrapper class to easily display basic user information in a template."""
+
+ def __init__(self, user_id, email, obscure_email):
+ email = email or ''
+ self.user_id = user_id
+ self.email = email
+ self.profile_url = '/u/%s/' % user_id
+ self.obscure_email = obscure_email
+ self.banned = ''
+
+ (self.username, self.domain,
+ self.obscured_username) = ParseAndObscureAddress(email)
+ # No need to obfuscate or reveal client email.
+ # Instead display a human-readable username.
+ if not self.email:
+ self.display_name = 'a deleted user'
+ self.obscure_email = ''
+ self.profile_url = ''
+ elif self.email in client_config_svc.GetServiceAccountMap():
+ self.display_name = client_config_svc.GetServiceAccountMap()[self.email]
+ elif not self.obscure_email:
+ self.display_name = email
+ else:
+ self.display_name = '%s...@%s' % (self.obscured_username, self.domain)
+
+ def RevealEmail(self):
+ if not self.email:
+ return
+ if self.email not in client_config_svc.GetServiceAccountMap():
+ self.obscure_email = False
+ self.display_name = self.email
+ self.profile_url = '/u/%s/' % self.email
+
+
+def MakeAllUserViews(cnxn, user_service, *list_of_user_id_lists):
+ """Make a dict {user_id: user_view, ...} for all user IDs given."""
+ distinct_user_ids = set()
+ distinct_user_ids.update(*list_of_user_id_lists)
+ user_dict = user_service.GetUsersByIDs(cnxn, distinct_user_ids)
+ return {user_id: UserView(user_id, user_pb.email, user_pb.obscure_email)
+ for user_id, user_pb in user_dict.iteritems()}
+
+
+def MakeUserView(cnxn, user_service, user_id):
+ """Make a UserView for the given user ID."""
+ user = user_service.GetUser(cnxn, user_id)
+ return UserView(user_id, user.email, user.obscure_email)
+
+
+def ParseAndObscureAddress(email):
+ """Break the given email into username and domain, and obscure.
+
+ Args:
+ email: string email address to process
+
+ Returns:
+ A 3-tuple (username, domain, obscured_username).
+ The obscured_username is trucated the same way that Google Groups does it.
+ """
+ if '@' in email:
+ username, user_domain = email.split('@', 1)
+ else: # don't fail if User table has unexpected email address format.
+ username, user_domain = email, ''
+
+ base_username = username.split('+')[0]
+ cutoff_point = min(8, max(1, len(base_username) - 3))
+ obscured_username = base_username[:cutoff_point]
+
+ return username, user_domain, obscured_username
+
+
+def _ShouldRevealEmail(auth, project, viewed_email):
+ """Decide whether to publish a user's email address.
+
+ Args:
+ auth: The AuthData of the user viewing the email addresses.
+ project: The project to which the viewed users belong.
+ viewed_email: The email of the viewed user.
+
+ Returns:
+ True if email addresses should be published to the logged-in user.
+ """
+ # Case 1: Anon users don't see anything revealed.
+ if auth.user_pb is None:
+ return False
+
+ # Case 2: site admins always see unobscured email addresses.
+ if auth.user_pb.is_site_admin:
+ return True
+
+ # Case 3: Domain users in same-org-only projects always see unobscured addrs.
+ # TODO(jrobbins): re-implement same_org
+
+ # Case 4: Project members see the unobscured email of everyone in a project.
+ if project and framework_bizobj.UserIsInProject(project, auth.effective_ids):
+ return True
+
+ # Case 5: Emails that end in priviledged user domains see unobscured email
+ # addresses.
+ if framework_bizobj.IsPriviledgedDomainUser(auth.user_pb.email):
+ return True
+
+ # Case 6: Do not obscure your own email.
+ if viewed_email and auth.user_pb.email == viewed_email:
+ return True
+
+ return False
+
+
+def RevealAllEmailsToMembers(mr, users_by_id):
+ """Allow project members to see unobscured email addresses in that project.
+
+ Non project member addresses will be obscured.
+ Site admins can see all email addresses unobscured.
+
+ Args:
+ mr: common info parsed from the user's request.
+ users_by_id: dictionary of UserView's that will be displayed.
+
+ Returns:
+ Nothing, but the UserViews in users_by_id may be modified to
+ publish email address.
+ """
+ for user_view in users_by_id.itervalues():
+ if _ShouldRevealEmail(mr.auth, mr.project, user_view.email):
+ user_view.RevealEmail()
+
+
+def RevealAllEmails(users_by_id):
+ """Allow anyone to see unobscured email addresses of project members.
+
+ The modified view objects should only be used to generate views for other
+ project members.
+
+ Args:
+ users_by_id: dictionary of UserViews that will be displayed.
+
+ Returns:
+ Nothing, but the UserViews in users_by_id may be modified to
+ publish email address.
+ """
+ for user_view in users_by_id.itervalues():
+ user_view.RevealEmail()
diff --git a/appengine/monorail/framework/gcs_helpers.py b/appengine/monorail/framework/gcs_helpers.py
new file mode 100644
index 0000000..843eac9
--- /dev/null
+++ b/appengine/monorail/framework/gcs_helpers.py
@@ -0,0 +1,118 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Set of helpers for interacting with Google Cloud Storage."""
+
+import base64
+import logging
+import os
+import time
+import urllib
+import uuid
+
+from datetime import datetime, timedelta
+
+from google.appengine.api import app_identity
+from google.appengine.api import images
+from third_party import cloudstorage
+
+from framework import filecontent
+
+
+ATTACHMENT_TTL = timedelta(seconds=30)
+
+IS_DEV_APPSERVER = (
+ 'development' in os.environ.get('SERVER_SOFTWARE', '').lower())
+
+RESIZABLE_MIME_TYPES = ['image/png', 'image/jpg', 'image/jpeg', 'image/gif']
+
+DEFAULT_THUMB_WIDTH = 250
+DEFAULT_THUMB_HEIGHT = 200
+LOGO_THUMB_WIDTH = 110
+LOGO_THUMB_HEIGHT = 30
+
+
+def _Now():
+ return datetime.utcnow()
+
+
+class UnsupportedMimeType(Exception):
+ pass
+
+
+def DeleteObjectFromGCS(object_id):
+ object_path = ('/' + app_identity.get_default_gcs_bucket_name() + object_id)
+ cloudstorage.delete(object_path)
+
+
+def StoreObjectInGCS(
+ content, mime_type, project_id, thumb_width=DEFAULT_THUMB_WIDTH,
+ thumb_height=DEFAULT_THUMB_HEIGHT):
+ bucket_name = app_identity.get_default_gcs_bucket_name()
+ guid = uuid.uuid4()
+ object_id = '/%s/attachments/%s' % (project_id, guid)
+ object_path = '/' + bucket_name + object_id
+ with cloudstorage.open(object_path, 'w', mime_type) as f:
+ f.write(content)
+
+ if mime_type in RESIZABLE_MIME_TYPES:
+ # Create and save a thumbnail too.
+ thumb_content = None
+ try:
+ thumb_content = images.resize(content, thumb_width, thumb_height)
+ except Exception, e:
+ # Do not raise exception for incorrectly formed images.
+ # See https://bugs.chromium.org/p/monorail/issues/detail?id=597 for more
+ # detail.
+ logging.exception(e)
+ if thumb_content:
+ thumb_path = '%s-thumbnail' % object_path
+ with cloudstorage.open(thumb_path, 'w', 'image/png') as f:
+ f.write(thumb_content)
+
+ return object_id
+
+
+def CheckMimeTypeResizable(mime_type):
+ if mime_type not in RESIZABLE_MIME_TYPES:
+ raise UnsupportedMimeType(
+ 'Please upload a logo with one of the following mime types:\n%s' %
+ ', '.join(RESIZABLE_MIME_TYPES))
+
+
+def StoreLogoInGCS(file_name, content, project_id):
+ mime_type = filecontent.GuessContentTypeFromFilename(file_name)
+ CheckMimeTypeResizable(mime_type)
+ if '\\' in file_name: # IE insists on giving us the whole path.
+ file_name = file_name[file_name.rindex('\\') + 1:]
+ return StoreObjectInGCS(
+ content, mime_type, project_id, thumb_width=LOGO_THUMB_WIDTH,
+ thumb_height=LOGO_THUMB_HEIGHT)
+
+
+def SignUrl(gcs_filename):
+ expiration_dt = _Now() + ATTACHMENT_TTL
+ expiration = int(time.mktime(expiration_dt.timetuple()))
+ signature_string = '\n'.join([
+ 'GET',
+ '', # Optional MD5, which we don't have.
+ '', # Optional content-type, which only applies to uploads.
+ str(expiration),
+ gcs_filename]).encode('utf-8')
+
+ signature_bytes = app_identity.sign_blob(signature_string)[1]
+
+ query_params = {'GoogleAccessId': app_identity.get_service_account_name(),
+ 'Expires': str(expiration),
+ 'Signature': base64.b64encode(signature_bytes)}
+
+ result = 'https://storage.googleapis.com{resource}?{querystring}'
+
+ if IS_DEV_APPSERVER:
+ result = '/_ah/gcs{resource}?{querystring}'
+
+ return result.format(
+ resource=gcs_filename, querystring=urllib.urlencode(query_params))
+
diff --git a/appengine/monorail/framework/grid_view_helpers.py b/appengine/monorail/framework/grid_view_helpers.py
new file mode 100644
index 0000000..833d2ae
--- /dev/null
+++ b/appengine/monorail/framework/grid_view_helpers.py
@@ -0,0 +1,275 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions for displaying grids of project artifacts.
+
+A grid is a two-dimensional display of items where the user can choose
+the X and Y axes.
+"""
+
+import collections
+import logging
+
+from framework import framework_constants
+from framework import sorting
+from framework import template_helpers
+from proto import tracker_pb2
+from tracker import tracker_bizobj
+
+
+# We shorten long attribute values to fit into the table cells.
+_MAX_CELL_DISPLAY_CHARS = 70
+
+
+def SortGridHeadings(col_name, heading_value_list, users_by_id, config,
+ asc_accessors):
+ """Sort the grid headings according to well-known status and label order.
+
+ Args:
+ col_name: String column name that is used on that grid axis.
+ heading_value_list: List of grid row or column heading values.
+ users_by_id: Dict mapping user_ids to UserViews.
+ config: ProjectIssueConfig PB for the current project.
+ asc_accessors: Dict (col_name -> function()) for special columns.
+
+ Returns:
+ The same heading values, but sorted in a logical order.
+ """
+ decorated_list = []
+ fd = tracker_bizobj.FindFieldDef(col_name, config)
+ if fd: # Handle fields.
+ for value in heading_value_list:
+ field_value = tracker_bizobj.GetFieldValueWithRawValue(
+ fd.field_type, None, users_by_id, value)
+ decorated_list.append([field_value, field_value])
+ elif col_name == 'status':
+ wk_statuses = [wks.status.lower()
+ for wks in config.well_known_statuses]
+ decorated_list = [(_WKSortingValue(value.lower(), wk_statuses), value)
+ for value in heading_value_list]
+
+ elif col_name in asc_accessors: # Special cols still sort alphabetically.
+ decorated_list = [(value, value)
+ for value in heading_value_list]
+
+ else: # Anything else is assumed to be a label prefix
+ wk_labels = [wkl.label.lower().split('-', 1)[-1]
+ for wkl in config.well_known_labels]
+ decorated_list = [(_WKSortingValue(value.lower(), wk_labels), value)
+ for value in heading_value_list]
+
+ decorated_list.sort()
+ result = [decorated_tuple[1] for decorated_tuple in decorated_list]
+ logging.info('Headers for %s are: %r', col_name, result)
+ return result
+
+
+def _WKSortingValue(value, well_known_list):
+ """Return a value used to sort headings so that well-known ones are first."""
+ if not value:
+ return sorting.MAX_STRING # Undefined values sort last.
+ try:
+ # well-known values sort by index
+ return well_known_list.index(value)
+ except ValueError:
+ return value # odd-ball values lexicographically after all well-known ones
+
+
+def MakeGridData(
+ artifacts, x_attr, x_headings, y_attr, y_headings, users_by_id,
+ artifact_view_factory, all_label_values, config):
+ """Return a list of grid row items for display by EZT.
+
+ Args:
+ artifacts: a list of issues to consider showing.
+ x_attr: lowercase name of the attribute that defines the x-axis.
+ x_headings: list of values for column headings.
+ y_attr: lowercase name of the attribute that defines the y-axis.
+ y_headings: list of values for row headings.
+ users_by_id: dict {user_id: user_view, ...} for referenced users.
+ artifact_view_factory: constructor for grid tiles.
+ all_label_values: pre-parsed dictionary of values from the key-value
+ labels on each issue: {issue_id: {key: [val,...], ...}, ...}
+ config: ProjectIssueConfig PB for the current project.
+
+ Returns:
+ A list of EZTItems, each representing one grid row, and each having
+ a nested list of grid cells.
+
+ Each grid row has a row name, and a list of cells. Each cell has a
+ list of tiles. Each tile represents one artifact. Artifacts are
+ represented once in each cell that they match, so one artifact that
+ has multiple values for a certain attribute can occur in multiple cells.
+ """
+ x_attr = x_attr.lower()
+ y_attr = y_attr.lower()
+
+ # A flat dictionary {(x, y): [cell, ...], ...] for the whole grid.
+ x_y_data = collections.defaultdict(list)
+
+ # Put each issue into the grid cell(s) where it belongs.
+ for art in artifacts:
+ label_value_dict = all_label_values[art.local_id]
+ x_vals = GetArtifactAttr(
+ art, x_attr, users_by_id, label_value_dict, config)
+ y_vals = GetArtifactAttr(
+ art, y_attr, users_by_id, label_value_dict, config)
+ tile = artifact_view_factory(art)
+
+ # Put the current issue into each cell where it belongs, which will usually
+ # be exactly 1 cell, but it could be a few.
+ if x_attr != '--' and y_attr != '--': # User specified both axes.
+ for x in x_vals:
+ for y in y_vals:
+ x_y_data[x, y].append(tile)
+ elif y_attr != '--': # User only specified Y axis.
+ for y in y_vals:
+ x_y_data['All', y].append(tile)
+ elif x_attr != '--': # User only specified X axis.
+ for x in x_vals:
+ x_y_data[x, 'All'].append(tile)
+ else: # User specified neither axis.
+ x_y_data['All', 'All'].append(tile)
+
+ # Convert the dictionary to a list-of-lists so that EZT can iterate over it.
+ grid_data = []
+ for y in y_headings:
+ cells_in_row = []
+ for x in x_headings:
+ tiles = x_y_data[x, y]
+
+ drill_down = ''
+ if x_attr != '--':
+ drill_down = MakeDrillDownSearch(x_attr, x)
+ if y_attr != '--':
+ drill_down += MakeDrillDownSearch(y_attr, y)
+
+ cells_in_row.append(template_helpers.EZTItem(
+ tiles=tiles, count=len(tiles), drill_down=drill_down))
+ grid_data.append(template_helpers.EZTItem(
+ grid_y_heading=y, cells_in_row=cells_in_row))
+
+ return grid_data
+
+
+def MakeDrillDownSearch(attr, value):
+ """Constructs search term for drill-down.
+
+ Args:
+ attr: lowercase name of the attribute to narrow the search on.
+ value: value to narrow the search to.
+
+ Returns:
+ String with user-query term to narrow a search to the given attr value.
+ """
+ if value == framework_constants.NO_VALUES:
+ return '-has:%s ' % attr
+ else:
+ return '%s=%s ' % (attr, value)
+
+
+def MakeLabelValuesDict(art):
+ """Return a dict of label values and a list of one-word labels.
+
+ Args:
+ art: artifact object, e.g., an issue PB.
+
+ Returns:
+ A dict {prefix: [suffix,...], ...} for each key-value label.
+ """
+ label_values = collections.defaultdict(list)
+ for label_name in tracker_bizobj.GetLabels(art):
+ if '-' in label_name:
+ key, value = label_name.split('-', 1)
+ label_values[key.lower()].append(value)
+
+ return label_values
+
+
+def GetArtifactAttr(
+ art, attribute_name, users_by_id, label_attr_values_dict, config):
+ """Return the requested attribute values of the given artifact.
+
+ Args:
+ art: a tracked artifact with labels, local_id, summary, stars, and owner.
+ attribute_name: lowercase string name of attribute to get.
+ users_by_id: dictionary of UserViews already created.
+ label_attr_values_dict: dictionary {'key': [value, ...], }.
+ config: ProjectIssueConfig PB for the current project.
+
+ Returns:
+ A list of string attribute values, or [framework_constants.NO_VALUES]
+ if the artifact has no value for that attribute.
+ """
+ if attribute_name == '--':
+ return []
+ if attribute_name == 'id':
+ return [art.local_id]
+ if attribute_name == 'summary':
+ return [art.summary]
+ if attribute_name == 'status':
+ return [tracker_bizobj.GetStatus(art)]
+ if attribute_name == 'stars':
+ return [art.star_count]
+ if attribute_name == 'attachments':
+ return [art.attachment_count]
+ # TODO(jrobbins): support blocked on, blocking, and mergedinto.
+ if attribute_name == 'reporter':
+ return [users_by_id[art.reporter_id].display_name]
+ if attribute_name == 'owner':
+ owner_id = tracker_bizobj.GetOwnerId(art)
+ if not owner_id:
+ return [framework_constants.NO_VALUES]
+ else:
+ return [users_by_id[owner_id].display_name]
+ if attribute_name == 'cc':
+ cc_ids = tracker_bizobj.GetCcIds(art)
+ if not cc_ids:
+ return [framework_constants.NO_VALUES]
+ else:
+ return [users_by_id[cc_id].display_name for cc_id in cc_ids]
+ if attribute_name == 'component':
+ comp_ids = list(art.component_ids) + list(art.derived_component_ids)
+ if not comp_ids:
+ return [framework_constants.NO_VALUES]
+ else:
+ paths = []
+ for comp_id in comp_ids:
+ cd = tracker_bizobj.FindComponentDefByID(comp_id, config)
+ if cd:
+ paths.append(cd.path)
+ return paths
+
+ # Check to see if it is a field. Process as field only if it is not an enum
+ # type because enum types are stored as key-value labels.
+ fd = tracker_bizobj.FindFieldDef(attribute_name, config)
+ if fd and fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE:
+ values = []
+ for fv in art.field_values:
+ if fv.field_id == fd.field_id:
+ value = tracker_bizobj.GetFieldValueWithRawValue(
+ fd.field_type, fv, users_by_id, None)
+ values.append(value)
+ return values
+
+ # Since it is not a built-in attribute or a field, it must be a key-value
+ # label.
+ return label_attr_values_dict.get(
+ attribute_name, [framework_constants.NO_VALUES])
+
+
+def AnyArtifactHasNoAttr(
+ artifacts, attr_name, users_by_id, all_label_values, config):
+ """Return true if any artifact does not have a value for attr_name."""
+ # TODO(jrobbins): all_label_values needs to be keyed by issue_id to allow
+ # cross-project grid views.
+ for art in artifacts:
+ vals = GetArtifactAttr(
+ art, attr_name.lower(), users_by_id, all_label_values[art.local_id],
+ config)
+ if framework_constants.NO_VALUES in vals:
+ return True
+
+ return False
diff --git a/appengine/monorail/framework/jsonfeed.py b/appengine/monorail/framework/jsonfeed.py
new file mode 100644
index 0000000..a2cb6d5
--- /dev/null
+++ b/appengine/monorail/framework/jsonfeed.py
@@ -0,0 +1,117 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This file defines a subclass of Servlet for JSON feeds.
+
+A "feed" is a servlet that is accessed by another part of our system and that
+responds with a JSON value rather than HTML to display in a browser.
+"""
+
+import httplib
+import json
+import logging
+
+from google.appengine.api import app_identity
+
+import settings
+
+from framework import framework_constants
+from framework import permissions
+from framework import servlet
+from framework import xsrf
+
+# This causes a JS error for a hacker trying to do a cross-site inclusion.
+XSSI_PREFIX = ")]}'\n"
+
+
+class JsonFeed(servlet.Servlet):
+ """A convenient base class for JSON feeds."""
+
+ # By default, JSON output is compact. Subclasses can set this to
+ # an integer, like 4, for pretty-printed output.
+ JSON_INDENT = None
+
+ # Some JSON handlers can only be accessed from our own app.
+ CHECK_SAME_APP = False
+
+ def HandleRequest(self, _mr):
+ """Override this method to implement handling of the request.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ A dictionary of json data.
+ """
+ raise servlet.MethodNotSupportedError()
+
+ def _DoRequestHandling(self, request, mr):
+ """Do permission checking, page processing, and response formatting."""
+ try:
+ if self.CHECK_SECURITY_TOKEN and mr.auth.user_id:
+ # Validate the XSRF token with the specific request path for this
+ # servlet. But, not every XHR request has a distinct token, so just
+ # use 'xhr' for ones that don't.
+ # TODO(jrobbins): make specific tokens for:
+ # user and project stars, issue options, check names.
+ try:
+ logging.info('request in jsonfeed is %r', request)
+ xsrf.ValidateToken(mr.token, mr.auth.user_id, request.path)
+ except xsrf.TokenIncorrect:
+ logging.info('using token path "xhr"')
+ xsrf.ValidateToken(mr.token, mr.auth.user_id, xsrf.XHR_SERVLET_PATH)
+
+ if self.CHECK_SAME_APP and not settings.dev_mode:
+ calling_app_id = request.headers.get('X-Appengine-Inbound-Appid')
+ if calling_app_id != app_identity.get_application_id():
+ self.response.status = httplib.FORBIDDEN
+ return
+
+ self._CheckForMovedProject(mr, request)
+ self.AssertBasePermission(mr)
+
+ json_data = self.HandleRequest(mr)
+
+ self._RenderJsonResponse(json_data)
+ raise servlet.AlreadySentResponseException()
+
+ except permissions.PermissionException as e:
+ logging.info('Trapped PermissionException %s', e)
+ self.response.status = httplib.FORBIDDEN
+
+ # pylint: disable=unused-argument
+ # pylint: disable=arguments-differ
+ def get(self, project_name=None, viewed_username=None):
+ """Collect page-specific and generic info, then render the page.
+
+ Args:
+ project_name: string project name parsed from the URL by webapp2,
+ but we also parse it out in our code.
+ viewed_username: string user email parsed from the URL by webapp2,
+ but we also parse it out in our code.
+ """
+ self._DoRequestHandling(self.mr.request, self.mr)
+
+ # pylint: disable=unused-argument
+ # pylint: disable=arguments-differ
+ def post(self, project_name=None, viewed_username=None):
+ """Parse the request, check base perms, and call form-specific code."""
+ self._DoRequestHandling(self.mr.request, self.mr)
+
+ def _RenderJsonResponse(self, json_data):
+ """Serialize the data as JSON so that it can be sent to the browser."""
+ json_str = json.dumps(json_data, indent=self.JSON_INDENT)
+ logging.debug(
+ 'Sending JSON response: %r length: %r',
+ json_str[:framework_constants.LOGGING_MAX_LENGTH], len(json_str))
+ self.response.content_type = framework_constants.CONTENT_TYPE_JSON
+ self.response.write(XSSI_PREFIX)
+ self.response.write(json_str)
+
+
+class InternalTask(JsonFeed):
+ """Internal tasks are JSON feeds that can only be reached by our own code."""
+
+ CHECK_SECURITY_TOKEN = False
diff --git a/appengine/monorail/framework/monorailrequest.py b/appengine/monorail/framework/monorailrequest.py
new file mode 100644
index 0000000..8e4dcd0
--- /dev/null
+++ b/appengine/monorail/framework/monorailrequest.py
@@ -0,0 +1,691 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes to hold information parsed from a request.
+
+To simplify our servlets and avoid duplication of code, we parse some
+info out of the request as soon as we get it and then pass a MonorailRequest
+object to the servlet-specific request handler methods.
+"""
+
+import endpoints
+import logging
+import re
+import urllib
+
+from third_party import ezt
+
+from google.appengine.api import app_identity
+from google.appengine.api import oauth
+from google.appengine.api import users
+
+import webapp2
+
+import settings
+from framework import framework_constants
+from framework import framework_views
+from framework import permissions
+from framework import sql
+from framework import template_helpers
+from proto import api_pb2_v1
+from proto import user_pb2
+from services import user_svc
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+_HOSTPORT_RE = re.compile('^[-a-z0-9.]+(:\d+)?$', re.I)
+
+
+class AuthData(object):
+ """This object holds authentication data about a user.
+
+ This is used by MonorailRequest as it determines which user the
+ requester is authenticated as and fetches the user's data. It can
+ also be used to lookup perms for user IDs specified in issue fields.
+
+ Attributes:
+ user_id: The user ID of the user (or 0 if not signed in).
+ effective_ids: A set of user IDs that includes the signed in user's
+ direct user ID and the user IDs of all their user groups.
+ This set will be empty for anonymous users.
+ user_view: UserView object for the signed-in user.
+ user_pb: User object for the signed-in user.
+ email: email address for the user, or None.
+ """
+
+ def __init__(self):
+ self.user_id = 0
+ self.effective_ids = set()
+ self.user_view = None
+ self.user_pb = user_pb2.MakeUser()
+ self.email = None
+
+ @classmethod
+ def FromRequest(cls, cnxn, services):
+ """Determine auth information from the request and fetches user data.
+
+ If everything works and the user is signed in, then all of the public
+ attributes of the AuthData instance will be filled in appropriately.
+
+ Args:
+ cnxn: connection to the SQL database.
+ services: Interface to all persistence storage backends.
+
+ Returns:
+ A new AuthData object.
+ """
+ user = users.get_current_user()
+ if user is None:
+ return cls()
+ else:
+ # We create a User row for each user who visits the site.
+ # TODO(jrobbins): we should really only do it when they take action.
+ return cls.FromEmail(cnxn, user.email(), services, autocreate=True)
+
+ @classmethod
+ def FromEmail(cls, cnxn, email, services, autocreate=False):
+ """Determine auth information for the given user email address.
+
+ Args:
+ cnxn: monorail connection to the database.
+ email: string email address of the user.
+ services: connections to backend servers.
+ autocreate: set to True to create a new row in the Users table if needed.
+
+ Returns:
+ A new AuthData object.
+
+ Raises:
+ user_svc.NoSuchUserException: If the user of the email does not exist.
+ """
+ auth = cls()
+ auth.email = email
+ if email:
+ auth.user_id = services.user.LookupUserID(
+ cnxn, email, autocreate=autocreate)
+ assert auth.user_id
+
+ cls._FinishInitialization(cnxn, auth, services)
+ return auth
+
+ @classmethod
+ def FromUserID(cls, cnxn, user_id, services):
+ """Determine auth information for the given user ID.
+
+ Args:
+ cnxn: monorail connection to the database.
+ user_id: int user ID of the user.
+ services: connections to backend servers.
+
+ Returns:
+ A new AuthData object.
+ """
+ auth = cls()
+ auth.user_id = user_id
+ if auth.user_id:
+ auth.email = services.user.LookupUserEmail(cnxn, user_id)
+
+ cls._FinishInitialization(cnxn, auth, services)
+ return auth
+
+ @classmethod
+ def _FinishInitialization(cls, cnxn, auth, services):
+ """Fill in the test of the fields based on the user_id."""
+ # TODO(jrobbins): re-implement same_org
+ if auth.user_id:
+ auth.effective_ids = services.usergroup.LookupMemberships(
+ cnxn, auth.user_id)
+ auth.effective_ids.add(auth.user_id)
+ auth.user_pb = services.user.GetUser(cnxn, auth.user_id)
+ if auth.user_pb:
+ auth.user_view = framework_views.UserView(
+ auth.user_id, auth.email,
+ auth.user_pb.obscure_email)
+
+
+class MonorailApiRequest(object):
+ """A class to hold information parsed from the Endpoints API request."""
+
+ # pylint: disable=attribute-defined-outside-init
+ def __init__(self, request, services):
+ requester = (
+ endpoints.get_current_user() or
+ oauth.get_current_user(
+ framework_constants.OAUTH_SCOPE))
+ requester_email = requester.email().lower()
+ self.cnxn = sql.MonorailConnection()
+ self.auth = AuthData.FromEmail(
+ self.cnxn, requester_email, services)
+ self.me_user_id = self.auth.user_id
+ self.viewed_username = None
+ self.viewed_user_auth = None
+ self.project_name = None
+ self.project = None
+ self.issue = None
+ self.config = None
+ self.granted_perms = set()
+
+ # query parameters
+ self.params = {
+ 'can': 1,
+ 'start': 0,
+ 'num': 100,
+ 'q': '',
+ 'sort': '',
+ 'groupby': '',
+ 'projects': []}
+ self.use_cached_searches = True
+ self.warnings = []
+ self.errors = template_helpers.EZTError()
+ self.mode = None
+
+ if hasattr(request, 'projectId'):
+ self.project_name = request.projectId
+ self.project = services.project.GetProjectByName(
+ self.cnxn, self.project_name)
+ self.params['projects'].append(self.project_name)
+ self.config = services.config.GetProjectConfig(
+ self.cnxn, self.project_id)
+ if hasattr(request, 'additionalProject'):
+ self.params['projects'].extend(request.additionalProject)
+ self.params['projects'] = list(set(self.params['projects']))
+ if hasattr(request, 'issueId'):
+ self.issue = services.issue.GetIssueByLocalID(
+ self.cnxn, self.project_id, request.issueId)
+ self.granted_perms = tracker_bizobj.GetGrantedPerms(
+ self.issue, self.auth.effective_ids, self.config)
+ if hasattr(request, 'userId'):
+ self.viewed_username = request.userId.lower()
+ if self.viewed_username == 'me':
+ self.viewed_username = requester_email
+ self.viewed_user_auth = AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services)
+ elif hasattr(request, 'groupName'):
+ self.viewed_username = request.groupName.lower()
+ try:
+ self.viewed_user_auth = AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services)
+ except user_svc.NoSuchUserException:
+ self.viewed_user_auth = None
+ self.perms = permissions.GetPermissions(
+ self.auth.user_pb, self.auth.effective_ids, self.project)
+
+ # Build q.
+ if hasattr(request, 'q') and request.q:
+ self.params['q'] = request.q
+ if hasattr(request, 'publishedMax') and request.publishedMax:
+ self.params['q'] += ' opened<=%d' % request.publishedMax
+ if hasattr(request, 'publishedMin') and request.publishedMin:
+ self.params['q'] += ' opened>=%d' % request.publishedMin
+ if hasattr(request, 'updatedMax') and request.updatedMax:
+ self.params['q'] += ' modified<=%d' % request.updatedMax
+ if hasattr(request, 'updatedMin') and request.updatedMin:
+ self.params['q'] += ' modified>=%d' % request.updatedMin
+ if hasattr(request, 'owner') and request.owner:
+ self.params['q'] += ' owner:%s' % request.owner
+ if hasattr(request, 'status') and request.status:
+ self.params['q'] += ' status:%s' % request.status
+ if hasattr(request, 'label') and request.label:
+ self.params['q'] += ' label:%s' % request.label
+
+ if hasattr(request, 'can') and request.can:
+ if request.can == api_pb2_v1.CannedQuery.all:
+ self.params['can'] = 1
+ elif request.can == api_pb2_v1.CannedQuery.new:
+ self.params['can'] = 6
+ elif request.can == api_pb2_v1.CannedQuery.open:
+ self.params['can'] = 2
+ elif request.can == api_pb2_v1.CannedQuery.owned:
+ self.params['can'] = 3
+ elif request.can == api_pb2_v1.CannedQuery.reported:
+ self.params['can'] = 4
+ elif request.can == api_pb2_v1.CannedQuery.starred:
+ self.params['can'] = 5
+ elif request.can == api_pb2_v1.CannedQuery.to_verify:
+ self.params['can'] = 7
+ else: # Endpoints should have caught this.
+ raise InputException(
+ 'Canned query %s is not supported.', request.can)
+ if hasattr(request, 'startIndex') and request.startIndex:
+ self.params['start'] = request.startIndex
+ if hasattr(request, 'maxResults') and request.maxResults:
+ self.params['num'] = request.maxResults
+ if hasattr(request, 'sort') and request.sort:
+ self.params['sort'] = request.sort
+
+ self.query_project_names = self.GetParam('projects')
+ self.group_by_spec = self.GetParam('groupby')
+ self.sort_spec = self.GetParam('sort')
+ self.query = self.GetParam('q')
+ self.can = self.GetParam('can')
+ self.start = self.GetParam('start')
+ self.num = self.GetParam('num')
+
+ @property
+ def project_id(self):
+ return self.project.project_id if self.project else None
+
+ def GetParam(self, query_param_name, default_value=None,
+ _antitamper_re=None):
+ return self.params.get(query_param_name, default_value)
+
+ def GetPositiveIntParam(self, query_param_name, default_value=None):
+ """Returns 0 if the user-provided value is less than 0."""
+ return max(self.GetParam(query_param_name, default_value=default_value),
+ 0)
+
+
+class MonorailRequest(object):
+ """A class to hold information parsed from the HTTP request.
+
+ The goal of MonorailRequest is to do almost all URL path and query string
+ procesing in one place, which makes the servlet code simpler.
+
+ Attributes:
+ cnxn: connection to the SQL databases.
+ logged_in_user_id: int user ID of the signed-in user, or None.
+ effective_ids: set of signed-in user ID and all their user group IDs.
+ user_pb: User object for the signed in user.
+ project_name: string name of the current project.
+ project_id: int ID of the current projet.
+ viewed_username: string username of the user whose profile is being viewed.
+ can: int "canned query" number to scope the user's search.
+ num: int number of results to show per pagination page.
+ start: int position in result set to show on this pagination page.
+ etc: there are many more, all read-only.
+ """
+
+ # pylint: disable=attribute-defined-outside-init
+ def __init__(self, params=None):
+ """Initialize the MonorailRequest object."""
+ self.form_overrides = {}
+ if params:
+ self.form_overrides.update(params)
+ self.warnings = []
+ self.errors = template_helpers.EZTError()
+ self.debug_enabled = False
+ self.use_cached_searches = True
+ self.cnxn = sql.MonorailConnection()
+
+ self.auth = AuthData() # Authentication info for logged-in user
+
+ self.project_name = None
+ self.project = None
+
+ self.viewed_username = None
+ self.viewed_user_auth = AuthData()
+
+ @property
+ def project_id(self):
+ return self.project.project_id if self.project else None
+
+ def CleanUp(self):
+ """Close the database connection so that the app does not run out."""
+ if self.cnxn:
+ self.cnxn.Close()
+ self.cnxn = None
+
+ def ParseRequest(self, request, services, prof, do_user_lookups=True):
+ """Parse tons of useful info from the given request object.
+
+ Args:
+ request: webapp2 Request object w/ path and query params.
+ services: connections to backend servers including DB.
+ prof: Profiler instance.
+ do_user_lookups: Set to False to disable lookups during testing.
+ """
+ with prof.Phase('basic parsing'):
+ self.request = request
+ self.current_page_url = request.url
+ self.current_page_url_encoded = urllib.quote_plus(self.current_page_url)
+
+ # Only accept a hostport from the request that looks valid.
+ if not _HOSTPORT_RE.match(request.host):
+ raise InputException('request.host looks funny: %r', request.host)
+
+ logging.info('Request: %s', self.current_page_url)
+
+ with prof.Phase('path parsing'):
+ viewed_user_val, self.project_name = _ParsePathIdentifiers(
+ self.request.path)
+ self.viewed_username = _GetViewedEmail(
+ viewed_user_val, self.cnxn, services)
+ with prof.Phase('qs parsing'):
+ self._ParseQueryParameters()
+ with prof.Phase('overrides parsing'):
+ self._ParseFormOverrides()
+
+ if not self.project: # It can be already set in unit tests.
+ self._LookupProject(services, prof)
+ if do_user_lookups:
+ if self.viewed_username:
+ self._LookupViewedUser(services, prof)
+ self._LookupLoggedInUser(services, prof)
+ # TODO(jrobbins): re-implement HandleLurkerViewingSelf()
+
+ prod_debug_allowed = self.perms.HasPerm(
+ permissions.VIEW_DEBUG, self.auth.user_id, None)
+ self.debug_enabled = (request.params.get('debug') and
+ (settings.dev_mode or prod_debug_allowed))
+ # temporary option for perf testing on staging instance.
+ if request.params.get('disable_cache'):
+ if settings.dev_mode or 'staging' in request.host:
+ self.use_cached_searches = False
+
+ def _ParseQueryParameters(self):
+ """Parse and convert all the query string params used in any servlet."""
+ self.start = self.GetPositiveIntParam('start', default_value=0)
+ self.num = self.GetPositiveIntParam('num', default_value=100)
+ # Prevent DoS attacks that try to make us serve really huge result pages.
+ self.num = min(self.num, settings.max_artifact_search_results_per_page)
+
+ self.invalidation_timestep = self.GetIntParam(
+ 'invalidation_timestep', default_value=0)
+
+ self.continue_issue_id = self.GetIntParam(
+ 'continue_issue_id', default_value=0)
+ self.redir = self.GetParam('redir')
+
+ # Search scope, a.k.a., canned query ID
+ # TODO(jrobbins): make configurable
+ self.can = self.GetIntParam(
+ 'can', default_value=tracker_constants.OPEN_ISSUES_CAN)
+
+ # Search query
+ self.query = self.GetParam('q', default_value='').strip()
+
+ # Sorting of search results (needed for result list and flipper)
+ self.sort_spec = self.GetParam(
+ 'sort', default_value='',
+ antitamper_re=framework_constants.SORTSPEC_RE)
+
+ # Note: This is set later in request handling by ComputeColSpec().
+ self.col_spec = None
+
+ # Grouping of search results (needed for result list and flipper)
+ self.group_by_spec = self.GetParam(
+ 'groupby', default_value='',
+ antitamper_re=framework_constants.SORTSPEC_RE)
+
+ # For issue list and grid mode.
+ self.cursor = self.GetParam('cursor')
+ self.preview = self.GetParam('preview')
+ self.mode = self.GetParam('mode', default_value='list')
+ self.x = self.GetParam('x', default_value='')
+ self.y = self.GetParam('y', default_value='')
+ self.cells = self.GetParam('cells', default_value='ids')
+
+ # For the dashboard and issue lists included in the dashboard.
+ self.ajah = self.GetParam('ajah') # AJAH = Asychronous Javascript And HTML
+ self.table_title = self.GetParam('table_title')
+ self.panel_id = self.GetIntParam('panel')
+
+ # For pagination of updates lists
+ self.before = self.GetPositiveIntParam('before')
+ self.after = self.GetPositiveIntParam('after')
+
+ # For cron tasks and backend calls
+ self.lower_bound = self.GetIntParam('lower_bound')
+ self.upper_bound = self.GetIntParam('upper_bound')
+ self.shard_id = self.GetIntParam('shard_id')
+
+ # For specifying which objects to operate on
+ self.local_id = self.GetIntParam('id')
+ self.local_id_list = self.GetIntListParam('ids')
+ self.seq = self.GetIntParam('seq')
+ self.aid = self.GetIntParam('aid')
+ self.specified_user_id = self.GetIntParam('u', default_value=0)
+ self.specified_logged_in_user_id = self.GetIntParam(
+ 'logged_in_user_id', default_value=0)
+ self.specified_me_user_id = self.GetIntParam(
+ 'me_user_id', default_value=0)
+ self.specified_project = self.GetParam('project')
+ self.specified_project_id = self.GetIntParam('project_id')
+ self.query_project_names = self.GetListParam('projects', default_value=[])
+ self.template_name = self.GetParam('template')
+ self.component_path = self.GetParam('component')
+ self.field_name = self.GetParam('field')
+
+ # For image attachments
+ self.inline = bool(self.GetParam('inline'))
+ self.thumb = bool(self.GetParam('thumb'))
+
+ # For JS callbacks
+ self.token = self.GetParam('token')
+ self.starred = bool(self.GetIntParam('starred'))
+
+ # For issue reindexing utility servlet
+ self.auto_submit = self.GetParam('auto_submit')
+
+ def _ParseFormOverrides(self):
+ """Support deep linking by allowing the user to set form fields via QS."""
+ allowed_overrides = {
+ 'template_name': self.GetParam('template_name'),
+ 'initial_summary': self.GetParam('summary'),
+ 'initial_description': (self.GetParam('description') or
+ self.GetParam('comment')),
+ 'initial_comment': self.GetParam('comment'),
+ 'initial_status': self.GetParam('status'),
+ 'initial_owner': self.GetParam('owner'),
+ 'initial_cc': self.GetParam('cc'),
+ 'initial_blocked_on': self.GetParam('blockedon'),
+ 'initial_blocking': self.GetParam('blocking'),
+ 'initial_merge_into': self.GetIntParam('mergeinto'),
+ 'initial_components': self.GetParam('components'),
+
+ # For the people pages
+ 'initial_add_members': self.GetParam('add_members'),
+ 'initially_expanded_form': ezt.boolean(self.GetParam('expand_form')),
+
+ # For user group admin pages
+ 'initial_name': (self.GetParam('group_name') or
+ self.GetParam('proposed_project_name')),
+ }
+
+ # Only keep the overrides that were actually provided in the query string.
+ self.form_overrides.update(
+ (k, v) for (k, v) in allowed_overrides.iteritems()
+ if v is not None)
+
+ def _LookupViewedUser(self, services, prof):
+ """Get information about the viewed user (if any) from the request."""
+ try:
+ with prof.Phase('get viewed user, if any'):
+ self.viewed_user_auth = AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services, autocreate=False)
+ except user_svc.NoSuchUserException:
+ logging.info('could not find user %r', self.viewed_username)
+ webapp2.abort(404, 'user not found')
+
+ if not self.viewed_user_auth.user_id:
+ webapp2.abort(404, 'user not found')
+
+ def _LookupProject(self, services, prof):
+ """Get information about the current project (if any) from the request."""
+ with prof.Phase('get current project, if any'):
+ if not self.project_name:
+ logging.info('no project_name, so no project')
+ else:
+ self.project = services.project.GetProjectByName(
+ self.cnxn, self.project_name)
+ if not self.project:
+ webapp2.abort(404, 'invalid project')
+
+ def _LookupLoggedInUser(self, services, prof):
+ """Get information about the signed-in user (if any) from the request."""
+ with prof.Phase('get user info, if any'):
+ self.auth = AuthData.FromRequest(self.cnxn, services)
+ self.me_user_id = (self.GetIntParam('me') or
+ self.viewed_user_auth.user_id or self.auth.user_id)
+
+ with prof.Phase('looking up signed in user permissions'):
+ self.perms = permissions.GetPermissions(
+ self.auth.user_pb, self.auth.effective_ids, self.project)
+
+ def ComputeColSpec(self, config):
+ """Set col_spec based on param, default in the config, or site default."""
+ if self.col_spec is not None:
+ return # Already set.
+ default_col_spec = ''
+ if config:
+ default_col_spec = config.default_col_spec
+
+ col_spec = self.GetParam(
+ 'colspec', default_value=default_col_spec,
+ antitamper_re=framework_constants.COLSPEC_RE)
+
+ if not col_spec:
+ # If col spec is still empty then default to the global col spec.
+ col_spec = tracker_constants.DEFAULT_COL_SPEC
+
+ self.col_spec = ' '.join(ParseColSpec(col_spec))
+
+ def PrepareForReentry(self, echo_data):
+ """Expose the results of form processing as if it was a new GET.
+
+ This method is called only when the user submits a form with invalid
+ information which they are being asked to correct it. Updating the MR
+ object allows the normal servlet get() method to populate the form with
+ the entered values and error messages.
+
+ Args:
+ echo_data: dict of {page_data_key: value_to_reoffer, ...} that will
+ override whatever HTML form values are nomally shown to the
+ user when they initially view the form. This allows them to
+ fix user input that was not valid.
+ """
+ self.form_overrides.update(echo_data)
+
+ def GetParam(self, query_param_name, default_value=None,
+ antitamper_re=None):
+ """Get a query parameter from the URL as a utf8 string."""
+ value = self.request.params.get(query_param_name)
+ assert value is None or isinstance(value, unicode)
+ using_default = value is None
+ if using_default:
+ value = default_value
+
+ if antitamper_re and not antitamper_re.match(value):
+ if using_default:
+ logging.error('Default value fails antitamper for %s field: %s',
+ query_param_name, value)
+ else:
+ logging.info('User seems to have tampered with %s field: %s',
+ query_param_name, value)
+ raise InputException()
+
+ return value
+
+ def GetIntParam(self, query_param_name, default_value=None):
+ """Get an integer param from the URL or default."""
+ value = self.request.params.get(query_param_name)
+ if value is None:
+ return default_value
+
+ try:
+ return int(value)
+ except (TypeError, ValueError):
+ return default_value
+
+ def GetPositiveIntParam(self, query_param_name, default_value=None):
+ """Returns 0 if the user-provided value is less than 0."""
+ return max(self.GetIntParam(query_param_name, default_value=default_value),
+ 0)
+
+ def GetListParam(self, query_param_name, default_value=None):
+ """Get a list of strings from the URL or default."""
+ params = self.request.params.get(query_param_name)
+ if params is None:
+ return default_value
+ if not params:
+ return []
+ return params.split(',')
+
+ def GetIntListParam(self, query_param_name, default_value=None):
+ """Get a list of ints from the URL or default."""
+ param_list = self.GetListParam(query_param_name)
+ if param_list is None:
+ return default_value
+
+ try:
+ return [int(p) for p in param_list]
+ except (TypeError, ValueError):
+ return default_value
+
+
+def _ParsePathIdentifiers(path):
+ """Parse out the workspace being requested (if any).
+
+ Args:
+ path: A string beginning with the request's path info.
+
+ Returns:
+ (viewed_user_val, project_name).
+ """
+ viewed_user_val = None
+ project_name = None
+
+ # Strip off any query params
+ split_path = path.lstrip('/').split('?')[0].split('/')
+
+ if len(split_path) >= 2:
+ if split_path[0] == 'p':
+ project_name = split_path[1]
+ if split_path[0] == 'u':
+ viewed_user_val = urllib.unquote(split_path[1])
+ if split_path[0] == 'g':
+ viewed_user_val = urllib.unquote(split_path[1])
+
+ return viewed_user_val, project_name
+
+
+def _GetViewedEmail(viewed_user_val, cnxn, services):
+ """Returns the viewed user's email.
+
+ Args:
+ viewed_user_val: Could be either int (user_id) or str (email).
+ cnxn: connection to the SQL database.
+ services: Interface to all persistence storage backends.
+
+ Returns:
+ viewed_email
+ """
+ if not viewed_user_val:
+ return None
+
+ try:
+ viewed_userid = int(viewed_user_val)
+ viewed_email = services.user.LookupUserEmail(cnxn, viewed_userid)
+ if not viewed_email:
+ logging.info('userID %s not found', viewed_userid)
+ webapp2.abort(404, 'user not found')
+ except ValueError:
+ viewed_email = viewed_user_val
+
+ return viewed_email
+
+
+def ParseColSpec(col_spec):
+ """Split a string column spec into a list of column names.
+
+ Args:
+ col_spec: a unicode string containing a list of labels.
+
+ Returns:
+ A list of the extracted labels. Non-alphanumeric
+ characters other than the period will be stripped from the text.
+ """
+ return framework_constants.COLSPEC_COL_RE.findall(col_spec)
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+class InputException(Error):
+ """Error in user input processing."""
+ pass
diff --git a/appengine/monorail/framework/paginate.py b/appengine/monorail/framework/paginate.py
new file mode 100644
index 0000000..fde7ebe
--- /dev/null
+++ b/appengine/monorail/framework/paginate.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that help display pagination widgets for result sets."""
+
+import logging
+
+from third_party import ezt
+
+import settings
+from framework import framework_helpers
+
+
+class VirtualPagination(object):
+ """Class to calc Prev and Next pagination links based on result counts."""
+
+ def __init__(self, mr, total_count, items_per_page,
+ list_page_url=None, count_up=True,
+ start_param='start', num_param='num', max_num=None):
+ """Given 'num' and 'start' params, determine Prev and Next links.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ total_count: total number of artifacts that satisfy the query.
+ items_per_page: number of items to display on each page, e.g., 25.
+ list_page_url: URL of the web application page that is displaying
+ the list of artifacts. Used to build the Prev and Next URLs.
+ If None, no URLs will be built.
+ count_up: if False, count down from total_count.
+ start_param: query string parameter name to use for the start
+ of the pagination page.
+ num_param: query string parameter name to use for the number of items
+ to show on a pagination page.
+ max_num: optional limit on the value of the num param. If not given,
+ settings.max_artifact_search_results_per_page is used.
+ """
+ self.total_count = total_count
+ self.prev_url = ''
+ self.reload_url = ''
+ self.next_url = ''
+
+ if max_num is None:
+ max_num = settings.max_artifact_search_results_per_page
+
+ self.num = mr.GetPositiveIntParam(num_param, items_per_page)
+ self.num = min(self.num, max_num)
+
+ if count_up:
+ self.start = mr.GetPositiveIntParam(start_param, 0)
+ self.last = min(self.total_count, self.start + self.num)
+ prev_start = max(0, self.start - self.num)
+ next_start = self.start + self.num
+ else:
+ self.start = mr.GetPositiveIntParam(start_param, self.total_count)
+ self.last = max(0, self.start - self.num)
+ prev_start = min(self.total_count, self.start + self.num)
+ next_start = self.start - self.num
+
+ if list_page_url:
+ if mr.project_name:
+ list_servlet_rel_url = '/p/%s%s' % (
+ mr.project_name, list_page_url)
+ else:
+ list_servlet_rel_url = list_page_url
+
+ self.reload_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url,
+ **{start_param: self.start, num_param: self.num})
+
+ if prev_start != self.start:
+ self.prev_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url,
+ **{start_param: prev_start, num_param: self.num})
+ if ((count_up and next_start < self.total_count) or
+ (not count_up and next_start >= 1)):
+ self.next_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url,
+ **{start_param: next_start, num_param: self.num})
+
+ self.visible = ezt.boolean(self.last != self.start)
+
+ # Adjust indices to one-based values for display to users.
+ if count_up:
+ self.start += 1
+ else:
+ self.last += 1
+
+ def DebugString(self):
+ """Return a string that is useful in on-page debugging."""
+ return '%s - %s of %s; prev_url:%s; next_url:%s' % (
+ self.start, self.last, self.total_count, self.prev_url, self.next_url)
+
+
+class ArtifactPagination(VirtualPagination):
+ """Class to calc Prev and Next pagination links based on a results list."""
+
+ def __init__(
+ self, mr, results, items_per_page, list_page_url, total_count=None,
+ limit_reached=False, skipped=0):
+ """Given 'num' and 'start' params, determine Prev and Next links.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ results: a list of artifact ids that satisfy the query.
+ items_per_page: number of items to display on each page, e.g., 25.
+ list_page_url: URL of the web application page that is displaying
+ the list of artifacts. Used to build the Prev and Next URLs.
+ total_count: specify total result count rather than the length of results
+ limit_reached: optional boolean that indicates that more results could
+ not be fetched because a limit was reached.
+ skipped: optional int number of items that were skipped and left off the
+ front of results.
+ """
+ if total_count is None:
+ total_count = skipped + len(results)
+ super(ArtifactPagination, self).__init__(
+ mr, total_count, items_per_page, list_page_url=list_page_url)
+
+ self.limit_reached = ezt.boolean(limit_reached)
+ # Determine which of those results should be visible on the current page.
+ range_start = self.start - 1 - skipped
+ range_end = range_start + self.num
+ assert 0 <= range_start <= range_end
+ self.visible_results = results[range_start:range_end]
diff --git a/appengine/monorail/framework/pbproxy_test_pb2.py b/appengine/monorail/framework/pbproxy_test_pb2.py
new file mode 100644
index 0000000..48bf399
--- /dev/null
+++ b/appengine/monorail/framework/pbproxy_test_pb2.py
@@ -0,0 +1,21 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Message classes for use by template_helpers_test."""
+
+from protorpc import messages
+
+
+class PBProxyExample(messages.Message):
+ """A simple protocol buffer to test template_helpers.PBProxy."""
+ foo = messages.StringField(1)
+ bar = messages.BooleanField(2, default=False)
+
+
+class PBProxyNested(messages.Message):
+ """A simple protocol buffer to test template_helpers.PBProxy."""
+ nested = messages.MessageField(PBProxyExample, 1)
+ multiple_strings = messages.StringField(2, repeated=True)
+ multiple_pbes = messages.MessageField(PBProxyExample, 3, repeated=True)
diff --git a/appengine/monorail/framework/permissions.py b/appengine/monorail/framework/permissions.py
new file mode 100644
index 0000000..e5b8404
--- /dev/null
+++ b/appengine/monorail/framework/permissions.py
@@ -0,0 +1,959 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions to implement permission checking.
+
+The main data structure is a simple map from (user role, project status,
+project_access_level) to specific perms.
+
+A perm is simply a string that indicates that the user has a given
+permission. The servlets and templates can test whether the current
+user has permission to see a UI element or perform an action by
+testing for the presence of the corresponding perm in the user's
+permission set.
+
+The user role is one of admin, owner, member, outsider user, or anon.
+The project status is one of the project states defined in project_pb2,
+or a special constant defined below. Likewise for access level.
+"""
+
+import logging
+import time
+
+from third_party import ezt
+
+import settings
+from framework import framework_bizobj
+from framework import framework_constants
+from proto import project_pb2
+from proto import site_pb2
+from proto import usergroup_pb2
+from tracker import tracker_bizobj
+
+# Constants that define permissions.
+# Note that perms with a leading "_" can never be granted
+# to users who are not site admins.
+VIEW = 'View'
+EDIT_PROJECT = 'EditProject'
+CREATE_PROJECT = 'CreateProject'
+PUBLISH_PROJECT = '_PublishProject' # for making "doomed" projects LIVE
+VIEW_DEBUG = '_ViewDebug' # on-page debugging info
+EDIT_OTHER_USERS = '_EditOtherUsers' # can edit other user's prefs, ban, etc.
+CUSTOMIZE_PROCESS = 'CustomizeProcess' # can use some enterprise features
+VIEW_EXPIRED_PROJECT = '_ViewExpiredProject' # view long-deleted projects
+# View the list of contributors even in hub-and-spoke projects.
+VIEW_CONTRIBUTOR_LIST = 'ViewContributorList'
+
+# Quota
+VIEW_QUOTA = 'ViewQuota'
+EDIT_QUOTA = 'EditQuota'
+
+# Permissions for editing user groups
+CREATE_GROUP = 'CreateGroup'
+EDIT_GROUP = 'EditGroup'
+DELETE_GROUP = 'DeleteGroup'
+VIEW_GROUP = 'ViewGroup'
+
+# Perms for Source tools
+# TODO(jrobbins): Monorail is just issue tracking with no version control, so
+# phase out use of the term "Commit", sometime after Monorail's initial launch.
+COMMIT = 'Commit'
+
+# Perms for issue tracking
+CREATE_ISSUE = 'CreateIssue'
+EDIT_ISSUE = 'EditIssue'
+EDIT_ISSUE_OWNER = 'EditIssueOwner'
+EDIT_ISSUE_SUMMARY = 'EditIssueSummary'
+EDIT_ISSUE_STATUS = 'EditIssueStatus'
+EDIT_ISSUE_CC = 'EditIssueCc'
+DELETE_ISSUE = 'DeleteIssue'
+ADD_ISSUE_COMMENT = 'AddIssueComment'
+VIEW_INBOUND_MESSAGES = 'ViewInboundMessages'
+# Note, there is no separate DELETE_ATTACHMENT perm. We
+# allow a user to delete an attachment iff they could soft-delete
+# the comment that holds the attachment.
+
+# Note: the "_" in the perm name makes it impossible for a
+# project owner to grant it to anyone as an extra perm.
+ADMINISTER_SITE = '_AdministerSite'
+
+# Permissions to soft-delete artifact comment
+DELETE_ANY = 'DeleteAny'
+DELETE_OWN = 'DeleteOwn'
+
+# Granting this allows owners to delegate some team management work.
+EDIT_ANY_MEMBER_NOTES = 'EditAnyMemberNotes'
+
+# Permission to star/unstar any artifact.
+SET_STAR = 'SetStar'
+
+# Permission to flag any artifact as spam.
+FLAG_SPAM = 'FlagSpam'
+VERDICT_SPAM = 'VerdictSpam'
+MODERATE_SPAM = 'ModerateSpam'
+
+STANDARD_ADMIN_PERMISSIONS = [
+ EDIT_PROJECT, CREATE_PROJECT, PUBLISH_PROJECT, VIEW_DEBUG,
+ EDIT_OTHER_USERS, CUSTOMIZE_PROCESS,
+ VIEW_QUOTA, EDIT_QUOTA, ADMINISTER_SITE,
+ EDIT_ANY_MEMBER_NOTES, VERDICT_SPAM, MODERATE_SPAM]
+
+STANDARD_ISSUE_PERMISSIONS = [
+ VIEW, EDIT_ISSUE, ADD_ISSUE_COMMENT, DELETE_ISSUE, FLAG_SPAM]
+
+# Monorail has no source control, but keep COMMIT for backward compatability.
+STANDARD_SOURCE_PERMISSIONS = [COMMIT]
+
+STANDARD_COMMENT_PERMISSIONS = [DELETE_OWN, DELETE_ANY]
+
+STANDARD_OTHER_PERMISSIONS = [CREATE_ISSUE, FLAG_SPAM, SET_STAR]
+
+STANDARD_PERMISSIONS = (STANDARD_ADMIN_PERMISSIONS +
+ STANDARD_ISSUE_PERMISSIONS +
+ STANDARD_SOURCE_PERMISSIONS +
+ STANDARD_COMMENT_PERMISSIONS +
+ STANDARD_OTHER_PERMISSIONS)
+
+# roles
+SITE_ADMIN_ROLE = 'admin'
+OWNER_ROLE = 'owner'
+COMMITTER_ROLE = 'committer'
+CONTRIBUTOR_ROLE = 'contributor'
+USER_ROLE = 'user'
+ANON_ROLE = 'anon'
+
+# Project state out-of-band values for keys
+UNDEFINED_STATUS = 'undefined_status'
+UNDEFINED_ACCESS = 'undefined_access'
+WILDCARD_ACCESS = 'wildcard_access'
+
+
+class PermissionSet(object):
+ """Class to represent the set of permissions available to the user."""
+
+ def __init__(self, perm_names, consider_restrictions=True):
+ """Create a PermissionSet with the given permissions.
+
+ Args:
+ perm_names: a list of permission name strings.
+ consider_restrictions: if true, the user's permissions can be blocked
+ by restriction labels on an artifact. Project owners and site
+ admins do not consider restrictions so that they cannot
+ "lock themselves out" of editing an issue.
+ """
+ self.perm_names = frozenset(p.lower() for p in perm_names)
+ self.consider_restrictions = consider_restrictions
+
+ def __getattr__(self, perm_name):
+ """Easy permission testing in EZT. E.g., [if-any perms.format_drive]."""
+ return ezt.boolean(self.HasPerm(perm_name, None, None))
+
+ def CanUsePerm(
+ self, perm_name, effective_ids, project, restriction_labels,
+ granted_perms=None):
+ """Return True if the user can use the given permission.
+
+ Args:
+ perm_name: string name of permission, e.g., 'EditIssue'.
+ effective_ids: set of int user IDs for the user (including any groups),
+ or an empty set if user is not signed in.
+ project: Project PB for the project being accessed, or None if not
+ in a project.
+ restriction_labels: list of strings that restrict permission usage.
+ granted_perms: optional list of lowercase strings of permissions that the
+ user is granted only within the scope of one issue, e.g., by being
+ named in a user-type custom field that grants permissions.
+
+ Restriction labels have 3 parts, e.g.:
+ 'Restrict-EditIssue-InnerCircle' blocks the use of just the
+ EditIssue permission, unless the user also has the InnerCircle
+ permission. This allows fine-grained restrictions on specific
+ actions, such as editing, commenting, or deleting.
+
+ Restriction labels and permissions are case-insensitive.
+
+ Returns:
+ True if the user can use the given permission, or False
+ if they cannot (either because they don't have that permission
+ or because it is blocked by a relevant restriction label).
+ """
+ # TODO(jrobbins): room for performance improvement: avoid set creation and
+ # repeated string operations.
+ granted_perms = granted_perms or set()
+ perm_lower = perm_name.lower()
+ if perm_lower in granted_perms:
+ return True
+
+ needed_perms = {perm_lower}
+ if self.consider_restrictions:
+ for label in restriction_labels:
+ label = label.lower()
+ # format: Restrict-Action-ToThisPerm
+ _kw, requested_perm, needed_perm = label.split('-', 2)
+ if requested_perm == perm_lower and needed_perm not in granted_perms:
+ needed_perms.add(needed_perm)
+
+ if not effective_ids:
+ effective_ids = {framework_constants.NO_USER_SPECIFIED}
+ # Id X might have perm A and Y might have B, if both A and B are needed
+ # True should be returned.
+ for perm in needed_perms:
+ if not any(
+ self.HasPerm(perm, user_id, project) for user_id in effective_ids):
+ return False
+
+ return True
+
+ def HasPerm(self, perm_name, user_id, project):
+ """Return True if the user has the given permission (ignoring user groups).
+
+ Args:
+ perm_name: string name of permission, e.g., 'EditIssue'.
+ user_id: int user id of the user, or None if user is not signed in.
+ project: Project PB for the project being accessed, or None if not
+ in a project.
+
+ Returns:
+ True if the user has the given perm.
+ """
+ # TODO(jrobbins): room for performance improvement: pre-compute
+ # extra perms (maybe merge them into the perms object), avoid
+ # redundant call to lower().
+ extra_perms = [p.lower() for p in GetExtraPerms(project, user_id)]
+ perm_name = perm_name.lower()
+ return perm_name in self.perm_names or perm_name in extra_perms
+
+ def DebugString(self):
+ """Return a useful string to show when debugging."""
+ return 'PermissionSet(%s)' % ', '.join(sorted(self.perm_names))
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, self.perm_names)
+
+
+EMPTY_PERMISSIONSET = PermissionSet([])
+
+READ_ONLY_PERMISSIONSET = PermissionSet([VIEW])
+
+USER_PERMISSIONSET = PermissionSet([
+ VIEW, FLAG_SPAM, SET_STAR,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT,
+ DELETE_OWN])
+
+CONTRIBUTOR_ACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW,
+ FLAG_SPAM, SET_STAR,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT,
+ DELETE_OWN])
+
+CONTRIBUTOR_INACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW])
+
+COMMITTER_ACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, COMMIT, VIEW_CONTRIBUTOR_LIST,
+ FLAG_SPAM, SET_STAR, VIEW_QUOTA,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT, EDIT_ISSUE, VIEW_INBOUND_MESSAGES,
+ DELETE_OWN])
+
+COMMITTER_INACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST,
+ VIEW_INBOUND_MESSAGES, VIEW_QUOTA])
+
+OWNER_ACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST, EDIT_PROJECT, COMMIT,
+ FLAG_SPAM, VERDICT_SPAM, SET_STAR, VIEW_QUOTA,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT, EDIT_ISSUE, DELETE_ISSUE,
+ VIEW_INBOUND_MESSAGES,
+ DELETE_ANY, EDIT_ANY_MEMBER_NOTES],
+ consider_restrictions=False)
+
+OWNER_INACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST, EDIT_PROJECT,
+ VIEW_INBOUND_MESSAGES, VIEW_QUOTA],
+ consider_restrictions=False)
+
+ADMIN_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST,
+ CREATE_PROJECT, EDIT_PROJECT, PUBLISH_PROJECT, VIEW_DEBUG,
+ COMMIT, CUSTOMIZE_PROCESS, FLAG_SPAM, VERDICT_SPAM, SET_STAR,
+ ADMINISTER_SITE, VIEW_EXPIRED_PROJECT, EDIT_OTHER_USERS,
+ VIEW_QUOTA, EDIT_QUOTA,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT, EDIT_ISSUE, DELETE_ISSUE,
+ VIEW_INBOUND_MESSAGES,
+ DELETE_ANY, EDIT_ANY_MEMBER_NOTES,
+ CREATE_GROUP, EDIT_GROUP, DELETE_GROUP, VIEW_GROUP,
+ MODERATE_SPAM],
+ consider_restrictions=False)
+
+GROUP_IMPORT_BORG_PERMISSIONSET = PermissionSet(
+ [CREATE_GROUP, VIEW_GROUP, EDIT_GROUP])
+
+
+# Permissions for project pages, e.g., the project summary page
+_PERMISSIONS_TABLE = {
+
+ # Project owners can view and edit artifacts in a LIVE project.
+ (OWNER_ROLE, project_pb2.ProjectState.LIVE, WILDCARD_ACCESS):
+ OWNER_ACTIVE_PERMISSIONSET,
+
+ # Project owners can view, but not edit artifacts in ARCHIVED.
+ # Note: EDIT_PROJECT is not enough permission to change an ARCHIVED project
+ # back to LIVE if a delete_time was set.
+ (OWNER_ROLE, project_pb2.ProjectState.ARCHIVED, WILDCARD_ACCESS):
+ OWNER_INACTIVE_PERMISSIONSET,
+
+ # Project members can view their own project, regardless of state.
+ (COMMITTER_ROLE, project_pb2.ProjectState.LIVE, WILDCARD_ACCESS):
+ COMMITTER_ACTIVE_PERMISSIONSET,
+ (COMMITTER_ROLE, project_pb2.ProjectState.ARCHIVED, WILDCARD_ACCESS):
+ COMMITTER_INACTIVE_PERMISSIONSET,
+
+ # Project contributors can view their own project, regardless of state.
+ (CONTRIBUTOR_ROLE, project_pb2.ProjectState.LIVE, WILDCARD_ACCESS):
+ CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ (CONTRIBUTOR_ROLE, project_pb2.ProjectState.ARCHIVED, WILDCARD_ACCESS):
+ CONTRIBUTOR_INACTIVE_PERMISSIONSET,
+
+ # Non-members users can read and comment in projects with access == ANYONE
+ (USER_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE):
+ USER_PERMISSIONSET,
+
+ # Anonymous users can only read projects with access == ANYONE.
+ (ANON_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE):
+ READ_ONLY_PERMISSIONSET,
+
+ # Permissions for site pages, e.g., creating a new project
+ (USER_ROLE, UNDEFINED_STATUS, UNDEFINED_ACCESS):
+ PermissionSet([CREATE_PROJECT, CREATE_GROUP]),
+ }
+
+
+def GetPermissions(user, effective_ids, project):
+ """Return a permission set appropriate for the user and project.
+
+ Args:
+ user: The User PB for the signed-in user, or None for anon users.
+ effective_ids: set of int user IDs for the current user and all user
+ groups that s/he is a member of. This will be an empty set for
+ anonymous users.
+ project: either a Project protobuf, or None for a page whose scope is
+ wider than a single project.
+
+ Returns:
+ a PermissionSet object for the current user and project (or for
+ site-wide operations if project is None).
+
+ If an exact match for the user's role and project status is found, that is
+ returned. Otherwise, we look for permissions for the user's role that is
+ not specific to any project status, or not specific to any project access
+ level. If neither of those are defined, we give the user an empty
+ permission set.
+ """
+ # Site admins get ADMIN_PERMISSIONSET regardless of groups or projects.
+ if user and user.is_site_admin:
+ return ADMIN_PERMISSIONSET
+
+ # Grant the borg job permission to view/edit groups
+ if user and user.email == settings.borg_service_account:
+ return GROUP_IMPORT_BORG_PERMISSIONSET
+
+ # Anon users don't need to accumulate anything.
+ if not effective_ids:
+ role, status, access = _GetPermissionKey(None, project)
+ return _LookupPermset(role, status, access)
+
+ effective_perms = set()
+ consider_restrictions = True
+
+ # Check for signed-in user with no roles in the current project.
+ if not project or not framework_bizobj.UserIsInProject(
+ project, effective_ids):
+ role, status, access = _GetPermissionKey(None, project)
+ return _LookupPermset(USER_ROLE, status, access)
+
+ # Signed-in user gets the union of all his/her PermissionSets from the table.
+ for user_id in effective_ids:
+ role, status, access = _GetPermissionKey(user_id, project)
+ role_perms = _LookupPermset(role, status, access)
+ # Accumulate a union of all the user's permissions.
+ effective_perms.update(role_perms.perm_names)
+ # If any role allows the user to ignore restriction labels, then
+ # ignore them overall.
+ if not role_perms.consider_restrictions:
+ consider_restrictions = False
+
+ return PermissionSet(
+ effective_perms, consider_restrictions=consider_restrictions)
+
+
+def _LookupPermset(role, status, access):
+ """Lookup the appropriate PermissionSet in _PERMISSIONS_TABLE.
+
+ Args:
+ role: a string indicating the user's role in the project.
+ status: a Project PB status value, or UNDEFINED_STATUS.
+ access: a Project PB access value, or UNDEFINED_ACCESS.
+
+ Returns:
+ A PermissionSet that is appropriate for that kind of user in that
+ project context.
+ """
+ if (role, status, access) in _PERMISSIONS_TABLE:
+ return _PERMISSIONS_TABLE[(role, status, access)]
+ elif (role, status, WILDCARD_ACCESS) in _PERMISSIONS_TABLE:
+ return _PERMISSIONS_TABLE[(role, status, WILDCARD_ACCESS)]
+ else:
+ return EMPTY_PERMISSIONSET
+
+
+def _GetPermissionKey(user_id, project, expired_before=None):
+ """Return a permission lookup key appropriate for the user and project."""
+ if user_id is None:
+ role = ANON_ROLE
+ elif project and IsExpired(project, expired_before=expired_before):
+ role = USER_ROLE # Do not honor roles in expired projects.
+ elif project and user_id in project.owner_ids:
+ role = OWNER_ROLE
+ elif project and user_id in project.committer_ids:
+ role = COMMITTER_ROLE
+ elif project and user_id in project.contributor_ids:
+ role = CONTRIBUTOR_ROLE
+ else:
+ role = USER_ROLE
+
+ # TODO(jrobbins): re-implement same_org
+
+ if project is None:
+ status = UNDEFINED_STATUS
+ else:
+ status = project.state
+
+ if project is None:
+ access = UNDEFINED_ACCESS
+ else:
+ access = project.access
+
+ return role, status, access
+
+
+def GetExtraPerms(project, member_id):
+ """Return a list of extra perms for the user in the project.
+
+ Args:
+ project: Project PB for the current project.
+ member_id: user id of a project owner, member, or contributor.
+
+ Returns:
+ A list of strings for the extra perms granted to the
+ specified user in this project. The list will often be empty.
+ """
+
+ extra_perms = FindExtraPerms(project, member_id)
+
+ if extra_perms:
+ return list(extra_perms.perms)
+ else:
+ return []
+
+
+def FindExtraPerms(project, member_id):
+ """Return a ExtraPerms PB for the given user in the project.
+
+ Args:
+ project: Project PB for the current project, or None if the user is
+ not currently in a project.
+ member_id: user ID of a project owner, member, or contributor.
+
+ Returns:
+ An ExtraPerms PB, or None.
+ """
+ if not project:
+ # TODO(jrobbins): maybe define extra perms for site-wide operations.
+ return None
+
+ # Users who have no current role cannot have any extra perms. Don't
+ # consider effective_ids (which includes user groups) for this check.
+ if not framework_bizobj.UserIsInProject(project, {member_id}):
+ return None
+
+ for extra_perms in project.extra_perms:
+ if extra_perms.member_id == member_id:
+ return extra_perms
+
+ return None
+
+
+def GetCustomPermissions(project):
+ """Return a sorted iterable of custom perms granted in a project."""
+ custom_permissions = set()
+ for extra_perms in project.extra_perms:
+ for perm in extra_perms.perms:
+ if perm not in STANDARD_PERMISSIONS:
+ custom_permissions.add(perm)
+
+ return sorted(custom_permissions)
+
+
+def UserCanViewProject(user, effective_ids, project, expired_before=None):
+ """Return True if the user can view the given project.
+
+ Args:
+ user: User protobuf for the user trying to view the project.
+ effective_ids: set of int user IDs of the user trying to view the project
+ (including any groups), or an empty set for anonymous users.
+ project: the Project protobuf to check.
+ expired_before: option time value for testing.
+
+ Returns:
+ True if the user should be allowed to view the project.
+ """
+ perms = GetPermissions(user, effective_ids, project)
+
+ if IsExpired(project, expired_before=expired_before):
+ needed_perm = VIEW_EXPIRED_PROJECT
+ else:
+ needed_perm = VIEW
+
+ return perms.CanUsePerm(needed_perm, effective_ids, project, [])
+
+
+def IsExpired(project, expired_before=None):
+ """Return True if a project deletion has been pending long enough already.
+
+ Args:
+ project: The project being viewed.
+ expired_before: If supplied, this method will return True only if the
+ project expired before the given time.
+
+ Returns:
+ True if the project is eligible for reaping.
+ """
+ if project.state != project_pb2.ProjectState.ARCHIVED:
+ return False
+
+ if expired_before is None:
+ expired_before = int(time.time())
+
+ return project.delete_time and project.delete_time < expired_before
+
+
+def CanDelete(logged_in_user_id, effective_ids, perms, deleted_by_user_id,
+ creator_user_id, project, restrictions, granted_perms=None):
+ """Returns true if user has delete permission.
+
+ Args:
+ logged_in_user_id: int user id of the logged in user.
+ effective_ids: set of int user IDs for the user (including any groups),
+ or an empty set if user is not signed in.
+ perms: instance of PermissionSet describing the current user's permissions.
+ deleted_by_user_id: int user ID of the user having previously deleted this
+ comment, or None, if the comment has never been deleted.
+ creator_user_id: int user ID of the user having created this comment.
+ project: Project PB for the project being accessed, or None if not
+ in a project.
+ restrictions: list of strings that restrict permission usage.
+ granted_perms: optional list of strings of permissions that the user is
+ granted only within the scope of one issue, e.g., by being named in
+ a user-type custom field that grants permissions.
+
+ Returns:
+ True if the logged in user has delete permissions.
+ """
+
+ # User is not logged in or has no permissions.
+ if not logged_in_user_id or not perms:
+ return False
+
+ # Site admin or project owners can delete any comment.
+ permit_delete_any = perms.CanUsePerm(
+ DELETE_ANY, effective_ids, project, restrictions,
+ granted_perms=granted_perms)
+ if permit_delete_any:
+ return True
+
+ # Users cannot undelete unless they deleted.
+ if deleted_by_user_id and deleted_by_user_id != logged_in_user_id:
+ return False
+
+ # Users can delete their own items.
+ permit_delete_own = perms.CanUsePerm(
+ DELETE_OWN, effective_ids, project, restrictions)
+ if permit_delete_own and creator_user_id == logged_in_user_id:
+ return True
+
+ return False
+
+
+def CanView(effective_ids, perms, project, restrictions, granted_perms=None):
+ """Checks if user has permission to view an issue."""
+ return perms.CanUsePerm(
+ VIEW, effective_ids, project, restrictions, granted_perms=granted_perms)
+
+
+def CanCreateProject(perms):
+ """Return True if the given user may create a project.
+
+ Args:
+ perms: Permissionset for the current user.
+
+ Returns:
+ True if the user should be allowed to create a project.
+ """
+ # "ANYONE" means anyone who has the needed perm.
+ if (settings.project_creation_restriction ==
+ site_pb2.UserTypeRestriction.ANYONE):
+ return perms.HasPerm(CREATE_PROJECT, None, None)
+
+ if (settings.project_creation_restriction ==
+ site_pb2.UserTypeRestriction.ADMIN_ONLY):
+ return perms.HasPerm(ADMINISTER_SITE, None, None)
+
+ return False
+
+
+def CanCreateGroup(perms):
+ """Return True if the given user may create a user group.
+
+ Args:
+ perms: Permissionset for the current user.
+
+ Returns:
+ True if the user should be allowed to create a group.
+ """
+ # "ANYONE" means anyone who has the needed perm.
+ if (settings.group_creation_restriction ==
+ site_pb2.UserTypeRestriction.ANYONE):
+ return perms.HasPerm(CREATE_GROUP, None, None)
+
+ if (settings.group_creation_restriction ==
+ site_pb2.UserTypeRestriction.ADMIN_ONLY):
+ return perms.HasPerm(ADMINISTER_SITE, None, None)
+
+ return False
+
+
+def CanEditGroup(perms, effective_ids, group_owner_ids):
+ """Return True if the given user may edit a user group.
+
+ Args:
+ perms: Permissionset for the current user.
+ effective_ids: set of user IDs for the logged in user.
+ group_owner_ids: set of user IDs of the user group owners.
+
+ Returns:
+ True if the user should be allowed to edit the group.
+ """
+ return (perms.HasPerm(EDIT_GROUP, None, None) or
+ not effective_ids.isdisjoint(group_owner_ids))
+
+
+def CanViewGroup(perms, effective_ids, group_settings, member_ids, owner_ids,
+ user_project_ids):
+ """Return True if the given user may view a user group.
+
+ Args:
+ perms: Permissionset for the current user.
+ effective_ids: set of user IDs for the logged in user.
+ group_settings: PB of UserGroupSettings.
+ member_ids: A list of member ids of this user group.
+ owner_ids: A list of owner ids of this user group.
+ user_project_ids: A list of project ids which the user has a role.
+
+ Returns:
+ True if the user should be allowed to view the group.
+ """
+ if perms.HasPerm(VIEW_GROUP, None, None):
+ return True
+ # The user could view this group with membership of some projects which are
+ # friends of the group.
+ if (group_settings.friend_projects and user_project_ids
+ and (set(group_settings.friend_projects) & set(user_project_ids))):
+ return True
+ visibility = group_settings.who_can_view_members
+ if visibility == usergroup_pb2.MemberVisibility.OWNERS:
+ return not effective_ids.isdisjoint(owner_ids)
+ elif visibility == usergroup_pb2.MemberVisibility.MEMBERS:
+ return (not effective_ids.isdisjoint(member_ids) or
+ not effective_ids.isdisjoint(owner_ids))
+ else:
+ return True
+
+
+def IsBanned(user, user_view):
+ """Return True if this user is banned from using our site."""
+ if user is None:
+ return False # Anyone is welcome to browse
+
+ if user.banned:
+ return True # We checked the "Banned" checkbox for this user.
+
+ if user_view:
+ if user_view.domain in settings.banned_user_domains:
+ return True # Some spammers create many accounts with the same domain.
+
+ return False
+
+
+def CanViewContributorList(mr):
+ """Return True if we should display the list project contributors.
+
+ This is used on the project summary page, when deciding to offer the
+ project People page link, and when generating autocomplete options
+ that include project members.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ True if we should display the project contributor list.
+ """
+ if not mr.project:
+ return False # We are not even in a project context.
+
+ if not mr.project.only_owners_see_contributors:
+ return True # Contributor list is not resticted.
+
+ # If it is hub-and-spoke, check for the perm that allows the user to
+ # view it anyway.
+ return mr.perms.HasPerm(
+ VIEW_CONTRIBUTOR_LIST, mr.auth.user_id, mr.project)
+
+
+def ShouldCheckForAbandonment(mr):
+ """Return True if user should be warned before changing/deleting their role.
+
+ Args:
+ mr: common info parsed from the user's request.
+
+ Returns:
+ True if user should be warned before changing/deleting their role.
+ """
+ # Note: No need to warn admins because they won't lose access anyway.
+ if mr.perms.CanUsePerm(
+ ADMINISTER_SITE, mr.auth.effective_ids, mr.project, []):
+ return False
+
+ return mr.perms.CanUsePerm(
+ EDIT_PROJECT, mr.auth.effective_ids, mr.project, [])
+
+
+# For speed, we remember labels that we have already classified as being
+# restriction labels or not being restriction labels. These sets are for
+# restrictions in general, not for any particular perm.
+_KNOWN_RESTRICTION_LABELS = set()
+_KNOWN_NON_RESTRICTION_LABELS = set()
+
+
+def IsRestrictLabel(label, perm=''):
+ """Returns True if a given label is a restriction label.
+
+ Args:
+ label: string for the label to examine.
+ perm: a permission that can be restricted (e.g. 'View' or 'Edit').
+ Defaults to '' to mean 'any'.
+
+ Returns:
+ True if a given label is a restriction label (of the specified perm)
+ """
+ if label in _KNOWN_NON_RESTRICTION_LABELS:
+ return False
+ if not perm and label in _KNOWN_RESTRICTION_LABELS:
+ return True
+
+ prefix = ('restrict-%s-' % perm.lower()) if perm else 'restrict-'
+ is_restrict = label.lower().startswith(prefix) and label.count('-') >= 2
+
+ if is_restrict:
+ _KNOWN_RESTRICTION_LABELS.add(label)
+ elif not perm:
+ _KNOWN_NON_RESTRICTION_LABELS.add(label)
+
+ return is_restrict
+
+
+def HasRestrictions(issue, perm=''):
+ """Return True if the issue has any restrictions (on the specified perm)."""
+ return (
+ any(IsRestrictLabel(lab, perm=perm) for lab in issue.labels) or
+ any(IsRestrictLabel(lab, perm=perm) for lab in issue.derived_labels))
+
+
+def GetRestrictions(issue):
+ """Return a list of restriction labels on the given issue."""
+ if not issue:
+ return []
+
+ return [lab.lower() for lab in tracker_bizobj.GetLabels(issue)
+ if IsRestrictLabel(lab)]
+
+
+def CanViewIssue(
+ effective_ids, perms, project, issue, allow_viewing_deleted=False,
+ granted_perms=None):
+ """Checks if user has permission to view an artifact.
+
+ Args:
+ effective_ids: set of user IDs for the logged in user and any user
+ group memberships. Should be an empty set for anon users.
+ perms: PermissionSet for the user.
+ project: Project PB for the project that contains this issue.
+ issue: Issue PB for the issue being viewed.
+ allow_viewing_deleted: True if the user should be allowed to view
+ deleted artifacts.
+ granted_perms: optional list of strings of permissions that the user is
+ granted only within the scope of one issue, e.g., by being named in
+ a user-type custom field that grants permissions.
+
+ Returns:
+ True iff the user can view the specified issue.
+ """
+ if issue.deleted and not allow_viewing_deleted:
+ # No one can view a deleted issue. If the user can undelete, that
+ # goes through the custom 404 page.
+ return False
+
+ # Check to see if the user can view anything in the project.
+ if not perms.CanUsePerm(VIEW, effective_ids, project, []):
+ return False
+
+ if not HasRestrictions(issue):
+ return True
+
+ return CanViewRestrictedIssueInVisibleProject(
+ effective_ids, perms, project, issue, granted_perms=granted_perms)
+
+
+def CanViewRestrictedIssueInVisibleProject(
+ effective_ids, perms, project, issue, granted_perms=None):
+ """Return True if the user can view this issue. Assumes project is OK."""
+ # The reporter, owner, and CC'd users can always see the issue.
+ # In effect, these fields override artifact restriction labels.
+ if effective_ids:
+ if (issue.reporter_id in effective_ids or
+ tracker_bizobj.GetOwnerId(issue) in effective_ids or
+ not effective_ids.isdisjoint(tracker_bizobj.GetCcIds(issue))):
+ return True
+
+ # Otherwise, apply the usual permission checking.
+ return CanView(
+ effective_ids, perms, project, GetRestrictions(issue),
+ granted_perms=granted_perms)
+
+
+def CanEditIssue(effective_ids, perms, project, issue, granted_perms=None):
+ """Return True if a user can edit an issue.
+
+ Args:
+ effective_ids: set of user IDs for the logged in user and any user
+ group memberships. Should be an empty set for anon users.
+ perms: PermissionSet for the user.
+ project: Project PB for the project that contains this issue.
+ issue: Issue PB for the issue being viewed.
+ granted_perms: optional list of strings of permissions that the user is
+ granted only within the scope of one issue, e.g., by being named in
+ a user-type custom field that grants permissions.
+
+ Returns:
+ True iff the user can edit the specified issue.
+ """
+ # TODO(jrobbins): We need to actually grant View+EditIssue in most cases.
+ # So, always grant View whenever there is any granted perm.
+ if not CanViewIssue(
+ effective_ids, perms, project, issue, granted_perms=granted_perms):
+ return False
+
+ # The issue owner can always edit the issue.
+ if effective_ids:
+ if tracker_bizobj.GetOwnerId(issue) in effective_ids:
+ return True
+
+ # Otherwise, apply the usual permission checking.
+ return perms.CanUsePerm(
+ EDIT_ISSUE, effective_ids, project, GetRestrictions(issue),
+ granted_perms=granted_perms)
+
+
+def CanCommentIssue(effective_ids, perms, project, issue, granted_perms=None):
+ """Return True if a user can comment on an issue."""
+
+ return perms.CanUsePerm(
+ ADD_ISSUE_COMMENT, effective_ids, project,
+ GetRestrictions(issue), granted_perms=granted_perms)
+
+
+def CanViewComponentDef(effective_ids, perms, project, component_def):
+ """Return True if a user can view the given component definition."""
+ if not effective_ids.isdisjoint(component_def.admin_ids):
+ return True # Component admins can view that component.
+
+ # TODO(jrobbins): check restrictions on the component definition.
+ return perms.CanUsePerm(VIEW, effective_ids, project, [])
+
+
+def CanEditComponentDef(effective_ids, perms, project, component_def, config):
+ """Return True if a user can edit the given component definition."""
+ if not effective_ids.isdisjoint(component_def.admin_ids):
+ return True # Component admins can edit that component.
+
+ # Check to see if user is admin of any parent component.
+ parent_components = tracker_bizobj.FindAncestorComponents(
+ config, component_def)
+ for parent in parent_components:
+ if not effective_ids.isdisjoint(parent.admin_ids):
+ return True
+
+ return perms.CanUsePerm(EDIT_PROJECT, effective_ids, project, [])
+
+
+def CanViewFieldDef(effective_ids, perms, project, field_def):
+ """Return True if a user can view the given field definition."""
+ if not effective_ids.isdisjoint(field_def.admin_ids):
+ return True # Field admins can view that field.
+
+ # TODO(jrobbins): check restrictions on the field definition.
+ return perms.CanUsePerm(VIEW, effective_ids, project, [])
+
+
+def CanEditFieldDef(effective_ids, perms, project, field_def):
+ """Return True if a user can edit the given field definition."""
+ if not effective_ids.isdisjoint(field_def.admin_ids):
+ return True # Field admins can edit that field.
+
+ return perms.CanUsePerm(EDIT_PROJECT, effective_ids, project, [])
+
+
+def CanViewTemplate(effective_ids, perms, project, template):
+ """Return True if a user can view the given issue template."""
+ if not effective_ids.isdisjoint(template.admin_ids):
+ return True # template admins can view that template.
+
+ # Members-only templates are only shown to members, other templates are
+ # shown to any user that is generally allowed to view project content.
+ if template.members_only:
+ return framework_bizobj.UserIsInProject(project, effective_ids)
+ else:
+ return perms.CanUsePerm(VIEW, effective_ids, project, [])
+
+
+def CanEditTemplate(effective_ids, perms, project, template):
+ """Return True if a user can edit the given field definition."""
+ if not effective_ids.isdisjoint(template.admin_ids):
+ return True # Template admins can edit that template.
+
+ return perms.CanUsePerm(EDIT_PROJECT, effective_ids, project, [])
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+
+
+class PermissionException(Error):
+ """The user is not authorized to make the current request."""
+
+
+class BannedUserException(Error):
+ """The user has been banned from using our service."""
diff --git a/appengine/monorail/framework/profiler.py b/appengine/monorail/framework/profiler.py
new file mode 100644
index 0000000..7c34891
--- /dev/null
+++ b/appengine/monorail/framework/profiler.py
@@ -0,0 +1,92 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A simple profiler object to track how time is spent on a request.
+
+The profiler is called from application code at the begining and
+end of each major phase and subphase of processing. The profiler
+object keeps track of how much time was spent on each phase or subphase.
+
+This class is useful when developers need to understand where
+server-side time is being spent. It includes durations in
+milliseconds, and a simple bar chart on the HTML page.
+
+On-page debugging and performance info is useful because it makes it easier
+to explore performance interactively.
+"""
+
+import logging
+import time
+
+from contextlib import contextmanager
+
+
+class Profiler(object):
+ """Object to record and help display request processing profiling info.
+
+ The Profiler class holds a list of phase objects, which can hold additional
+ phase objects (which are subphases). Each phase or subphase represents some
+ meaningful part of this application's HTTP request processing.
+ """
+
+ _COLORS = ['900', '090', '009', '360', '306', '036',
+ '630', '630', '063', '333']
+
+ def __init__(self):
+ """Each request processing profile begins with an empty list of phases."""
+ self.top_phase = _Phase('overall profile', -1, None)
+ self.current_phase = self.top_phase
+ self.next_color = 0
+
+ @contextmanager
+ def Phase(self, name='unspecified phase'):
+ """Context manager to automatically begin and end (sub)phases."""
+ color = self._COLORS[self.next_color % len(self._COLORS)]
+ self.next_color += 1
+ self.current_phase = _Phase(name, color, self.current_phase)
+ try:
+ yield
+ finally:
+ self.current_phase = self.current_phase.End()
+
+ def LogStats(self):
+ """Log sufficiently-long phases and subphases, for debugging purposes."""
+ self.top_phase.LogStats()
+
+
+class _Phase(object):
+ """A _Phase instance represents a period of time during request processing."""
+
+ def __init__(self, name, color, parent):
+ """Initialize a (sub)phase with the given name and current system clock."""
+ self.start = time.time()
+ self.name = name
+ self.color = color
+ self.subphases = []
+ self.elapsed_seconds = None
+ self.ms = 'in_progress' # shown if the phase never records a finish.
+ self.uncategorized_ms = None
+ self.parent = parent
+ if self.parent is not None:
+ self.parent._RegisterSubphase(self)
+
+ def _RegisterSubphase(self, subphase):
+ """Add a subphase to this phase."""
+ self.subphases.append(subphase)
+
+ def End(self):
+ """Record the time between the start and end of this (sub)phase."""
+ self.elapsed_seconds = time.time() - self.start
+ self.ms = str(int(self.elapsed_seconds * 1000))
+ categorized = sum(sub.elapsed_seconds for sub in self.subphases)
+ self.uncategorized_ms = int((self.elapsed_seconds - categorized) * 1000)
+ return self.parent
+
+ def LogStats(self):
+ # Phases that took longer than 30ms are interesting.
+ if self.elapsed_seconds > 0.03:
+ logging.info('%5s: %s', self.ms, self.name)
+ for subphase in self.subphases:
+ subphase.LogStats()
diff --git a/appengine/monorail/framework/ratelimiter.py b/appengine/monorail/framework/ratelimiter.py
new file mode 100644
index 0000000..0c968df
--- /dev/null
+++ b/appengine/monorail/framework/ratelimiter.py
@@ -0,0 +1,178 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Request rate limiting implementation.
+
+This is intented to be used for automatic DDoS protection.
+
+"""
+
+import datetime
+import logging
+import settings
+import time
+
+from infra_libs import ts_mon
+
+from google.appengine.api import memcache
+from google.appengine.api.modules import modules
+from google.appengine.api import users
+
+N_MINUTES = 5
+EXPIRE_AFTER_SECS = 60 * 60
+DEFAULT_LIMIT = 300 # 300 requests in 5 minutes is 1 QPS.
+
+ANON_USER = 'anon'
+
+COUNTRY_HEADER = 'X-AppEngine-Country'
+
+COUNTRY_LIMITS = {
+ # Two-letter country code: max requests per N_MINUTES
+ # This limit will apply to all requests coming
+ # from this country.
+ # To add a country code, see GAE logs and use the
+ # appropriate code from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
+ # E.g., 'cn': 300, # Limit to 1 QPS.
+}
+
+# Modules not in this list will not have rate limiting applied by this
+# class.
+MODULE_WHITELIST = ['default']
+
+def _CacheKeys(request, now_sec):
+ """ Returns an array of arrays. Each array contains strings with
+ the same prefix and a timestamp suffix, starting with the most
+ recent and decrementing by 1 minute each time.
+ """
+ now = datetime.datetime.fromtimestamp(now_sec)
+ country = request.headers.get(COUNTRY_HEADER, 'ZZ')
+ ip = request.remote_addr
+ minute_buckets = [now - datetime.timedelta(minutes=m) for m in
+ range(N_MINUTES)]
+ user = users.get_current_user()
+ user_email = user.email() if user else ANON_USER
+
+ # <IP, country, user_email> to be rendered into each key prefix.
+ prefixes = []
+
+ # All logged-in users get a per-user rate limit, regardless of IP and country.
+ if user:
+ prefixes.append(['ALL', 'ALL', user.email()])
+ else:
+ # All anon requests get a per-IP ratelimit.
+ prefixes.append([ip, 'ALL', 'ALL'])
+
+ # All requests from a problematic country get a per-country rate limit,
+ # regardless of the user (even a non-logged-in one) or IP.
+ if country in COUNTRY_LIMITS:
+ prefixes.append(['ALL', country, 'ALL'])
+
+ keysets = []
+ for prefix in prefixes:
+ keysets.append(['ratelimit-%s-%s' % ('-'.join(prefix),
+ str(minute_bucket.replace(second=0, microsecond=0)))
+ for minute_bucket in minute_buckets])
+
+ return keysets, country, ip, user_email
+
+class RateLimiter:
+ blocked_requests = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/blocked_request')
+ limit_exceeded = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/rate_exceeded')
+ cost_thresh_exceeded = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/cost_thresh_exceeded')
+ checks = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/check')
+
+ def __init__(self, _cache=memcache, fail_open=True, **_kwargs):
+ self.fail_open = fail_open
+
+ def CheckStart(self, request, now=None):
+ if (modules.get_current_module_name() not in MODULE_WHITELIST or
+ users.is_current_user_admin()):
+ return
+ logging.info('X-AppEngine-Country: %s' %
+ request.headers.get(COUNTRY_HEADER, 'ZZ'))
+
+ if now is None:
+ now = time.time()
+
+ keysets, country, ip, user_email = _CacheKeys(request, now)
+ # There are either two or three sets of keys in keysets.
+ # Three if the user's country is in COUNTRY_LIMITS, otherwise two.
+ for keys in keysets:
+ count = 0
+ try:
+ counters = memcache.get_multi(keys)
+ count = sum(counters.values())
+ self.checks.increment({'type': 'success'})
+ except Exception as e:
+ logging.error(e)
+ if not self.fail_open:
+ self.checks.increment({'type': 'fail_open'})
+ raise RateLimitExceeded(country=country, ip=ip, user_email=user_email)
+ self.checks.increment({'type': 'fail_closed'})
+
+ limit = COUNTRY_LIMITS.get(country, DEFAULT_LIMIT)
+ if count > limit:
+ # Since webapp2 won't let us return a 429 error code
+ # <http://tools.ietf.org/html/rfc6585#section-4>, we can't
+ # monitor rate limit exceeded events with our standard tools.
+ # We return a 400 with a custom error message to the client,
+ # and this logging is so we can monitor it internally.
+ logging.info('Rate Limit Exceeded: %s, %s, %s, %d' % (
+ country, ip, user_email, count))
+
+ self.limit_exceeded.increment()
+
+ if settings.ratelimiting_enabled:
+ self.blocked_requests.increment()
+ raise RateLimitExceeded(country=country, ip=ip, user_email=user_email)
+
+ k = keys[0]
+ # Only update the latest *time* bucket for each prefix (reverse chron).
+ memcache.add(k, 0, time=EXPIRE_AFTER_SECS)
+ memcache.incr(k, initial_value=0)
+
+ def CheckEnd(self, request, now, start_time):
+ """If a request was expensive to process, charge some extra points
+ against this set of buckets.
+ We pass in both now and start_time so we can update the buckets
+ based on keys created from start_time instead of now.
+ now and start_time are float seconds.
+ """
+ if (modules.get_current_module_name() not in MODULE_WHITELIST or
+ not settings.ratelimiting_cost_enabled):
+ return
+
+ elapsed_ms = (now - start_time) * 1000
+ # Would it kill the python lib maintainers to have timedelta.total_ms()?
+ if elapsed_ms < settings.ratelimiting_cost_thresh_ms:
+ return
+
+ # TODO: Look into caching the keys instead of generating them twice
+ # for every request. Say, return them from CheckStart so they can
+ # be bassed back in here later.
+ keysets, country, ip, user_email = _CacheKeys(request, start_time)
+ for keys in keysets:
+ logging.info('Rate Limit Cost Threshold Exceeded: %s, %s, %s' % (
+ country, ip, user_email))
+ self.cost_thresh_exceeded.increment_by(settings.ratelimiting_cost_penalty)
+
+ # Only update the latest *time* bucket for each prefix (reverse chron).
+ k = keys[0]
+ memcache.add(k, 0, time=EXPIRE_AFTER_SECS)
+ memcache.incr(k, initial_value=0)
+
+class RateLimitExceeded(Exception):
+ def __init__(self, country=None, ip=None, user_email=None, **_kwargs):
+ self.country = country
+ self.ip = ip
+ self.user_email = user_email
+
+ def __str__(self):
+ return 'RateLimitExceeded: %s, %s, %s' % (
+ self.country, self.ip, self.user_email)
diff --git a/appengine/monorail/framework/reap.py b/appengine/monorail/framework/reap.py
new file mode 100644
index 0000000..4c2bbd7
--- /dev/null
+++ b/appengine/monorail/framework/reap.py
@@ -0,0 +1,119 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to handle cron requests to expunge doomed and deletable projects."""
+
+import logging
+import time
+
+from framework import jsonfeed
+
+RUN_DURATION_LIMIT = 50 * 60 # 50 minutes
+
+
+class Reap(jsonfeed.InternalTask):
+ """Look for doomed and deletable projects and delete them."""
+
+ def HandleRequest(self, mr):
+ """Update/Delete doomed and deletable projects as needed.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format. The JSON will look like this:
+ {
+ 'doomed_project_ids': <int>,
+ 'expunged_project_ids': <int>
+ }
+ doomed_project_ids are the projects which have been marked as deletable.
+ expunged_project_ids are the projects that have either been completely
+ expunged or are in the midst of being expunged.
+ """
+ doomed_project_ids = self._MarkDoomedProjects(mr.cnxn)
+ expunged_project_ids = self._ExpungeDeletableProjects(mr.cnxn)
+ return {
+ 'doomed_project_ids': doomed_project_ids,
+ 'expunged_project_ids': expunged_project_ids,
+ }
+
+ def _MarkDoomedProjects(self, cnxn):
+ """No longer needed projects get doomed, and this marks them deletable."""
+ now = int(time.time())
+ doomed_project_rows = self.services.project.project_tbl.Select(
+ cnxn, cols=['project_id'],
+ # We only match projects with real timestamps and not delete_time = 0.
+ where=[('delete_time < %s', [now]), ('delete_time != %s', [0])],
+ state='archived', limit=1000)
+ doomed_project_ids = [row[0] for row in doomed_project_rows]
+ for project_id in doomed_project_ids:
+ self.services.project.MarkProjectDeletable(
+ cnxn, project_id, self.services.config)
+
+ return doomed_project_ids
+
+ def _ExpungeDeletableProjects(self, cnxn):
+ """Chip away at deletable projects until they are gone."""
+ request_deadline = time.time() + RUN_DURATION_LIMIT
+
+ deletable_project_rows = self.services.project.project_tbl.Select(
+ cnxn, cols=['project_id'], state='deletable', limit=100)
+ deletable_project_ids = [row[0] for row in deletable_project_rows]
+ # expunged_project_ids will contain projects that have either been
+ # completely expunged or are in the midst of being expunged.
+ expunged_project_ids = set()
+ for project_id in deletable_project_ids:
+ for _part in self._ExpungeParts(cnxn, project_id):
+ expunged_project_ids.add(project_id)
+ if time.time() > request_deadline:
+ return list(expunged_project_ids)
+
+ return list(expunged_project_ids)
+
+ def _ExpungeParts(self, cnxn, project_id):
+ """Delete all data from the specified project, one part at a time.
+
+ This method purges all data associated with the specified project. The
+ following is purged:
+ * All issues of the project.
+ * Project config.
+ * Saved queries.
+ * Filter rules.
+ * Former locations.
+ * Local ID counters.
+ * Quick edit history.
+ * Item stars.
+ * Project from the DB.
+
+ Returns a generator whose return values can be either issue
+ ids or the specified project id. The returned values are intended to be
+ iterated over and not read.
+ """
+ # Purge all issues of the project.
+ while True:
+ issue_id_rows = self.services.issue.issue_tbl.Select(
+ cnxn, cols=['id'], project_id=project_id, limit=1000)
+ issue_ids = [row[0] for row in issue_id_rows]
+ for issue_id in issue_ids:
+ self.services.issue_star.ExpungeStars(cnxn, issue_id)
+ self.services.issue.ExpungeIssues(cnxn, issue_ids)
+ yield issue_ids
+ break
+
+ # All project purge functions are called with cnxn and project_id.
+ project_purge_functions = (
+ self.services.config.ExpungeConfig,
+ self.services.features.ExpungeSavedQueriesExecuteInProject,
+ self.services.features.ExpungeFilterRules,
+ self.services.issue.ExpungeFormerLocations,
+ self.services.issue.ExpungeLocalIDCounters,
+ self.services.features.ExpungeQuickEditHistory,
+ self.services.project_star.ExpungeStars,
+ self.services.project.ExpungeProject,
+ )
+
+ for f in project_purge_functions:
+ f(cnxn, project_id)
+ yield project_id
diff --git a/appengine/monorail/framework/registerpages_helpers.py b/appengine/monorail/framework/registerpages_helpers.py
new file mode 100644
index 0000000..5a86336
--- /dev/null
+++ b/appengine/monorail/framework/registerpages_helpers.py
@@ -0,0 +1,72 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This file sets up all the urls for monorail pages."""
+
+
+import httplib
+import logging
+
+import webapp2
+
+
+def MakeRedirect(redirect_to_this_uri, permanent=True):
+ """Return a new request handler class that redirects to the given URL."""
+
+ class Redirect(webapp2.RequestHandler):
+ """Redirect is a response handler that issues a redirect to another URI."""
+
+ def get(self, **_kw):
+ """Send the 301/302 response code and write the Location: redirect."""
+ self.response.location = redirect_to_this_uri
+ self.response.headers.add('Strict-Transport-Security',
+ 'max-age=31536000; includeSubDomains')
+ self.response.status = (
+ httplib.MOVED_PERMANENTLY if permanent else httplib.FOUND)
+
+ return Redirect
+
+
+def MakeRedirectInScope(uri_in_scope, scope, permanent=True):
+ """Redirect to a URI within a given scope, e.g., per project or user.
+
+ Args:
+ uri_in_scope: a uri within a project or user starting with a slash.
+ scope: a string indicating the uri-space scope:
+ p for project pages
+ u for user pages
+ g for group pages
+ permanent: True for a HTTP 301 permanently moved response code,
+ otherwise a HTTP 302 temporarily moved response will be used.
+
+ Example:
+ self._SetupProjectPage(
+ redirect.MakeRedirectInScope('/newpage', 'p'), '/oldpage')
+
+ Returns:
+ A class that can be used with webapp2.
+ """
+ assert uri_in_scope.startswith('/')
+
+ class RedirectInScope(webapp2.RequestHandler):
+ """A handler that redirects to another URI in the same scope."""
+
+ def get(self, **_kw):
+ """Send the 301/302 response code and write the Location: redirect."""
+ split_path = self.request.path.lstrip('/').split('/')
+ if len(split_path) > 1:
+ project_or_user = split_path[1]
+ url = '//%s/%s/%s%s' % (
+ self.request.host, scope, project_or_user, uri_in_scope)
+ self.response.location = url
+ else:
+ self.response.location = '/'
+
+ self.response.headers.add('Strict-Transport-Security',
+ 'max-age=31536000; includeSubDomains')
+ self.response.status = (
+ httplib.MOVED_PERMANENTLY if permanent else httplib.FOUND)
+
+ return RedirectInScope
diff --git a/appengine/monorail/framework/servlet.py b/appengine/monorail/framework/servlet.py
new file mode 100644
index 0000000..ce1e25e
--- /dev/null
+++ b/appengine/monorail/framework/servlet.py
@@ -0,0 +1,909 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Base classes for Monorail servlets.
+
+This base class provides HTTP get() and post() methods that
+conveniently drive the process of parsing the request, checking base
+permissions, gathering common page information, gathering
+page-specific information, and adding on-page debugging information
+(when appropriate). Subclasses can simply implement the page-specific
+logic.
+
+Summary of page classes:
+ Servlet: abstract base class for all Monorail servlets.
+ _ContextDebugItem: displays page_data elements for on-page debugging.
+"""
+
+import httplib
+import json
+import logging
+import os
+import time
+import urllib
+
+from third_party import ezt
+
+from google.appengine.api import users
+
+import webapp2
+
+import settings
+from features import savedqueries_helpers
+from framework import actionlimit
+from framework import alerts
+from framework import captcha
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import monorailrequest
+from framework import permissions
+from framework import profiler
+from framework import ratelimiter
+from framework import servlet_helpers
+from framework import template_helpers
+from framework import urls
+from framework import xsrf
+from proto import project_pb2
+from search import query2ast
+from services import issue_svc
+from services import project_svc
+from services import secrets_svc
+from services import user_svc
+from tracker import tracker_views
+
+NONCE_LENGTH = 32
+
+if not settings.unit_test_mode:
+ import MySQLdb
+
+
+class MethodNotSupportedError(NotImplementedError):
+ """An exception class for indicating that the method is not supported.
+
+ Used by GatherPageData and ProcessFormData to indicate that GET and POST,
+ respectively, are not supported methods on the given Servlet.
+ """
+ pass
+
+
+class Servlet(webapp2.RequestHandler):
+ """Base class for all Monorail servlets.
+
+ Defines a framework of methods that build up parts of the EZT page data.
+
+ Subclasses should override GatherPageData and/or ProcessFormData to
+ handle requests.
+ """
+
+ _MAIN_TAB_MODE = None # Normally overriden in subclasses to be one of these:
+
+ MAIN_TAB_NONE = 't0'
+ MAIN_TAB_DASHBOARD = 't1'
+ MAIN_TAB_ISSUES = 't2'
+ MAIN_TAB_PEOPLE = 't3'
+ MAIN_TAB_PROCESS = 't4'
+ MAIN_TAB_UPDATES = 't5'
+ MAIN_TAB_ADMIN = 't6'
+ PROCESS_TAB_SUMMARY = 'st1'
+ PROCESS_TAB_STATUSES = 'st3'
+ PROCESS_TAB_LABELS = 'st4'
+ PROCESS_TAB_RULES = 'st5'
+ PROCESS_TAB_TEMPLATES = 'st6'
+ PROCESS_TAB_COMPONENTS = 'st7'
+ PROCESS_TAB_VIEWS = 'st8'
+ ADMIN_TAB_META = 'st1'
+ ADMIN_TAB_ADVANCED = 'st9'
+
+ # Most forms require a security token, however if a form is really
+ # just redirecting to a search GET request without writing any data,
+ # subclass can override this to allow anonymous use.
+ CHECK_SECURITY_TOKEN = True
+
+ # Most forms just ignore fields that have value "". Subclasses can override
+ # if needed.
+ KEEP_BLANK_FORM_VALUES = False
+
+ # Most forms use regular forms, but subclasses that accept attached files can
+ # override this to be True.
+ MULTIPART_POST_BODY = False
+
+ # This value should not typically be overridden.
+ _TEMPLATE_PATH = framework_constants.TEMPLATE_PATH
+
+ _PAGE_TEMPLATE = None # Normally overriden in subclasses.
+ _ELIMINATE_BLANK_LINES = False
+
+ _CAPTCHA_ACTION_TYPES = [] # Override this in subclass to add captcha.
+
+ _MISSING_PERMISSIONS_TEMPLATE = 'sitewide/403-page.ezt'
+
+ def __init__(self, request, response, services=None,
+ content_type='text/html; charset=UTF-8'):
+ """Load and parse the template, saving it for later use."""
+ super(Servlet, self).__init__(request, response)
+ if self._PAGE_TEMPLATE: # specified in subclasses
+ template_path = self._TEMPLATE_PATH + self._PAGE_TEMPLATE
+ self.template = template_helpers.GetTemplate(
+ template_path, eliminate_blank_lines=self._ELIMINATE_BLANK_LINES)
+ else:
+ self.template = None
+
+ self._missing_permissions_template = template_helpers.MonorailTemplate(
+ self._TEMPLATE_PATH + self._MISSING_PERMISSIONS_TEMPLATE)
+ self.services = services or self.app.config.get('services')
+ self.content_type = content_type
+ self.profiler = profiler.Profiler()
+ self.mr = None
+ self.ratelimiter = ratelimiter.RateLimiter()
+
+ def dispatch(self):
+ """Do common stuff then dispatch the request to get() or put() methods."""
+ handler_start_time = time.time()
+
+ logging.info('\n\n\nRequest handler: %r', self)
+
+ self.mr = monorailrequest.MonorailRequest()
+
+ self.ratelimiter.CheckStart(self.request)
+ self.response.headers.add('Strict-Transport-Security',
+ 'max-age=31536000; includeSubDomains')
+
+ if self.services.cache_manager:
+ # TODO(jrobbins): don't do this step if invalidation_timestep was
+ # passed via the request and matches our last timestep
+ try:
+ with self.profiler.Phase('distributed invalidation'):
+ self.services.cache_manager.DoDistributedInvalidation(self.mr.cnxn)
+
+ except MySQLdb.OperationalError as e:
+ logging.exception(e)
+ self.redirect('/database-maintenance', abort=True)
+
+ try:
+ with self.profiler.Phase('parsing request and doing lookups'):
+ self.mr.ParseRequest(self.request, self.services, self.profiler)
+
+ self.response.headers['X-Frame-Options'] = 'SAMEORIGIN'
+ webapp2.RequestHandler.dispatch(self)
+
+ except user_svc.NoSuchUserException as e:
+ logging.warning('Trapped NoSuchUserException %s', e)
+ self.abort(404, 'user not found')
+
+ except monorailrequest.InputException as e:
+ logging.info('Rejecting invalid input: %r', e)
+ self.response.status = httplib.BAD_REQUEST
+
+ except project_svc.NoSuchProjectException as e:
+ logging.info('Rejecting invalid request: %r', e)
+ self.response.status = httplib.BAD_REQUEST
+
+ except xsrf.TokenIncorrect as e:
+ logging.info('Bad XSRF token: %r', e.message)
+ self.response.status = httplib.BAD_REQUEST
+
+ except AlreadySentResponseException:
+ # If servlet already sent response, then do nothing more. E.g.,
+ # when serving attachment content, we do not use templates.
+ pass
+
+ except permissions.BannedUserException as e:
+ logging.warning('The user has been banned')
+ url = framework_helpers.FormatAbsoluteURL(
+ self.mr, urls.BANNED, include_project=False, copy_params=False)
+ self.redirect(url, abort=True)
+
+ except actionlimit.ExcessiveActivityException:
+ logging.info('Excessive Activity Exception %r', self.mr.auth.user_id)
+ url = framework_helpers.FormatAbsoluteURL(
+ self.mr, urls.EXCESSIVE_ACTIVITY,
+ include_project=False, copy_params=False)
+ self.redirect(url, abort=True)
+
+ except ratelimiter.RateLimitExceeded as e:
+ logging.info('RateLimitExceeded Exception %s', e)
+ self.response.status = httplib.BAD_REQUEST
+ self.response.body = 'Slow your roll.'
+
+ finally:
+ self.mr.CleanUp()
+ self.ratelimiter.CheckEnd(self.request, time.time(), handler_start_time)
+
+ total_processing_time = time.time() - handler_start_time
+ logging.warn('Processed request in %d ms',
+ int(total_processing_time * 1000))
+ if settings.enable_profiler_logging:
+ self.profiler.LogStats()
+
+ def _AddHelpDebugPageData(self, page_data):
+ with self.profiler.Phase('help and debug data'):
+ page_data.update(self.GatherHelpData(self.mr, page_data))
+ page_data.update(self.GatherDebugData(self.mr, page_data))
+
+ # pylint: disable=unused-argument
+ def get(self, **kwargs):
+ """Collect page-specific and generic info, then render the page.
+
+ Args:
+ Any path components parsed by webapp2 will be in kwargs, but we do
+ our own parsing later anyway, so igore them for now.
+ """
+ page_data = {}
+ nonce = framework_helpers.MakeRandomKey(length=NONCE_LENGTH)
+ try:
+ csp_header = 'Content-Security-Policy'
+ csp_scheme = 'https:'
+ if settings.dev_mode:
+ csp_header = 'Content-Security-Policy-Report-Only'
+ csp_scheme = 'http:'
+ user_agent = self.mr.request.headers.get('User-Agent', '')
+ csp_supports_nonce = (
+ ('Chrome' in user_agent or 'Firefox' in user_agent) and
+ ('Edge' not in user_agent))
+ self.response.headers.add(csp_header,
+ ("default-src %(scheme)s ; "
+ "script-src"
+ " 'unsafe-inline'" # Only counts in browsers that lack CSP2.
+ " 'unsafe-dynamic'" # Allows <script nonce> to load more.
+ " https://www.gstatic.com/recaptcha/api2/"
+ " %(csp_self)s 'nonce-%(nonce)s'; "
+ "child-src https://www.google.com/recaptcha/; "
+ "frame-src https://www.google.com/recaptcha/; "
+ "img-src %(scheme)s data: blob: ; "
+ "style-src %(scheme)s 'unsafe-inline'; "
+ "object-src 'none'; "
+ "report-uri /csp.do" % {
+ 'nonce': nonce,
+ 'scheme': csp_scheme,
+ 'csp_self': '' if csp_supports_nonce else "'self'",
+ }))
+
+ page_data.update(self._GatherFlagData(self.mr))
+
+ # Page-specific work happens in this call.
+ page_data.update(self._DoPageProcessing(self.mr, nonce))
+
+ self._AddHelpDebugPageData(page_data)
+
+ with self.profiler.Phase('rendering template'):
+ self._RenderResponse(page_data)
+
+ except (MethodNotSupportedError, NotImplementedError) as e:
+ # Instead of these pages throwing 500s display the 404 message and log.
+ # The motivation of this is to minimize 500s on the site to keep alerts
+ # meaningful during fuzzing. For more context see
+ # https://bugs.chromium.org/p/monorail/issues/detail?id=659
+ logging.warning('Trapped NotImplementedError %s', e)
+ self.abort(404, 'invalid page')
+ except query2ast.InvalidQueryError as e:
+ logging.warning('Trapped InvalidQueryError: %s', e)
+ logging.exception(e)
+ msg = e.message if e.message else 'invalid query'
+ self.abort(400, msg)
+ except permissions.PermissionException as e:
+ logging.warning('Trapped PermissionException %s', e)
+ if not self.mr.auth.user_id:
+ # If not logged in, let them log in
+ url = _SafeCreateLoginURL(self.mr)
+ self.redirect(url, abort=True)
+ else:
+ # Display the missing permissions template.
+ self.response.status = httplib.FORBIDDEN
+ page_data = {'reason': e.message}
+ with self.profiler.Phase('gather base data'):
+ page_data.update(self.GatherBaseData(self.mr, nonce))
+ self._AddHelpDebugPageData(page_data)
+ self._missing_permissions_template.WriteResponse(
+ self.response, page_data, content_type=self.content_type)
+
+ def SetCacheHeaders(self, response):
+ """Set headers to allow the response to be cached."""
+ headers = framework_helpers.StaticCacheHeaders()
+ for name, value in headers:
+ response.headers[name] = value
+
+ def GetTemplate(self, _page_data):
+ """Get the template to use for writing the http response.
+
+ Defaults to self.template. This method can be overwritten in subclasses
+ to allow dynamic template selection based on page_data.
+
+ Args:
+ _page_data: A dict of data for ezt rendering, containing base ezt
+ data, captcha data, page data, and debug data.
+
+ Returns:
+ The template to be used for writing the http response.
+ """
+ return self.template
+
+ def _GatherFlagData(self, mr):
+ page_data = {
+ 'recaptcha_public_key': secrets_svc.GetRecaptchaPublicKey(),
+ 'project_stars_enabled': ezt.boolean(
+ settings.enable_project_stars),
+ 'user_stars_enabled': ezt.boolean(settings.enable_user_stars),
+ 'can_create_project': ezt.boolean(
+ permissions.CanCreateProject(mr.perms)),
+ 'can_create_group': ezt.boolean(
+ permissions.CanCreateGroup(mr.perms)),
+ }
+
+ return page_data
+
+ def _RenderResponse(self, page_data):
+ logging.info('rendering response len(page_data) is %r', len(page_data))
+ self.GetTemplate(page_data).WriteResponse(
+ self.response, page_data, content_type=self.content_type)
+
+ def ProcessFormData(self, mr, post_data):
+ """Handle form data and redirect appropriately.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ raise MethodNotSupportedError()
+
+ def post(self, **kwargs):
+ """Parse the request, check base perms, and call form-specific code."""
+ try:
+ # Page-specific work happens in this call.
+ self._DoFormProcessing(self.request, self.mr)
+
+ except permissions.PermissionException as e:
+ logging.warning('Trapped permission-related exception "%s".', e)
+ # TODO(jrobbins): can we do better than an error page? not much.
+ self.response.status = httplib.BAD_REQUEST
+
+ except issue_svc.MidAirCollisionException as e:
+ logging.info('Mid-air collision detected.')
+ collision_page_url = urls.ARTIFACT_COLLISION
+ url = framework_helpers.FormatAbsoluteURL(
+ self.mr, collision_page_url, copy_params=False,
+ name=e.name, continue_issue_id=e.continue_issue_id,
+ ts=int(time.time()))
+ self.redirect(url, abort=True)
+
+ def _DoCommonRequestProcessing(self, request, mr):
+ """Do common processing dependent on having the user and project pbs."""
+ with self.profiler.Phase('basic processing'):
+ self._CheckForMovedProject(mr, request)
+ self.AssertBasePermission(mr)
+
+ def _DoPageProcessing(self, mr, nonce):
+ """Do user lookups and gather page-specific ezt data."""
+ with self.profiler.Phase('common request data'):
+ self._DoCommonRequestProcessing(self.request, mr)
+ page_data = self.GatherBaseData(mr, nonce)
+ page_data.update(self.GatherCaptchaData(mr))
+
+ with self.profiler.Phase('page processing'):
+ page_data.update(self.GatherPageData(mr))
+ page_data.update(mr.form_overrides)
+ template_helpers.ExpandLabels(page_data)
+
+ return page_data
+
+ def _DoFormProcessing(self, request, mr):
+ """Do user lookups and handle form data."""
+ self._DoCommonRequestProcessing(request, mr)
+
+ if self.CHECK_SECURITY_TOKEN:
+ xsrf.ValidateToken(
+ request.POST.get('token'), mr.auth.user_id, request.path)
+
+ redirect_url = self.ProcessFormData(mr, request.POST)
+
+ # Most forms redirect the user to a new URL on success. If no
+ # redirect_url was returned, the form handler must have already
+ # sent a response. E.g., bounced the user back to the form with
+ # invalid form fields higlighted.
+ if redirect_url:
+ self.redirect(redirect_url, abort=True)
+ else:
+ assert self.response.body
+
+ def _CheckForMovedProject(self, mr, request):
+ """If the project moved, redirect there or to an informational page."""
+ if not mr.project:
+ return # We are on a site-wide or user page.
+ if not mr.project.moved_to:
+ return # This project has not moved.
+ admin_url = '/p/%s%s' % (mr.project_name, urls.ADMIN_META)
+ if request.path.startswith(admin_url):
+ return # It moved, but we are near the page that can un-move it.
+
+ logging.info('project %s has moved: %s', mr.project.project_name,
+ mr.project.moved_to)
+
+ moved_to = mr.project.moved_to
+ if framework_bizobj.RE_PROJECT_NAME.match(moved_to):
+ # Use the redir query parameter to avoid redirect loops.
+ if mr.redir is None:
+ url = framework_helpers.FormatMovedProjectURL(mr, moved_to)
+ if '?' in url:
+ url += '&redir=1'
+ else:
+ url += '?redir=1'
+ logging.info('trusted move to a new project on our site')
+ self.redirect(url, abort=True)
+
+ logging.info('not a trusted move, will display link to user to click')
+ # Attach the project name as a url param instead of generating a /p/
+ # link to the destination project.
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.PROJECT_MOVED,
+ include_project=False, copy_params=False, project=mr.project_name)
+ self.redirect(url, abort=True)
+
+ def CheckPerm(self, mr, perm, art=None, granted_perms=None):
+ """Return True if the user can use the requested permission."""
+ return servlet_helpers.CheckPerm(
+ mr, perm, art=art, granted_perms=granted_perms)
+
+ def MakePagePerms(self, mr, art, *perm_list, **kwargs):
+ """Make an EZTItem with a set of permissions needed in a given template.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ art: a project artifact, such as an issue.
+ *perm_list: any number of permission names that are referenced
+ in the EZT template.
+ **kwargs: dictionary that may include 'granted_perms' list of permissions
+ granted to the current user specifically on the current page.
+
+ Returns:
+ An EZTItem with one attribute for each permission and the value
+ of each attribute being an ezt.boolean(). True if the user
+ is permitted to do that action on the given artifact, or
+ False if not.
+ """
+ granted_perms = kwargs.get('granted_perms')
+ page_perms = template_helpers.EZTItem()
+ for perm in perm_list:
+ setattr(
+ page_perms, perm,
+ ezt.boolean(self.CheckPerm(
+ mr, perm, art=art, granted_perms=granted_perms)))
+
+ return page_perms
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page.
+
+ Subclasses should call super, then check additional permissions
+ and raise a PermissionException if the user is not authorized to
+ do something.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Raises:
+ PermissionException: If the user does not have permisssion to view
+ the current page.
+ """
+ servlet_helpers.AssertBasePermission(mr)
+
+ def GatherBaseData(self, mr, nonce):
+ """Return a dict of info used on almost all pages."""
+ project = mr.project
+
+ project_summary = ''
+ project_alert = None
+ project_read_only = False
+ project_home_page = ''
+ project_thumbnail_url = ''
+ if project:
+ project_summary = project.summary
+ project_alert = _CalcProjectAlert(project)
+ project_read_only = project.read_only_reason
+ project_home_page = project.home_page
+ project_thumbnail_url = tracker_views.LogoView(project).thumbnail_url
+
+ # If we have both a project and a logged in user, we need to check if the
+ # user has starred that project.
+ with self.profiler.Phase('project star'):
+ is_project_starred = False
+ if mr.project and mr.auth.user_id:
+ is_project_starred = self.services.project_star.IsItemStarredBy(
+ mr.cnxn, mr.project_id, mr.auth.user_id)
+
+ project_view = None
+ if mr.project:
+ # TODO(jrobbins): should this be a ProjectView?
+ project_view = template_helpers.PBProxy(mr.project)
+
+ app_version = os.environ.get('CURRENT_VERSION_ID')
+
+ viewed_username = None
+ if mr.viewed_user_auth.user_view:
+ viewed_username = mr.viewed_user_auth.user_view.username
+
+ grid_x_attr = None
+ grid_y_attr = None
+ canned_query_views = []
+ issue_entry_url = 'entry'
+ if mr.project_id and self.services.config:
+ with self.profiler.Phase('getting config'):
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ canned_queries = self.services.features.GetCannedQueriesByProjectID(
+ mr.cnxn, mr.project_id)
+ grid_x_attr = (mr.x or config.default_x_attr).lower()
+ grid_y_attr = (mr.y or config.default_y_attr).lower()
+ canned_query_views = [
+ savedqueries_helpers.SavedQueryView(sq, idx + 1, None, None)
+ for idx, sq in enumerate(canned_queries)]
+ issue_entry_url = _ComputeIssueEntryURL(mr, config)
+
+ if mr.auth.user_id and self.services.features:
+ with self.profiler.Phase('getting saved queries'):
+ saved_queries = self.services.features.GetSavedQueriesByUserID(
+ mr.cnxn, mr.me_user_id)
+ saved_query_views = [
+ savedqueries_helpers.SavedQueryView(sq, idx + 1, None, None)
+ for idx, sq in enumerate(saved_queries)
+ if (mr.project_id in sq.executes_in_project_ids or
+ not mr.project_id)]
+ else:
+ saved_query_views = []
+
+ viewing_self = mr.auth.user_id == mr.viewed_user_auth.user_id
+ offer_saved_queries_subtab = (
+ viewing_self or mr.auth.user_pb and mr.auth.user_pb.is_site_admin)
+
+ login_url = _SafeCreateLoginURL(mr)
+ logout_url = _SafeCreateLogoutURL(mr)
+ logout_url_goto_home = users.create_logout_url('/')
+
+ base_data = {
+ # EZT does not have constants for True and False, so we pass them in.
+ 'True': ezt.boolean(True),
+ 'False': ezt.boolean(False),
+
+ 'site_name': settings.site_name,
+ 'show_search_metadata': ezt.boolean(False),
+ 'page_template': self._PAGE_TEMPLATE,
+ 'main_tab_mode': self._MAIN_TAB_MODE,
+ 'project_summary': project_summary,
+ 'project_home_page': project_home_page,
+ 'project_thumbnail_url': project_thumbnail_url,
+
+ 'hostport': mr.request.host,
+ 'absolute_base_url': '%s://%s' % (mr.request.scheme, mr.request.host),
+ 'project_home_url': None,
+ 'link_rel_canonical': None, # For specifying <link rel="canonical">
+ 'projectname': mr.project_name,
+ 'project': project_view,
+ 'project_is_restricted': ezt.boolean(_ProjectIsRestricted(mr)),
+ 'offer_contributor_list': ezt.boolean(
+ permissions.CanViewContributorList(mr)),
+ 'logged_in_user': mr.auth.user_view,
+ 'form_token': None, # Set to a value below iff the user is logged in.
+ 'form_token_path': None,
+ 'token_expires_sec': None,
+ 'xhr_token': None, # Set to a value below iff the user is logged in.
+ 'flag_spam_token': None,
+ 'nonce': nonce,
+ 'perms': mr.perms,
+ 'warnings': mr.warnings,
+ 'errors': mr.errors,
+
+ 'viewed_username': viewed_username,
+ 'viewed_user': mr.viewed_user_auth.user_view,
+ 'viewed_user_pb': template_helpers.PBProxy(
+ mr.viewed_user_auth.user_pb),
+ 'viewing_self': ezt.boolean(viewing_self),
+ 'viewed_user_id': mr.viewed_user_auth.user_id,
+ 'offer_saved_queries_subtab': ezt.boolean(offer_saved_queries_subtab),
+
+ 'currentPageURL': mr.current_page_url,
+ 'currentPageURLEncoded': mr.current_page_url_encoded,
+ 'login_url': login_url,
+ 'logout_url': logout_url,
+ 'logout_url_goto_home': logout_url_goto_home,
+ 'continue_issue_id': mr.continue_issue_id,
+ 'feedback_email': settings.feedback_email,
+ 'category_css': None, # Used to specify a category of stylesheet
+ 'page_css': None, # Used to add a stylesheet to a specific page.
+
+ 'can': mr.can,
+ 'query': mr.query,
+ 'colspec': None,
+ 'sortspec': mr.sort_spec,
+
+ 'grid_x_attr': grid_x_attr,
+ 'grid_y_attr': grid_y_attr,
+ 'grid_cell_mode': mr.cells,
+ 'grid_mode': None,
+
+ 'issue_entry_url': issue_entry_url,
+ 'canned_queries': canned_query_views,
+ 'saved_queries': saved_query_views,
+ 'is_cross_project': ezt.boolean(False),
+
+ # for project search (some also used in issue search)
+ 'start': mr.start,
+ 'num': mr.num,
+ 'groupby': mr.group_by_spec,
+ 'q_field_size': (
+ min(framework_constants.MAX_ARTIFACT_SEARCH_FIELD_SIZE,
+ max(framework_constants.MIN_ARTIFACT_SEARCH_FIELD_SIZE,
+ len(mr.query) + framework_constants.AUTOSIZE_STEP))),
+ 'mode': None, # Display mode, e.g., grid mode.
+ 'ajah': mr.ajah,
+ 'table_title': mr.table_title,
+
+ 'alerts': alerts.AlertsView(mr), # For alert.ezt
+ 'project_alert': project_alert,
+
+ 'title': None, # First part of page title
+ 'title_summary': None, # Appended to title on artifact detail pages
+
+ # TODO(jrobbins): make sure that the templates use
+ # project_read_only for project-mutative actions and if any
+ # uses of read_only remain.
+ 'project_read_only': ezt.boolean(project_read_only),
+ 'site_read_only': ezt.boolean(settings.read_only),
+ 'banner_time': servlet_helpers.GetBannerTime(settings.banner_time),
+ 'read_only': ezt.boolean(settings.read_only or project_read_only),
+ 'site_banner_message': settings.banner_message,
+ 'robots_no_index': None,
+ 'analytics_id': settings.analytics_id,
+
+ 'is_project_starred': ezt.boolean(is_project_starred),
+
+ 'app_version': app_version,
+ 'viewing_user_page': ezt.boolean(False),
+ }
+
+ if mr.project:
+ base_data['project_home_url'] = '/p/%s' % mr.project_name
+
+ # Always add an anti-xsrf token when the user is logged in.
+ if mr.auth.user_id:
+ form_token_path = self._FormHandlerURL(mr.request.path)
+ base_data['form_token'] = xsrf.GenerateToken(
+ mr.auth.user_id, form_token_path)
+ base_data['form_token_path'] = form_token_path
+ base_data['token_expires_sec'] = xsrf.TokenExpiresSec()
+ base_data['xhr_token'] = xsrf.GenerateToken(
+ mr.auth.user_id, xsrf.XHR_SERVLET_PATH)
+ base_data['flag_spam_token'] = xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_FLAGSPAM_JSON))
+
+ return base_data
+
+ def _FormHandlerURL(self, path):
+ """Return the form handler for the main form on a page."""
+ if path.endswith('/'):
+ return path + 'edit.do'
+ elif path.endswith('.do'):
+ return path # This happens as part of PleaseCorrect().
+ else:
+ return path + '.do'
+
+ def GatherCaptchaData(self, mr):
+ """If this page needs a captcha, return captcha info for use in EZT."""
+ if (mr.project and
+ framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids)):
+ # Don't show users CAPTCHAs within their own projects.
+ return {'show_captcha': ezt.boolean(False)}
+
+ show_captcha = any(actionlimit.NeedCaptcha(mr.auth.user_pb, action_type)
+ for action_type in self._CAPTCHA_ACTION_TYPES)
+ return {'show_captcha': ezt.boolean(show_captcha)}
+
+ def GatherPageData(self, mr):
+ """Return a dict of page-specific ezt data."""
+ raise MethodNotSupportedError()
+
+ # pylint: disable=unused-argument
+ def GatherHelpData(self, mr, page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ return {
+ 'cue': None, # for cues.ezt
+ }
+
+ def GatherDebugData(self, mr, page_data):
+ """Return debugging info for display at the very bottom of the page."""
+ if mr.debug_enabled:
+ debug = [_ContextDebugCollection('Page data', page_data)]
+ return {
+ 'dbg': 'on',
+ 'debug': debug,
+ 'profiler': self.profiler,
+ }
+ else:
+ if '?' in mr.current_page_url:
+ debug_url = mr.current_page_url + '&debug=1'
+ else:
+ debug_url = mr.current_page_url + '?debug=1'
+
+ return {
+ 'debug_uri': debug_url,
+ 'dbg': 'off',
+ 'debug': [('none', 'recorded')],
+ }
+
+ def CheckCaptcha(self, mr, post_data):
+ """Check the provided CAPTCHA solution and add an error if it is wrong."""
+ if (mr.project and
+ framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids)):
+ return # Don't check a user's actions within their own projects.
+
+ if not any(actionlimit.NeedCaptcha(mr.auth.user_pb, action_type)
+ for action_type in self._CAPTCHA_ACTION_TYPES):
+ return # no captcha was needed.
+
+ remote_ip = mr.request.remote_addr
+ captcha_response = post_data.get('g-recaptcha-response')
+ correct, _msg = captcha.Verify(remote_ip, captcha_response)
+ if not correct:
+ logging.info('BZzzz! Bad captcha solution.')
+ mr.errors.captcha = 'Captcha check failed.'
+
+ def CountRateLimitedActions(self, mr, action_counts):
+ """Count attempted actions against non-member's action limits.
+
+ Note that users can take any number of actions in their own projects.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ action_counts: {action_type: delta, ... }
+ a dictionary mapping action type constants to the number of times
+ that action was performed during the current request (usually 1).
+ """
+ if (mr.project and
+ framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids)):
+ # Don't count a user's actions within their own projects...
+ return
+
+ for action_type in action_counts:
+ actionlimit.CountAction(
+ mr.auth.user_pb, action_type, delta=action_counts[action_type])
+
+ self.services.user.UpdateUser(mr.cnxn, mr.auth.user_id, mr.auth.user_pb)
+
+ def PleaseCorrect(self, mr, **echo_data):
+ """Show the same form again so that the user can correct their input."""
+ mr.PrepareForReentry(echo_data)
+ self.get()
+
+
+def _CalcProjectAlert(project):
+ """Return a string to be shown as red text explaning the project state."""
+
+ project_alert = None
+
+ if project.read_only_reason:
+ project_alert = 'READ-ONLY: %s.' % project.read_only_reason
+ if project.moved_to:
+ project_alert = 'This project has moved to: %s.' % project.moved_to
+ elif project.delete_time:
+ delay_seconds = project.delete_time - time.time()
+ delay_days = delay_seconds // framework_constants.SECS_PER_DAY
+ if delay_days <= 0:
+ project_alert = 'Scheduled for deletion today.'
+ else:
+ days_word = 'day' if delay_days == 1 else 'days'
+ project_alert = (
+ 'Scheduled for deletion in %d %s.' % (delay_days, days_word))
+ elif project.state == project_pb2.ProjectState.ARCHIVED:
+ project_alert = 'Project is archived: read-only by members only.'
+
+ return project_alert
+
+
+class _ContextDebugItem(object):
+ """Wrapper class to generate on-screen debugging output."""
+
+ def __init__(self, key, val):
+ """Store the key and generate a string for the value."""
+ self.key = key
+ if isinstance(val, list):
+ nested_debug_strs = [self.StringRep(v) for v in val]
+ self.val = '[%s]' % ', '.join(nested_debug_strs)
+ else:
+ self.val = self.StringRep(val)
+
+ def StringRep(self, val):
+ """Make a useful string representation of the given value."""
+ try:
+ return val.DebugString()
+ except Exception:
+ try:
+ return str(val.__dict__)
+ except Exception:
+ return repr(val)
+
+
+class _ContextDebugCollection(object):
+ """Attach a title to a dictionary for exporting as a table of debug info."""
+
+ def __init__(self, title, collection):
+ self.title = title
+ self.collection = [_ContextDebugItem(key, collection[key])
+ for key in sorted(collection.iterkeys())]
+
+
+def _ProjectIsRestricted(mr):
+ """Return True if the mr has a 'private' project."""
+ return (mr.project and
+ mr.project.access != project_pb2.ProjectAccess.ANYONE)
+
+
+def _ComputeIssueEntryURL(mr, config):
+ """Compute the URL to use for the "New issue" subtab.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ config: ProjectIssueConfig for the current project.
+
+ Returns:
+ A URL string to use. It will be simply "entry" in the non-customized
+ case. Otherewise it will be a fully qualified URL that includes some
+ query string parameters.
+ """
+ if not config.custom_issue_entry_url:
+ return 'entry'
+
+ base_url = config.custom_issue_entry_url
+ sep = '&' if '?' in base_url else '?'
+ token = xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s%s' % (mr.project_name, urls.ISSUE_ENTRY, '.do'))
+ role_name = framework_helpers.GetRoleName(mr.auth.effective_ids, mr.project)
+
+ continue_url = urllib.quote(framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_ENTRY + '.do'))
+
+ return '%s%stoken=%s&role=%s&continue=%s' % (
+ base_url, sep, urllib.quote(token),
+ urllib.quote(role_name or ''), continue_url)
+
+
+def _SafeCreateLoginURL(mr):
+ """Make a login URL w/ a detailed continue URL, otherwise use a short one."""
+ try:
+ return users.create_login_url(mr.current_page_url)
+ except users.RedirectTooLongError:
+ if mr.project_name:
+ return users.create_login_url('/p/%s' % mr.project_name)
+ else:
+ return users.create_login_url('/')
+
+
+def _SafeCreateLogoutURL(mr):
+ """Make a logout URL w/ a detailed continue URL, otherwise use a short one."""
+ try:
+ return users.create_logout_url(mr.current_page_url)
+ except users.RedirectTooLongError:
+ if mr.project_name:
+ return users.create_logout_url('/p/%s' % mr.project_name)
+ else:
+ return users.create_logout_url('/')
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+class AlreadySentResponseException(Error):
+ """The servlet already responded, no need to render a page template."""
+ pass
diff --git a/appengine/monorail/framework/servlet_helpers.py b/appengine/monorail/framework/servlet_helpers.py
new file mode 100644
index 0000000..be79be3
--- /dev/null
+++ b/appengine/monorail/framework/servlet_helpers.py
@@ -0,0 +1,114 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions used by the Monorail servlet base class."""
+
+import datetime
+import logging
+import time
+
+from framework import permissions
+from framework import template_helpers
+
+
+_WEEKDAY = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
+ 'Saturday', 'Sunday']
+
+
+def GetBannerTime(timestamp):
+ """Converts a timestamp into EZT-ready data so it can appear in the banner.
+
+ Args:
+ timestamp: timestamp expressed in the following format:
+ [year,month,day,hour,minute,second]
+ e.g. [2009,3,20,21,45,50] represents March 20 2009 9:45:50 PM
+
+ Returns:
+ EZT-ready data used to display the time inside the banner message.
+ """
+ if timestamp is None:
+ return None
+
+ # Get the weekday and 'hour:min AM/PM' to display the timestamp
+ # to users with javascript disabled
+ ts = datetime.datetime(*[int(t) for t in timestamp])
+ weekday = _WEEKDAY[ts.weekday()]
+ hour_min = datetime.datetime.strftime(ts, '%I:%M%p')
+
+ # Convert the timestamp to milliseconds since the epoch to display
+ # the timestamp to users with javascript
+ ts_ms = time.mktime(ts.timetuple()) * 1000
+
+ return template_helpers.EZTItem(
+ ts=ts_ms, year=ts.year, month=ts.month, day=ts.day, hour=ts.hour,
+ minute=ts.minute, second=ts.second, weekday=weekday, hour_min=hour_min)
+
+
+def AssertBasePermissionForUser(user, user_view):
+ """Verify user permissions and state.
+
+ Args:
+ user: user_pb2.User protocol buffer for the user
+ user_view: framework.views.UserView for the user
+ """
+ if permissions.IsBanned(user, user_view):
+ raise permissions.BannedUserException(
+ 'You have been banned from using this site')
+
+
+def AssertBasePermission(mr):
+ """Make sure that the logged in user can view the requested page.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Nothing
+
+ Raises:
+ BannedUserException: If the user is banned.
+ PermissionException: If the user does not have permisssion to view.
+ """
+ AssertBasePermissionForUser(mr.auth.user_pb, mr.auth.user_view)
+
+ if mr.project_name and not CheckPerm(mr, permissions.VIEW):
+ logging.info('your perms are %r', mr.perms)
+ raise permissions.PermissionException(
+ 'User is not allowed to view this project')
+
+
+def CheckPerm(mr, perm, art=None, granted_perms=None):
+ """Convenience method that makes permission checks easier.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ perm: A permission constant, defined in module framework.permissions
+ art: Optional artifact pb
+ granted_perms: optional set of perms granted specifically in that artifact.
+
+ Returns:
+ A boolean, whether the request can be satisfied, given the permission.
+ """
+ return mr.perms.CanUsePerm(
+ perm, mr.auth.effective_ids, mr.project,
+ permissions.GetRestrictions(art), granted_perms=granted_perms)
+
+
+def CheckPermForProject(mr, perm, project, art=None):
+ """Convenience method that makes permission checks for projects easier.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ perm: A permission constant, defined in module framework.permissions
+ project: The project to enforce permissions for.
+ art: Optional artifact pb
+
+ Returns:
+ A boolean, whether the request can be satisfied, given the permission.
+ """
+ perms = permissions.GetPermissions(
+ mr.auth.user_pb, mr.auth.effective_ids, project)
+ return perms.CanUsePerm(
+ perm, mr.auth.effective_ids, project, permissions.GetRestrictions(art))
diff --git a/appengine/monorail/framework/sorting.py b/appengine/monorail/framework/sorting.py
new file mode 100644
index 0000000..282935c
--- /dev/null
+++ b/appengine/monorail/framework/sorting.py
@@ -0,0 +1,480 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for sorting lists of project artifacts.
+
+This module exports the SortArtifacts function that does sorting of
+Monorail business objects (e.g., an issue). The sorting is done by
+extracting relevant values from the PB using a dictionary of
+accessor functions.
+
+The desired sorting directives are specified in part of the user's
+HTTP request. This sort spec consists of the names of the columns
+with optional minus signs to indicate descending sort order.
+
+The tool configuration object also affects sorting. When sorting by
+key-value labels, the well-known labels are considered to come
+before any non-well-known labels, and those well-known labels sort in
+the order in which they are defined in the tool config PB.
+"""
+
+import logging
+
+import settings
+from framework import framework_constants
+from proto import tracker_pb2
+from tracker import tracker_bizobj
+
+
+class DescendingValue(object):
+ """A wrapper which reverses the sort order of values."""
+
+ @classmethod
+ def MakeDescendingValue(cls, obj):
+ """Make a value that sorts in the reverse order as obj."""
+ if isinstance(obj, int):
+ return -obj
+ if obj == MAX_STRING:
+ return MIN_STRING
+ if obj == MIN_STRING:
+ return MAX_STRING
+ if isinstance(obj, list):
+ return [cls.MakeDescendingValue(item) for item in reversed(obj)]
+ return DescendingValue(obj)
+
+ def __init__(self, val):
+ self.val = val
+
+ def __cmp__(self, other):
+ """Return -1, 0, or 1 base on the reverse of the normal sort order."""
+ if isinstance(other, DescendingValue):
+ return cmp(other.val, self.val)
+ else:
+ return cmp(other, self.val)
+
+ def __repr__(self):
+ return 'DescendingValue(%r)' % self.val
+
+
+# A string that sorts after every other string, and one that sorts before them.
+MAX_STRING = '~~~'
+MIN_STRING = DescendingValue(MAX_STRING)
+
+
+# RAMCache {issue_id: {column_name: sort_key, ...}, ...}
+art_values_cache = None
+
+
+def InitializeArtValues(services):
+ global art_values_cache
+ art_values_cache = services.cache_manager.MakeCache(
+ 'issue', max_size=settings.issue_cache_max_size)
+
+
+def SortArtifacts(
+ mr, artifacts, config, accessors, username_cols=None, users_by_id=None):
+ """Return a list of artifacts sorted by the user's sort specification.
+
+ In the following, an "accessor" is a function(art) -> [field_value, ...].
+
+ Args:
+ mr: commonly used info parsed from the request, including query.
+ artifacts: an unsorted list of project artifact PBs.
+ config: Project config PB instance that defines the sort order for
+ labels and statuses in this project.
+ accessors: dictionary of (column_name -> accessor) to get values
+ from the artifacts.
+ username_cols: optional list of lowercase column names that will show
+ user names.
+ users_by_id: optional dictionary {user_id: user_view,...} for all users
+ who participate in the list of artifacts.
+
+ Returns:
+ A sorted list of artifacts.
+
+ Note: if username_cols is supplied, then users_by_id should be too.
+
+ The approach to sorting is to construct a comprehensive sort key for
+ each artifact. To create the sort key, we (a) build lists with a
+ variable number of fields to sort on, and (b) allow individual
+ fields to be sorted in descending order. Even with the time taken
+ to build the sort keys, calling sorted() with the key seems to be
+ faster overall than doing multiple stable-sorts or doing one sort
+ using a multi-field comparison function.
+ """
+ sort_directives = ComputeSortDirectives(mr, config)
+
+ # Build a list of accessors that will extract sort keys from the issues.
+ accessor_pairs = [
+ (sd, _MakeCombinedSortKeyAccessor(
+ sd, config, accessors, username_cols, users_by_id))
+ for sd in sort_directives]
+
+ def SortKey(art):
+ """Make a sort_key for the given artifact, used by sorted() below."""
+ if art_values_cache.HasItem(art.issue_id):
+ art_values = art_values_cache.GetItem(art.issue_id)
+ else:
+ art_values = {}
+
+ sort_key = []
+ for sd, accessor in accessor_pairs:
+ if sd not in art_values:
+ art_values[sd] = accessor(art)
+ sort_key.append(art_values[sd])
+
+ art_values_cache.CacheItem(art.issue_id, art_values)
+ return sort_key
+
+ return sorted(artifacts, key=SortKey)
+
+
+def ComputeSortDirectives(mr, config, tie_breaker='id'):
+ """Return a list with sort directives to be used in sorting.
+
+ Args:
+ mr: commonly used info parsed from the request, including query.
+ config: Project config PB instance that defines the sort order for
+ labels and statuses in this project.
+ tie_breaker: column name to add to the end of the sort spec if it is
+ not already somewhere in the sort spec.
+
+ Returns:
+ A list of lower-case column names, each one may have a leading
+ minus-sign.
+ """
+ # Prepend the end-user's sort spec to any project default sort spec.
+ sort_spec = '%s %s %s' % (
+ mr.group_by_spec, mr.sort_spec, config.default_sort_spec)
+ # Sort specs can have interfering sort orders, so remove any duplicates.
+ field_names = set()
+ sort_directives = []
+ for sort_directive in sort_spec.lower().split():
+ field_name = sort_directive.lstrip('-')
+ if field_name not in field_names:
+ sort_directives.append(sort_directive)
+ field_names.add(field_name)
+
+ # Add in the project name so that the overall ordering is completely
+ # defined in cross-project search. Otherwise, issues jump up and
+ # down on each reload of the same query, and prev/next links get
+ # messed up. It's a no-op in single projects.
+ if 'project' not in sort_directives:
+ sort_directives.append('project')
+
+ if tie_breaker not in sort_directives:
+ sort_directives.append(tie_breaker)
+
+ return sort_directives
+
+
+def _MakeCombinedSortKeyAccessor(
+ sort_directive, config, accessors, username_cols, users_by_id):
+ """Return an accessor that extracts a sort key for a UI table column.
+
+ Args:
+ sort_directive: string with column name and optional leading minus sign,
+ for combined columns, it may have slashes, e.g., "-priority/pri".
+ config: ProjectIssueConfig instance that defines the sort order for
+ labels and statuses in this project.
+ accessors: dictionary of (column_name -> accessor) to get values
+ from the artifacts.
+ username_cols: list of lowercase names of columns that contain user names.
+ users_by_id: dictionary {user_id: user_view,...} for all users
+ who participate in the list of artifacts (e.g., owners, reporters, cc).
+
+ Returns:
+ A list of accessor functions that can be applied to an issue to extract
+ the relevant sort key value.
+
+ The strings for status and labels are converted to lower case in
+ this method so that they sort like case-insensitive enumerations.
+ Any component-specific field of the artifact is sorted according to the
+ value returned by the accessors defined in that component. Those
+ accessor functions should lower case string values for fields where
+ case-insensitive sorting is desired.
+ """
+ if sort_directive.startswith('-'):
+ combined_col_name = sort_directive[1:]
+ descending = True
+ else:
+ combined_col_name = sort_directive
+ descending = False
+
+ wk_labels = [wkl.label for wkl in config.well_known_labels]
+ accessors = [
+ _MakeSingleSortKeyAccessor(
+ col_name, config, accessors, username_cols, users_by_id, wk_labels)
+ for col_name in combined_col_name.split('/')]
+
+ # The most common case is that we sort on a single column, like "priority".
+ if len(accessors) == 1:
+ return _MaybeMakeDescending(accessors[0], descending)
+
+ # Less commonly, we are sorting on a combined column like "priority/pri".
+ def CombinedAccessor(art):
+ """Flatten and sort the values for each column in a combined column."""
+ key_part = []
+ for single_accessor in accessors:
+ value = single_accessor(art)
+ if isinstance(value, list):
+ key_part.extend(value)
+ else:
+ key_part.append(value)
+ return sorted(key_part)
+
+ return _MaybeMakeDescending(CombinedAccessor, descending)
+
+
+def _MaybeMakeDescending(accessor, descending):
+ """If descending is True, return a new function that reverses accessor."""
+ if not descending:
+ return accessor
+
+ def DescendingAccessor(art):
+ asc_value = accessor(art)
+ return DescendingValue.MakeDescendingValue(asc_value)
+
+ return DescendingAccessor
+
+
+def _MakeSingleSortKeyAccessor(
+ col_name, config, accessors, username_cols, users_by_id, wk_labels):
+ """Return an accessor function for a single simple UI column."""
+ # Case 1. Handle built-in fields: status, component.
+ if col_name == 'status':
+ wk_statuses = [wks.status for wks in config.well_known_statuses]
+ return _IndexOrLexical(wk_statuses, accessors[col_name])
+
+ if col_name == 'component':
+ comp_defs = sorted(config.component_defs, key=lambda cd: cd.path.lower())
+ comp_ids = [cd.component_id for cd in comp_defs]
+ return _IndexListAccessor(comp_ids, accessors[col_name])
+
+ # Case 2. Any other defined accessor functions.
+ if col_name in accessors:
+ if username_cols and col_name in username_cols:
+ # sort users by email address rather than user ids.
+ return _UserEditNameAccessor(users_by_id, accessors[col_name])
+ else:
+ return accessors[col_name]
+
+ # Case 3. Anything else is assumed to be a label prefix or custom field.
+ # TODO(jrobbins): user-valued custom fields. Find them at top of loop.
+ fd_list = [
+ fd for fd in config.field_defs
+ if (fd.field_name.lower() == col_name and
+ fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE)]
+ return _IndexOrLexicalList(
+ wk_labels, fd_list, col_name, users_by_id)
+
+
+IGNORABLE_INDICATOR = -1
+
+
+def _PrecomputeSortIndexes(values, col_name):
+ """Precompute indexes of strings in the values list for fast lookup later."""
+ # Make a dictionary that immediately gives us the index of any value
+ # in the list, and also add the same values in all-lower letters. In
+ # the case where two values differ only by case, the later value wins,
+ # which is fine.
+ indexes = {}
+ if col_name:
+ prefix = col_name + '-'
+ else:
+ prefix = ''
+ for idx, val in enumerate(values):
+ if val.lower().startswith(prefix):
+ indexes[val] = idx
+ indexes[val.lower()] = idx
+ else:
+ indexes[val] = IGNORABLE_INDICATOR
+ indexes[val.lower()] = IGNORABLE_INDICATOR
+
+ return indexes
+
+
+def _UserEditNameAccessor(users_by_id, base_accessor):
+ """Make an accessor that returns a list of user edit names for sorting.
+
+ Args:
+ users_by_id: dictionary {user_id: user_view, ...} for all participants
+ in the entire list of artifacts.
+ base_accessor: an accessor function f(artifact) -> user_id.
+
+ Returns:
+ An accessor f(artifact) -> value that can be used in sorting
+ the decorated list.
+ """
+
+ def Accessor(art):
+ """Return a user edit name for the given artifact's base_accessor."""
+ id_or_id_list = base_accessor(art)
+ if isinstance(id_or_id_list, list):
+ emails = [users_by_id[user_id].email
+ for user_id in id_or_id_list]
+ else:
+ emails = [users_by_id[id_or_id_list].email]
+
+ return sorted(emails) or MAX_STRING
+
+ return Accessor
+
+
+def _MakeColumnAccessor(col_name):
+ """Make an accessor for an issue's labels that have col_name as a prefix.
+
+ Args:
+ col_name: string column name.
+
+ Returns:
+ An accessor that can be applied to an artifact to return a list of
+ labels that have col_name as a prefix.
+
+ For example, _MakeColumnAccessor('priority')(issue) could result in
+ [], or ['priority-high'], or a longer list for multi-valued labels.
+ """
+ prefix = col_name + '-'
+
+ def Accessor(art):
+ """Return a list of label values on the given artifact."""
+ result = [label.lower() for label in tracker_bizobj.GetLabels(art)
+ if label.lower().startswith(prefix)]
+ return result
+
+ return Accessor
+
+
+def _IndexOrLexical(wk_values, base_accessor):
+ """Return an accessor to score an artifact based on a user-defined ordering.
+
+ Args:
+ wk_values: a list of well-known status values from the config.
+ base_accessor: function that gets a field from a given issue.
+
+ Returns:
+ An accessor that can be applied to an issue to return a suitable
+ sort key.
+
+ For example, when used to sort issue statuses, these accessors return an
+ integer for well-known statuses, a string for odd-ball statuses, and an
+ extreme value key for issues with no status. That causes issues to appear
+ in the expected order with odd-ball issues sorted lexicographically after
+ the ones with well-known status values, and issues with no defined status at
+ the very end.
+ """
+ well_known_value_indexes = _PrecomputeSortIndexes(wk_values, '')
+
+ def Accessor(art):
+ """Custom-made function to return a specific value of any issue."""
+ value = base_accessor(art)
+ if not value:
+ # Undefined values sort last.
+ return MAX_STRING
+
+ try:
+ # Well-known values sort by index. Ascending sorting has positive ints
+ # in well_known_value_indexes.
+ return well_known_value_indexes[value]
+ except KeyError:
+ # Odd-ball values after well-known and lexicographically.
+ return value.lower()
+
+ return Accessor
+
+
+def _IndexListAccessor(wk_values, base_accessor):
+ """Return an accessor to score an artifact based on a user-defined ordering.
+
+ Args:
+ wk_values: a list of well-known values from the config.
+ base_accessor: function that gets a field from a given issue.
+
+ Returns:
+ An accessor that can be applied to an issue to return a suitable
+ sort key.
+ """
+ well_known_value_indexes = {
+ val: idx for idx, val in enumerate(wk_values)}
+
+ def Accessor(art):
+ """Custom-made function to return a specific value of any issue."""
+ values = base_accessor(art)
+ if not values:
+ # Undefined values sort last.
+ return MAX_STRING
+
+ indexes = [well_known_value_indexes.get(val, MAX_STRING) for val in values]
+ return sorted(indexes)
+
+ return Accessor
+
+
+def _IndexOrLexicalList(wk_values, fd_list, col_name, users_by_id):
+ """Return an accessor to score an artifact based on a user-defined ordering.
+
+ Args:
+ wk_values: A list of well-known labels from the config.
+ fd_list: list of FieldDef PBs that match the column name. These might not
+ all have the same field_type. Enum-type field are not included.
+ col_name: lowercase string name of the column that will be sorted on.
+ users_by_id: A dictionary {user_id: user_view}.
+
+ Returns:
+ An accessor that can be applied to an issue to return a suitable
+ sort key.
+ """
+ well_known_value_indexes = _PrecomputeSortIndexes(wk_values, col_name)
+
+ def Accessor(art):
+ """Custom-made function to return a sort value for any issue."""
+ idx_or_lex_list = (
+ _SortableFieldValues(art, fd_list, users_by_id) +
+ _SortableLabelValues(art, col_name, well_known_value_indexes))
+ if not idx_or_lex_list:
+ return MAX_STRING # issues with no value sort to the end of the list.
+ return sorted(idx_or_lex_list)
+
+ return Accessor
+
+
+def _SortableFieldValues(art, fd_list, users_by_id):
+ """Return a list of field values relevant to one UI table column."""
+ sortable_value_list = []
+ for fd in fd_list:
+ for fv in art.field_values:
+ if fv.field_id == fd.field_id:
+ sortable_value_list.append(
+ tracker_bizobj.GetFieldValue(fv, users_by_id))
+
+ return sortable_value_list
+
+
+def _SortableLabelValues(art, col_name, well_known_value_indexes):
+ """Return a list of ints and strings for labels relevant to one UI column."""
+ sortable_value_list = []
+ for label in tracker_bizobj.GetLabels(art):
+ idx_or_lex = well_known_value_indexes.get(label)
+ if idx_or_lex == IGNORABLE_INDICATOR:
+ continue # Label is known to not have the desired prefix.
+ if idx_or_lex is None:
+ if '-' not in label:
+ # Skip an irrelevant OneWord label and remember to ignore it later.
+ well_known_value_indexes[label] = IGNORABLE_INDICATOR
+ continue
+ key, value = label.lower().split('-', 1)
+ if key == col_name:
+ # Label is a key-value label with an odd-ball value, remember it
+ idx_or_lex = value
+ well_known_value_indexes[label] = value
+ else:
+ # Label was a key-value label that is not relevant to this column.
+ # Remember to ignore it later.
+ well_known_value_indexes[label] = IGNORABLE_INDICATOR
+ continue
+
+ sortable_value_list.append(idx_or_lex)
+
+ return sortable_value_list
diff --git a/appengine/monorail/framework/sql.py b/appengine/monorail/framework/sql.py
new file mode 100644
index 0000000..223912d
--- /dev/null
+++ b/appengine/monorail/framework/sql.py
@@ -0,0 +1,745 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of classes for interacting with tables in SQL."""
+
+import logging
+import random
+import re
+import sys
+import time
+
+import settings