Open Source Monorail
R=jrobbins@chromium.org
BUG=monorail:1066
Review URL: https://codereview.chromium.org/1868553004
diff --git a/.coveragerc b/.coveragerc
index 510fefa..bf59e70 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -7,6 +7,10 @@
# Add chromium_build, which wasn't test.py-enabled for a long time.
./appengine/chromium_build/*
+ # Add monorail's third-party packages and worst offenders
+ ./appengine/monorail/third_party/*
+ ./appengine/monorail/testing/*
+
# Appengine third_party packages, committed into the repo as is.
./appengine/third_party/*
@@ -32,3 +36,6 @@
# Don't complain if non-runnable code isn't run:
if 0:
if __name__ == ['"]__main__['"]:
+
+[expect_tests]
+expected_coverage_min = 90
diff --git a/appengine/monorail/.expect_tests.cfg b/appengine/monorail/.expect_tests.cfg
new file mode 100644
index 0000000..1eada43
--- /dev/null
+++ b/appengine/monorail/.expect_tests.cfg
@@ -0,0 +1,4 @@
+[expect_tests]
+skip=
+ gae_ts_mon
+ third_party
diff --git a/appengine/monorail/.expect_tests_pretest.py b/appengine/monorail/.expect_tests_pretest.py
new file mode 100644
index 0000000..b8ca7a4
--- /dev/null
+++ b/appengine/monorail/.expect_tests_pretest.py
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# pylint: disable=undefined-variable
+
+import os
+import sys
+
+# Using pretest_filename is magic, because it is available in the locals() of
+# the script which execfiles this file.
+# prefixing with 'pretest' to avoid name collisions in expect_tests.
+pretest_APPENGINE_ENV_PATH = os.path.join(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.dirname(
+ os.path.abspath(pretest_filename))))),
+ 'google_appengine')
+sys.path.append(pretest_APPENGINE_ENV_PATH)
+
+# Crazy hack, because of appengine.
+# Importing dev_appserver is probably not officially supported and fix_sys_path
+# may be an implementation detail subject to change.
+import dev_appserver as pretest_dev_appserver
+pretest_dev_appserver.fix_sys_path()
+
+# Remove google_appengine SDK from sys.path after use
+sys.path.remove(pretest_APPENGINE_ENV_PATH)
+
+SDK_LIBRARY_PATHS = [
+ # This is not added by fix_sys_path.
+ os.path.join(pretest_APPENGINE_ENV_PATH, 'lib', 'mox'),
+]
+sys.path.extend(SDK_LIBRARY_PATHS)
+
+os.environ['SERVER_SOFTWARE'] = 'test ' + os.environ.get('SERVER_SOFTWARE', '')
+os.environ['CURRENT_VERSION_ID'] = 'test.123'
diff --git a/appengine/monorail/.gitignore b/appengine/monorail/.gitignore
new file mode 100644
index 0000000..877a235
--- /dev/null
+++ b/appengine/monorail/.gitignore
@@ -0,0 +1,14 @@
+.*\.py[co]
+.*\.pyc-2.4
+.*~
+.*\.orig
+.*\.swp
+.*\#.*
+.*@.*
+index\.yaml
+REVISION
+.coverage
+htmlcov
+.DS_Store
+workspace.xml
+new_static/components/*\.vulcanized.html
diff --git a/appengine/monorail/Makefile b/appengine/monorail/Makefile
new file mode 100644
index 0000000..a770b80
--- /dev/null
+++ b/appengine/monorail/Makefile
@@ -0,0 +1,73 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# Makefile to simplify some common AppEngine actions.
+# Use 'make help' for a list of commands.
+
+STAGEID= monorail-staging
+PRODID= monorail-prod
+
+GAE_PY?= python gae.py
+DEV_APPSERVER_FLAGS?=
+
+FRONTEND_MODULES?= default
+BACKEND_MODULES?= besearch
+
+default: help
+
+check:
+ifndef NPM_VERSION
+ $(error npm not found. Install from nodejs.org or see README)
+endif
+
+help:
+ @echo "Available commands:"
+ @sed -n '/^[a-zA-Z0-9_.]*:/s/:.*//p' <Makefile
+
+test:
+ ../../test.py test appengine/monorail
+
+# Commands for running locally using dev_appserver.
+serve:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver $(DEV_APPSERVER_FLAGS)
+
+serve_email:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver $(DEV_APPSERVER_FLAGS) --enable_sendmail=True
+
+# The _remote commands expose the app on 0.0.0.0, so that it is externally
+# accessible by hostname:port, rather than just localhost:port.
+serve_remote:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver -o $(DEV_APPSERVER_FLAGS)
+
+serve_remote_email:
+ @echo "---[Starting SDK AppEngine Server]---"
+ $(GAE_PY) devserver -o $(DEV_APPSERVER_FLAGS) --enable_sendmail=True
+
+run: serve
+
+
+# AppEngine apps can be tested locally and in non-default versions upload to
+# the main app-id, but it is still sometimes useful to have a completely
+# separate app-id. E.g., for testing inbound email, load testing, or using
+# throwaway databases.
+deploy_staging:
+ @echo "---[Staging $(STAGEID)]---"
+ $(GAE_PY) upload -A $(STAGEID) $(FRONTEND_MODULES) $(BACKEND_MODULES)
+
+
+# This is our production server that users actually use.
+deploy_prod:
+ @echo "---[Deploying prod instance $(PRODID)]---"
+ $(GAE_PY) upload -A $(PRODID) $(FRONTEND_MODULES) $(BACKEND_MODULES)
+
+
+# Note that we do not provide a command-line way to make the newly-uploaded
+# version the default version. This is for two reasons: a) You should be using
+# your browser to confirm that the new version works anyway, so just use the
+# console interface to make it the default; and b) If you really want to use
+# the command line you can use gae.py directly.
diff --git a/appengine/monorail/README.md b/appengine/monorail/README.md
index 7a4f4e6..2f7998a 100644
--- a/appengine/monorail/README.md
+++ b/appengine/monorail/README.md
@@ -1,8 +1,41 @@
-# Monorail
+# Monorail Issue Tracker
-[bugs.chromium.org](https://bugs.chromium.org)
+Monorail is the Issue Tracker used by the Chromium project and other related
+projects. It is hosted at [bugs.chromium.org](https://bugs.chromium.org).
-Monorail is the issue tracking tool for chromium-related projects, which is a port of the Google Code issue tracker to AppEngine.
+If you wish to file a bug against Monorail itself, please do so in our
+[self-hosting tracker](https://bugs.chromium.org/p/monorail/issues/entry).
+We also discuss development of Monorail at `infra-dev@chromium.org`.
+## Testing
-* [API](doc/api.md)
\ No newline at end of file
+In order to run all of the Monorail unit tests, run `make test` in this
+directory. If you wish to run just a subset of the tests, you can invoke the
+test runner directly and give it a subdirectory: `../../test.py
+appengine/monorail/tracker`.
+
+## Running Locally
+
+To run the app locally, you need to have a local MySQL database. Install MySQL
+according to the canonical instructions for your platform. Then create
+a new database an import our schema:
+
+ mysql> create database monorail;
+ mysql> source /path/to/infra/appengine/monorail/sql/framework.sql;
+ mysql> source /path/to/infra/appengine/monorail/sql/project.sql;
+ mysql> source /path/to/infra/appengine/monorail/sql/tracker.sql;
+ mysql> exit;
+
+Then you can run the development server locally with just `make serve`.
+
+## Deploying
+
+The `app.yaml` and `Makefile` files contained in this directory point at the
+official instances of Monorail maintained by the Chromium Infrastructure Team.
+If you wish (and have sufficient permissions) to deploy to one of those, simply
+run `make deploy_staging` or `make deploy_prod`. If you wish to set up your
+own instance, edit the first line of the `app.yaml` and use gae.py directly,
+or edit the `Makefile` to add an entry for your AppEngine app ID. It is likely
+that you'll also want to edit many of the values in `settings.py`, which
+specify debug email addresses, instance counts, and default Google Storage
+buckets.
diff --git a/appengine/monorail/app.yaml b/appengine/monorail/app.yaml
new file mode 100644
index 0000000..cfda623
--- /dev/null
+++ b/appengine/monorail/app.yaml
@@ -0,0 +1,68 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+application: monorail-staging
+version: 2015-05-26
+runtime: python27
+api_version: 1
+threadsafe: no
+
+default_expiration: "3600d"
+
+instance_class: F4
+automatic_scaling:
+ min_idle_instances: 10
+ max_pending_latency: 0.2s
+
+handlers:
+- url: /_ah/spi/.*
+ script: monorailapp.endpoints
+
+- url: /robots.txt
+ static_files: static/robots.txt
+ upload: static/robots.txt
+
+- url: /database-maintenance
+ static_files: static/database-maintenance.html
+ upload: static/database-maintenance.html
+
+- url: /static
+ static_dir: static
+
+- url: /_ah/mail/.+
+ script: monorailapp.app
+ login: admin
+
+- url: /_task/.*
+ script: monorailapp.app
+ login: admin
+
+- url: /_cron/.*
+ script: monorailapp.app
+ login: admin
+
+- url: /_backend/.*
+ script: monorailapp.app
+ login: admin
+
+- url: /.*
+ script: monorailapp.app
+ secure: always
+
+inbound_services:
+- mail
+
+libraries:
+- name: endpoints
+ version: 1.0
+- name: MySQLdb
+ version: "latest"
+- name: pycrypto
+ version: "2.6"
+- name: django
+ version: 1.4
+
+includes:
+ - gae_ts_mon
diff --git a/appengine/monorail/appengine_config.py b/appengine/monorail/appengine_config.py
new file mode 100644
index 0000000..e886e94
--- /dev/null
+++ b/appengine/monorail/appengine_config.py
@@ -0,0 +1,15 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Configuration."""
+
+import os
+import sys
+
+# Enable third-party imports
+sys.path.append(os.path.join(os.path.dirname(__file__), 'third_party'))
+
+import httplib2
+import oauth2client
diff --git a/appengine/monorail/benchmark/search-urls.txt b/appengine/monorail/benchmark/search-urls.txt
new file mode 100644
index 0000000..00545cc
--- /dev/null
+++ b/appengine/monorail/benchmark/search-urls.txt
@@ -0,0 +1,75 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# This is a bunch of URLs that can be hit to measure performance.
+# Use 'make siege' to run it.
+# Or 'siege -c 4 -f benchmark/search-urls.txt'
+#
+# For a log of results of running performance tests see the
+# go/monorail-performance spreadsheet.
+
+
+# Use one of these lines to choose the server and project.
+ISSUE_LIST=https://monorail-staging.appspot.com/p/chromium/issues/list?disable_cache=1&
+#ISSUE_LIST=https://code.google.com/p/chromium/issues/list?
+
+# Log data shows that 68% of issue list page views have no query terms.
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+${ISSUE_LIST}
+
+${ISSUE_LIST}can=1
+${ISSUE_LIST}can=1
+${ISSUE_LIST}can=3
+${ISSUE_LIST}can=4
+${ISSUE_LIST}q=label:Type-Bug
+${ISSUE_LIST}q=label:Pri-2
+${ISSUE_LIST}q=label:OS-Windows
+${ISSUE_LIST}q=status:Unconfirmed
+${ISSUE_LIST}q=status=Unconfirmed
+${ISSUE_LIST}q=status=Started
+${ISSUE_LIST}q=status=Untriaged
+${ISSUE_LIST}q=status=Started,Untriaged
+${ISSUE_LIST}q=-status=Started,Untriaged
+${ISSUE_LIST}can=1&q=status=Fixed
+${ISSUE_LIST}can=1&q=status=Duplicate
+# Actually owner:me is pretty common, but specify the owner here since we ar not signed in.
+${ISSUE_LIST}q=owner=agl@chromium.org
+${ISSUE_LIST}q=owner=rch@chromium.org
+${ISSUE_LIST}q=owner=jon@chromium.org
+${ISSUE_LIST}q=owner=amit@chromium.org
+${ISSUE_LIST}q=cc:amit@chromium.org
+${ISSUE_LIST}q=owner:google.com
+${ISSUE_LIST}q=feature=nacl status:started
+${ISSUE_LIST}q=Hotlist=GoodFirstBug
+${ISSUE_LIST}q=has:restrict
+${ISSUE_LIST}q=feature=Printing
+${ISSUE_LIST}q=feature=Printing status=Available
+${ISSUE_LIST}q=feature=Printing -status=Available
+
+
+
diff --git a/appengine/monorail/codereview.settings b/appengine/monorail/codereview.settings
new file mode 100644
index 0000000..6a720d7
--- /dev/null
+++ b/appengine/monorail/codereview.settings
@@ -0,0 +1,11 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# This file is used by gcl to get repository specific information.
+CODE_REVIEW_SERVER: https://codereview.chromium.org
+VIEW_VC: https://chromium.googlesource.com/infra/infra/+/
+CC_LIST: chromium-reviews@chromium.org, infra-reviews+infra@chromium.org, jrobbins+catch-up@google.com
+PROJECT: infra
+BUG_PREFIX: monorail:
diff --git a/appengine/monorail/cron.yaml b/appengine/monorail/cron.yaml
new file mode 100644
index 0000000..97f6546
--- /dev/null
+++ b/appengine/monorail/cron.yaml
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+cron:
+- description: keep the databases loaded
+ url: /p/chromium/issues/list
+ schedule: every 30 minutes synchronized
+- description: consolidate old invalidation rows
+ url: /_cron/ramCacheConsolidate
+ schedule: every 6 hours synchronized
+- description: index issues that were modified in big batches
+ url: /_cron/reindexQueue
+ schedule: every 6 minutes synchronized
+- description: get rid of doomed and deletable projects
+ url: /_cron/reap
+ schedule: every 24 hours synchronized
+- description: send ts_mon metrics
+ url: /internal/cron/ts_mon/send
+ schedule: every 1 minutes
+- description: export spam model training examples
+ url: /_cron/spamDataExport
+ timezone: US/Pacific
+ schedule: every day 01:00
diff --git a/appengine/monorail/dos.yaml b/appengine/monorail/dos.yaml
new file mode 100644
index 0000000..97e4e4d
--- /dev/null
+++ b/appengine/monorail/dos.yaml
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+blacklist:
+# Edit this file to enable GAE's built-in DoS protection.
+# Run some aggregate queries in the Cloud Console BigQuery
+# interface to see if you can block larger subnets rathe
+# than individual IP addresses. Also, this list is limited
+# to 100 entries, so if it's a DDoS you might run out
+# subnets.
+# On-Call Playbook:
+# https://docs.google.com/document/d/1acGea37jlb5FEp1BGdqca6tY_hiH1QGXKxbt4iBfAug
+#
+# See the playbook for other measures you can take, such
+# as setting ratelimiting_enabled = True in settings.py.
+
+# Example entries:
+# - subnet: 192.0.2.1
+# description: a single IP address
+# - subnet: 192.0.2.0/24
+# description: an IPv4 subnet
+# - subnet: 2001:DB8::1
+# description: an IPv6 address
+# - subnet: 2001:DB8::/32
+# description: an IPv6 subnet
diff --git a/appengine/monorail/features/__init__.py b/appengine/monorail/features/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/features/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/features/activities.py b/appengine/monorail/features/activities.py
new file mode 100644
index 0000000..e36f97a
--- /dev/null
+++ b/appengine/monorail/features/activities.py
@@ -0,0 +1,310 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Code to support project and user activies pages."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import sql
+from framework import template_helpers
+from framework import timestr
+from project import project_views
+from proto import tracker_pb2
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+UPDATES_PER_PAGE = 50
+MAX_UPDATES_PER_PAGE = 200
+
+
+class ActivityView(template_helpers.PBProxy):
+ """EZT-friendly wrapper for Activities."""
+
+ _TITLE_TEMPLATE = template_helpers.MonorailTemplate(
+ framework_constants.TEMPLATE_PATH + 'features/activity-title.ezt',
+ compress_whitespace=True, base_format=ezt.FORMAT_HTML)
+
+ _BODY_TEMPLATE = template_helpers.MonorailTemplate(
+ framework_constants.TEMPLATE_PATH + 'features/activity-body.ezt',
+ compress_whitespace=True, base_format=ezt.FORMAT_HTML)
+
+ def __init__(
+ self, pb, services, mr, prefetched_issues, users_by_id,
+ autolink=None, all_ref_artifacts=None, ending=None, highlight=None):
+ """Constructs an ActivityView out of an Activity protocol buffer.
+
+ Args:
+ pb: an IssueComment or Activity protocol buffer.
+ services: connections to backend services.
+ mr: HTTP request info, used by the artifact autolink.
+ prefetched_issues: dictionary of the issues for the comments being shown.
+ users_by_id: dict {user_id: UserView} for all relevant users.
+ autolink: Autolink instance.
+ all_ref_artifacts: list of all artifacts in the activity stream.
+ ending: ending type for activity titles, 'in_project' or 'by_user'
+ highlight: what to highlight in the middle column on user updates pages
+ i.e. 'project', 'user', or None
+ """
+ template_helpers.PBProxy.__init__(self, pb)
+
+ activity_type = 'ProjectIssueUpdate' # TODO(jrobbins): more types
+
+ self.comment = None
+ self.issue = None
+ self.field_changed = None
+ self.multiple_fields_changed = ezt.boolean(False)
+ self.project = None
+ self.user = None
+ self.timestamp = time.time() # Bogus value makes bad ones highly visible.
+
+ if isinstance(pb, tracker_pb2.IssueComment):
+ self.timestamp = pb.timestamp
+ issue = prefetched_issues[pb.issue_id]
+ if self.timestamp == issue.opened_timestamp:
+ issue_change_id = None # This comment is the description.
+ else:
+ issue_change_id = pb.id # instead of seq num.
+
+ self.comment = tracker_views.IssueCommentView(
+ mr.project_name, pb, users_by_id, autolink,
+ all_ref_artifacts, mr, issue)
+
+ # TODO(jrobbins): pass effective_ids of the commenter so that he/she
+ # can be identified as a project member or not.
+ # TODO(jrobbins): Prefetch all needed projects and configs just like the
+ # way that we batch-prefetch issues.
+ config = services.config.GetProjectConfig(mr.cnxn, issue.project_id)
+ self.issue = tracker_views.IssueView(issue, users_by_id, config)
+ self.user = self.comment.creator
+ project = services.project.GetProject(mr.cnxn, issue.project_id)
+ self.project_name = project.project_name
+ self.project = project_views.ProjectView(project)
+
+ else:
+ logging.warn('unknown activity object %r', pb)
+
+ nested_page_data = {
+ 'activity_type': activity_type,
+ 'issue_change_id': issue_change_id,
+ 'comment': self.comment,
+ 'issue': self.issue,
+ 'project': self.project,
+ 'user': self.user,
+ 'timestamp': self.timestamp,
+ 'ending_type': ending,
+ }
+
+ self.escaped_title = self._TITLE_TEMPLATE.GetResponse(
+ nested_page_data).strip()
+ self.escaped_body = self._BODY_TEMPLATE.GetResponse(
+ nested_page_data).strip()
+
+ if autolink is not None and all_ref_artifacts is not None:
+ # TODO(jrobbins): actually parse the comment text. Actually render runs.
+ runs = autolink.MarkupAutolinks(
+ mr, [template_helpers.TextRun(self.escaped_body)], all_ref_artifacts)
+ self.escaped_body = ''.join(run.content for run in runs)
+
+ self.date_bucket, self.date_relative = timestr.GetHumanScaleDate(
+ self.timestamp)
+ time_tuple = time.localtime(self.timestamp)
+ self.date_tooltip = time.asctime(time_tuple)
+
+ # We always highlight the user for starring activities
+ if activity_type.startswith('UserStar'):
+ self.highlight = 'user'
+ else:
+ self.highlight = highlight
+
+
+def GatherUpdatesData(
+ services, mr, prof, project_ids=None, user_ids=None, ending=None,
+ updates_page_url=None, autolink=None, highlight=None):
+ """Gathers and returns updates data.
+
+ Args:
+ services: Connections to backend services.
+ mr: HTTP request info, used by the artifact autolink.
+ prof: The profiler to use.
+ project_ids: List of project IDs we want updates for.
+ user_ids: List of user IDs we want updates for.
+ ending: Ending type for activity titles, 'in_project' or 'by_user'.
+ updates_page_url: The URL that will be used to create pagination links from.
+ autolink: Autolink instance.
+ highlight: What to highlight in the middle column on user updates pages
+ i.e. 'project', 'user', or None.
+ """
+ ascending = bool(mr.after)
+
+ # num should be non-negative number
+ num = mr.GetPositiveIntParam('num', UPDATES_PER_PAGE)
+ num = min(num, MAX_UPDATES_PER_PAGE)
+
+ updates_data = {
+ 'no_stars': None,
+ 'no_activities': None,
+ 'pagination': None,
+ 'updates_data': None,
+ 'ending_type': ending,
+ }
+
+ if not user_ids and not project_ids:
+ updates_data['no_stars'] = ezt.boolean(True)
+ return updates_data
+
+ with prof.Phase('get activities'):
+ # TODO(jrobbins): make this into a persist method.
+ # TODO(jrobbins): this really needs permission checking in SQL, which will
+ # be slow.
+ where_conds = [('Issue.id = Comment.issue_id', [])]
+ if project_ids is not None:
+ cond_str = 'Comment.project_id IN (%s)' % sql.PlaceHolders(project_ids)
+ where_conds.append((cond_str, project_ids))
+ if user_ids is not None:
+ cond_str = 'Comment.commenter_id IN (%s)' % sql.PlaceHolders(user_ids)
+ where_conds.append((cond_str, user_ids))
+
+ if project_ids:
+ use_clause = 'USE INDEX (project_id) USE INDEX FOR ORDER BY (project_id)'
+ elif user_ids:
+ use_clause = (
+ 'USE INDEX (commenter_id) USE INDEX FOR ORDER BY (commenter_id)')
+ else:
+ use_clause = ''
+
+ if mr.before:
+ where_conds.append(('created < %s', [mr.before]))
+ if mr.after:
+ where_conds.append(('created > %s', [mr.after]))
+ if ascending:
+ order_by = [('created', [])]
+ else:
+ order_by = [('created DESC', [])]
+
+ comments = services.issue.GetComments(
+ mr.cnxn, joins=[('Issue', [])], deleted_by=None, where=where_conds,
+ use_clause=use_clause, order_by=order_by, limit=num + 1)
+
+ # TODO(jrobbins): it would be better if we could just get the dict directly.
+ prefetched_issues_list = services.issue.GetIssues(
+ mr.cnxn, {c.issue_id for c in comments})
+ prefetched_issues = {
+ issue.issue_id: issue for issue in prefetched_issues_list}
+ needed_project_ids = {issue.project_id for issue in prefetched_issues_list}
+ prefetched_projects = services.project.GetProjects(
+ mr.cnxn, needed_project_ids)
+ prefetched_configs = services.config.GetProjectConfigs(
+ mr.cnxn, needed_project_ids)
+ viewable_issues_list = tracker_helpers.FilterOutNonViewableIssues(
+ mr.auth.effective_ids, mr.auth.user_pb, prefetched_projects,
+ prefetched_configs, prefetched_issues_list)
+ viewable_iids = {issue.issue_id for issue in viewable_issues_list}
+
+ # Filter the comments based on permission to view the issue.
+ # TODO(jrobbins): push permission checking in the query so that pagination
+ # pages never become underfilled, or use backends to shard.
+ # TODO(jrobbins): come back to this when I implement private comments.
+ comments = [
+ c for c in comments if c.issue_id in viewable_iids]
+
+ if ascending:
+ comments.reverse()
+
+ amendment_user_ids = []
+ for comment in comments:
+ for amendment in comment.amendments:
+ amendment_user_ids.extend(amendment.added_user_ids)
+ amendment_user_ids.extend(amendment.removed_user_ids)
+
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, services.user, [c.user_id for c in comments],
+ amendment_user_ids)
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+
+ num_results_returned = len(comments)
+ displayed_activities = comments[:UPDATES_PER_PAGE]
+
+ if not num_results_returned:
+ updates_data['no_activities'] = ezt.boolean(True)
+ return updates_data
+
+ # Get all referenced artifacts first
+ all_ref_artifacts = None
+ if autolink is not None:
+ content_list = []
+ for activity in comments:
+ content_list.append(activity.content)
+
+ all_ref_artifacts = autolink.GetAllReferencedArtifacts(
+ mr, content_list)
+
+ # Now process content and gather activities
+ today = []
+ yesterday = []
+ pastweek = []
+ pastmonth = []
+ thisyear = []
+ older = []
+
+ with prof.Phase('rendering activities'):
+ for activity in displayed_activities:
+ entry = ActivityView(
+ activity, services, mr, prefetched_issues, users_by_id,
+ autolink=autolink, all_ref_artifacts=all_ref_artifacts, ending=ending,
+ highlight=highlight)
+
+ if entry.date_bucket == 'Today':
+ today.append(entry)
+ elif entry.date_bucket == 'Yesterday':
+ yesterday.append(entry)
+ elif entry.date_bucket == 'Last 7 days':
+ pastweek.append(entry)
+ elif entry.date_bucket == 'Last 30 days':
+ pastmonth.append(entry)
+ elif entry.date_bucket == 'Earlier this year':
+ thisyear.append(entry)
+ elif entry.date_bucket == 'Older':
+ older.append(entry)
+
+ new_after = None
+ new_before = None
+ if displayed_activities:
+ new_after = displayed_activities[0].timestamp
+ new_before = displayed_activities[-1].timestamp
+
+ prev_url = None
+ next_url = None
+ if updates_page_url:
+ list_servlet_rel_url = updates_page_url.split('/')[-1]
+ if displayed_activities and (mr.before or mr.after):
+ prev_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url, after=new_after)
+ if mr.after or len(comments) > UPDATES_PER_PAGE:
+ next_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url, before=new_before)
+
+ if prev_url or next_url:
+ pagination = template_helpers.EZTItem(
+ start=None, last=None, prev_url=prev_url, next_url=next_url,
+ reload_url=None, visible=ezt.boolean(True), total_count=None)
+ else:
+ pagination = None
+
+ updates_data.update({
+ 'no_activities': ezt.boolean(False),
+ 'pagination': pagination,
+ 'updates_data': template_helpers.EZTItem(
+ today=today, yesterday=yesterday, pastweek=pastweek,
+ pastmonth=pastmonth, thisyear=thisyear, older=older),
+ })
+
+ return updates_data
diff --git a/appengine/monorail/features/autolink.py b/appengine/monorail/features/autolink.py
new file mode 100644
index 0000000..a50b848
--- /dev/null
+++ b/appengine/monorail/features/autolink.py
@@ -0,0 +1,465 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Autolink helps auto-link references to artifacts in text.
+
+This class maintains a registry of artifact autolink syntax specs and
+callbacks. The structure of that registry is:
+ { component_name: (lookup_callback,
+ { regex: substitution_callback, ...}),
+ ...
+ }
+
+For example:
+ { 'tracker':
+ (GetReferencedIssues,
+ ExtractProjectAndIssueIds,
+ {_ISSUE_REF_RE: ReplaceIssueRef}),
+ 'versioncontrol':
+ (GetReferencedRevisions,
+ ExtractProjectAndRevNum,
+ {_GIT_HASH_RE: ReplaceRevisionRef}),
+ }
+
+The dictionary of regexes is used here because, in the future, we
+might add more regexes for each component rather than have one complex
+regex per component.
+"""
+
+import logging
+import re
+import urllib
+import urlparse
+
+import settings
+from framework import template_helpers
+from framework import validate
+from proto import project_pb2
+from tracker import tracker_helpers
+
+
+_CLOSING_TAG_RE = re.compile('</[a-z0-9]+>$', re.IGNORECASE)
+
+_LINKIFY_SCHEMES = r'(https?://|ftp://|mailto:)'
+# Also count a start-tag '<' as a url delimeter, since the autolinker
+# is sometimes run against html fragments.
+_IS_A_LINK_RE = re.compile(r'(%s)([^\s<]+)' % _LINKIFY_SCHEMES, re.UNICODE)
+
+# These are allowed in links, but if any of closing delimiters appear
+# at the end of the link, and the opening one is not part of the link,
+# then trim off the closing delimiters.
+_LINK_TRAILING_CHARS = [
+ (None, ':'),
+ (None, '.'),
+ (None, ','),
+ ('<', '>'),
+ ('"', '"'),
+ ('(', ')'),
+ ('[', ']'),
+ ('{', '}'),
+ ]
+
+
+def Linkify(_mr, autolink_regex_match,
+ _component_ref_artifacts):
+ """Examine a textual reference and replace it with a hyperlink or not.
+
+ This is a callback for use with the autolink feature.
+
+ Args:
+ _mr: common info parsed from the user HTTP request.
+ autolink_regex_match: regex match for the textual reference.
+ _component_ref_artifacts: unused value
+
+ Returns:
+ A list of TextRuns with tag=a for all matched ftp, http, https and mailto
+ links converted into HTML hyperlinks.
+ """
+ hyperlink = autolink_regex_match.group(0)
+
+ trailing = ''
+ for begin, end in _LINK_TRAILING_CHARS:
+ if hyperlink.endswith(end):
+ if not begin or hyperlink[:-len(end)].find(begin) == -1:
+ trailing = end + trailing
+ hyperlink = hyperlink[:-len(end)]
+
+ tag_match = _CLOSING_TAG_RE.search(hyperlink)
+ if tag_match:
+ trailing = hyperlink[tag_match.start(0):] + trailing
+ hyperlink = hyperlink[:tag_match.start(0)]
+
+ if (not validate.IsValidURL(hyperlink) and
+ not validate.IsValidEmail(hyperlink)):
+ return [template_helpers.TextRun(hyperlink)]
+
+ result = [template_helpers.TextRun(hyperlink, tag='a', href=hyperlink)]
+ if trailing:
+ result.append(template_helpers.TextRun(trailing))
+
+ return result
+
+
+# Regular expression to detect git hashes.
+# Used to auto-link to Git hashes on crrev.com when displaying issue details.
+# Matches "rN", "r#N", and "revision N" when "rN" is not part of a larger word
+# and N is a hexadecimal string of 40 chars.
+_GIT_HASH_RE = re.compile(
+ r'\b(?P<prefix>r(evision\s+#?)?)?(?P<revnum>([a-f0-9]{40}))\b',
+ re.IGNORECASE | re.MULTILINE)
+
+# This is for SVN revisions and Git commit posisitons.
+_SVN_REF_RE = re.compile(
+ r'\b(?P<prefix>r(evision\s+#?)?)(?P<revnum>([0-9]{1,7}))\b',
+ re.IGNORECASE | re.MULTILINE)
+
+
+def GetReferencedRevisions(_mr, _refs):
+ """Load the referenced revision objects."""
+ # For now we just autolink any revision hash without actually
+ # checking that such a revision exists,
+ # TODO(jrobbins): Hit crrev.com and check that the revision exists
+ # and show a rollover with revision info.
+ return None
+
+
+def ExtractRevNums(_mr, autolink_regex_match):
+ """Return internal representation of a rev reference."""
+ ref = autolink_regex_match.group('revnum')
+ logging.debug('revision ref = %s', ref)
+ return [ref]
+
+
+def ReplaceRevisionRef(
+ mr, autolink_regex_match, _component_ref_artifacts):
+ """Return HTML markup for an autolink reference."""
+ prefix = autolink_regex_match.group('prefix')
+ revnum = autolink_regex_match.group('revnum')
+ url = _GetRevisionURLFormat(mr.project).format(revnum=revnum)
+ content = revnum
+ if prefix:
+ content = '%s%s' % (prefix, revnum)
+ return [template_helpers.TextRun(content, tag='a', href=url)]
+
+
+def _GetRevisionURLFormat(project):
+ # TODO(jrobbins): Expose a UI to customize it to point to whatever site
+ # hosts the source code. Also, site-wide default.
+ return (project.revision_url_format or settings.revision_url_format)
+
+
+# Regular expression to detect issue references.
+# Used to auto-link to other issues when displaying issue details.
+# Matches "issue " when "issue" is not part of a larger word, or
+# "issue #", or just a "#" when it is preceeded by a space.
+_ISSUE_REF_RE = re.compile(r"""
+ (?P<prefix>\b(issues?|bugs?)[ \t]*(:|=)?)
+ ([ \t]*(?P<project_name>\b[-a-z0-9]+[:\#])?
+ (?P<number_sign>\#?)
+ (?P<local_id>\d+)\b
+ (,?[ \t]*(and|or)?)?)+""", re.IGNORECASE | re.VERBOSE)
+
+_SINGLE_ISSUE_REF_RE = re.compile(r"""
+ (?P<prefix>\b(issue|bug)[ \t]*)?
+ (?P<project_name>\b[-a-z0-9]+[:\#])?
+ (?P<number_sign>\#?)
+ (?P<local_id>\d+)\b""", re.IGNORECASE | re.VERBOSE)
+
+
+def CurryGetReferencedIssues(services):
+ """Return a function to get ref'd issues with these persist objects bound.
+
+ Currying is a convienent way to give the callback access to the persist
+ objects, but without requiring that all possible persist objects be passed
+ through the autolink registry and functions.
+
+ Args:
+ services: connection to issue, config, and project persistence layers.
+
+ Returns:
+ A ready-to-use function that accepts the arguments that autolink
+ expects to pass to it.
+ """
+
+ def GetReferencedIssues(mr, ref_tuples):
+ """Return lists of open and closed issues referenced by these comments.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ ref_tuples: list of (project_name, local_id) tuples for each issue
+ that is mentioned in the comment text. The project_name may be None,
+ in which case the issue is assumed to be in the current project.
+
+ Returns:
+ A list of open and closed issue dicts.
+ """
+ ref_projects = services.project.GetProjectsByName(
+ mr.cnxn,
+ [(ref_pn or mr.project_name) for ref_pn, _ in ref_tuples])
+ issue_ids = services.issue.ResolveIssueRefs(
+ mr.cnxn, ref_projects, mr.project_name, ref_tuples)
+ open_issues, closed_issues = (
+ tracker_helpers.GetAllowedOpenedAndClosedIssues(
+ mr, issue_ids, services))
+
+ open_dict = {}
+ for issue in open_issues:
+ open_dict[_IssueProjectKey(issue.project_name, issue.local_id)] = issue
+
+ closed_dict = {}
+ for issue in closed_issues:
+ closed_dict[_IssueProjectKey(issue.project_name, issue.local_id)] = issue
+
+ logging.info('autolinking dicts %r and %r', open_dict, closed_dict)
+
+ return open_dict, closed_dict
+
+ return GetReferencedIssues
+
+
+def _ParseProjectNameMatch(project_name):
+ """Process the passed project name and determine the best representation.
+
+ Args:
+ project_name: a string with the project name matched in a regex
+
+ Returns:
+ A minimal representation of the project name, None if no valid content.
+ """
+ if not project_name:
+ return None
+ return project_name.lstrip().rstrip('#: \t\n')
+
+
+def ExtractProjectAndIssueIds(_mr, autolink_regex_match):
+ """Convert a regex match for a textual reference into our internal form."""
+ whole_str = autolink_regex_match.group(0)
+ refs = []
+ for submatch in _SINGLE_ISSUE_REF_RE.finditer(whole_str):
+ ref = (_ParseProjectNameMatch(submatch.group('project_name')),
+ int(submatch.group('local_id')))
+ refs.append(ref)
+ logging.info('issue ref = %s', ref)
+
+ return refs
+
+
+# This uses project name to avoid a lookup on project ID in a function
+# that has no services object.
+def _IssueProjectKey(project_name, local_id):
+ """Make a dictionary key to identify a referenced issue."""
+ return '%s:%d' % (project_name, local_id)
+
+
+class IssueRefRun(object):
+ """A text run that links to a referenced issue."""
+
+ def __init__(self, issue, is_closed, project_name, prefix):
+ self.tag = 'a'
+ self.css_class = 'closed_ref' if is_closed else None
+ self.title = issue.summary
+ self.href = '/p/%s/issues/detail?id=%d' % (project_name, issue.local_id)
+
+ self.content = '%s%d' % (prefix, issue.local_id)
+ if is_closed and not prefix:
+ self.content = ' %s ' % self.content
+
+
+def ReplaceIssueRef(mr, autolink_regex_match, component_ref_artifacts):
+ """Examine a textual reference and replace it with an autolink or not.
+
+ Args:
+ mr: commonly used info parsed from the request
+ autolink_regex_match: regex match for the textual reference.
+ component_ref_artifacts: result of earlier call to GetReferencedIssues.
+
+ Returns:
+ A list of IssueRefRuns and TextRuns to replace the textual
+ reference. If there is an issue to autolink to, we return an HTML
+ hyperlink. Otherwise, we the run will have the original plain
+ text.
+ """
+ open_dict, closed_dict = component_ref_artifacts
+ original = autolink_regex_match.group(0)
+ logging.info('called ReplaceIssueRef on %r', original)
+ result_runs = []
+ pos = 0
+ for submatch in _SINGLE_ISSUE_REF_RE.finditer(original):
+ if submatch.start() >= pos:
+ if original[pos: submatch.start()]:
+ result_runs.append(template_helpers.TextRun(
+ original[pos: submatch.start()]))
+ replacement_run = _ReplaceSingleIssueRef(
+ mr, submatch, open_dict, closed_dict)
+ result_runs.append(replacement_run)
+ pos = submatch.end()
+
+ if original[pos:]:
+ result_runs.append(template_helpers.TextRun(original[pos:]))
+
+ return result_runs
+
+
+def _ReplaceSingleIssueRef(mr, submatch, open_dict, closed_dict):
+ """Replace one issue reference with a link, or the original text."""
+ prefix = submatch.group('prefix') or ''
+ project_name = submatch.group('project_name')
+ if project_name:
+ prefix += project_name
+ project_name = project_name.lstrip().rstrip(':#')
+ else:
+ # We need project_name for the URL, even if it is not in the text.
+ project_name = mr.project_name
+
+ number_sign = submatch.group('number_sign')
+ if number_sign:
+ prefix += number_sign
+ local_id = int(submatch.group('local_id'))
+ issue_key = _IssueProjectKey(project_name or mr.project_name, local_id)
+
+ if issue_key in open_dict:
+ return IssueRefRun(open_dict[issue_key], False, project_name, prefix)
+ elif issue_key in closed_dict:
+ return IssueRefRun(closed_dict[issue_key], True, project_name, prefix)
+ else: # Don't link to non-existent issues.
+ return template_helpers.TextRun('%s%d' % (prefix, local_id))
+
+
+class Autolink(object):
+ """Maintains a registry of autolink syntax and can apply it to comments."""
+
+ def __init__(self):
+ self.registry = {}
+
+ def RegisterComponent(self, component_name, artifact_lookup_function,
+ match_to_reference_function, autolink_re_subst_dict):
+ """Register all the autolink info for a software component.
+
+ Args:
+ component_name: string name of software component, must be unique.
+ artifact_lookup_function: function to batch lookup all artifacts that
+ might have been referenced in a set of comments:
+ function(all_matches) -> referenced_artifacts
+ the referenced_artifacts will be pased to each subst function.
+ match_to_reference_function: convert a regex match object to
+ some internal representation of the artifact reference.
+ autolink_re_subst_dict: dictionary of regular expressions and
+ the substitution function that should be called for each match:
+ function(match, referenced_artifacts) -> replacement_markup
+ """
+ self.registry[component_name] = (artifact_lookup_function,
+ match_to_reference_function,
+ autolink_re_subst_dict)
+
+ def GetAllReferencedArtifacts(self, mr, comment_text_list):
+ """Call callbacks to lookup all artifacts possibly referenced.
+
+ Args:
+ mr: information parsed out of the user HTTP request.
+ comment_text_list: list of comment content strings.
+
+ Returns:
+ Opaque object that can be pased to MarkupAutolinks. It's
+ structure happens to be {component_name: artifact_list, ...}.
+ """
+ all_referenced_artifacts = {}
+ for comp, (lookup, match_to_refs, re_dict) in self.registry.iteritems():
+ refs = set()
+ for comment_text in comment_text_list:
+ for regex in re_dict:
+ for match in regex.finditer(comment_text):
+ additional_refs = match_to_refs(mr, match)
+ if additional_refs:
+ refs.update(additional_refs)
+
+ all_referenced_artifacts[comp] = lookup(mr, refs)
+
+ return all_referenced_artifacts
+
+ def MarkupAutolinks(self, mr, text_runs, all_referenced_artifacts):
+ """Loop over components and regexes, applying all substitutions.
+
+ Args:
+ mr: info parsed from the user's HTTP request.
+ text_runs: List of text runs for the user's comment.
+ all_referenced_artifacts: result of previous call to
+ GetAllReferencedArtifacts.
+
+ Returns:
+ List of text runs for the entire user comment, some of which may have
+ attribures that cause them to render as links in render-rich-text.ezt.
+ """
+ items = self.registry.items()
+ items.sort() # Process components in determinate alphabetical order.
+ for component, (_lookup, _match_ref, re_subst_dict) in items:
+ component_ref_artifacts = all_referenced_artifacts[component]
+ for regex, subst_fun in re_subst_dict.iteritems():
+ text_runs = self._ApplySubstFunctionToRuns(
+ text_runs, regex, subst_fun, mr, component_ref_artifacts)
+
+ return text_runs
+
+ def _ApplySubstFunctionToRuns(
+ self, text_runs, regex, subst_fun, mr, component_ref_artifacts):
+ """Apply autolink regex and substitution function to each text run.
+
+ Args:
+ text_runs: list of TextRun objects with parts of the original comment.
+ regex: Regular expression for detecting textual references to artifacts.
+ subst_fun: function to return autolink markup, or original text.
+ mr: common info parsed from the user HTTP request.
+ component_ref_artifacts: already-looked-up destination artifacts to use
+ when computing substitution text.
+
+ Returns:
+ A new list with more and smaller runs, some of which may have tag
+ and link attributes set.
+ """
+ result_runs = []
+ for run in text_runs:
+ content = run.content
+ if run.tag:
+ # This chunk has already been substituted, don't allow nested
+ # autolinking to mess up our output.
+ result_runs.append(run)
+ else:
+ pos = 0
+ for match in regex.finditer(content):
+ if match.start() > pos:
+ result_runs.append(template_helpers.TextRun(
+ content[pos: match.start()]))
+ replacement_runs = subst_fun(mr, match, component_ref_artifacts)
+ result_runs.extend(replacement_runs)
+ pos = match.end()
+
+ if run.content[pos:]: # Keep any text that came after the last match
+ result_runs.append(template_helpers.TextRun(run.content[pos:]))
+
+ # TODO(jrobbins): ideally we would merge consecutive plain text runs
+ # so that regexes can match across those run boundaries.
+
+ return result_runs
+
+
+def RegisterAutolink(services):
+ """Register all the autolink hooks."""
+ services.autolink.RegisterComponent(
+ '01-linkify',
+ lambda request, mr: None,
+ lambda mr, match: None,
+ {_IS_A_LINK_RE: Linkify})
+
+ services.autolink.RegisterComponent(
+ '02-tracker',
+ CurryGetReferencedIssues(services),
+ ExtractProjectAndIssueIds,
+ {_ISSUE_REF_RE: ReplaceIssueRef})
+
+ services.autolink.RegisterComponent(
+ '03-versioncontrol',
+ GetReferencedRevisions,
+ ExtractRevNums,
+ {_GIT_HASH_RE: ReplaceRevisionRef,
+ _SVN_REF_RE: ReplaceRevisionRef})
diff --git a/appengine/monorail/features/commands.py b/appengine/monorail/features/commands.py
new file mode 100644
index 0000000..5f5ea9b
--- /dev/null
+++ b/appengine/monorail/features/commands.py
@@ -0,0 +1,305 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions that implement command-line-like issue updates."""
+
+import logging
+import re
+
+from framework import framework_bizobj
+from framework import framework_constants
+from services import user_svc
+from tracker import tracker_constants
+
+
+def ParseQuickEditCommand(
+ cnxn, cmd, issue, config, logged_in_user_id, services):
+ """Parse a quick edit command into assignments and labels."""
+ parts = _BreakCommandIntoParts(cmd)
+ parser = AssignmentParser(None, easier_kv_labels=True)
+
+ for key, value in parts:
+ if key: # A key=value assignment.
+ valid_assignment = parser.ParseAssignment(
+ cnxn, key, value, config, services, logged_in_user_id)
+ if not valid_assignment:
+ logging.info('ignoring assignment: %r, %r', key, value)
+
+ elif value.startswith('-'): # Removing a label.
+ parser.labels_remove.append(_StandardizeLabel(value[1:], config))
+
+ else: # Adding a label.
+ value = value.strip('+')
+ parser.labels_add.append(_StandardizeLabel(value, config))
+
+ new_summary = parser.summary or issue.summary
+
+ if parser.status is None:
+ new_status = issue.status
+ else:
+ new_status = parser.status
+
+ if parser.owner_id is None:
+ new_owner_id = issue.owner_id
+ else:
+ new_owner_id = parser.owner_id
+
+ new_cc_ids = [cc for cc in list(issue.cc_ids) + list(parser.cc_add)
+ if cc not in parser.cc_remove]
+ (new_labels, _update_add,
+ _update_remove) = framework_bizobj.MergeLabels(
+ issue.labels, parser.labels_add, parser.labels_remove,
+ config.exclusive_label_prefixes)
+
+ return new_summary, new_status, new_owner_id, new_cc_ids, new_labels
+
+
+ASSIGN_COMMAND_RE = re.compile(
+ r'(?P<key>\w+(?:-|\w)*)(?:=|:)'
+ r'(?:(?P<value1>(?:-|\+|\.|%|@|=|,|\w)+)|'
+ r'"(?P<value2>[^"]+)"|'
+ r"'(?P<value3>[^']+)')",
+ re.UNICODE | re.IGNORECASE)
+
+LABEL_COMMAND_RE = re.compile(
+ r'(?P<label>(?:\+|-)?\w(?:-|\w)*)',
+ re.UNICODE | re.IGNORECASE)
+
+
+def _BreakCommandIntoParts(cmd):
+ """Break a quick edit command into assignment and label parts.
+
+ Args:
+ cmd: string command entered by the user.
+
+ Returns:
+ A list of (key, value) pairs where key is the name of the field
+ being assigned or None for OneWord labels, and value is the value
+ to assign to it, or the whole label. Value may begin with a "+"
+ which is just ignored, or a "-" meaning that the label should be
+ removed, or neither.
+ """
+ parts = []
+ cmd = cmd.strip()
+ m = True
+
+ while m:
+ m = ASSIGN_COMMAND_RE.match(cmd)
+ if m:
+ key = m.group('key')
+ value = m.group('value1') or m.group('value2') or m.group('value3')
+ parts.append((key, value))
+ cmd = cmd[len(m.group(0)):].strip()
+ else:
+ m = LABEL_COMMAND_RE.match(cmd)
+ if m:
+ parts.append((None, m.group('label')))
+ cmd = cmd[len(m.group(0)):].strip()
+
+ return parts
+
+
+def _ParsePlusMinusList(value):
+ """Parse a string containing a series of plus/minuse values.
+
+ Strings are seprated by whitespace, comma and/or semi-colon.
+
+ Example:
+ value = "one +two -three"
+ plus = ['one', 'two']
+ minus = ['three']
+
+ Args:
+ value: string containing unparsed plus minus values.
+
+ Returns:
+ A tuple of (plus, minus) string values.
+ """
+ plus = []
+ minus = []
+ # Treat ';' and ',' as separators (in addition to SPACE)
+ for ch in [',', ';']:
+ value = value.replace(ch, ' ')
+ terms = [i.strip() for i in value.split()]
+ for item in terms:
+ if item.startswith('-'):
+ minus.append(item.lstrip('-'))
+ else:
+ plus.append(item.lstrip('+')) # optional leading '+'
+
+ return plus, minus
+
+
+class AssignmentParser(object):
+ """Class to parse assignment statements in quick edits or email replies."""
+
+ def __init__(self, template, easier_kv_labels=False):
+ self.cc_list = []
+ self.cc_add = []
+ self.cc_remove = []
+ self.owner_id = None
+ self.status = None
+ self.summary = None
+ self.labels_list = []
+ self.labels_add = []
+ self.labels_remove = []
+ self.branch = None
+
+ # Accept "Anything=Anything" for quick-edit, but not in commit-log-commands
+ # because it would be too error-prone when mixed with plain text comment
+ # text and without autocomplete to help users triggering it via typos.
+ self.easier_kv_labels = easier_kv_labels
+
+ if template:
+ if template.owner_id:
+ self.owner_id = template.owner_id
+ if template.summary:
+ self.summary = template.summary
+ if template.labels:
+ self.labels_list = template.labels
+ # Do not have a similar check as above for status because it could be an
+ # empty string.
+ self.status = template.status
+
+ def ParseAssignment(self, cnxn, key, value, config, services, user_id):
+ """Parse command-style text entered by the user to update an issue.
+
+ E.g., The user may want to set the issue status to "reviewed", or
+ set the owner to "me".
+
+ Args:
+ cnxn: connection to SQL database.
+ key: string name of the field to set.
+ value: string value to be interpreted.
+ config: Projects' issue tracker configuration PB.
+ services: connections to backends.
+ user_id: int user ID of the user making the change.
+
+ Returns:
+ True if the line could be parsed as an assigment, False otherwise.
+ Also, as a side-effect, the assigned values are built up in the instance
+ variables of the parser.
+ """
+ valid_line = True
+
+ if key == 'owner':
+ if framework_constants.NO_VALUE_RE.match(value):
+ self.owner_id = framework_constants.NO_USER_SPECIFIED
+ else:
+ try:
+ self.owner_id = _LookupMeOrUsername(cnxn, value, services, user_id)
+ except user_svc.NoSuchUserException:
+ logging.warning('bad owner: %r when committing to project_id %r',
+ value, config.project_id)
+ valid_line = False
+
+ elif key == 'cc':
+ try:
+ add, remove = _ParsePlusMinusList(value)
+ self.cc_add = [_LookupMeOrUsername(cnxn, cc, services, user_id)
+ for cc in add]
+ self.cc_remove = [_LookupMeOrUsername(cnxn, cc, services, user_id)
+ for cc in remove]
+ for user_id in self.cc_add:
+ if user_id not in self.cc_list:
+ self.cc_list.append(user_id)
+ self.cc_list = [user_id for user_id in self.cc_list
+ if user_id not in self.cc_remove]
+ except user_svc.NoSuchUserException:
+ logging.warning('bad cc: %r when committing to project_id %r',
+ value, config.project_id)
+ valid_line = False
+
+ elif key == 'summary':
+ self.summary = value
+
+ elif key == 'status':
+ if framework_constants.NO_VALUE_RE.match(value):
+ self.status = ''
+ else:
+ self.status = _StandardizeStatus(value, config)
+
+ elif key == 'label' or key == 'labels':
+ self.labels_add, self.labels_remove = _ParsePlusMinusList(value)
+ self.labels_add = [_StandardizeLabel(lab, config)
+ for lab in self.labels_add]
+ self.labels_remove = [_StandardizeLabel(lab, config)
+ for lab in self.labels_remove]
+ (self.labels_list, _update_add,
+ _update_remove) = framework_bizobj.MergeLabels(
+ self.labels_list, self.labels_add, self.labels_remove,
+ config.exclusive_label_prefixes)
+
+ elif (self.easier_kv_labels and
+ key not in tracker_constants.RESERVED_PREFIXES and
+ key and value):
+ if key.startswith('-'):
+ self.labels_remove.append(_StandardizeLabel(
+ '%s-%s' % (key[1:], value), config))
+ else:
+ self.labels_add.append(_StandardizeLabel(
+ '%s-%s' % (key, value), config))
+
+ else:
+ valid_line = False
+
+ return valid_line
+
+
+def _StandardizeStatus(status, config):
+ """Attempt to match a user-supplied status with standard status values.
+
+ Args:
+ status: User-supplied status string.
+ config: Project's issue tracker configuration PB.
+
+ Returns:
+ A canonicalized status string, that matches a standard project
+ value, if found.
+ """
+ well_known_statuses = [wks.status for wks in config.well_known_statuses]
+ return _StandardizeArtifact(status, well_known_statuses)
+
+
+def _StandardizeLabel(label, config):
+ """Attempt to match a user-supplied label with standard label values.
+
+ Args:
+ label: User-supplied label string.
+ config: Project's issue tracker configuration PB.
+
+ Returns:
+ A canonicalized label string, that matches a standard project
+ value, if found.
+ """
+ well_known_labels = [wkl.label for wkl in config.well_known_labels]
+ return _StandardizeArtifact(label, well_known_labels)
+
+
+def _StandardizeArtifact(artifact, well_known_artifacts):
+ """Attempt to match a user-supplied artifact with standard artifact values.
+
+ Args:
+ artifact: User-supplied artifact string.
+ well_known_artifacts: List of well known values of the artifact.
+
+ Returns:
+ A canonicalized artifact string, that matches a standard project
+ value, if found.
+ """
+ artifact = framework_bizobj.CanonicalizeLabel(artifact)
+ for wka in well_known_artifacts:
+ if artifact.lower() == wka.lower():
+ return wka
+ # No match - use user-supplied artifact.
+ return artifact
+
+
+def _LookupMeOrUsername(cnxn, username, services, user_id):
+ """Handle the 'me' syntax or lookup a user's user ID."""
+ if username.lower() == 'me':
+ return user_id
+
+ return services.user.LookupUserID(cnxn, username)
diff --git a/appengine/monorail/features/commitlogcommands.py b/appengine/monorail/features/commitlogcommands.py
new file mode 100644
index 0000000..16c6e3d
--- /dev/null
+++ b/appengine/monorail/features/commitlogcommands.py
@@ -0,0 +1,155 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implements processing of issue update command lines.
+
+This currently processes the leading command-lines that appear
+at the top of inbound email messages to update existing issues.
+
+It could also be expanded to allow new issues to be created. Or, to
+handle commands in commit-log messages if the version control system
+invokes a webhook.
+"""
+
+import logging
+import re
+
+from features import commands
+from features import notify
+from framework import emailfmt
+from framework import framework_bizobj
+from framework import framework_helpers
+from services import issue_svc
+
+
+# Actions have separate 'Parse' and 'Run' implementations to allow better
+# testing coverage.
+class IssueAction(object):
+ """Base class for all issue commands."""
+
+ def __init__(self):
+ self.parser = commands.AssignmentParser(None)
+ self.description = ''
+ self.inbound_message = None
+ self.commenter_id = None
+ self.project = None
+ self.config = None
+ self.hostport = framework_helpers.GetHostPort()
+
+ def Parse(
+ self, cnxn, project_name, commenter_id, lines, services,
+ strip_quoted_lines=False, hostport=None):
+ """Populate object from raw user input."""
+ self.project = services.project.GetProjectByName(cnxn, project_name)
+ self.config = services.config.GetProjectConfig(
+ cnxn, self.project.project_id)
+ self.commenter_id = commenter_id
+
+ # Process all valid key-value lines. Once we find a non key-value line,
+ # treat the rest as the 'description'.
+ for idx, line in enumerate(lines):
+ valid_line = False
+ m = re.match(r'^\s*(\w+)\s*\:\s*(.*?)\s*$', line)
+ if m:
+ # Process Key-Value
+ key = m.group(1).lower()
+ value = m.group(2)
+ valid_line = self.parser.ParseAssignment(
+ cnxn, key, value, self.config, services, self.commenter_id)
+
+ if not valid_line:
+ # Not Key-Value. Treat this line and remaining as 'description'.
+ # First strip off any trailing blank lines.
+ while lines and not lines[-1].strip():
+ lines.pop()
+ if lines:
+ self.description = '\n'.join(lines[idx:])
+ break
+
+ if strip_quoted_lines:
+ self.inbound_message = '\n'.join(lines)
+ self.description = emailfmt.StripQuotedText(self.description)
+
+ if hostport:
+ self.hostport = hostport
+
+ for key in ['owner_id', 'cc_add', 'cc_remove', 'summary',
+ 'status', 'labels_add', 'labels_remove', 'branch']:
+ logging.info('\t%s: %s', key, self.parser.__dict__[key])
+
+ for key in ['commenter_id', 'description', 'hostport']:
+ logging.info('\t%s: %s', key, self.__dict__[key])
+
+ def Run(self, cnxn, services, allow_edit=True):
+ """Execute this action."""
+ raise NotImplementedError()
+
+
+class UpdateIssueAction(IssueAction):
+ """Implements processing email replies or the "update issue" command."""
+
+ def __init__(self, local_id):
+ super(UpdateIssueAction, self).__init__()
+ self.local_id = local_id
+
+ def Run(self, cnxn, services, allow_edit=True):
+ """Updates an issue based on the parsed commands."""
+ try:
+ issue = services.issue.GetIssueByLocalID(
+ cnxn, self.project.project_id, self.local_id)
+ except issue_svc.NoSuchIssueException:
+ return # Issue does not exist, so do nothing
+
+ old_owner_id = issue.owner_id
+ new_summary = self.parser.summary or issue.summary
+
+ if self.parser.status is None:
+ new_status = issue.status
+ else:
+ new_status = self.parser.status
+
+ if self.parser.owner_id is None:
+ new_owner_id = issue.owner_id
+ else:
+ new_owner_id = self.parser.owner_id
+
+ new_cc_ids = [cc for cc in list(issue.cc_ids) + list(self.parser.cc_add)
+ if cc not in self.parser.cc_remove]
+ (new_labels, _update_add,
+ _update_remove) = framework_bizobj.MergeLabels(
+ issue.labels, self.parser.labels_add,
+ self.parser.labels_remove,
+ self.config.exclusive_label_prefixes)
+
+ new_field_values = issue.field_values # TODO(jrobbins): edit custom ones
+
+ if not allow_edit:
+ # If user can't edit, then only consider the plain-text comment,
+ # and set all other fields back to their original values.
+ logging.info('Processed reply from user who can not edit issue')
+ new_summary = issue.summary
+ new_status = issue.status
+ new_owner_id = issue.owner_id
+ new_cc_ids = issue.cc_ids
+ new_labels = issue.labels
+ new_field_values = issue.field_values
+
+ amendments, _comment_pb = services.issue.ApplyIssueComment(
+ cnxn, services, self.commenter_id,
+ self.project.project_id, issue.local_id, new_summary, new_status,
+ new_owner_id, new_cc_ids, new_labels, new_field_values,
+ issue.component_ids, issue.blocked_on_iids, issue.blocking_iids,
+ issue.dangling_blocked_on_refs, issue.dangling_blocking_refs,
+ issue.merged_into, comment=self.description,
+ inbound_message=self.inbound_message)
+
+ logging.info('Updated issue %s:%s w/ amendments %r',
+ self.project.project_name, issue.local_id, amendments)
+
+ if amendments or self.description: # Avoid completely empty comments.
+ cmnts = services.issue.GetCommentsForIssue(cnxn, issue.issue_id)
+ notify.PrepareAndSendIssueChangeNotification(
+ self.project.project_id, self.local_id, self.hostport,
+ self.commenter_id, len(cmnts) - 1, old_owner_id=old_owner_id)
diff --git a/appengine/monorail/features/cues.py b/appengine/monorail/features/cues.py
new file mode 100644
index 0000000..f56821d
--- /dev/null
+++ b/appengine/monorail/features/cues.py
@@ -0,0 +1,50 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Simple servlet to store the fact that a user has dismissed a cue card.
+
+Cue cards are small on-page help items that appear when the user has
+done a certain action or is viewing a project that is in a certain
+state. The cue card give the user a suggestion of what he/she should
+do next. Cue cards can be dismissed to reduce visual clutter on the
+page once the user has learned the content of the suggestion. That
+preference is recorded in the User PB, and the same cue card will not
+be presented again to the same user.
+
+Exmple: The logged in user has dismissed the cue card that tells him/her how
+to search for numbers in the issue tracker:
+
+ POST /hosting/cues.do
+ cue_id=search_for_numbers&token=12344354534
+"""
+
+
+import logging
+
+from framework import jsonfeed
+from framework import monorailrequest
+
+
+class SetCuesFeed(jsonfeed.JsonFeed):
+ """A class to process an AJAX request to dismiss a cue card."""
+
+ def HandleRequest(self, mr):
+ """Processes a user's POST request to dismiss a cue card.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+
+ cue_id = mr.GetParam('cue_id')
+ if not cue_id:
+ raise monorailrequest.InputException('no cue_id specified')
+
+ logging.info('Handling user set cue request: %r', cue_id)
+ new_dismissed_cues = mr.auth.user_pb.dismissed_cues
+ new_dismissed_cues.append(cue_id)
+ self.services.user.UpdateUserSettings(
+ mr.cnxn, mr.auth.user_id, mr.auth.user_pb,
+ dismissed_cues=new_dismissed_cues)
+
diff --git a/appengine/monorail/features/filterrules.py b/appengine/monorail/features/filterrules.py
new file mode 100644
index 0000000..fb5f84a
--- /dev/null
+++ b/appengine/monorail/features/filterrules.py
@@ -0,0 +1,47 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implementation of the filter rules feature."""
+
+import logging
+
+from features import filterrules_helpers
+from framework import jsonfeed
+from tracker import tracker_constants
+
+
+class RecomputeDerivedFieldsTask(jsonfeed.InternalTask):
+ """JSON servlet that recomputes derived fields on a batch of issues."""
+
+ def HandleRequest(self, mr):
+ """Recompute derived field values on one range of issues in a shard."""
+ logging.info(
+ 'params are %r %r %r %r', mr.specified_project_id, mr.lower_bound,
+ mr.upper_bound, mr.shard_id)
+ project = self.services.project.GetProject(
+ mr.cnxn, mr.specified_project_id)
+ config = self.services.config.GetProjectConfig(
+ mr.cnxn, mr.specified_project_id)
+ filterrules_helpers.RecomputeAllDerivedFieldsNow(
+ mr.cnxn, self.services, project, config, lower_bound=mr.lower_bound,
+ upper_bound=mr.upper_bound, shard_id=mr.shard_id)
+
+ return {
+ 'success': True,
+ }
+
+
+class ReindexQueueCron(jsonfeed.InternalTask):
+ """JSON servlet that reindexes some issues each minute, as needed."""
+
+ def HandleRequest(self, mr):
+ """Reindex issues that are listed in the the reindex table."""
+ num_reindexed = self.services.issue.ReindexIssues(
+ mr.cnxn, tracker_constants.MAX_ISSUES_TO_REINDEX_PER_MINUTE,
+ self.services.user)
+
+ return {
+ 'num_reindexed': num_reindexed,
+ }
diff --git a/appengine/monorail/features/filterrules_helpers.py b/appengine/monorail/features/filterrules_helpers.py
new file mode 100644
index 0000000..695d2ce
--- /dev/null
+++ b/appengine/monorail/features/filterrules_helpers.py
@@ -0,0 +1,699 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implementation of the filter rules helper functions."""
+
+import logging
+import re
+
+from google.appengine.api import taskqueue
+
+import settings
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import monorailrequest
+from framework import urls
+from framework import validate
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import query2ast
+from search import searchpipeline
+from services import user_svc
+from tracker import component_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+
+
+# Maximum number of filer rules that can be specified in a given
+# project. This helps us bound the amount of time needed to
+# (re)compute derived fields.
+MAX_RULES = 200
+
+BLOCK = tracker_constants.RECOMPUTE_DERIVED_FIELDS_BLOCK_SIZE
+
+
+# TODO(jrobbins): implement a more efficient way to update just those
+# issues affected by a specific component change.
+def RecomputeAllDerivedFields(cnxn, services, project, config):
+ """Create work items to update all issues after filter rule changes.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to backend services.
+ project: Project PB for the project that was edited.
+ config: ProjectIssueConfig PB for the project that was edited,
+ including the edits made.
+ """
+ if not settings.recompute_derived_fields_in_worker:
+ # Background tasks are not enabled, just do everything in the servlet.
+ RecomputeAllDerivedFieldsNow(cnxn, services, project, config)
+ return
+
+ highest_id = services.issue.GetHighestLocalID(cnxn, project.project_id)
+ if highest_id == 0:
+ return # No work to do.
+
+ # Enqueue work items for blocks of issues to recompute.
+ steps = range(1, highest_id + 1, BLOCK)
+ steps.reverse() # Update higher numbered issues sooner, old issues last.
+ # Cycle through shard_ids just to load-balance among the replicas. Each
+ # block includes all issues in that local_id range, not just 1/10 of them.
+ shard_id = 0
+ for step in steps:
+ params = {
+ 'project_id': project.project_id,
+ 'lower_bound': step,
+ 'upper_bound': min(step + BLOCK, highest_id + 1),
+ 'shard_id': shard_id,
+ }
+ logging.info('adding task with params %r', params)
+ taskqueue.add(
+ url=urls.RECOMPUTE_DERIVED_FIELDS_TASK + '.do', params=params)
+ shard_id = (shard_id + 1) % settings.num_logical_shards
+
+
+def RecomputeAllDerivedFieldsNow(
+ cnxn, services, project, config, lower_bound=None, upper_bound=None,
+ shard_id=None):
+ """Re-apply all filter rules to all issues in a project.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to persistence layer.
+ project: Project PB for the project that was changed.
+ config: ProjectIssueConfig for that project.
+ lower_bound: optional int lowest issue ID to consider, inclusive.
+ upper_bound: optional int highest issue ID to consider, exclusive.
+ shard_id: optional int shard_id to read from one replica.
+
+ SIDE-EFFECT: updates all issues in the project. Stores and re-indexes
+ all those that were changed.
+ """
+ if lower_bound is not None and upper_bound is not None:
+ issues = services.issue.GetIssuesByLocalIDs(
+ cnxn, project.project_id, range(lower_bound, upper_bound),
+ shard_id=shard_id)
+ else:
+ issues = services.issue.GetAllIssuesInProject(cnxn, project.project_id)
+
+ rules = services.features.GetFilterRules(cnxn, project.project_id)
+ predicate_asts = ParsePredicateASTs(rules, config, None)
+ modified_issues = []
+ for issue in issues:
+ if ApplyGivenRules(cnxn, services, issue, config, rules, predicate_asts):
+ modified_issues.append(issue)
+
+ services.issue.UpdateIssues(cnxn, modified_issues, just_derived=True)
+
+ # Doing the FTS indexing can be too slow, so queue up the issues
+ # that need to be re-indexed by a cron-job later.
+ services.issue.EnqueueIssuesForIndexing(
+ cnxn, [issue.issue_id for issue in modified_issues])
+
+
+def ParsePredicateASTs(rules, config, me_user_id):
+ """Parse the given rules in QueryAST PBs."""
+ predicates = [rule.predicate for rule in rules]
+ if me_user_id:
+ predicates = [searchpipeline.ReplaceKeywordsWithUserID(me_user_id, pred)
+ for pred in predicates]
+ predicate_asts = [
+ query2ast.ParseUserQuery(pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ for pred in predicates]
+ return predicate_asts
+
+
+def ApplyFilterRules(cnxn, services, issue, config):
+ """Apply the filter rules for this project to the given issue.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users, issues, and projects.
+ issue: An Issue PB that has just been updated with new explicit values.
+ config: The project's issue tracker config PB.
+
+ Returns:
+ True if any derived_* field of the issue was changed.
+
+ SIDE-EFFECT: update the derived_* fields of the Issue PB.
+ """
+ rules = services.features.GetFilterRules(cnxn, issue.project_id)
+ predicate_asts = ParsePredicateASTs(rules, config, None)
+ return ApplyGivenRules(cnxn, services, issue, config, rules, predicate_asts)
+
+
+def ApplyGivenRules(cnxn, services, issue, config, rules, predicate_asts):
+ """Apply the filter rules for this project to the given issue.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users, issues, and projects.
+ issue: An Issue PB that has just been updated with new explicit values.
+ config: The project's issue tracker config PB.
+ rules: list of FilterRule PBs.
+
+ Returns:
+ True if any derived_* field of the issue was changed.
+
+ SIDE-EFFECT: update the derived_* fields of the Issue PB.
+ """
+ (derived_owner_id, derived_status, derived_cc_ids,
+ derived_labels, derived_notify_addrs) = _ComputeDerivedFields(
+ cnxn, services, issue, config, rules, predicate_asts)
+
+ any_change = (derived_owner_id != issue.derived_owner_id or
+ derived_status != issue.derived_status or
+ derived_cc_ids != issue.derived_cc_ids or
+ derived_labels != issue.derived_labels or
+ derived_notify_addrs != issue.derived_notify_addrs)
+
+ # Remember any derived values.
+ issue.derived_owner_id = derived_owner_id
+ issue.derived_status = derived_status
+ issue.derived_cc_ids = derived_cc_ids
+ issue.derived_labels = derived_labels
+ issue.derived_notify_addrs = derived_notify_addrs
+
+ return any_change
+
+
+def _ComputeDerivedFields(cnxn, services, issue, config, rules, predicate_asts):
+ """Compute derived field values for an issue based on filter rules.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users, issues, and projects.
+ issue: the issue to examine.
+ config: ProjectIssueConfig for the project containing the issue.
+ rules: list of FilterRule PBs.
+ predicate_asts: QueryAST PB for each rule.
+
+ Returns:
+ A 5-tuple of derived values for owner_id, status, cc_ids, labels, and
+ notify_addrs. These values are the result of applying all rules in order.
+ Filter rules only produce derived values that do not conflict with the
+ explicit field values of the issue.
+ """
+ excl_prefixes = config.exclusive_label_prefixes
+ # Examine the explicit labels and Cc's on the issue.
+ lower_labels = [lab.lower() for lab in issue.labels]
+ label_set = set(lower_labels)
+ cc_set = set(issue.cc_ids)
+ excl_prefixes_used = set()
+ for lab in lower_labels:
+ prefix = lab.split('-')[0]
+ if prefix in excl_prefixes:
+ excl_prefixes_used.add(prefix)
+ prefix_values_added = {}
+
+ # Start with the assumption that rules don't change anything, then
+ # accumulate changes.
+ derived_owner_id = framework_constants.NO_USER_SPECIFIED
+ derived_status = ''
+ # Get the component auto-cc's before even starting the rules.
+ # TODO(jrobbins): take this out and instead get component CC IDs
+ # on each access and search, but that will be a pretty big change.
+ derived_cc_ids = [
+ auto_cc_id
+ for auto_cc_id in component_helpers.GetComponentCcIDs(issue, config)
+ if auto_cc_id not in cc_set]
+ derived_labels = []
+ derived_notify_addrs = []
+
+ # Apply each rule in order. Later rules see the results of earlier rules.
+ # Later rules can overwrite or add to results of earlier rules.
+ # TODO(jrobbins): also pass in in-progress values for owner and CCs so
+ # that early rules that set those can affect later rules that check them.
+ for rule, predicate_ast in zip(rules, predicate_asts):
+ (rule_owner_id, rule_status, rule_add_cc_ids,
+ rule_add_labels, rule_add_notify) = _ApplyRule(
+ cnxn, services, rule, predicate_ast, issue, label_set, config)
+
+ # logging.info(
+ # 'rule "%s" gave %r, %r, %r, %r, %r',
+ # rule.predicate, rule_owner_id, rule_status, rule_add_cc_ids,
+ # rule_add_labels, rule_add_notify)
+
+ if rule_owner_id and not issue.owner_id:
+ derived_owner_id = rule_owner_id
+
+ if rule_status and not issue.status:
+ derived_status = rule_status
+
+ for cc_id in rule_add_cc_ids:
+ if cc_id not in cc_set:
+ derived_cc_ids.append(cc_id)
+ cc_set.add(cc_id)
+
+ for lab in rule_add_labels:
+ lab_lower = lab.lower()
+ if lab_lower in label_set:
+ continue # We already have that label.
+ prefix = lab_lower.split('-')[0]
+ if '-' in lab_lower and prefix in excl_prefixes:
+ if prefix in excl_prefixes_used:
+ continue # Issue already has that prefix.
+ # Replace any earlied-added label that had the same exclusive prefix.
+ if prefix in prefix_values_added:
+ label_set.remove(prefix_values_added[prefix].lower())
+ derived_labels = [dl for dl in derived_labels
+ if dl != prefix_values_added[prefix]]
+ prefix_values_added[prefix] = lab
+
+ derived_labels.append(lab)
+ label_set.add(lab_lower)
+
+ for addr in rule_add_notify:
+ if addr not in derived_notify_addrs:
+ derived_notify_addrs.append(addr)
+
+ return (derived_owner_id, derived_status, derived_cc_ids, derived_labels,
+ derived_notify_addrs)
+
+
+def EvalPredicate(
+ cnxn, services, predicate_ast, issue, label_set, config, owner_id, cc_ids,
+ status):
+ """Return True if the given issue satisfies the given predicate.
+
+ Args:
+ cnxn: Connection to SQL database.
+ services: persistence layer for users and issues.
+ predicate_ast: QueryAST for rule or saved query string.
+ issue: Issue PB of the issue to evaluate.
+ label_set: set of lower-cased labels on the issue.
+ config: ProjectIssueConfig for the project that contains the issue.
+ owner_id: int user ID of the issue owner.
+ cc_ids: list of int user IDs of the users Cc'd on the issue.
+ status: string status value of the issue.
+
+ Returns:
+ True if the issue satisfies the predicate.
+
+ Note: filter rule evaluation passes in only the explicit owner_id,
+ cc_ids, and status whereas subscription evaluation passes in the
+ combination of explicit values and derived values.
+ """
+ # TODO(jrobbins): Call ast2ast to simplify the predicate and do
+ # most lookups. Refactor to allow that to be done once.
+ project = services.project.GetProject(cnxn, config.project_id)
+ for conj in predicate_ast.conjunctions:
+ if all(_ApplyCond(cnxn, services, project, cond, issue, label_set, config,
+ owner_id, cc_ids, status)
+ for cond in conj.conds):
+ return True
+
+ # All OR-clauses were evaluated, but none of them was matched.
+ return False
+
+
+def _ApplyRule(
+ cnxn, services, rule_pb, predicate_ast, issue, label_set, config):
+ """Test if the given rule should fire and return its result.
+
+ Args:
+ cnxn: database connection, used to look up user IDs.
+ services: persistence layer for users and issues.
+ rule_pb: FilterRule PB instance with a predicate and various actions.
+ predicate_ast: QueryAST for the rule predicate.
+ issue: The Issue PB to be considered.
+ label_set: set of lowercased labels from an issue's explicit
+ label_list plus and labels that have accumlated from previous rules.
+ config: ProjectIssueConfig for the project containing the issue.
+
+ Returns:
+ A 5-tuple of the results from this rule: derived owner id, status,
+ cc_ids to add, labels to add, and notify addresses to add.
+ """
+ if EvalPredicate(
+ cnxn, services, predicate_ast, issue, label_set, config,
+ issue.owner_id, issue.cc_ids, issue.status):
+ logging.info('rule adds: %r', rule_pb.add_labels)
+ return (rule_pb.default_owner_id, rule_pb.default_status,
+ rule_pb.add_cc_ids, rule_pb.add_labels,
+ rule_pb.add_notify_addrs)
+ else:
+ return None, None, [], [], []
+
+
+def _ApplyCond(
+ cnxn, services, project, term, issue, label_set, config, owner_id, cc_ids,
+ status):
+ """Return True if the given issue satisfied the given predicate term."""
+ op = term.op
+ vals = term.str_values or term.int_values
+ # Since rules are per-project, there'll be exactly 1 field
+ fd = term.field_defs[0]
+ field = fd.field_name
+
+ if field == 'label':
+ return _Compare(op, vals, label_set)
+ if field == 'component':
+ return _CompareComponents(config, op, vals, issue.component_ids)
+ if field == 'any_field':
+ return _Compare(op, vals, label_set) or _Compare(op, vals, [issue.summary])
+ if field == 'attachments':
+ return _Compare(op, vals, [issue.attachment_count])
+ if field == 'blocked':
+ return _Compare(op, vals, issue.blocked_on_iids)
+ if field == 'blockedon':
+ return _CompareIssueRefs(
+ cnxn, services, project, op, term.str_values, issue.blocked_on_iids)
+ if field == 'blocking':
+ return _CompareIssueRefs(
+ cnxn, services, project, op, term.str_values, issue.blocking_iids)
+ if field == 'cc':
+ return _CompareUsers(cnxn, services.user, op, vals, cc_ids)
+ if field == 'closed':
+ return (issue.closed_timestamp and
+ _Compare(op, vals, [issue.closed_timestamp]))
+ if field == 'id':
+ return _Compare(op, vals, [issue.local_id])
+ if field == 'mergedinto':
+ return _CompareIssueRefs(
+ cnxn, services, project, op, term.str_values, [issue.merged_into or 0])
+ if field == 'modified':
+ return (issue.modified_timestamp and
+ _Compare(op, vals, [issue.modified_timestamp]))
+ if field == 'open':
+ # TODO(jrobbins): this just checks the explicit status, not the result
+ # of any previous rules.
+ return tracker_helpers.MeansOpenInProject(status, config)
+ if field == 'opened':
+ return (issue.opened_timestamp and
+ _Compare(op, vals, [issue.opened_timestamp]))
+ if field == 'owner':
+ return _CompareUsers(cnxn, services.user, op, vals, [owner_id])
+ if field == 'reporter':
+ return _CompareUsers(cnxn, services.user, op, vals, [issue.reporter_id])
+ if field == 'stars':
+ return _Compare(op, vals, [issue.star_count])
+ if field == 'status':
+ return _Compare(op, vals, [status.lower()])
+ if field == 'summary':
+ return _Compare(op, vals, [issue.summary])
+
+ # Since rules are per-project, it makes no sense to support field project.
+ # We would need to load comments to support fields comment, commentby,
+ # description, attachment.
+ # Supporting starredby is probably not worth the complexity.
+
+ logging.info('Rule with unsupported field %r was False', field)
+ return False
+
+
+def _CheckTrivialCases(op, issue_values):
+ """Check has:x and -has:x terms and no values. Otherwise, return None."""
+ # We can do these operators without looking up anything or even knowing
+ # which field is being checked.
+ issue_values_exist = bool(
+ issue_values and issue_values != [''] and issue_values != [0])
+ if op == ast_pb2.QueryOp.IS_DEFINED:
+ return issue_values_exist
+ elif op == ast_pb2.QueryOp.IS_NOT_DEFINED:
+ return not issue_values_exist
+ elif not issue_values_exist:
+ # No other operator can match empty values.
+ return op in (ast_pb2.QueryOp.NE, ast_pb2.QueryOp.NOT_TEXT_HAS)
+
+ return None # Caller should continue processing the term.
+
+def _CompareComponents(config, op, rule_values, issue_values):
+ """Compare the components specified in the rule vs those in the issue."""
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ exact = op in (ast_pb2.QueryOp.EQ, ast_pb2.QueryOp.NE)
+ rule_component_ids = set()
+ for path in rule_values:
+ rule_component_ids.update(tracker_bizobj.FindMatchingComponentIDs(
+ path, config, exact=exact))
+
+ if op == ast_pb2.QueryOp.TEXT_HAS or op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_values for rv in rule_component_ids)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS or op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_values for rv in rule_component_ids)
+
+ return False
+
+
+def _CompareIssueRefs(
+ cnxn, services, project, op, rule_str_values, issue_values):
+ """Compare the issues specified in the rule vs referenced in the issue."""
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ rule_refs = []
+ for str_val in rule_str_values:
+ ref = tracker_bizobj.ParseIssueRef(str_val)
+ if ref:
+ rule_refs.append(ref)
+ rule_ref_project_names = set(
+ pn for pn, local_id in rule_refs if pn)
+ rule_ref_projects_dict = services.project.GetProjectsByName(
+ cnxn, rule_ref_project_names)
+ rule_ref_projects_dict[project.project_name] = project
+ rule_iids = services.issue.ResolveIssueRefs(
+ cnxn, rule_ref_projects_dict, project.project_name, rule_refs)
+
+ if op == ast_pb2.QueryOp.TEXT_HAS:
+ op = ast_pb2.QueryOp.EQ
+ if op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ op = ast_pb2.QueryOp.NE
+
+ return _Compare(op, rule_iids, issue_values)
+
+
+def _CompareUsers(cnxn, user_service, op, rule_values, issue_values):
+ """Compare the user(s) specified in the rule and the issue."""
+ # Note that all occurances of "me" in rule_values should have already
+ # been resolved to str(user_id) of the subscribing user.
+ # TODO(jrobbins): Project filter rules should not be allowed to have "me".
+
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ try:
+ return _CompareUserIDs(op, rule_values, issue_values)
+ except ValueError:
+ return _CompareEmails(cnxn, user_service, op, rule_values, issue_values)
+
+
+def _CompareUserIDs(op, rule_values, issue_values):
+ """Compare users according to specified user ID integer strings."""
+ rule_user_ids = [int(uid_str) for uid_str in rule_values]
+
+ if op == ast_pb2.QueryOp.TEXT_HAS or op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_values for rv in rule_user_ids)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS or op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_values for rv in rule_user_ids)
+
+ logging.info('unexpected numeric user operator %r %r %r',
+ op, rule_values, issue_values)
+ return False
+
+
+def _CompareEmails(cnxn, user_service, op, rule_values, issue_values):
+ """Compare users based on email addresses."""
+ issue_emails = user_service.LookupUserEmails(cnxn, issue_values).values()
+
+ if op == ast_pb2.QueryOp.TEXT_HAS:
+ return any(_HasText(rv, issue_emails) for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ return all(not _HasText(rv, issue_emails) for rv in rule_values)
+ elif op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_emails for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_emails for rv in rule_values)
+
+ logging.info('unexpected user operator %r %r %r',
+ op, rule_values, issue_values)
+ return False
+
+
+def _Compare(op, rule_values, issue_values):
+ """Compare the values specified in the rule and the issue."""
+ trivial_result = _CheckTrivialCases(op, issue_values)
+ if trivial_result is not None:
+ return trivial_result
+
+ if (op in [ast_pb2.QueryOp.TEXT_HAS, ast_pb2.QueryOp.NOT_TEXT_HAS] and
+ issue_values and not isinstance(min(issue_values), basestring)):
+ return False # Empty or numeric fields cannot match substrings
+ elif op == ast_pb2.QueryOp.TEXT_HAS:
+ return any(_HasText(rv, issue_values) for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ return all(not _HasText(rv, issue_values) for rv in rule_values)
+
+ val_type = type(min(issue_values))
+ if val_type == int or val_type == long:
+ try:
+ rule_values = [int(rv) for rv in rule_values]
+ except ValueError:
+ logging.info('rule value conversion to int failed: %r', rule_values)
+ return False
+
+ if op == ast_pb2.QueryOp.EQ:
+ return any(rv in issue_values for rv in rule_values)
+ elif op == ast_pb2.QueryOp.NE:
+ return all(rv not in issue_values for rv in rule_values)
+
+ if val_type != int and val_type != long:
+ return False # Inequalities only work on numeric fields
+
+ if op == ast_pb2.QueryOp.GT:
+ return min(issue_values) > min(rule_values)
+ elif op == ast_pb2.QueryOp.GE:
+ return min(issue_values) >= min(rule_values)
+ elif op == ast_pb2.QueryOp.LT:
+ return max(issue_values) < max(rule_values)
+ elif op == ast_pb2.QueryOp.LE:
+ return max(issue_values) <= max(rule_values)
+
+ logging.info('unexpected operator %r %r %r', op, rule_values, issue_values)
+ return False
+
+
+def _HasText(rule_text, issue_values):
+ """Return True if the issue contains the rule text, case insensitive."""
+ rule_lower = rule_text.lower()
+ for iv in issue_values:
+ if iv is not None and rule_lower in iv.lower():
+ return True
+
+ return False
+
+
+def MakeRule(predicate, default_status=None, default_owner_id=None,
+ add_cc_ids=None, add_labels=None, add_notify=None):
+ """Make a FilterRule PB with the supplied information.
+
+ Args:
+ predicate: string query that will trigger the rule if satisfied.
+ default_status: optional default status to set if rule fires.
+ default_owner_id: optional default owner_id to set if rule fires.
+ add_cc_ids: optional cc ids to set if rule fires.
+ add_labels: optional label strings to set if rule fires.
+ add_notify: optional notify email addresses to set if rule fires.
+
+ Returns:
+ A new FilterRule PB.
+ """
+ rule_pb = tracker_pb2.FilterRule()
+ rule_pb.predicate = predicate
+
+ if add_labels:
+ rule_pb.add_labels = add_labels
+ if default_status:
+ rule_pb.default_status = default_status
+ if default_owner_id:
+ rule_pb.default_owner_id = default_owner_id
+ if add_cc_ids:
+ rule_pb.add_cc_ids = add_cc_ids
+ if add_notify:
+ rule_pb.add_notify_addrs = add_notify
+
+ return rule_pb
+
+
+def ParseRules(cnxn, post_data, user_service, errors, prefix=''):
+ """Parse rules from the user and return a list of FilterRule PBs.
+
+ Args:
+ cnxn: connection to database.
+ post_data: dictionary of html form data.
+ user_service: connection to user backend services.
+ errors: EZTErrors message used to display field validation errors.
+ prefix: optional string prefix used to differentiate the form fields
+ for existing rules from the form fields for new rules.
+
+ Returns:
+ A list of FilterRule PBs
+ """
+ rules = []
+
+ # The best we can do for now is show all validation errors at the bottom of
+ # the filter rules section, not directly on the rule that had the error :(.
+ error_list = []
+
+ for i in xrange(1, MAX_RULES + 1):
+ if ('%spredicate%s' % (prefix, i)) not in post_data:
+ continue # skip any entries that are blank or have no predicate.
+ predicate = post_data['%spredicate%s' % (prefix, i)].strip()
+ action_type = post_data.get('%saction_type%s' % (prefix, i),
+ 'add_labels').strip()
+ action_value = post_data.get('%saction_value%s' % (prefix, i),
+ '').strip()
+ if predicate:
+ # Note: action_value may be '', meaning no-op.
+ rules.append(_ParseOneRule(
+ cnxn, predicate, action_type, action_value, user_service, i,
+ error_list))
+
+ if error_list:
+ errors.rules = error_list
+
+ return rules
+
+
+def _ParseOneRule(
+ cnxn, predicate, action_type, action_value, user_service,
+ rule_num, error_list):
+ """Parse one FilterRule based on the action type."""
+ if action_type == 'default_status':
+ status = framework_bizobj.CanonicalizeLabel(action_value)
+ rule = MakeRule(predicate, default_status=status)
+
+ elif action_type == 'default_owner':
+ if action_value:
+ try:
+ user_id = user_service.LookupUserID(cnxn, action_value)
+ except user_svc.NoSuchUserException:
+ user_id = framework_constants.NO_USER_SPECIFIED
+ error_list.append(
+ 'Rule %d: No such user: %s' % (rule_num, action_value))
+ else:
+ user_id = framework_constants.NO_USER_SPECIFIED
+ rule = MakeRule(predicate, default_owner_id=user_id)
+
+ elif action_type == 'add_ccs':
+ cc_ids = []
+ for email in re.split('[,;\s]+', action_value):
+ if not email.strip():
+ continue
+ try:
+ user_id = user_service.LookupUserID(
+ cnxn, email.strip(), autocreate=True)
+ cc_ids.append(user_id)
+ except user_svc.NoSuchUserException:
+ error_list.append(
+ 'Rule %d: No such user: %s' % (rule_num, email.strip()))
+
+ rule = MakeRule(predicate, add_cc_ids=cc_ids)
+
+ elif action_type == 'add_labels':
+ add_labels = framework_constants.IDENTIFIER_RE.findall(action_value)
+ rule = MakeRule(predicate, add_labels=add_labels)
+
+ elif action_type == 'also_notify':
+ add_notify = []
+ for addr in re.split('[,;\s]+', action_value):
+ if validate.IsValidEmail(addr.strip()):
+ add_notify.append(addr.strip())
+ else:
+ error_list.append(
+ 'Rule %d: Invalid email address: %s' % (rule_num, addr.strip()))
+
+ rule = MakeRule(predicate, add_notify=add_notify)
+
+ else:
+ logging.info('unexpected action type, probably tampering:%r', action_type)
+ raise monorailrequest.InputException()
+
+ return rule
diff --git a/appengine/monorail/features/filterrules_views.py b/appengine/monorail/features/filterrules_views.py
new file mode 100644
index 0000000..ca21b2d
--- /dev/null
+++ b/appengine/monorail/features/filterrules_views.py
@@ -0,0 +1,44 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes to display filter rules in templates."""
+
+import logging
+
+from framework import template_helpers
+
+
+class RuleView(template_helpers.PBProxy):
+ """Wrapper class that makes it easier to display a Rule via EZT."""
+
+ def __init__(self, rule_pb, users_by_id):
+ super(RuleView, self).__init__(rule_pb)
+
+ self.action_type = ''
+ self.action_value = ''
+
+ if rule_pb is None:
+ return # Just leave everything as ''
+
+ # self.predicate is automatically available.
+
+ # For the current UI, we assume that each rule has exactly
+ # one action, so we can determine the text value for it here.
+ if rule_pb.default_status:
+ self.action_type = 'default_status'
+ self.action_value = rule_pb.default_status
+ elif rule_pb.default_owner_id:
+ self.action_type = 'default_owner'
+ self.action_value = users_by_id[rule_pb.default_owner_id].email
+ elif rule_pb.add_cc_ids:
+ self.action_type = 'add_ccs'
+ usernames = [users_by_id[cc_id].email for cc_id in rule_pb.add_cc_ids]
+ self.action_value = ', '.join(usernames)
+ elif rule_pb.add_labels:
+ self.action_type = 'add_labels'
+ self.action_value = ', '.join(rule_pb.add_labels)
+ elif rule_pb.add_notify_addrs:
+ self.action_type = 'also_notify'
+ self.action_value = ', '.join(rule_pb.add_notify_addrs)
diff --git a/appengine/monorail/features/inboundemail.py b/appengine/monorail/features/inboundemail.py
new file mode 100644
index 0000000..b25b312
--- /dev/null
+++ b/appengine/monorail/features/inboundemail.py
@@ -0,0 +1,258 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Handler to process inbound email with issue comments and commands."""
+
+import logging
+import os
+import urllib
+
+from third_party import ezt
+
+from google.appengine.api import mail
+
+import webapp2
+
+from features import commitlogcommands
+from features import notify
+from framework import emailfmt
+from framework import framework_constants
+from framework import monorailrequest
+from framework import permissions
+from framework import sql
+from framework import template_helpers
+from proto import project_pb2
+from services import issue_svc
+from services import user_svc
+
+
+TEMPLATE_PATH_BASE = framework_constants.TEMPLATE_PATH
+
+MSG_TEMPLATES = {
+ 'banned': 'features/inboundemail-banned.ezt',
+ 'body_too_long': 'features/inboundemail-body-too-long.ezt',
+ 'project_not_found': 'features/inboundemail-project-not-found.ezt',
+ 'not_a_reply': 'features/inboundemail-not-a-reply.ezt',
+ 'no_account': 'features/inboundemail-no-account.ezt',
+ 'no_artifact': 'features/inboundemail-no-artifact.ezt',
+ 'no_perms': 'features/inboundemail-no-perms.ezt',
+ 'replies_disabled': 'features/inboundemail-replies-disabled.ezt',
+ }
+
+
+class InboundEmail(webapp2.RequestHandler):
+ """Servlet to handle inbound email messages."""
+
+ def __init__(self, request, response, services=None, *args, **kwargs):
+ super(InboundEmail, self).__init__(request, response, *args, **kwargs)
+ self.services = services or self.app.config.get('services')
+ self._templates = {}
+ for name, template_path in MSG_TEMPLATES.iteritems():
+ self._templates[name] = template_helpers.MonorailTemplate(
+ TEMPLATE_PATH_BASE + template_path,
+ compress_whitespace=False, base_format=ezt.FORMAT_RAW)
+
+ def get(self, project_addr=None):
+ logging.info('\n\n\nGET for InboundEmail and project_addr is %r',
+ project_addr)
+ self.Handler(mail.InboundEmailMessage(self.request.body),
+ urllib.unquote(project_addr))
+
+ def post(self, project_addr=None):
+ logging.info('\n\n\nPOST for InboundEmail and project_addr is %r',
+ project_addr)
+ self.Handler(mail.InboundEmailMessage(self.request.body),
+ urllib.unquote(project_addr))
+
+ def Handler(self, inbound_email_message, project_addr):
+ """Process an inbound email message."""
+ msg = inbound_email_message.original
+ email_tasks = self.ProcessMail(msg, project_addr)
+
+ if email_tasks:
+ notify.AddAllEmailTasks(email_tasks)
+
+ def ProcessMail(self, msg, project_addr):
+ """Process an inbound email message."""
+ # TODO(jrobbins): If the message is HUGE, don't even try to parse
+ # it. Silently give up.
+
+ (from_addr, to_addrs, cc_addrs, references, subject,
+ body) = emailfmt.ParseEmailMessage(msg)
+
+ logging.info('Proj addr: %r', project_addr)
+ logging.info('From addr: %r', from_addr)
+ logging.info('Subject: %r', subject)
+ logging.info('To: %r', to_addrs)
+ logging.info('Cc: %r', cc_addrs)
+ logging.info('References: %r', references)
+ logging.info('Body: %r', body)
+
+ # If message body is very large, reject it and send an error email.
+ if emailfmt.IsBodyTooBigToParse(body):
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['body_too_long'])
+
+ # Make sure that the project reply-to address is in the To: line.
+ if not emailfmt.IsProjectAddressOnToLine(project_addr, to_addrs):
+ return None
+
+ # Identify the project and artifact to update.
+ project_name, local_id = emailfmt.IdentifyProjectAndIssue(
+ project_addr, subject)
+ if not project_addr or not local_id:
+ logging.info('Could not identify issue: %s %s', project_addr, subject)
+ # No error message, because message was probably not intended for us.
+ return None
+
+ cnxn = sql.MonorailConnection()
+ if self.services.cache_manager:
+ self.services.cache_manager.DoDistributedInvalidation(cnxn)
+
+ project = self.services.project.GetProjectByName(cnxn, project_name)
+
+ if not project or project.state != project_pb2.ProjectState.LIVE:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['project_not_found'])
+
+ if not project.process_inbound_email:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['replies_disabled'],
+ project_name=project_name)
+
+ # Verify that this is a reply to a notification that we could have sent.
+ if not os.environ['SERVER_SOFTWARE'].startswith('Development'):
+ for ref in references:
+ if emailfmt.ValidateReferencesHeader(ref, project, from_addr, subject):
+ break # Found a message ID that we could have sent.
+ else:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['not_a_reply'])
+
+ # Authenticate the from-addr and perm check.
+ # Note: If the issue summary line is changed, a new thread is created,
+ # and replies to the old thread will no longer work because the subject
+ # line hash will not match, which seems reasonable.
+ try:
+ auth = monorailrequest.AuthData.FromEmail(cnxn, from_addr, self.services)
+ from_user_id = auth.user_id
+ except user_svc.NoSuchUserException:
+ from_user_id = None
+ if not from_user_id:
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['no_account'])
+
+ if auth.user_pb.banned:
+ logging.info('Banned user %s tried to post to %s',
+ from_addr, project_addr)
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['banned'])
+
+ perms = permissions.GetPermissions(
+ auth.user_pb, auth.effective_ids, project)
+
+ self.ProcessIssueReply(
+ cnxn, project, local_id, project_addr, from_addr, from_user_id,
+ auth.effective_ids, perms, body)
+
+ return None
+
+ def ProcessIssueReply(
+ self, cnxn, project, local_id, project_addr, from_addr, from_user_id,
+ effective_ids, perms, body):
+ """Examine an issue reply email body and add a comment to the issue.
+
+ Args:
+ cnxn: connection to SQL database.
+ project: Project PB for the project containing the issue.
+ local_id: int ID of the issue being replied to.
+ project_addr: string email address used for outbound emails from
+ that project.
+ from_addr: string email address of the user who sent the email
+ reply to our server.
+ from_user_id: int user ID of user who sent the reply email.
+ effective_ids: set of int user IDs for the user (including any groups),
+ or an empty set if user is not signed in.
+ perms: PermissionSet for the user who sent the reply email.
+ body: string email body text of the reply email.
+
+ Returns:
+ A list of follow-up work items, e.g., to notify other users of
+ the new comment, or to notify the user that their reply was not
+ processed.
+
+ Side-effect:
+ Adds a new comment to the issue, if no error is reported.
+ """
+ try:
+ issue = self.services.issue.GetIssueByLocalID(
+ cnxn, project.project_id, local_id)
+ except issue_svc.NoSuchIssueException:
+ issue = None
+
+ if not issue or issue.deleted:
+ # The referenced issue was not found, e.g., it might have been
+ # deleted, or someone messed with the subject line. Reject it.
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['no_artifact'],
+ artifact_phrase='issue %d' % local_id,
+ project_name=project.project_name)
+
+ if not perms.CanUsePerm(
+ permissions.ADD_ISSUE_COMMENT, effective_ids, project,
+ permissions.GetRestrictions(issue)):
+ return _MakeErrorMessageReplyTask(
+ project_addr, from_addr, self._templates['no_perms'],
+ artifact_phrase='issue %d' % local_id,
+ project_name=project.project_name)
+ allow_edit = permissions.CanEditIssue(
+ effective_ids, perms, project, issue)
+ # TODO(jrobbins): if the user does not have EDIT_ISSUE and the inbound
+ # email tries to make an edit, send back an error message.
+
+ lines = body.strip().split('\n')
+ uia = commitlogcommands.UpdateIssueAction(local_id)
+ uia.Parse(cnxn, project.project_name, from_user_id, lines, self.services,
+ strip_quoted_lines=True)
+ uia.Run(cnxn, self.services, allow_edit=allow_edit)
+
+
+def _MakeErrorMessageReplyTask(
+ project_addr, sender_addr, template, **callers_page_data):
+ """Return a new task to send an error message email.
+
+ Args:
+ project_addr: string email address that the inbound email was delivered to.
+ sender_addr: string email address of user who sent the email that we could
+ not process.
+ template: EZT template used to generate the email error message. The
+ first line of this generated text will be used as the subject line.
+ callers_page_data: template data dict for body of the message.
+
+ Returns:
+ A list with a single Email task that can be enqueued to
+ actually send the email.
+
+ Raises:
+ ValueError: if the template does begin with a "Subject:" line.
+ """
+ email_data = {
+ 'project_addr': project_addr,
+ 'sender_addr': sender_addr
+ }
+ email_data.update(callers_page_data)
+
+ generated_lines = template.GetResponse(email_data)
+ subject, body = generated_lines.split('\n', 1)
+ if subject.startswith('Subject: '):
+ subject = subject[len('Subject: '):]
+ else:
+ raise ValueError('Email template does not begin with "Subject:" line.')
+
+ email_task = dict(to=sender_addr, subject=subject, body=body,
+ from_addr=emailfmt.NoReplyAddress())
+ logging.info('sending email error reply: %r', email_task)
+
+ return [email_task]
diff --git a/appengine/monorail/features/notify.py b/appengine/monorail/features/notify.py
new file mode 100644
index 0000000..b6382b6
--- /dev/null
+++ b/appengine/monorail/features/notify.py
@@ -0,0 +1,928 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Task handlers for email notifications of issue changes.
+
+Email notificatons are sent when an issue changes, an issue that is blocking
+another issue changes, or a bulk edit is done. The users notified include
+the project-wide mailing list, issue owners, cc'd users, starrers,
+also-notify addresses, and users who have saved queries with email notification
+set.
+"""
+
+import collections
+import logging
+
+from third_party import ezt
+
+from google.appengine.api import mail
+from google.appengine.api import taskqueue
+
+import settings
+from features import autolink
+from features import notify_helpers
+from framework import emailfmt
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import jsonfeed
+from framework import monorailrequest
+from framework import permissions
+from framework import template_helpers
+from framework import urls
+from tracker import component_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+TEMPLATE_PATH = framework_constants.TEMPLATE_PATH
+
+
+def PrepareAndSendIssueChangeNotification(
+ project_id, local_id, hostport, commenter_id, seq_num, send_email=True,
+ old_owner_id=framework_constants.NO_USER_SPECIFIED):
+ """Create a task to notify users that an issue has changed.
+
+ Args:
+ project_id: int ID of the project containing the changed issue.
+ local_id: Issue number for the issue that was updated and saved.
+ hostport: string domain name and port number from the HTTP request.
+ commenter_id: int user ID of the user who made the comment.
+ seq_num: int index into the comments of the new comment.
+ send_email: True if email notifications should be sent.
+ old_owner_id: optional user ID of owner before the current change took
+ effect. He/she will also be notified.
+
+ Returns nothing.
+ """
+ params = dict(
+ project_id=project_id, id=local_id, commenter_id=commenter_id,
+ seq=seq_num, hostport=hostport,
+ old_owner_id=old_owner_id, send_email=int(send_email))
+ logging.info('adding notify task with params %r', params)
+ taskqueue.add(url=urls.NOTIFY_ISSUE_CHANGE_TASK + '.do', params=params)
+
+
+def PrepareAndSendIssueBlockingNotification(
+ project_id, hostport, local_id, delta_blocker_iids,
+ commenter_id, send_email=True):
+ """Create a task to follow up on an issue blocked_on change."""
+ if not delta_blocker_iids:
+ return # No notification is needed
+
+ params = dict(
+ project_id=project_id, id=local_id, commenter_id=commenter_id,
+ hostport=hostport, send_email=int(send_email),
+ delta_blocker_iids=','.join(str(iid) for iid in delta_blocker_iids))
+
+ logging.info('adding blocking task with params %r', params)
+ taskqueue.add(url=urls.NOTIFY_BLOCKING_CHANGE_TASK + '.do', params=params)
+
+
+def SendIssueBulkChangeNotification(
+ hostport, project_id, local_ids, old_owner_ids,
+ comment_text, commenter_id, amendments, send_email, users_by_id):
+ """Create a task to follow up on an issue blocked_on change."""
+ amendment_lines = []
+ for up in amendments:
+ line = ' %s: %s' % (
+ tracker_bizobj.GetAmendmentFieldName(up),
+ tracker_bizobj.AmendmentString(up, users_by_id))
+ if line not in amendment_lines:
+ amendment_lines.append(line)
+
+ params = dict(
+ project_id=project_id, commenter_id=commenter_id,
+ hostport=hostport, send_email=int(send_email),
+ ids=','.join(str(lid) for lid in local_ids),
+ old_owner_ids=','.join(str(uid) for uid in old_owner_ids),
+ comment_text=comment_text, amendments='\n'.join(amendment_lines))
+
+ logging.info('adding bulk task with params %r', params)
+ taskqueue.add(url=urls.NOTIFY_BULK_CHANGE_TASK + '.do', params=params)
+
+
+def _EnqueueOutboundEmail(message_dict):
+ """Create a task to send one email message, all fields are in the dict.
+
+ We use a separate task for each outbound email to isolate errors.
+
+ Args:
+ message_dict: dict with all needed info for the task.
+ """
+ logging.info('Queuing an email task with params %r', message_dict)
+ taskqueue.add(
+ url=urls.OUTBOUND_EMAIL_TASK + '.do', params=message_dict,
+ queue_name='outboundemail')
+
+
+def AddAllEmailTasks(tasks):
+ """Add one GAE task for each email to be sent."""
+ notified = []
+ for task in tasks:
+ _EnqueueOutboundEmail(task)
+ notified.append(task['to'])
+
+ return notified
+
+
+class NotifyTaskBase(jsonfeed.InternalTask):
+ """Abstract base class for notification task handler."""
+
+ _EMAIL_TEMPLATE = None # Subclasses must override this.
+
+ CHECK_SECURITY_TOKEN = False
+
+ def __init__(self, *args, **kwargs):
+ super(NotifyTaskBase, self).__init__(*args, **kwargs)
+
+ if not self._EMAIL_TEMPLATE:
+ raise Exception('Subclasses must override _EMAIL_TEMPLATE.'
+ ' This class must not be called directly.')
+ # We use FORMAT_RAW for emails because they are plain text, not HTML.
+ # TODO(jrobbins): consider sending HTML formatted emails someday.
+ self.email_template = template_helpers.MonorailTemplate(
+ TEMPLATE_PATH + self._EMAIL_TEMPLATE,
+ compress_whitespace=False, base_format=ezt.FORMAT_RAW)
+
+
+class NotifyIssueChangeTask(NotifyTaskBase):
+ """JSON servlet that notifies appropriate users after an issue change."""
+
+ _EMAIL_TEMPLATE = 'tracker/issue-change-notification-email.ezt'
+
+ def HandleRequest(self, mr):
+ """Process the task to notify users after an issue change.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ project_id = mr.specified_project_id
+ if project_id is None:
+ return {
+ 'params': {},
+ 'notified': [],
+ 'message': 'Cannot proceed without a valid project ID.',
+ }
+ commenter_id = mr.GetPositiveIntParam('commenter_id')
+ seq_num = mr.seq
+ omit_ids = [commenter_id]
+ hostport = mr.GetParam('hostport')
+ old_owner_id = mr.GetPositiveIntParam('old_owner_id')
+ send_email = bool(mr.GetIntParam('send_email'))
+ params = dict(
+ project_id=project_id, local_id=mr.local_id, commenter_id=commenter_id,
+ seq_num=seq_num, hostport=hostport, old_owner_id=old_owner_id,
+ omit_ids=omit_ids, send_email=send_email)
+
+ logging.info('issue change params are %r', params)
+ project = self.services.project.GetProject(mr.cnxn, project_id)
+ config = self.services.config.GetProjectConfig(mr.cnxn, project_id)
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, project_id, mr.local_id)
+
+ if issue.is_spam:
+ # Don't send email for spam issues.
+ return {
+ 'params': params,
+ 'notified': [],
+ }
+
+ all_comments = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+ comment = all_comments[seq_num]
+
+ # Only issues that any contributor could view sent to mailing lists.
+ contributor_could_view = permissions.CanViewIssue(
+ set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ project, issue)
+ starrer_ids = self.services.issue_star.LookupItemStarrers(
+ mr.cnxn, issue.issue_id)
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ tracker_bizobj.UsersInvolvedInIssues([issue]), [old_owner_id],
+ tracker_bizobj.UsersInvolvedInComment(comment),
+ issue.cc_ids, issue.derived_cc_ids, starrer_ids, omit_ids)
+
+ # Make followup tasks to send emails
+ tasks = []
+ if send_email:
+ tasks = self._MakeEmailTasks(
+ mr.cnxn, project, issue, config, old_owner_id, users_by_id,
+ all_comments, comment, starrer_ids, contributor_could_view,
+ hostport, omit_ids)
+
+ notified = AddAllEmailTasks(tasks)
+
+ return {
+ 'params': params,
+ 'notified': notified,
+ }
+
+ def _MakeEmailTasks(
+ self, cnxn, project, issue, config, old_owner_id,
+ users_by_id, all_comments, comment, starrer_ids,
+ contributor_could_view, hostport, omit_ids):
+ """Formulate emails to be sent."""
+ detail_url = framework_helpers.IssueCommentURL(
+ hostport, project, issue.local_id, seq_num=comment.sequence)
+
+ # TODO(jrobbins): avoid the need to make a MonorailRequest object.
+ mr = monorailrequest.MonorailRequest()
+ mr.project_name = project.project_name
+ mr.project = project
+
+ # We do not autolink in the emails, so just use an empty
+ # registry of autolink rules.
+ # TODO(jrobbins): offer users an HTML email option w/ autolinks.
+ autolinker = autolink.Autolink()
+
+ email_data = {
+ # Pass open_related and closed_related into this method and to
+ # the issue view so that we can show it on new issue email.
+ 'issue': tracker_views.IssueView(issue, users_by_id, config),
+ 'summary': issue.summary,
+ 'comment': tracker_views.IssueCommentView(
+ project.project_name, comment, users_by_id,
+ autolinker, {}, mr, issue),
+ 'comment_text': comment.content,
+ 'detail_url': detail_url,
+ }
+
+ # Generate two versions of email body: members version has all
+ # full email addresses exposed.
+ body_for_non_members = self.email_template.GetResponse(email_data)
+ framework_views.RevealAllEmails(users_by_id)
+ email_data['comment'] = tracker_views.IssueCommentView(
+ project.project_name, comment, users_by_id,
+ autolinker, {}, mr, issue)
+ body_for_members = self.email_template.GetResponse(email_data)
+
+ subject = 'Issue %d in %s: %s' % (
+ issue.local_id, project.project_name, issue.summary)
+
+ commenter_email = users_by_id[comment.user_id].email
+ omit_addrs = set([commenter_email] +
+ [users_by_id[omit_id].email for omit_id in omit_ids])
+
+ auth = monorailrequest.AuthData.FromUserID(
+ cnxn, comment.user_id, self.services)
+ commenter_in_project = framework_bizobj.UserIsInProject(
+ project, auth.effective_ids)
+ noisy = tracker_helpers.IsNoisy(len(all_comments) - 1, len(starrer_ids))
+
+ # Get the transitive set of owners and Cc'd users, and their proxies.
+ reporter = [issue.reporter_id] if issue.reporter_id in starrer_ids else []
+ old_direct_owners, old_transitive_owners = (
+ self.services.usergroup.ExpandAnyUserGroups(cnxn, [old_owner_id]))
+ direct_owners, transitive_owners = (
+ self.services.usergroup.ExpandAnyUserGroups(cnxn, [issue.owner_id]))
+ der_direct_owners, der_transitive_owners = (
+ self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, [issue.derived_owner_id]))
+ direct_comp, trans_comp = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, component_helpers.GetComponentCcIDs(issue, config))
+ direct_ccs, transitive_ccs = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(issue.cc_ids))
+ # TODO(jrobbins): This will say that the user was cc'd by a rule when it
+ # was really added to the derived_cc_ids by a component.
+ der_direct_ccs, der_transitive_ccs = (
+ self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(issue.derived_cc_ids)))
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, self.services.user, transitive_owners, der_transitive_owners,
+ direct_comp, trans_comp, transitive_ccs, der_transitive_ccs))
+
+ # Notify interested people according to the reason for their interest:
+ # owners, component auto-cc'd users, cc'd users, starrers, and
+ # other notification addresses.
+ reporter_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, reporter, project, issue, self.services, omit_addrs, users_by_id)
+ owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_owners + transitive_owners, project, issue,
+ self.services, omit_addrs, users_by_id)
+ old_owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, old_direct_owners + old_transitive_owners, project, issue,
+ self.services, omit_addrs, users_by_id)
+ owner_addr_perm_set = set(owner_addr_perm_list)
+ old_owner_addr_perm_list = [ap for ap in old_owner_addr_perm_list
+ if ap not in owner_addr_perm_set]
+ der_owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, der_direct_owners + der_transitive_owners, project, issue,
+ self.services, omit_addrs, users_by_id)
+ cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_ccs + transitive_ccs, project, issue,
+ self.services, omit_addrs, users_by_id)
+ der_cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, der_direct_ccs + der_transitive_ccs, project, issue,
+ self.services, omit_addrs, users_by_id)
+
+ starrer_addr_perm_list = []
+ sub_addr_perm_list = []
+ if not noisy or commenter_in_project:
+ # Avoid an OOM by only notifying a number of starrers that we can handle.
+ # And, we really should limit the number of emails that we send anyway.
+ max_starrers = settings.max_starrers_to_notify
+ starrer_ids = starrer_ids[-max_starrers:]
+ # Note: starrers can never be user groups.
+ starrer_addr_perm_list = (
+ notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, starrer_ids, project, issue,
+ self.services, omit_addrs, users_by_id,
+ pref_check_function=lambda u: u.notify_starred_issue_change))
+
+ sub_addr_perm_list = _GetSubscribersAddrPermList(
+ cnxn, self.services, issue, project, config, omit_addrs,
+ users_by_id)
+
+ # Get the list of addresses to notify based on filter rules.
+ issue_notify_addr_list = notify_helpers.ComputeIssueNotificationAddrList(
+ issue, omit_addrs)
+ # Get the list of addresses to notify based on project settings.
+ proj_notify_addr_list = notify_helpers.ComputeProjectNotificationAddrList(
+ project, contributor_could_view, omit_addrs)
+
+ # Give each user a bullet-list of all the reasons that apply for that user.
+ group_reason_list = [
+ (reporter_addr_perm_list, 'You reported this issue'),
+ (owner_addr_perm_list, 'You are the owner of the issue'),
+ (old_owner_addr_perm_list,
+ 'You were the issue owner before this change'),
+ (der_owner_addr_perm_list, 'A rule made you owner of the issue'),
+ (cc_addr_perm_list, 'You were specifically CC\'d on the issue'),
+ (der_cc_addr_perm_list, 'A rule CC\'d you on the issue'),
+ ]
+ group_reason_list.extend(notify_helpers.ComputeComponentFieldAddrPerms(
+ cnxn, config, issue, project, self.services, omit_addrs,
+ users_by_id))
+ group_reason_list.extend(notify_helpers.ComputeCustomFieldAddrPerms(
+ cnxn, config, issue, project, self.services, omit_addrs,
+ users_by_id))
+ group_reason_list.extend([
+ (starrer_addr_perm_list, 'You starred the issue'),
+ (sub_addr_perm_list, 'Your saved query matched the issue'),
+ (issue_notify_addr_list,
+ 'A rule was set up to notify you'),
+ (proj_notify_addr_list,
+ 'The project was configured to send all issue notifications '
+ 'to this address'),
+ ])
+ commenter_view = users_by_id[comment.user_id]
+ detail_url = framework_helpers.FormatAbsoluteURLForDomain(
+ hostport, issue.project_name, urls.ISSUE_DETAIL,
+ id=issue.local_id)
+ email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ group_reason_list, subject, body_for_non_members, body_for_members,
+ project, hostport, commenter_view, seq_num=comment.sequence,
+ detail_url=detail_url)
+
+ return email_tasks
+
+
+class NotifyBlockingChangeTask(NotifyTaskBase):
+ """JSON servlet that notifies appropriate users after a blocking change."""
+
+ _EMAIL_TEMPLATE = 'tracker/issue-blocking-change-notification-email.ezt'
+
+ def HandleRequest(self, mr):
+ """Process the task to notify users after an issue blocking change.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ project_id = mr.specified_project_id
+ if project_id is None:
+ return {
+ 'params': {},
+ 'notified': [],
+ 'message': 'Cannot proceed without a valid project ID.',
+ }
+ commenter_id = mr.GetPositiveIntParam('commenter_id')
+ omit_ids = [commenter_id]
+ hostport = mr.GetParam('hostport')
+ delta_blocker_iids = mr.GetIntListParam('delta_blocker_iids')
+ send_email = bool(mr.GetIntParam('send_email'))
+ params = dict(
+ project_id=project_id, local_id=mr.local_id, commenter_id=commenter_id,
+ hostport=hostport, delta_blocker_iids=delta_blocker_iids,
+ omit_ids=omit_ids, send_email=send_email)
+
+ logging.info('blocking change params are %r', params)
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, project_id, mr.local_id)
+ if issue.is_spam:
+ return {
+ 'params': params,
+ 'notified': [],
+ }
+
+ upstream_issues = self.services.issue.GetIssues(
+ mr.cnxn, delta_blocker_iids)
+ logging.info('updating ids %r', [up.local_id for up in upstream_issues])
+ upstream_projects = tracker_helpers.GetAllIssueProjects(
+ mr.cnxn, upstream_issues, self.services.project)
+ upstream_configs = self.services.config.GetProjectConfigs(
+ mr.cnxn, upstream_projects.keys())
+
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, [commenter_id])
+ commenter_view = users_by_id[commenter_id]
+
+ tasks = []
+ if send_email:
+ for upstream_issue in upstream_issues:
+ one_issue_email_tasks = self._ProcessUpstreamIssue(
+ mr.cnxn, upstream_issue,
+ upstream_projects[upstream_issue.project_id],
+ upstream_configs[upstream_issue.project_id],
+ issue, omit_ids, hostport, commenter_view)
+ tasks.extend(one_issue_email_tasks)
+
+ notified = AddAllEmailTasks(tasks)
+
+ return {
+ 'params': params,
+ 'notified': notified,
+ }
+
+ def _ProcessUpstreamIssue(
+ self, cnxn, upstream_issue, upstream_project, upstream_config,
+ issue, omit_ids, hostport, commenter_view):
+ """Compute notifications for one upstream issue that is now blocking."""
+ upstream_detail_url = framework_helpers.FormatAbsoluteURLForDomain(
+ hostport, upstream_issue.project_name, urls.ISSUE_DETAIL,
+ id=upstream_issue.local_id)
+ logging.info('upstream_detail_url = %r', upstream_detail_url)
+ detail_url = framework_helpers.FormatAbsoluteURLForDomain(
+ hostport, issue.project_name, urls.ISSUE_DETAIL,
+ id=issue.local_id)
+
+ # Only issues that any contributor could view are sent to mailing lists.
+ contributor_could_view = permissions.CanViewIssue(
+ set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ upstream_project, upstream_issue)
+
+ # Now construct the e-mail to send
+
+ # Note: we purposely do not notify users who starred an issue
+ # about changes in blocking.
+ users_by_id = framework_views.MakeAllUserViews(
+ cnxn, self.services.user,
+ tracker_bizobj.UsersInvolvedInIssues([upstream_issue]), omit_ids)
+
+ is_blocking = upstream_issue.issue_id in issue.blocked_on_iids
+
+ email_data = {
+ 'issue': tracker_views.IssueView(
+ upstream_issue, users_by_id, upstream_config),
+ 'summary': upstream_issue.summary,
+ 'detail_url': upstream_detail_url,
+ 'is_blocking': ezt.boolean(is_blocking),
+ 'downstream_issue_ref': tracker_bizobj.FormatIssueRef(
+ (None, issue.local_id)),
+ 'downstream_issue_url': detail_url,
+ }
+
+ # TODO(jrobbins): Generate two versions of email body: members
+ # vesion has other member full email addresses exposed. But, don't
+ # expose too many as we iterate through upstream projects.
+ body = self.email_template.GetResponse(email_data)
+
+ # Just use "Re:", not Message-Id and References because a blocking
+ # notification is not a comment on the issue.
+ subject = 'Re: Issue %d in %s: %s' % (
+ upstream_issue.local_id, upstream_issue.project_name,
+ upstream_issue.summary)
+
+ omit_addrs = {users_by_id[omit_id].email for omit_id in omit_ids}
+
+ # Get the transitive set of owners and Cc'd users, and their UserView's.
+ direct_owners, trans_owners = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, [tracker_bizobj.GetOwnerId(upstream_issue)])
+ direct_ccs, trans_ccs = self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(upstream_issue.cc_ids))
+ # TODO(jrobbins): This will say that the user was cc'd by a rule when it
+ # was really added to the derived_cc_ids by a component.
+ der_direct_ccs, der_transitive_ccs = (
+ self.services.usergroup.ExpandAnyUserGroups(
+ cnxn, list(upstream_issue.derived_cc_ids)))
+ # direct owners and Ccs are already in users_by_id
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, self.services.user, trans_owners, trans_ccs, der_transitive_ccs))
+
+ owner_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_owners + trans_owners, upstream_project, upstream_issue,
+ self.services, omit_addrs, users_by_id)
+ cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, direct_ccs + trans_ccs, upstream_project, upstream_issue,
+ self.services, omit_addrs, users_by_id)
+ der_cc_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, der_direct_ccs + der_transitive_ccs, upstream_project,
+ upstream_issue, self.services, omit_addrs, users_by_id)
+ sub_addr_perm_list = _GetSubscribersAddrPermList(
+ cnxn, self.services, upstream_issue, upstream_project, upstream_config,
+ omit_addrs, users_by_id)
+
+ issue_notify_addr_list = notify_helpers.ComputeIssueNotificationAddrList(
+ upstream_issue, omit_addrs)
+ proj_notify_addr_list = notify_helpers.ComputeProjectNotificationAddrList(
+ upstream_project, contributor_could_view, omit_addrs)
+
+ # Give each user a bullet-list of all the reasons that apply for that user.
+ group_reason_list = [
+ (owner_addr_perm_list, 'You are the owner of the issue'),
+ (cc_addr_perm_list, 'You were specifically CC\'d on the issue'),
+ (der_cc_addr_perm_list, 'A rule CC\'d you on the issue'),
+ ]
+ group_reason_list.extend(notify_helpers.ComputeComponentFieldAddrPerms(
+ cnxn, upstream_config, upstream_issue, upstream_project, self.services,
+ omit_addrs, users_by_id))
+ group_reason_list.extend(notify_helpers.ComputeCustomFieldAddrPerms(
+ cnxn, upstream_config, upstream_issue, upstream_project, self.services,
+ omit_addrs, users_by_id))
+ group_reason_list.extend([
+ # Starrers are not notified of blocking changes to reduce noise.
+ (sub_addr_perm_list, 'Your saved query matched the issue'),
+ (issue_notify_addr_list,
+ 'Project filter rules were setup to notify you'),
+ (proj_notify_addr_list,
+ 'The project was configured to send all issue notifications '
+ 'to this address'),
+ ])
+
+ one_issue_email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ group_reason_list, subject, body, body, upstream_project, hostport,
+ commenter_view, detail_url=detail_url)
+
+ return one_issue_email_tasks
+
+
+class NotifyBulkChangeTask(NotifyTaskBase):
+ """JSON servlet that notifies appropriate users after a bulk edit."""
+
+ _EMAIL_TEMPLATE = 'tracker/issue-bulk-change-notification-email.ezt'
+
+ def HandleRequest(self, mr):
+ """Process the task to notify users after an issue blocking change.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ hostport = mr.GetParam('hostport')
+ project_id = mr.specified_project_id
+ if project_id is None:
+ return {
+ 'params': {},
+ 'notified': [],
+ 'message': 'Cannot proceed without a valid project ID.',
+ }
+
+ local_ids = mr.local_id_list
+ old_owner_ids = mr.GetIntListParam('old_owner_ids')
+ comment_text = mr.GetParam('comment_text')
+ commenter_id = mr.GetPositiveIntParam('commenter_id')
+ amendments = mr.GetParam('amendments')
+ send_email = bool(mr.GetIntParam('send_email'))
+ params = dict(
+ project_id=project_id, local_ids=mr.local_id_list,
+ commenter_id=commenter_id, hostport=hostport,
+ old_owner_ids=old_owner_ids, comment_text=comment_text,
+ send_email=send_email, amendments=amendments)
+
+ logging.info('bulk edit params are %r', params)
+ # TODO(jrobbins): For cross-project bulk edits, prefetch all relevant
+ # projects and configs and pass a dict of them to subroutines.
+ project = self.services.project.GetProject(mr.cnxn, project_id)
+ config = self.services.config.GetProjectConfig(mr.cnxn, project_id)
+ issues = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, project_id, local_ids)
+ issues = [issue for issue in issues if not issue.is_spam]
+ anon_perms = permissions.GetPermissions(None, set(), project)
+
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, [commenter_id])
+ ids_in_issues = {}
+ starrers = {}
+
+ non_private_issues = []
+ for issue, old_owner_id in zip(issues, old_owner_ids):
+ # TODO(jrobbins): use issue_id consistently rather than local_id.
+ starrers[issue.local_id] = self.services.issue_star.LookupItemStarrers(
+ mr.cnxn, issue.issue_id)
+ named_ids = set() # users named in user-value fields that notify.
+ for fd in config.field_defs:
+ named_ids.update(notify_helpers.ComputeNamedUserIDsToNotify(issue, fd))
+ direct, indirect = self.services.usergroup.ExpandAnyUserGroups(
+ mr.cnxn, list(issue.cc_ids) + list(issue.derived_cc_ids) +
+ [issue.owner_id, old_owner_id, issue.derived_owner_id] +
+ list(named_ids))
+ ids_in_issues[issue.local_id] = set(starrers[issue.local_id])
+ ids_in_issues[issue.local_id].update(direct)
+ ids_in_issues[issue.local_id].update(indirect)
+ ids_in_issue_needing_views = (
+ ids_in_issues[issue.local_id] |
+ tracker_bizobj.UsersInvolvedInIssues([issue]))
+ new_ids_in_issue = [user_id for user_id in ids_in_issue_needing_views
+ if user_id not in users_by_id]
+ users_by_id.update(
+ framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, new_ids_in_issue))
+
+ anon_can_view = permissions.CanViewIssue(
+ set(), anon_perms, project, issue)
+ if anon_can_view:
+ non_private_issues.append(issue)
+
+ commenter_view = users_by_id[commenter_id]
+ omit_addrs = {commenter_view.email}
+
+ tasks = []
+ if send_email:
+ email_tasks = self._BulkEditEmailTasks(
+ mr.cnxn, issues, old_owner_ids, omit_addrs, project,
+ non_private_issues, users_by_id, ids_in_issues, starrers,
+ commenter_view, hostport, comment_text, amendments, config)
+ tasks = email_tasks
+
+ notified = AddAllEmailTasks(tasks)
+ return {
+ 'params': params,
+ 'notified': notified,
+ }
+
+ def _BulkEditEmailTasks(
+ self, cnxn, issues, old_owner_ids, omit_addrs, project,
+ non_private_issues, users_by_id, ids_in_issues, starrers,
+ commenter_view, hostport, comment_text, amendments, config):
+ """Generate Email PBs to notify interested users after a bulk edit."""
+ # 1. Get the user IDs of everyone who could be notified,
+ # and make all their user proxies. Also, build a dictionary
+ # of all the users to notify and the issues that they are
+ # interested in. Also, build a dictionary of additional email
+ # addresses to notify and the issues to notify them of.
+ users_by_id = {}
+ ids_to_notify_of_issue = {}
+ additional_addrs_to_notify_of_issue = collections.defaultdict(list)
+
+ users_to_queries = notify_helpers.GetNonOmittedSubscriptions(
+ cnxn, self.services, [project.project_id], {})
+ config = self.services.config.GetProjectConfig(
+ cnxn, project.project_id)
+ for issue, old_owner_id in zip(issues, old_owner_ids):
+ issue_participants = set(
+ [tracker_bizobj.GetOwnerId(issue), old_owner_id] +
+ tracker_bizobj.GetCcIds(issue))
+ # users named in user-value fields that notify.
+ for fd in config.field_defs:
+ issue_participants.update(
+ notify_helpers.ComputeNamedUserIDsToNotify(issue, fd))
+ for user_id in ids_in_issues[issue.local_id]:
+ # TODO(jrobbins): implement batch GetUser() for speed.
+ if not user_id:
+ continue
+ auth = monorailrequest.AuthData.FromUserID(
+ cnxn, user_id, self.services)
+ if (auth.user_pb.notify_issue_change and
+ not auth.effective_ids.isdisjoint(issue_participants)):
+ ids_to_notify_of_issue.setdefault(user_id, []).append(issue)
+ elif (auth.user_pb.notify_starred_issue_change and
+ user_id in starrers[issue.local_id]):
+ # Skip users who have starred issues that they can no longer view.
+ starrer_perms = permissions.GetPermissions(
+ auth.user_pb, auth.effective_ids, project)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, auth.effective_ids, config)
+ starrer_can_view = permissions.CanViewIssue(
+ auth.effective_ids, starrer_perms, project, issue,
+ granted_perms=granted_perms)
+ if starrer_can_view:
+ ids_to_notify_of_issue.setdefault(user_id, []).append(issue)
+ logging.info(
+ 'ids_to_notify_of_issue[%s] = %s',
+ user_id,
+ [i.local_id for i in ids_to_notify_of_issue.get(user_id, [])])
+
+ # Find all subscribers that should be notified.
+ subscribers_to_consider = notify_helpers.EvaluateSubscriptions(
+ cnxn, issue, users_to_queries, self.services, config)
+ for sub_id in subscribers_to_consider:
+ auth = monorailrequest.AuthData.FromUserID(cnxn, sub_id, self.services)
+ sub_perms = permissions.GetPermissions(
+ auth.user_pb, auth.effective_ids, project)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, auth.effective_ids, config)
+ sub_can_view = permissions.CanViewIssue(
+ auth.effective_ids, sub_perms, project, issue,
+ granted_perms=granted_perms)
+ if sub_can_view:
+ ids_to_notify_of_issue.setdefault(sub_id, []).append(issue)
+
+ if issue in non_private_issues:
+ for notify_addr in issue.derived_notify_addrs:
+ additional_addrs_to_notify_of_issue[notify_addr].append(issue)
+
+ # 2. Compose an email specifically for each user.
+ email_tasks = []
+ needed_user_view_ids = [uid for uid in ids_to_notify_of_issue
+ if uid not in users_by_id]
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, self.services.user, needed_user_view_ids))
+ for user_id in ids_to_notify_of_issue:
+ if not user_id:
+ continue # Don't try to notify NO_USER_SPECIFIED
+ if users_by_id[user_id].email in omit_addrs:
+ logging.info('Omitting %s', user_id)
+ continue
+ user_issues = ids_to_notify_of_issue[user_id]
+ if not user_issues:
+ continue # user's prefs indicate they don't want these notifications
+ email = self._FormatBulkIssuesEmail(
+ users_by_id[user_id].email, user_issues, users_by_id,
+ commenter_view, hostport, comment_text, amendments, config, project)
+ email_tasks.append(email)
+ omit_addrs.add(users_by_id[user_id].email)
+ logging.info('about to bulk notify %s (%s) of %s',
+ users_by_id[user_id].email, user_id,
+ [issue.local_id for issue in user_issues])
+
+ # 3. Compose one email to each notify_addr with all the issues that it
+ # is supossed to be notified about.
+ for addr, addr_issues in additional_addrs_to_notify_of_issue.iteritems():
+ email = self._FormatBulkIssuesEmail(
+ addr, addr_issues, users_by_id, commenter_view, hostport,
+ comment_text, amendments, config, project)
+ email_tasks.append(email)
+ omit_addrs.add(addr)
+ logging.info('about to bulk notify additional addr %s of %s',
+ addr, [addr_issue.local_id for addr_issue in addr_issues])
+
+ # 4. Add in the project's issue_notify_address. This happens even if it
+ # is the same as the commenter's email address (which would be an unusual
+ # but valid project configuration). Only issues that any contributor could
+ # view are included in emails to the all-issue-activity mailing lists.
+ if (project.issue_notify_address
+ and project.issue_notify_address not in omit_addrs):
+ non_private_issues_live = []
+ for issue in issues:
+ contributor_could_view = permissions.CanViewIssue(
+ set(), permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ project, issue)
+ if contributor_could_view:
+ non_private_issues_live.append(issue)
+
+ if non_private_issues_live:
+ email = self._FormatBulkIssuesEmail(
+ project.issue_notify_address, non_private_issues_live,
+ users_by_id, commenter_view, hostport, comment_text, amendments,
+ config, project)
+ email_tasks.append(email)
+ omit_addrs.add(project.issue_notify_address)
+ logging.info('about to bulk notify all-issues %s of %s',
+ project.issue_notify_address,
+ [issue.local_id for issue in non_private_issues])
+
+ return email_tasks
+
+ def _FormatBulkIssuesEmail(
+ self, dest_email, issues, users_by_id, commenter_view,
+ hostport, comment_text, amendments, config, _project):
+ """Format an email to one user listing many issues."""
+ # TODO(jrobbins): Generate two versions of email body: members
+ # vesion has full email addresses exposed. And, use the full
+ # commenter email address in the From: line when sending to
+ # a member.
+ subject, body = self._FormatBulkIssues(
+ issues, users_by_id, commenter_view, hostport, comment_text,
+ amendments, config)
+
+ from_addr = emailfmt.NoReplyAddress(commenter_view=commenter_view)
+ return dict(from_addr=from_addr, to=dest_email, subject=subject, body=body)
+
+ def _FormatBulkIssues(
+ self, issues, users_by_id, commenter_view, hostport, comment_text,
+ amendments, config, body_type='email'):
+ """Format a subject and body for a bulk issue edit."""
+ assert body_type in ('email', 'feed')
+ project_name = issues[0].project_name
+
+ issue_views = []
+ for issue in issues:
+ # TODO(jrobbins): choose config from dict of prefetched configs.
+ issue_views.append(tracker_views.IssueView(issue, users_by_id, config))
+
+ email_data = {
+ 'hostport': hostport,
+ 'num_issues': len(issues),
+ 'issues': issue_views,
+ 'comment_text': comment_text,
+ 'commenter': commenter_view,
+ 'amendments': amendments,
+ 'body_type': body_type,
+ }
+
+ if len(issues) == 1:
+ subject = 'issue %s in %s: %s' % (
+ issues[0].local_id, project_name, issues[0].summary)
+ # TODO(jrobbins): Look up the sequence number instead and treat this
+ # more like an individual change for email threading. For now, just
+ # add "Re:" because bulk edits are always replies.
+ subject = 'Re: ' + subject
+ else:
+ subject = '%d issues changed in %s' % (len(issues), project_name)
+
+ body = self.email_template.GetResponse(email_data)
+
+ return subject, body
+
+
+class OutboundEmailTask(jsonfeed.InternalTask):
+ """JSON servlet that sends one email."""
+
+ def HandleRequest(self, mr):
+ """Process the task to send one email message.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format which is useful just for debugging.
+ The main goal is the side-effect of sending emails.
+ """
+ # If running on a GAFYD domain, you must define an app alias on the
+ # Application Settings admin web page.
+ sender = mr.GetParam('from_addr')
+ reply_to = mr.GetParam('reply_to')
+ to = mr.GetParam('to')
+ if not to:
+ # Cannot proceed if we cannot create a valid EmailMessage.
+ return
+ references = mr.GetParam('references')
+ subject = mr.GetParam('subject')
+ body = mr.GetParam('body')
+ html_body = mr.GetParam('html_body')
+
+ if settings.dev_mode:
+ to_format = settings.send_dev_email_to
+ else:
+ to_format = settings.send_all_email_to
+
+ if to_format:
+ to_user, to_domain = to.split('@')
+ to = to_format % {'user': to_user, 'domain': to_domain}
+
+ logging.info(
+ 'Email:\n sender: %s\n reply_to: %s\n to: %s\n references: %s\n '
+ 'subject: %s\n body: %s\n html body: %s',
+ sender, reply_to, to, references, subject, body, html_body)
+ message = mail.EmailMessage(
+ sender=sender, to=to, subject=subject, body=body)
+ if html_body:
+ message.html = html_body
+ if reply_to:
+ message.reply_to = reply_to
+ if references:
+ message.headers = {'References': references}
+ if settings.unit_test_mode:
+ logging.info('Sending message "%s" in test mode.', message.subject)
+ else:
+ message.send()
+
+ return dict(
+ sender=sender, to=to, subject=subject, body=body, html_body=html_body,
+ reply_to=reply_to, references=references)
+
+
+def _GetSubscribersAddrPermList(
+ cnxn, services, issue, project, config, omit_addrs, users_by_id):
+ """Lookup subscribers, evaluate their saved queries, and decide to notify."""
+ users_to_queries = notify_helpers.GetNonOmittedSubscriptions(
+ cnxn, services, [project.project_id], omit_addrs)
+ # TODO(jrobbins): need to pass through the user_id to use for "me".
+ subscribers_to_notify = notify_helpers.EvaluateSubscriptions(
+ cnxn, issue, users_to_queries, services, config)
+ # TODO(jrobbins): expand any subscribers that are user groups.
+ subs_needing_user_views = [
+ uid for uid in subscribers_to_notify if uid not in users_by_id]
+ users_by_id.update(framework_views.MakeAllUserViews(
+ cnxn, services.user, subs_needing_user_views))
+ sub_addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, subscribers_to_notify, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda *args: True)
+
+ return sub_addr_perm_list
diff --git a/appengine/monorail/features/notify_helpers.py b/appengine/monorail/features/notify_helpers.py
new file mode 100644
index 0000000..ff37190
--- /dev/null
+++ b/appengine/monorail/features/notify_helpers.py
@@ -0,0 +1,414 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for email notifications of issue changes."""
+
+import cgi
+import logging
+import re
+
+from django.utils.html import urlize
+
+from features import filterrules_helpers
+from features import savedqueries_helpers
+from framework import emailfmt
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import monorailrequest
+from framework import permissions
+from framework import urls
+from proto import tracker_pb2
+from search import query2ast
+from search import searchpipeline
+from tracker import component_helpers
+from tracker import tracker_bizobj
+
+
+# When sending change notification emails, choose the reply-to header and
+# footer message based on three levels of the the recipient's permissions
+# for that issue.
+REPLY_NOT_ALLOWED = 'REPLY_NOT_ALLOWED'
+REPLY_MAY_COMMENT = 'REPLY_MAY_COMMENT'
+REPLY_MAY_UPDATE = 'REPLY_MAY_UPDATE'
+
+# This HTML template adds mark up which enables Gmail/Inbox to display a
+# convenient link that takes users to the CL directly from the inbox without
+# having to click on the email.
+# Documentation for this schema.org markup is here:
+# https://developers.google.com/gmail/markup/reference/go-to-action
+HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE = """
+<html>
+<body>
+<script type="application/ld+json">
+{
+ "@context": "http://schema.org",
+ "@type": "EmailMessage",
+ "potentialAction": {
+ "@type": "ViewAction",
+ "name": "View Issue",
+ "url": "%s"
+ },
+ "description": ""
+}
+</script>
+
+<div style="font-family: arial, sans-serif">%s</div>
+</body>
+</html>
+"""
+
+
+def ComputeIssueChangeAddressPermList(
+ cnxn, ids_to_consider, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda u: u.notify_issue_change):
+ """Return a list of user email addresses to notify of an issue change.
+
+ User email addresses are determined by looking up the given user IDs
+ in the given users_by_id dict.
+
+ Args:
+ cnxn: connection to SQL database.
+ ids_to_consider: list of user IDs for users interested in this issue.
+ project: Project PB for the project contianing containing this issue.
+ issue: Issue PB for the issue that was updated.
+ services: Services.
+ omit_addrs: set of strings for email addresses to not notify because
+ they already know.
+ users_by_id: dict {user_id: user_view} user info.
+ pref_check_function: optional function to use to check if a certain
+ User PB has a preference set to receive the email being sent. It
+ defaults to "If I am in the issue's owner or cc field", but it
+ can be set to check "If I starred the issue."
+
+ Returns:
+ A list of tuples: [(recipient_is_member, address, reply_perm), ...] where
+ reply_perm is one of REPLY_NOT_ALLOWED, REPLY_MAY_COMMENT,
+ REPLY_MAY_UPDATE.
+ """
+ memb_addr_perm_list = []
+ for user_id in ids_to_consider:
+ if user_id == framework_constants.NO_USER_SPECIFIED:
+ continue
+ user = services.user.GetUser(cnxn, user_id)
+ # Notify people who have a pref set, or if they have no User PB
+ # because the pref defaults to True.
+ if user and not pref_check_function(user):
+ continue
+ # TODO(jrobbins): doing a bulk operation would reduce DB load.
+ auth = monorailrequest.AuthData.FromUserID(cnxn, user_id, services)
+ perms = permissions.GetPermissions(user, auth.effective_ids, project)
+ config = services.config.GetProjectConfig(cnxn, project.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, auth.effective_ids, config)
+
+ if not permissions.CanViewIssue(
+ auth.effective_ids, perms, project, issue,
+ granted_perms=granted_perms):
+ continue
+
+ addr = users_by_id[user_id].email
+ if addr in omit_addrs:
+ continue
+
+ recipient_is_member = bool(framework_bizobj.UserIsInProject(
+ project, auth.effective_ids))
+
+ reply_perm = REPLY_NOT_ALLOWED
+ if project.process_inbound_email:
+ if permissions.CanEditIssue(auth.effective_ids, perms, project, issue):
+ reply_perm = REPLY_MAY_UPDATE
+ elif permissions.CanCommentIssue(
+ auth.effective_ids, perms, project, issue):
+ reply_perm = REPLY_MAY_COMMENT
+
+ memb_addr_perm_list.append((recipient_is_member, addr, reply_perm))
+
+ logging.info('For %s %s, will notify: %r',
+ project.project_name, issue.local_id, memb_addr_perm_list)
+
+ return memb_addr_perm_list
+
+
+def ComputeProjectNotificationAddrList(
+ project, contributor_could_view, omit_addrs):
+ """Return a list of non-user addresses to notify of an issue change.
+
+ The non-user addresses are specified by email address strings, not
+ user IDs. One such address can be specified in the project PB.
+ It is not assumed to have permission to see all issues.
+
+ Args:
+ project: Project PB containing the issue that was updated.
+ contributor_could_view: True if any project contributor should be able to
+ see the notification email, e.g., in a mailing list archive or feed.
+ omit_addrs: set of strings for email addresses to not notify because
+ they already know.
+
+ Returns:
+ A list of tuples: [(False, email_address, reply_permission_level), ...],
+ where reply_permission_level is always REPLY_NOT_ALLOWED for now.
+ """
+ memb_addr_perm_list = []
+ if contributor_could_view:
+ ml_addr = project.issue_notify_address
+ if ml_addr and ml_addr not in omit_addrs:
+ memb_addr_perm_list.append((False, ml_addr, REPLY_NOT_ALLOWED))
+
+ return memb_addr_perm_list
+
+
+def ComputeIssueNotificationAddrList(issue, omit_addrs):
+ """Return a list of non-user addresses to notify of an issue change.
+
+ The non-user addresses are specified by email address strings, not
+ user IDs. They can be set by filter rules with the "Also notify" action.
+ "Also notify" addresses are assumed to have permission to see any issue,
+ even a restricted one.
+
+ Args:
+ issue: Issue PB for the issue that was updated.
+ omit_addrs: set of strings for email addresses to not notify because
+ they already know.
+
+ Returns:
+ A list of tuples: [(False, email_address, reply_permission_level), ...],
+ where reply_permission_level is always REPLY_NOT_ALLOWED for now.
+ """
+ addr_perm_list = []
+ for addr in issue.derived_notify_addrs:
+ if addr not in omit_addrs:
+ addr_perm_list.append((False, addr, REPLY_NOT_ALLOWED))
+
+ return addr_perm_list
+
+
+def MakeBulletedEmailWorkItems(
+ group_reason_list, subject, body_for_non_members, body_for_members,
+ project, hostport, commenter_view, seq_num=None, detail_url=None):
+ """Make a list of dicts describing email-sending tasks to notify users.
+
+ Args:
+ group_reason_list: list of (is_memb, addr_perm, reason) tuples.
+ subject: string email subject line.
+ body_for_non_members: string body of email to send to non-members.
+ body_for_members: string body of email to send to members.
+ project: Project that contains the issue.
+ hostport: string hostname and port number for links to the site.
+ commenter_view: UserView for the user who made the comment.
+ seq_num: optional int sequence number of the comment.
+ detail_url: optional str direct link to the issue.
+
+ Returns:
+ A list of dictionaries, each with all needed info to send an individual
+ email to one user. Each email contains a footer that lists all the
+ reasons why that user received the email.
+ """
+ logging.info('group_reason_list is %r', group_reason_list)
+ addr_reasons_dict = {}
+ for group, reason in group_reason_list:
+ for memb_addr_perm in group:
+ addr_reasons_dict.setdefault(memb_addr_perm, []).append(reason)
+
+ email_tasks = []
+ for memb_addr_perm, reasons in addr_reasons_dict.iteritems():
+ email_tasks.append(_MakeEmailWorkItem(
+ memb_addr_perm, reasons, subject, body_for_non_members,
+ body_for_members, project, hostport, commenter_view, seq_num=seq_num,
+ detail_url=detail_url))
+
+ return email_tasks
+
+
+def _MakeEmailWorkItem(
+ (recipient_is_member, to_addr, reply_perm), reasons, subject,
+ body_for_non_members, body_for_members, project, hostport, commenter_view,
+ seq_num=None, detail_url=None):
+ """Make one email task dict for one user, includes a detailed reason."""
+ footer = _MakeNotificationFooter(reasons, reply_perm, hostport)
+ if isinstance(footer, unicode):
+ footer = footer.encode('utf-8')
+ if recipient_is_member:
+ logging.info('got member %r', to_addr)
+ body = body_for_members
+ else:
+ logging.info('got non-member %r', to_addr)
+ body = body_for_non_members
+
+ logging.info('sending body + footer: %r', body + footer)
+ can_reply_to = (
+ reply_perm != REPLY_NOT_ALLOWED and project.process_inbound_email)
+ from_addr = emailfmt.FormatFromAddr(
+ project, commenter_view=commenter_view, reveal_addr=recipient_is_member,
+ can_reply_to=can_reply_to)
+ if can_reply_to:
+ reply_to = '%s@%s' % (project.project_name, emailfmt.MailDomain())
+ else:
+ reply_to = emailfmt.NoReplyAddress()
+ refs = emailfmt.GetReferences(
+ to_addr, subject, seq_num,
+ '%s@%s' % (project.project_name, emailfmt.MailDomain()))
+ # If detail_url is specified then we can use markup to display a convenient
+ # link that takes users directly to the issue without clicking on the email.
+ html_body = None
+ if detail_url:
+ # cgi.escape the body and additionally escape single quotes which are
+ # occassionally used to contain HTML attributes and event handler
+ # definitions.
+ html_escaped_body = cgi.escape(body + footer, quote=1).replace("'", ''')
+ html_body = HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ _AddHTMLTags(html_escaped_body.decode('utf-8')))
+ return dict(to=to_addr, subject=subject, body=body + footer,
+ html_body=html_body, from_addr=from_addr, reply_to=reply_to,
+ references=refs)
+
+
+def _AddHTMLTags(body):
+ """Adds HMTL tags in the specified email body.
+
+ Specifically does the following:
+ * Detects links and adds <a href>s around the links.
+ * Substitutes <br/> for all occurrences of "\n".
+
+ See crbug.com/582463 for context.
+ """
+ # Convert all URLs into clickable links.
+ body = urlize(body)
+ # The above step converts
+ # '<link.com>' into '<<a href="link.com>">link.com></a>;' and
+ # '<x@y.com>' into '<<a href="mailto:x@y.com>">x@y.com></a>;'
+ # The below regex fixes this specific problem. See
+ # https://bugs.chromium.org/p/monorail/issues/detail?id=1007 for more details.
+ body = re.sub(r'<<a href="(|mailto:)(.*?)>">(.*?)></a>;',
+ r'<a href="\1\2"><\3></a>', body)
+
+ # Convert all "\n"s into "<br/>"s.
+ body = body.replace("\n", "<br/>")
+ return body
+
+
+def _MakeNotificationFooter(reasons, reply_perm, hostport):
+ """Make an informative footer for a notification email.
+
+ Args:
+ reasons: a list of strings to be used as the explanation. Empty if no
+ reason is to be given.
+ reply_perm: string which is one of REPLY_NOT_ALLOWED, REPLY_MAY_COMMENT,
+ REPLY_MAY_UPDATE.
+ hostport: string with domain_name:port_number to be used in linking to
+ the user preferences page.
+
+ Returns:
+ A string to be used as the email footer.
+ """
+ if not reasons:
+ return ''
+
+ domain_port = hostport.split(':')
+ domain_port[0] = framework_helpers.GetPreferredDomain(domain_port[0])
+ hostport = ':'.join(domain_port)
+
+ prefs_url = 'https://%s%s' % (hostport, urls.USER_SETTINGS)
+ lines = ['-- ']
+ lines.append('You received this message because:')
+ lines.extend(' %d. %s' % (idx + 1, reason)
+ for idx, reason in enumerate(reasons))
+
+ lines.extend(['', 'You may adjust your notification preferences at:',
+ prefs_url])
+
+ if reply_perm == REPLY_MAY_COMMENT:
+ lines.extend(['', 'Reply to this email to add a comment.'])
+ elif reply_perm == REPLY_MAY_UPDATE:
+ lines.extend(['', 'Reply to this email to add a comment or make updates.'])
+
+ return '\n'.join(lines)
+
+
+def GetNonOmittedSubscriptions(cnxn, services, project_ids, omit_addrs):
+ """Get a dict of users w/ subscriptions in those projects."""
+ users_to_queries = services.features.GetSubscriptionsInProjects(
+ cnxn, project_ids)
+ user_emails = services.user.LookupUserEmails(cnxn, users_to_queries.keys())
+ for user_id, email in user_emails.iteritems():
+ if email in omit_addrs:
+ del users_to_queries[user_id]
+
+ return users_to_queries
+
+
+def EvaluateSubscriptions(
+ cnxn, issue, users_to_queries, services, config):
+ """Determine subscribers who have subs that match the given issue."""
+ # Note: unlike filter rule, subscriptions see explicit & derived values.
+ lower_labels = [lab.lower() for lab in tracker_bizobj.GetLabels(issue)]
+ label_set = set(lower_labels)
+
+ subscribers_to_notify = []
+ for uid, saved_queries in users_to_queries.iteritems():
+ for sq in saved_queries:
+ if sq.subscription_mode != 'immediate':
+ continue
+ if issue.project_id not in sq.executes_in_project_ids:
+ continue
+ cond = savedqueries_helpers.SavedQueryToCond(sq)
+ logging.info('evaluating query %s: %r', sq.name, cond)
+ cond = searchpipeline.ReplaceKeywordsWithUserID(uid, cond)
+ cond_ast = query2ast.ParseUserQuery(
+ cond, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+
+ if filterrules_helpers.EvalPredicate(
+ cnxn, services, cond_ast, issue, label_set, config,
+ tracker_bizobj.GetOwnerId(issue), tracker_bizobj.GetCcIds(issue),
+ tracker_bizobj.GetStatus(issue)):
+ subscribers_to_notify.append(uid)
+ break # Don't bother looking at the user's other saved quereies.
+
+ return subscribers_to_notify
+
+
+def ComputeCustomFieldAddrPerms(
+ cnxn, config, issue, project, services, omit_addrs, users_by_id):
+ """Check the reasons to notify users named in custom fields."""
+ group_reason_list = []
+ for fd in config.field_defs:
+ named_user_ids = ComputeNamedUserIDsToNotify(issue, fd)
+ if named_user_ids:
+ named_addr_perms = ComputeIssueChangeAddressPermList(
+ cnxn, named_user_ids, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda u: True)
+ group_reason_list.append(
+ (named_addr_perms, 'You are named in the %s field' % fd.field_name))
+
+ return group_reason_list
+
+
+def ComputeNamedUserIDsToNotify(issue, fd):
+ """Give a list of user IDs to notify because they're in a field."""
+ if (fd.field_type == tracker_pb2.FieldTypes.USER_TYPE and
+ fd.notify_on == tracker_pb2.NotifyTriggers.ANY_COMMENT):
+ return [fv.user_id for fv in issue.field_values
+ if fv.field_id == fd.field_id]
+
+ return []
+
+
+def ComputeComponentFieldAddrPerms(
+ cnxn, config, issue, project, services, omit_addrs, users_by_id):
+ """Return [(addr_perm, reason), ...] for users auto-cc'd by components."""
+ component_ids = set(issue.component_ids)
+ group_reason_list = []
+ for cd in config.component_defs:
+ if cd.component_id in component_ids:
+ cc_ids = component_helpers.GetCcIDsForComponentAndAncestors(config, cd)
+ comp_addr_perms = ComputeIssueChangeAddressPermList(
+ cnxn, cc_ids, project, issue, services, omit_addrs,
+ users_by_id, pref_check_function=lambda u: True)
+ group_reason_list.append(
+ (comp_addr_perms,
+ 'You are auto-CC\'d on all issues in component %s' % cd.path))
+
+ return group_reason_list
diff --git a/appengine/monorail/features/prettify.py b/appengine/monorail/features/prettify.py
new file mode 100644
index 0000000..5b12b63
--- /dev/null
+++ b/appengine/monorail/features/prettify.py
@@ -0,0 +1,73 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for source code syntax highlighting."""
+
+from third_party import ezt
+
+from framework import framework_constants
+
+
+# We only attempt to do client-side syntax highlighting on files that we
+# expect to be source code in languages that we support, and that are
+# reasonably sized.
+MAX_PRETTIFY_LINES = 3000
+
+
+def PrepareSourceLinesForHighlighting(file_contents):
+ """Parse a file into lines for highlighting.
+
+ Args:
+ file_contents: string contents of the source code file.
+
+ Returns:
+ A list of _SourceLine objects, one for each line in the source file.
+ """
+ return [_SourceLine(num + 1, line) for num, line
+ in enumerate(file_contents.splitlines())]
+
+
+class _SourceLine(object):
+ """Convenience class to represent one line of the source code display.
+
+ Attributes:
+ num: The line's location in the source file.
+ line: String source code line to display.
+ """
+
+ def __init__(self, num, line):
+ self.num = num
+ self.line = line
+
+ def __str__(self):
+ return '%d: %s' % (self.num, self.line)
+
+
+def BuildPrettifyData(num_lines, path):
+ """Return page data to help configure google-code-prettify.
+
+ Args:
+ num_lines: int number of lines of source code in the file.
+ path: string path to the file, or just the filename.
+
+ Returns:
+ Dictionary that can be passed to EZT to render a page.
+ """
+ reasonable_size = num_lines < MAX_PRETTIFY_LINES
+
+ filename_lower = path[path.rfind('/') + 1:].lower()
+ ext = filename_lower[filename_lower.rfind('.') + 1:]
+
+ # Note that '' might be a valid entry in these maps.
+ prettify_class = framework_constants.PRETTIFY_CLASS_MAP.get(ext)
+ if prettify_class is None:
+ prettify_class = framework_constants.PRETTIFY_FILENAME_CLASS_MAP.get(
+ filename_lower)
+ supported_lang = prettify_class is not None
+
+ return {
+ 'should_prettify': ezt.boolean(supported_lang and reasonable_size),
+ 'prettify_class': prettify_class,
+ }
diff --git a/appengine/monorail/features/savedqueries.py b/appengine/monorail/features/savedqueries.py
new file mode 100644
index 0000000..624312d
--- /dev/null
+++ b/appengine/monorail/features/savedqueries.py
@@ -0,0 +1,70 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Page for showing a user's saved queries and subscription options."""
+
+import logging
+import time
+
+from features import savedqueries_helpers
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from framework import urls
+
+
+class SavedQueries(servlet.Servlet):
+ """A page class that shows the user's saved queries."""
+
+ _PAGE_TEMPLATE = 'features/saved-queries-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ super(SavedQueries, self).AssertBasePermission(mr)
+ viewing_self = mr.viewed_user_auth.user_id == mr.auth.user_id
+ if not mr.auth.user_pb.is_site_admin and not viewing_self:
+ raise permissions.PermissionException(
+ 'User not allowed to edit this user\'s saved queries')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ saved_queries = self.services.features.GetSavedQueriesByUserID(
+ mr.cnxn, mr.viewed_user_auth.user_id)
+ saved_query_views = [
+ savedqueries_helpers.SavedQueryView(
+ sq, idx + 1, mr.cnxn, self.services.project)
+ for idx, sq in enumerate(saved_queries)]
+
+ page_data = {
+ 'canned_queries': saved_query_views,
+ 'new_query_indexes': (
+ range(len(saved_queries) + 1,
+ savedqueries_helpers.MAX_QUERIES + 1)),
+ 'max_queries': savedqueries_helpers.MAX_QUERIES,
+ 'user_tab_mode': 'st4',
+ }
+ return page_data
+
+ def ProcessFormData(self, mr, post_data):
+ """Validate and store the contents of the issues tracker admin page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ existing_queries = savedqueries_helpers.ParseSavedQueries(
+ mr.cnxn, post_data, self.services.project)
+ added_queries = savedqueries_helpers.ParseSavedQueries(
+ mr.cnxn, post_data, self.services.project, prefix='new_')
+ saved_queries = existing_queries + added_queries
+
+ self.services.features.UpdateUserSavedQueries(
+ mr.cnxn, mr.viewed_user_auth.user_id, saved_queries)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, '/u/%s%s' % (mr.viewed_username, urls.SAVED_QUERIES),
+ include_project=False, saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/features/savedqueries_helpers.py b/appengine/monorail/features/savedqueries_helpers.py
new file mode 100644
index 0000000..036b43e
--- /dev/null
+++ b/appengine/monorail/features/savedqueries_helpers.py
@@ -0,0 +1,110 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Utility functions and classes for dealing with saved queries.
+
+Saved queries can be part of the project issue config, where they are
+called "canned queries". Or, they can be personal saved queries that
+may appear in the search scope drop-down, on the user's dashboard, or
+in the user's subscription.
+"""
+
+import logging
+import re
+
+from framework import template_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+MAX_QUERIES = 100
+
+
+def ParseSavedQueries(cnxn, post_data, project_service, prefix=''):
+ """Parse form data for the Saved Queries part of an admin form."""
+ saved_queries = []
+ for i in xrange(1, MAX_QUERIES + 1):
+ if ('%ssavedquery_name_%s' % (prefix, i)) not in post_data:
+ continue # skip any entries that are blank or have no predicate.
+
+ name = post_data['%ssavedquery_name_%s' % (prefix, i)].strip()
+ if not name:
+ continue # skip any blank entries
+
+ if '%ssavedquery_id_%s' % (prefix, i) in post_data:
+ query_id = int(post_data['%ssavedquery_id_%s' % (prefix, i)])
+ else:
+ query_id = None # a new query_id will be generated by the DB.
+
+ project_names_str = post_data.get(
+ '%ssavedquery_projects_%s' % (prefix, i), '')
+ project_names = [pn.strip().lower()
+ for pn in re.split('[],;\s]+', project_names_str)
+ if pn.strip()]
+ project_ids = project_service.LookupProjectIDs(
+ cnxn, project_names).values()
+
+ base_id = int(post_data['%ssavedquery_base_%s' % (prefix, i)])
+ query = post_data['%ssavedquery_query_%s' % (prefix, i)].strip()
+
+ subscription_mode_field = '%ssavedquery_sub_mode_%s' % (prefix, i)
+ if subscription_mode_field in post_data:
+ subscription_mode = post_data[subscription_mode_field].strip()
+ else:
+ subscription_mode = None
+
+ saved_queries.append(tracker_bizobj.MakeSavedQuery(
+ query_id, name, base_id, query, subscription_mode=subscription_mode,
+ executes_in_project_ids=project_ids))
+
+ return saved_queries
+
+
+class SavedQueryView(template_helpers.PBProxy):
+ """Wrapper class that makes it easier to display SavedQuery via EZT."""
+
+ def __init__(self, sq, idx, cnxn, project_service):
+ """Store relevant values for later display by EZT.
+
+ Args:
+ sq: A SavedQuery protocol buffer.
+ idx: Int index of this saved query in the list.
+ cnxn: connection to SQL database.
+ project_service: persistence layer for project data.
+ """
+ super(SavedQueryView, self).__init__(sq)
+
+ self.idx = idx
+ base_query_name = 'All issues'
+ for canned in tracker_constants.DEFAULT_CANNED_QUERIES:
+ qid, name, _base_id, _query = canned
+ if qid == sq.base_query_id:
+ base_query_name = name
+
+ if cnxn:
+ project_names = sorted(project_service.LookupProjectNames(
+ cnxn, sq.executes_in_project_ids).values())
+ self.projects = ', '.join(project_names)
+ else:
+ self.projects = ''
+
+ self.docstring = '[%s] %s' % (base_query_name, sq.query)
+
+
+def SavedQueryToCond(saved_query):
+ """Convert a SavedQuery PB to a user query condition string."""
+ base_cond = tracker_bizobj.GetBuiltInQuery(saved_query.base_query_id)
+ cond = '%s %s' % (base_cond, saved_query.query)
+ return cond.strip()
+
+
+def SavedQueryIDToCond(cnxn, features_service, query_id):
+ """Convert a can/query ID to a user query condition string."""
+ built_in = tracker_bizobj.GetBuiltInQuery(query_id)
+ if built_in:
+ return built_in
+
+ saved_query = features_service.GetSavedQuery(cnxn, query_id)
+ return SavedQueryToCond(saved_query)
diff --git a/appengine/monorail/features/spammodel.py b/appengine/monorail/features/spammodel.py
new file mode 100644
index 0000000..3f292a9
--- /dev/null
+++ b/appengine/monorail/features/spammodel.py
@@ -0,0 +1,74 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+""" Tasks and handlers for maintaining the spam classifier model. These
+ should be run via cron and task queue rather than manually.
+"""
+
+import cgi
+import csv
+import logging
+import webapp2
+import cloudstorage
+import json
+
+from datetime import date
+from datetime import datetime
+from datetime import timedelta
+
+from framework import servlet
+from framework import urls
+from google.appengine.api import taskqueue
+from google.appengine.api import app_identity
+from framework import gcs_helpers
+
+class TrainingDataExport(webapp2.RequestHandler):
+ """Trigger a training data export task"""
+ def get(self):
+ logging.info("Training data export requested.")
+ taskqueue.add(url=urls.SPAM_DATA_EXPORT_TASK + '.do')
+
+BATCH_SIZE = 100
+
+class TrainingDataExportTask(servlet.Servlet):
+ """Export any human-labeled ham or spam from the previous day. These
+ records will be used by a subsequent task to create an updated model.
+ """
+ CHECK_SECURITY_TOKEN = False
+
+ def ProcessFormData(self, mr, post_data):
+ logging.info("Training data export initiated.")
+
+ bucket_name = app_identity.get_default_gcs_bucket_name()
+ date_str = date.today().isoformat()
+ export_target_path = '/' + bucket_name + '/spam_training_data/' + date_str
+ total_issues = 0
+
+ with cloudstorage.open(export_target_path, mode='w',
+ content_type=None, options=None, retry_params=None) as gcs_file:
+
+ csv_writer = csv.writer(gcs_file, delimiter=',', quotechar='"',
+ quoting=csv.QUOTE_ALL, lineterminator='\n')
+
+ since = datetime.now() - timedelta(days=1)
+
+ # TODO: Comments, and further pagination
+ issues, first_comments, _count = (
+ self.services.spam.GetTrainingIssues(
+ mr.cnxn, self.services.issue, since, offset=0, limit=BATCH_SIZE))
+ total_issues += len(issues)
+ for issue in issues:
+ # Cloud Prediction API doesn't allow newlines in the training data.
+ fixed_summary = issue.summary.replace('\r\n', ' ')
+ fixed_comment = first_comments[issue.issue_id].replace('\r\n', ' ')
+
+ csv_writer.writerow([
+ 'spam' if issue.is_spam else 'ham',
+ fixed_summary, fixed_comment,
+ ])
+
+ self.response.body = json.dumps({
+ "exported_issue_count": total_issues,
+ })
+
diff --git a/appengine/monorail/features/stars.py b/appengine/monorail/features/stars.py
new file mode 100644
index 0000000..83d7e1c
--- /dev/null
+++ b/appengine/monorail/features/stars.py
@@ -0,0 +1,43 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This is a starring servlet for users and projects."""
+
+import logging
+
+from framework import jsonfeed
+from framework import monorailrequest
+
+USER_STARS_SCOPE = 'users'
+PROJECT_STARS_SCOPE = 'projects'
+
+
+class SetStarsFeed(jsonfeed.JsonFeed):
+ """Process an AJAX request to (un)set a star on a project or user."""
+
+ def HandleRequest(self, mr):
+ """Retrieves the star persistence object and sets a star."""
+ starrer_id = mr.auth.user_id
+ item = mr.GetParam('item') # a project name or a user ID number
+ scope = mr.GetParam('scope')
+ starred = bool(mr.GetIntParam('starred'))
+ logging.info('Handling user set star request: %r %r %r %r',
+ starrer_id, item, scope, starred)
+
+ if scope == PROJECT_STARS_SCOPE:
+ project = self.services.project.GetProjectByName(mr.cnxn, item)
+ self.services.project_star.SetStar(
+ mr.cnxn, project.project_id, starrer_id, starred)
+
+ elif scope == USER_STARS_SCOPE:
+ user_id = int(item)
+ self.services.user_star.SetStar(mr.cnxn, user_id, starrer_id, starred)
+
+ else:
+ raise monorailrequest.InputException('unexpected star scope: %s' % scope)
+
+ return {
+ 'starred': starred,
+ }
diff --git a/appengine/monorail/features/test/__init__.py b/appengine/monorail/features/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/features/test/__init__.py
diff --git a/appengine/monorail/features/test/activities_test.py b/appengine/monorail/features/test/activities_test.py
new file mode 100644
index 0000000..2d738a4
--- /dev/null
+++ b/appengine/monorail/features/test/activities_test.py
@@ -0,0 +1,154 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.feature.activities."""
+
+import unittest
+
+import mox
+
+from features import activities
+from framework import framework_views
+from framework import profiler
+from proto import tracker_pb2
+from proto import user_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class ActivitiesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ )
+
+ self.project_name = 'proj'
+ self.project_id = 987
+ self.project = self.services.project.TestAddProject(
+ self.project_name, project_id=self.project_id,
+ process_inbound_email=True)
+
+ self.issue_id = 11
+ self.issue_local_id = 100
+ self.issue = tracker_pb2.Issue()
+ self.issue.issue_id = self.issue_id
+ self.issue.project_id = self.project_id
+ self.issue.local_id = self.issue_local_id
+ self.services.issue.TestAddIssue(self.issue)
+
+ self.comment_id = 123
+ self.comment_timestamp = 120
+ self.user_id = 2
+ self.mr_after = 1234
+
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testActivities_NoUpdates(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ updates_data = activities.GatherUpdatesData(
+ self.services, mr, profiler.Profiler(), project_ids=[self.project_id],
+ user_ids=None, ending=None, updates_page_url=None, autolink=None,
+ highlight=None)
+
+ self.assertIsNone(updates_data['pagination'])
+ self.assertIsNone(updates_data['no_stars'])
+ self.assertIsNone(updates_data['updates_data'])
+ self.assertEqual('yes', updates_data['no_activities'])
+ self.assertIsNone(updates_data['ending_type'])
+
+ def createAndAssertUpdates(self, project_ids=None, user_ids=None,
+ ascending=True):
+ user = user_pb2.MakeUser()
+ comment_1 = tracker_pb2.IssueComment(
+ id=self.comment_id, issue_id=self.issue_id,
+ project_id=self.project_id, user_id=self.user_id,
+ content='this is the 1st comment',
+ timestamp=self.comment_timestamp)
+ self.mox.StubOutWithMock(self.services.issue, 'GetComments')
+
+ created_order = 'created'
+ field = 'project_id' if project_ids else 'commenter_id'
+ where_clauses = [('Issue.id = Comment.issue_id', [])]
+ if project_ids:
+ where_clauses.append(('Comment.project_id IN (%s)', project_ids))
+ if user_ids:
+ where_clauses.append(('Comment.commenter_id IN (%s)', user_ids))
+ if ascending:
+ where_clauses.append(('created > %s', [self.mr_after]))
+ else:
+ created_order += ' DESC'
+ self.services.issue.GetComments(
+ mox.IgnoreArg(), deleted_by=None,
+ joins=[('Issue', [])], limit=activities.UPDATES_PER_PAGE + 1,
+ order_by=[(created_order, [])],
+ use_clause='USE INDEX (%s) USE INDEX FOR ORDER BY (%s)' % (field,
+ field),
+ where=where_clauses).AndReturn([comment_1])
+
+ self.mox.StubOutWithMock(framework_views, 'MakeAllUserViews')
+ framework_views.MakeAllUserViews(
+ mox.IgnoreArg(), self.services.user, [self.user_id], []).AndReturn(
+ {self.user_id: user})
+
+ self.mox.ReplayAll()
+
+ mr = testing_helpers.MakeMonorailRequest()
+ if ascending:
+ mr.after = self.mr_after
+
+ updates_page_url='testing/testing'
+ updates_data = activities.GatherUpdatesData(
+ self.services, mr, profiler.Profiler(), project_ids=project_ids,
+ user_ids=user_ids, ending=None, autolink=None,
+ highlight='highlightme', updates_page_url=updates_page_url)
+ self.mox.VerifyAll()
+
+ if mr.after:
+ pagination = updates_data['pagination']
+ self.assertIsNone(pagination.last)
+ self.assertEquals('%s?before=%d' % (updates_page_url.split('/')[-1],
+ self.comment_timestamp),
+ pagination.next_url)
+ self.assertEquals('%s?after=%d' % (updates_page_url.split('/')[-1],
+ self.comment_timestamp),
+ pagination.prev_url)
+
+ activity_view = updates_data['updates_data'].older[0]
+ self.assertEqual(
+ '<a class="ot-issue-link"\n href="/p//issues/detail?id=%s#c_id%s"\n >'
+ 'issue %s</a>\n\n()\n\n\n\n\n \n commented on' % (
+ self.issue_local_id, self.comment_id, self.issue_local_id),
+ activity_view.escaped_title)
+ self.assertEqual(
+ '<span class="ot-issue-comment">\n this is the 1st comment\n</span>',
+ activity_view.escaped_body)
+ self.assertEqual('highlightme', activity_view.highlight)
+ self.assertEqual(self.project_name, activity_view.project_name)
+
+ def testActivities_AscendingProjectUpdates(self):
+ self.createAndAssertUpdates(project_ids=[self.project_id], ascending=True)
+
+ def testActivities_DescendingProjectUpdates(self):
+ self.createAndAssertUpdates(project_ids=[self.project_id], ascending=False)
+
+ def testActivities_AscendingUserUpdates(self):
+ self.createAndAssertUpdates(user_ids=[self.user_id], ascending=True)
+
+ def testActivities_DescendingUserUpdates(self):
+ self.createAndAssertUpdates(user_ids=[self.user_id], ascending=False)
+
+ def testActivities_SpecifyProjectAndUser(self):
+ self.createAndAssertUpdates(
+ project_ids=[self.project_id], user_ids=[self.user_id], ascending=False)
+
diff --git a/appengine/monorail/features/test/autolink_test.py b/appengine/monorail/features/test/autolink_test.py
new file mode 100644
index 0000000..b4facd6
--- /dev/null
+++ b/appengine/monorail/features/test/autolink_test.py
@@ -0,0 +1,588 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the autolink feature."""
+
+import re
+import unittest
+
+from features import autolink
+from framework import template_helpers
+from proto import tracker_pb2
+from testing import fake
+from testing import testing_helpers
+
+
+SIMPLE_EMAIL_RE = re.compile(r'([a-z]+)@([a-z]+)\.com')
+OVER_AMBITIOUS_DOMAIN_RE = re.compile(r'([a-z]+)\.(com|net|org)')
+
+
+class AutolinkTest(unittest.TestCase):
+
+ def RegisterEmailCallbacks(self, aa):
+
+ def LookupUsers(_mr, all_addresses):
+ """Return user objects for only users who are at trusted domains."""
+ return [addr for addr in all_addresses
+ if addr.endswith('@example.com')]
+
+ def Match2Addresses(_mr, match):
+ return [match.group(0)]
+
+ def MakeMailtoLink(_mr, match, comp_ref_artifacts):
+ email = match.group(0)
+ if email in comp_ref_artifacts:
+ return [template_helpers.TextRun(
+ tag='a', href='mailto:%s' % email, content=email)]
+ else:
+ return [template_helpers.TextRun('%s AT %s.com' % match.group(1, 2))]
+
+ aa.RegisterComponent('testcomp',
+ LookupUsers,
+ Match2Addresses,
+ {SIMPLE_EMAIL_RE: MakeMailtoLink})
+
+ def RegisterDomainCallbacks(self, aa):
+
+ def LookupDomains(_mr, _all_refs):
+ """Return business objects for only real domains. Always just True."""
+ return True # We don't have domain business objects, accept anything.
+
+ def Match2Domains(_mr, match):
+ return [match.group(0)]
+
+ def MakeHyperLink(_mr, match, _comp_ref_artifacts):
+ domain = match.group(0)
+ return [template_helpers.TextRun(tag='a', href=domain, content=domain)]
+
+ aa.RegisterComponent('testcomp2',
+ LookupDomains,
+ Match2Domains,
+ {OVER_AMBITIOUS_DOMAIN_RE: MakeHyperLink})
+
+ def setUp(self):
+ self.aa = autolink.Autolink()
+ self.RegisterEmailCallbacks(self.aa)
+ self.comment1 = ('Feel free to contact me at a@other.com, '
+ 'or b@example.com, or c@example.org.')
+ self.comment2 = 'no matches in this comment'
+ self.comment3 = 'just matches with no ref: a@other.com, c@example.org'
+ self.comments = [self.comment1, self.comment2, self.comment3]
+
+ def testRegisterComponent(self):
+ self.assertIn('testcomp', self.aa.registry)
+
+ def testGetAllReferencedArtifacts(self):
+ all_ref_artifacts = self.aa.GetAllReferencedArtifacts(
+ None, self.comments)
+
+ self.assertIn('testcomp', all_ref_artifacts)
+ comp_refs = all_ref_artifacts['testcomp']
+ self.assertIn('b@example.com', comp_refs)
+ self.assertTrue(len(comp_refs) == 1)
+
+ def testMarkupAutolinks(self):
+ all_ref_artifacts = self.aa.GetAllReferencedArtifacts(None, self.comments)
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment1)], all_ref_artifacts)
+ self.assertEqual('Feel free to contact me at ', result[0].content)
+ self.assertEqual('a AT other.com', result[1].content)
+ self.assertEqual(', or ', result[2].content)
+ self.assertEqual('b@example.com', result[3].content)
+ self.assertEqual('mailto:b@example.com', result[3].href)
+ self.assertEqual(', or c@example.org.', result[4].content)
+
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment2)], all_ref_artifacts)
+ self.assertEqual('no matches in this comment', result[0].content)
+
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment3)], all_ref_artifacts)
+ self.assertEqual('just matches with no ref: ', result[0].content)
+ self.assertEqual('a AT other.com', result[1].content)
+ self.assertEqual(', c@example.org', result[2].content)
+
+ def testNonnestedAutolinks(self):
+ """Test that when a substitution yields plain text, others are applied."""
+ self.RegisterDomainCallbacks(self.aa)
+ all_ref_artifacts = self.aa.GetAllReferencedArtifacts(None, self.comments)
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment1)], all_ref_artifacts)
+ self.assertEqual('Feel free to contact me at ', result[0].content)
+ self.assertEqual('a AT ', result[1].content)
+ self.assertEqual('other.com', result[2].content)
+ self.assertEqual('other.com', result[2].href)
+ self.assertEqual(', or ', result[3].content)
+ self.assertEqual('b@example.com', result[4].content)
+ self.assertEqual('mailto:b@example.com', result[4].href)
+ self.assertEqual(', or c@', result[5].content)
+ self.assertEqual('example.org', result[6].content)
+ self.assertEqual('example.org', result[6].href)
+ self.assertEqual('.', result[7].content)
+
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment2)], all_ref_artifacts)
+ self.assertEqual('no matches in this comment', result[0].content)
+ result = self.aa.MarkupAutolinks(
+ None, [template_helpers.TextRun(self.comment3)], all_ref_artifacts)
+ self.assertEqual('just matches with no ref: ', result[0].content)
+ self.assertEqual('a AT ', result[1].content)
+ self.assertEqual('other.com', result[2].content)
+ self.assertEqual('other.com', result[2].href)
+ self.assertEqual(', c@', result[3].content)
+ self.assertEqual('example.org', result[4].content)
+ self.assertEqual('example.org', result[4].href)
+
+
+class URLAutolinkTest(unittest.TestCase):
+
+ def DoLinkify(self, content):
+ """Calls the linkify method and returns the result.
+
+ Args:
+ content: string with a hyperlink.
+
+ Returns:
+ A list of TextRuns with some runs will have the embedded URL hyperlinked.
+ Or, None if no link was detected.
+ """
+ match = autolink._IS_A_LINK_RE.search(content)
+ if not match:
+ return None
+
+ replacement_runs = autolink.Linkify(None, match, None)
+ return replacement_runs
+
+ def testLinkify(self):
+ """Test that given url is autolinked when put in the given context."""
+ # Disallow the linking of URLs with user names and passwords.
+ test = 'http://user:pass@www.yahoo.com'
+ result = self.DoLinkify('What about %s' % test)
+ self.assertEqual(None, result[0].tag)
+ self.assertEqual(None, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Disallow the linking of non-HTTP(S) links
+ test = 'nntp://news.google.com'
+ result = self.DoLinkify('%s' % test)
+ self.assertEqual(None, result)
+
+ # Disallow the linking of file links
+ test = 'file://C:/Windows/System32/cmd.exe'
+ result = self.DoLinkify('%s' % test)
+ self.assertEqual(None, result)
+
+ # Test some known URLs
+ test = 'http://www.example.com'
+ result = self.DoLinkify('What about %s' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ def testLinkify_FTP(self):
+ """Test that FTP urls are linked."""
+ # Check for a standard ftp link
+ test = 'ftp://ftp.example.com'
+ result = self.DoLinkify('%s' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ def testLinkify_Context(self):
+ """Test that surrounding syntax is not considered part of the url."""
+ test = 'http://www.example.com'
+
+ # Check for a link followed by a comma at end of English phrase.
+ result = self.DoLinkify('The URL %s, points to a great website.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(',', result[1].content)
+
+ # Check for a link followed by a period at end of English sentence.
+ result = self.DoLinkify('The best site ever, %s.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('.', result[1].content)
+
+ # Check for a link in paranthesis (), [], or {}
+ result = self.DoLinkify('My fav site (%s).' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(').', result[1].content)
+
+ result = self.DoLinkify('My fav site [%s].' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('].', result[1].content)
+
+ result = self.DoLinkify('My fav site {%s}.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('}.', result[1].content)
+
+ # Check for a link with trailing colon
+ result = self.DoLinkify('Hit %s: you will love it.' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(':', result[1].content)
+
+ # Check link with commas in query string, but don't include trailing comma.
+ test = 'http://www.example.com/?v=1,2,3'
+ result = self.DoLinkify('Try %s, ok?' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Check link surrounded by angle-brackets, or quotes.
+ result = self.DoLinkify('<%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ result = self.DoLinkify('"%s"' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('"', result[1].content)
+
+ # Check link with embedded quotes.
+ test = 'http://www.example.com/?q="a+b+c"'
+ result = self.DoLinkify('Try %s, ok?' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(',', result[1].content)
+
+ # Check link with embedded parens.
+ test = 'http://www.example.com/funky(foo)and(bar).asp'
+ result = self.DoLinkify('Try %s, ok?' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(',', result[1].content)
+
+ test = 'http://www.example.com/funky(foo)and(bar).asp'
+ result = self.DoLinkify('My fav site <%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ # Check link with embedded brackets and braces.
+ test = 'http://www.example.com/funky[foo]and{bar}.asp'
+ result = self.DoLinkify('My fav site <%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ # Check link with mismatched delimeters inside it or outside it.
+ test = 'http://www.example.com/funky"(foo]and>bar}.asp'
+ result = self.DoLinkify('My fav site <%s>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('>', result[1].content)
+
+ test = 'http://www.example.com/funky"(foo]and>bar}.asp'
+ result = self.DoLinkify('My fav site {%s' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ test = 'http://www.example.com/funky"(foo]and>bar}.asp'
+ result = self.DoLinkify('My fav site %s}' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('}', result[1].content)
+
+ # Link as part of an HTML example.
+ test = 'http://www.example.com/'
+ result = self.DoLinkify('<a href="%s">' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual('">', result[1].content)
+
+ # Link nested in an HTML tag.
+ result = self.DoLinkify('<span>%s</span>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Link followed by HTML tag - same bug as above.
+ result = self.DoLinkify('%s<span>foo</span>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ # Link followed by unescaped HTML tag.
+ result = self.DoLinkify('%s<span>foo</span>' % test)
+ self.assertEqual(test, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+ def testLinkify_UnicodeContext(self):
+ """Test that unicode context does not mess up the link."""
+ test = 'http://www.example.com'
+
+ # This string has a non-breaking space \xa0.
+ result = self.DoLinkify(u'The correct RFC link is\xa0%s' % test)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(test, result[0].href)
+
+ def testLinkify_UnicodeLink(self):
+ """Test that unicode in a link is OK."""
+ test = u'http://www.example.com?q=division\xc3\xb7sign'
+
+ # This string has a non-breaking space \xa0.
+ result = self.DoLinkify(u'The unicode link is %s' % test)
+ self.assertEqual(test, result[0].content)
+ self.assertEqual(test, result[0].href)
+
+ def testLinkify_LinkTextEscapingDisabled(self):
+ """Test that url-like things that miss validation aren't linked."""
+ # Link matched by the regex but not accepted by the validator.
+ test = 'http://crash/reportdetail?reportid=35aa03e04772358b'
+ result = self.DoLinkify('<span>%s</span>' % test)
+ self.assertEqual(None, result[0].href)
+ self.assertEqual(test, result[0].content)
+
+
+def _Issue(project_name, local_id, summary, status):
+ issue = tracker_pb2.Issue()
+ issue.project_name = project_name
+ issue.local_id = local_id
+ issue.summary = summary
+ issue.status = status
+ return issue
+
+
+class TrackerAutolinkTest(unittest.TestCase):
+
+ COMMENT_TEXT = (
+ 'This relates to issue 1, issue #2, and issue3 \n'
+ 'as well as bug 4, bug #5, and bug6 \n'
+ 'with issue other-project:12 and issue other-project#13. \n'
+ 'Watch out for issues 21, 22, and 23 with oxford comma. \n'
+ 'And also bugs 31, 32 and 33 with no oxford comma\n'
+ 'We do not match when an issue\n'
+ '999. Is split across lines.'
+ )
+
+ def testExtractProjectAndIssueId(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=1')
+ ref_batches = []
+ for match in autolink._ISSUE_REF_RE.finditer(self.COMMENT_TEXT):
+ new_refs = autolink.ExtractProjectAndIssueIds(mr, match)
+ ref_batches.append(new_refs)
+
+ self.assertEquals(
+ ref_batches,
+ [[(None, 1)],
+ [(None, 2)],
+ [(None, 3)],
+ [(None, 4)],
+ [(None, 5)],
+ [(None, 6)],
+ [('other-project', 12)],
+ [('other-project', 13)],
+ [(None, 21), (None, 22), (None, 23)],
+ [(None, 31), (None, 32), (None, 33)],
+ ])
+
+ def DoReplaceIssueRef(self, content):
+ """Calls the ReplaceIssueRef method and returns the result.
+
+ Args:
+ content: string that may have a textual reference to an issue.
+
+ Returns:
+ A list of TextRuns with some runs will have the reference hyperlinked.
+ Or, None if no reference detected.
+ """
+ match = autolink._ISSUE_REF_RE.search(content)
+ if not match:
+ return None
+
+ open_dict = {'proj:1': _Issue('proj', 1, 'summary-PROJ-1', 'New'),
+ # Assume there is no issue 3 in PROJ
+ 'proj:4': _Issue('proj', 4, 'summary-PROJ-4', 'New'),
+ 'proj:6': _Issue('proj', 6, 'summary-PROJ-6', 'New'),
+ 'other-project:12': _Issue('other-project', 12,
+ 'summary-OP-12', 'Accepted'),
+ }
+ closed_dict = {'proj:2': _Issue('proj', 2, 'summary-PROJ-2', 'Fixed'),
+ 'proj:5': _Issue('proj', 5, 'summary-PROJ-5', 'Fixed'),
+ 'other-project:13': _Issue('other-project', 13,
+ 'summary-OP-12', 'Invalid'),
+ }
+ comp_ref_artifacts = (open_dict, closed_dict,)
+
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/detail?r=1')
+ replacement_runs = autolink.ReplaceIssueRef(mr, match, comp_ref_artifacts)
+ return replacement_runs
+
+ def testReplaceIssueRef(self):
+
+ result = self.DoReplaceIssueRef('This relates to issue 1')
+ self.assertEquals('/p/proj/issues/detail?id=1', result[0].href)
+ self.assertEquals('issue 1', result[0].content)
+ self.assertEquals(None, result[0].css_class)
+ self.assertEquals('summary-PROJ-1', result[0].title)
+ self.assertEquals('a', result[0].tag)
+
+ result = self.DoReplaceIssueRef(', issue #2')
+ self.assertEquals('/p/proj/issues/detail?id=2', result[0].href)
+ self.assertEquals('issue #2', result[0].content)
+ self.assertEquals('closed_ref', result[0].css_class)
+ self.assertEquals('summary-PROJ-2', result[0].title)
+ self.assertEquals('a', result[0].tag)
+
+ result = self.DoReplaceIssueRef(', and issue3 ')
+ self.assertEquals(None, result[0].href) # There is no issue 3
+ self.assertEquals('issue3', result[0].content)
+
+ result = self.DoReplaceIssueRef('as well as bug 4')
+ self.assertEquals('/p/proj/issues/detail?id=4', result[0].href)
+ self.assertEquals('bug 4', result[0].content)
+
+ result = self.DoReplaceIssueRef(', bug #5, ')
+ self.assertEquals('/p/proj/issues/detail?id=5', result[0].href)
+ self.assertEquals('bug #5', result[0].content)
+
+ result = self.DoReplaceIssueRef('and bug6')
+ self.assertEquals('/p/proj/issues/detail?id=6', result[0].href)
+ self.assertEquals('bug6', result[0].content)
+
+ result = self.DoReplaceIssueRef('with issue other-project:12')
+ self.assertEquals('/p/other-project/issues/detail?id=12', result[0].href)
+ self.assertEquals('issue other-project:12', result[0].content)
+
+ result = self.DoReplaceIssueRef('and issue other-project#13')
+ self.assertEquals('/p/other-project/issues/detail?id=13', result[0].href)
+ self.assertEquals('issue other-project#13', result[0].content)
+
+ def testParseProjectNameMatch(self):
+ golden = 'project-name'
+ variations = ['%s', ' %s', '%s ', '%s:', '%s#', '%s#:', '%s:#', '%s :#',
+ '\t%s', '%s\t', '\t%s\t', '\t\t%s\t\t', '\n%s', '%s\n',
+ '\n%s\n', '\n\n%s\n\n', '\t\n%s', '\n\t%s', '%s\t\n',
+ '%s\n\t', '\t\n%s#', '\n\t%s#', '%s\t\n#', '%s\n\t#',
+ '\t\n%s:', '\n\t%s:', '%s\t\n:', '%s\n\t:'
+ ]
+
+ # First pass checks all valid project name results
+ for pattern in variations:
+ self.assertEquals(
+ golden, autolink._ParseProjectNameMatch(pattern % golden))
+
+ # Second pass tests all inputs that should result in None
+ for pattern in variations:
+ self.assert_(
+ autolink._ParseProjectNameMatch(pattern % '') in [None, ''])
+
+
+class VCAutolinkTest(unittest.TestCase):
+
+ GIT_HASH_1 = '1' * 40
+ GIT_HASH_2 = '2' * 40
+ GIT_HASH_3 = 'a1' * 20
+ GIT_COMMENT_TEXT = (
+ 'This is a fix for r%s and R%s, by r2d2, who also authored revision %s, '
+ 'revision #%s, revision %s, and revision %s' % (
+ GIT_HASH_1, GIT_HASH_2, GIT_HASH_3,
+ GIT_HASH_1.upper(), GIT_HASH_2.upper(), GIT_HASH_3.upper()))
+ SVN_COMMENT_TEXT = (
+ 'This is a fix for r12 and R34, by r2d2, who also authored revision r4, '
+ 'revision #1234567, revision 789, and revision 9025. If you have '
+ 'questions, call me at 18005551212')
+
+ def testGetReferencedRevisions(self):
+ refs = ['1', '2', '3']
+ # For now, we do not look up revision objects, result is always None
+ self.assertIsNone(autolink.GetReferencedRevisions(None, refs))
+
+ def testExtractGitHashes(self):
+ refs = []
+ for match in autolink._GIT_HASH_RE.finditer(self.GIT_COMMENT_TEXT):
+ new_refs = autolink.ExtractRevNums(None, match)
+ refs.extend(new_refs)
+
+ self.assertEquals(
+ refs, [self.GIT_HASH_1, self.GIT_HASH_2, self.GIT_HASH_3,
+ self.GIT_HASH_1.upper(), self.GIT_HASH_2.upper(),
+ self.GIT_HASH_3.upper()])
+
+ def testExtractRevNums(self):
+ refs = []
+ for match in autolink._SVN_REF_RE.finditer(self.SVN_COMMENT_TEXT):
+ new_refs = autolink.ExtractRevNums(None, match)
+ refs.extend(new_refs)
+
+ self.assertEquals(
+ refs, ['12', '34', '4', '1234567', '789', '9025'])
+
+
+ def DoReplaceRevisionRef(self, content, project=None):
+ """Calls the ReplaceRevisionRef method and returns the result.
+
+ Args:
+ content: string with a hyperlink.
+ project: optional project.
+
+ Returns:
+ A list of TextRuns with some runs will have the embedded URL hyperlinked.
+ Or, None if no link was detected.
+ """
+ match = autolink._GIT_HASH_RE.search(content)
+ if not match:
+ return None
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/source/detail?r=1', project=project)
+ replacement_runs = autolink.ReplaceRevisionRef(mr, match, None)
+ return replacement_runs
+
+ def testReplaceRevisionRef(self):
+ result = self.DoReplaceRevisionRef(
+ 'This is a fix for r%s' % self.GIT_HASH_1)
+ self.assertEquals('https://crrev.com/%s' % self.GIT_HASH_1, result[0].href)
+ self.assertEquals('r%s' % self.GIT_HASH_1, result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'and R%s, by r2d2, who ' % self.GIT_HASH_2)
+ self.assertEquals('https://crrev.com/%s' % self.GIT_HASH_2, result[0].href)
+ self.assertEquals('R%s' % self.GIT_HASH_2, result[0].content)
+
+ result = self.DoReplaceRevisionRef('by r2d2, who ')
+ self.assertEquals(None, result)
+
+ result = self.DoReplaceRevisionRef(
+ 'also authored revision %s, ' % self.GIT_HASH_3)
+ self.assertEquals('https://crrev.com/%s' % self.GIT_HASH_3, result[0].href)
+ self.assertEquals('revision %s' % self.GIT_HASH_3, result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'revision #%s, ' % self.GIT_HASH_1.upper())
+ self.assertEquals(
+ 'https://crrev.com/%s' % self.GIT_HASH_1.upper(), result[0].href)
+ self.assertEquals(
+ 'revision #%s' % self.GIT_HASH_1.upper(), result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'revision %s, ' % self.GIT_HASH_2.upper())
+ self.assertEquals(
+ 'https://crrev.com/%s' % self.GIT_HASH_2.upper(), result[0].href)
+ self.assertEquals(
+ 'revision %s' % self.GIT_HASH_2.upper(), result[0].content)
+
+ result = self.DoReplaceRevisionRef(
+ 'and revision %s' % self.GIT_HASH_3.upper())
+ self.assertEquals(
+ 'https://crrev.com/%s' % self.GIT_HASH_3.upper(), result[0].href)
+ self.assertEquals(
+ 'revision %s' % self.GIT_HASH_3.upper(), result[0].content)
+
+ def testReplaceRevisionRef_CustomURL(self):
+ """A project can override the URL used for revision links."""
+ project = fake.Project()
+ project.revision_url_format = 'http://example.com/+/{revnum}'
+ result = self.DoReplaceRevisionRef(
+ 'This is a fix for r%s' % self.GIT_HASH_1, project=project)
+ self.assertEquals(
+ 'http://example.com/+/%s' % self.GIT_HASH_1, result[0].href)
+ self.assertEquals('r%s' % self.GIT_HASH_1, result[0].content)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/commands_test.py b/appengine/monorail/features/test/commands_test.py
new file mode 100644
index 0000000..0c61a53
--- /dev/null
+++ b/appengine/monorail/features/test/commands_test.py
@@ -0,0 +1,231 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions that implement command-line-like issue updates."""
+
+import logging
+import unittest
+
+from features import commands
+from framework import framework_constants
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+class CommandsTest(unittest.TestCase):
+
+ def VerifyParseQuickEditCommmand(
+ self, cmd, exp_summary='sum', exp_status='New', exp_owner_id=111L,
+ exp_cc_ids=None, exp_labels=None):
+
+ issue = tracker_pb2.Issue()
+ issue.project_name = 'proj'
+ issue.local_id = 1
+ issue.summary = 'sum'
+ issue.status = 'New'
+ issue.owner_id = 111L
+ issue.cc_ids.extend([222L, 333L])
+ issue.labels.extend(['Type-Defect', 'Priority-Medium', 'Hot'])
+
+ if exp_cc_ids is None:
+ exp_cc_ids = [222L, 333L]
+ if exp_labels is None:
+ exp_labels = ['Type-Defect', 'Priority-Medium', 'Hot']
+
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ logged_in_user_id = 999L
+ services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService())
+ services.user.TestAddUser('jrobbins', 333L)
+ services.user.TestAddUser('jrobbins@jrobbins.org', 888L)
+
+ cnxn = 'fake cnxn'
+ (summary, status, owner_id, cc_ids,
+ labels) = commands.ParseQuickEditCommand(
+ cnxn, cmd, issue, config, logged_in_user_id, services)
+ self.assertEqual(exp_summary, summary)
+ self.assertEqual(exp_status, status)
+ self.assertEqual(exp_owner_id, owner_id)
+ self.assertListEqual(exp_cc_ids, cc_ids)
+ self.assertListEqual(exp_labels, labels)
+
+ def testParseQuickEditCommmand_Empty(self):
+ self.VerifyParseQuickEditCommmand('') # Nothing should change.
+
+ def testParseQuickEditCommmand_BuiltInFields(self):
+ self.VerifyParseQuickEditCommmand(
+ 'status=Fixed', exp_status='Fixed')
+ self.VerifyParseQuickEditCommmand( # Normalized capitalization.
+ 'status=fixed', exp_status='Fixed')
+ self.VerifyParseQuickEditCommmand(
+ 'status=limbo', exp_status='limbo')
+
+ self.VerifyParseQuickEditCommmand(
+ 'owner=me', exp_owner_id=999L)
+ self.VerifyParseQuickEditCommmand(
+ 'owner=jrobbins@jrobbins.org', exp_owner_id=888L)
+ self.VerifyParseQuickEditCommmand(
+ 'owner=----', exp_owner_id=framework_constants.NO_USER_SPECIFIED)
+
+ self.VerifyParseQuickEditCommmand(
+ 'summary=JustOneWord', exp_summary='JustOneWord')
+ self.VerifyParseQuickEditCommmand(
+ 'summary="quoted sentence"', exp_summary='quoted sentence')
+ self.VerifyParseQuickEditCommmand(
+ "summary='quoted sentence'", exp_summary='quoted sentence')
+
+ self.VerifyParseQuickEditCommmand(
+ 'cc=me', exp_cc_ids=[222L, 333L, 999L])
+ self.VerifyParseQuickEditCommmand(
+ 'cc=jrobbins@jrobbins.org', exp_cc_ids=[222L, 333L, 888L])
+ self.VerifyParseQuickEditCommmand(
+ 'cc=me,jrobbins@jrobbins.org',
+ exp_cc_ids=[222L, 333L, 999L, 888L])
+ self.VerifyParseQuickEditCommmand(
+ 'cc=-jrobbins,jrobbins@jrobbins.org',
+ exp_cc_ids=[222L, 888L])
+
+ def testParseQuickEditCommmand_Labels(self):
+ self.VerifyParseQuickEditCommmand(
+ 'Priority=Low', exp_labels=['Type-Defect', 'Hot', 'Priority-Low'])
+ self.VerifyParseQuickEditCommmand(
+ 'priority=low', exp_labels=['Type-Defect', 'Hot', 'Priority-Low'])
+ self.VerifyParseQuickEditCommmand(
+ 'priority-low', exp_labels=['Type-Defect', 'Hot', 'Priority-Low'])
+ self.VerifyParseQuickEditCommmand(
+ '-priority-low', exp_labels=['Type-Defect', 'Priority-Medium', 'Hot'])
+ self.VerifyParseQuickEditCommmand(
+ '-priority-medium', exp_labels=['Type-Defect', 'Hot'])
+
+ self.VerifyParseQuickEditCommmand(
+ 'Cold', exp_labels=['Type-Defect', 'Priority-Medium', 'Hot', 'Cold'])
+ self.VerifyParseQuickEditCommmand(
+ '+Cold', exp_labels=['Type-Defect', 'Priority-Medium', 'Hot', 'Cold'])
+ self.VerifyParseQuickEditCommmand(
+ '-Hot Cold', exp_labels=['Type-Defect', 'Priority-Medium', 'Cold'])
+ self.VerifyParseQuickEditCommmand(
+ '-Hot', exp_labels=['Type-Defect', 'Priority-Medium'])
+
+ def testParseQuickEditCommmand_Multiple(self):
+ self.VerifyParseQuickEditCommmand(
+ 'Priority=Low -hot owner:me cc:-jrobbins summary="other summary"',
+ exp_summary='other summary', exp_owner_id=999L,
+ exp_cc_ids=[222L], exp_labels=['Type-Defect', 'Priority-Low'])
+
+ def testBreakCommandIntoParts_Empty(self):
+ self.assertListEqual(
+ [],
+ commands._BreakCommandIntoParts(''))
+
+ def testBreakCommandIntoParts_Single(self):
+ self.assertListEqual(
+ [('summary', 'new summary')],
+ commands._BreakCommandIntoParts('summary="new summary"'))
+ self.assertListEqual(
+ [('summary', 'OneWordSummary')],
+ commands._BreakCommandIntoParts('summary=OneWordSummary'))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts('key=value'))
+ self.assertListEqual(
+ [('key', 'value-with-dashes')],
+ commands._BreakCommandIntoParts('key=value-with-dashes'))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts('key:value'))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts(' key:value '))
+ self.assertListEqual(
+ [('key', 'value')],
+ commands._BreakCommandIntoParts('key:"value"'))
+ self.assertListEqual(
+ [('key', 'user@dom.com')],
+ commands._BreakCommandIntoParts('key:user@dom.com'))
+ self.assertListEqual(
+ [('key', 'a@dom.com,-b@dom.com')],
+ commands._BreakCommandIntoParts('key:a@dom.com,-b@dom.com'))
+ self.assertListEqual(
+ [(None, 'label')],
+ commands._BreakCommandIntoParts('label'))
+ self.assertListEqual(
+ [(None, '-label')],
+ commands._BreakCommandIntoParts('-label'))
+ self.assertListEqual(
+ [(None, '+label')],
+ commands._BreakCommandIntoParts('+label'))
+
+ def testBreakCommandIntoParts_Multiple(self):
+ self.assertListEqual(
+ [('summary', 'new summary'), (None, 'Hot'), (None, '-Cold'),
+ ('owner', 'me'), ('cc', '+a,-b')],
+ commands._BreakCommandIntoParts(
+ 'summary="new summary" Hot -Cold owner:me cc:+a,-b'))
+
+
+class CommandSyntaxParsingTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ user=fake.UserService())
+
+ self.services.project.TestAddProject('proj', owner_ids=[111L])
+ self.services.user.TestAddUser('a@example.com', 222L)
+
+ cnxn = 'fake connection'
+ config = self.services.config.GetProjectConfig(cnxn, 789)
+
+ for status in ['New', 'ReadyForReview']:
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status=status))
+
+ for label in ['Prioity-Low', 'Priority-High']:
+ config.well_known_labels.append(tracker_pb2.LabelDef(
+ label=label))
+
+ config.exclusive_label_prefixes.extend(
+ tracker_constants.DEFAULT_EXCL_LABEL_PREFIXES)
+
+ self.services.config.StoreConfig(cnxn, config)
+
+ def testStandardizeStatus(self):
+ config = self.services.config.GetProjectConfig('fake cnxn', 789)
+ self.assertEqual('New',
+ commands._StandardizeStatus('NEW', config))
+ self.assertEqual('New',
+ commands._StandardizeStatus('n$Ew ', config))
+ self.assertEqual(
+ 'custom-label',
+ commands._StandardizeLabel('custom=label ', config))
+
+ def testStandardizeLabel(self):
+ config = self.services.config.GetProjectConfig('fake cnxn', 789)
+ self.assertEqual(
+ 'Priority-High',
+ commands._StandardizeLabel('priority-high', config))
+ self.assertEqual(
+ 'Priority-High',
+ commands._StandardizeLabel('PRIORITY=HIGH', config))
+
+ def testLookupMeOrUsername(self):
+ self.assertEqual(
+ 123L,
+ commands._LookupMeOrUsername('fake cnxn', 'me', self.services, 123L))
+
+ self.assertEqual(
+ 222L,
+ commands._LookupMeOrUsername(
+ 'fake cnxn', 'a@example.com', self.services, 0))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/commitlogcommands_test.py b/appengine/monorail/features/test/commitlogcommands_test.py
new file mode 100644
index 0000000..4c43cd4
--- /dev/null
+++ b/appengine/monorail/features/test/commitlogcommands_test.py
@@ -0,0 +1,96 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.features.commitlogcommands."""
+
+import unittest
+
+import mox
+
+from features import commitlogcommands
+from features import notify
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class InboundEmailTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ issue=fake.IssueService(),
+ project=fake.ProjectService(),
+ config=fake.ConfigService())
+
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=987, process_inbound_email=True)
+ self.issue = tracker_pb2.Issue()
+ self.issue.project_id = 987
+ self.issue.summary = 'summary'
+ self.issue.status = 'Assigned'
+ self.services.issue.TestAddIssue(self.issue)
+
+ self.uia = commitlogcommands.UpdateIssueAction(self.issue.local_id)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testParse_StripQuotedLines(self):
+ self.uia.Parse(self.cnxn, self.project.project_name, 101,
+ ['summary:something', '> line 1', 'line 2'], self.services,
+ hostport=80, strip_quoted_lines=True)
+ self.assertEquals('line 2', self.uia.description)
+ self.assertEquals('summary:something\n> line 1\nline 2',
+ self.uia.inbound_message)
+
+ def testParse_NoStripQuotedLines(self):
+ self.uia.Parse(self.cnxn, self.project.project_name, 101,
+ ['summary:something', '> line 1', 'line 2'], self.services,
+ hostport=80)
+ self.assertEquals('> line 1\nline 2', self.uia.description)
+ self.assertIsNone(self.uia.inbound_message)
+
+ def setupAndCallRun(self, allow_edit):
+ comments = ['comment 1', 'comment 2', 'comment 3']
+
+ self.mox.StubOutWithMock(self.services.issue, 'GetCommentsForIssue')
+ self.services.issue.GetCommentsForIssue(
+ self.cnxn, self.issue.issue_id).AndReturn(comments)
+ self.mox.StubOutWithMock(notify, 'PrepareAndSendIssueChangeNotification')
+ notify.PrepareAndSendIssueChangeNotification(
+ self.project.project_id, self.issue.local_id, 80, 101,
+ len(comments) - 1, old_owner_id=self.issue.owner_id)
+ self.mox.ReplayAll()
+
+ self.uia.Parse(self.cnxn, self.project.project_name, 101,
+ ['summary:something', 'status:New', '> line 1', '> line 2'],
+ self.services, hostport=80)
+ self.uia.Run(self.cnxn, self.services, allow_edit=allow_edit)
+ self.mox.VerifyAll()
+
+ def testRun_AllowEdit(self):
+ self.setupAndCallRun(allow_edit=True)
+
+ self.assertEquals('> line 1\n> line 2', self.uia.description)
+ # Assert that ammendments were made to the issue.
+ self.assertEquals('something', self.issue.summary)
+ self.assertEquals('New', self.issue.status)
+
+
+ def testRun_NoAllowEdit(self):
+ self.setupAndCallRun(allow_edit=False)
+
+ self.assertEquals('> line 1\n> line 2', self.uia.description)
+ # Assert that ammendments were *not* made to the issue.
+ self.assertEquals('summary', self.issue.summary)
+ self.assertEquals('Assigned', self.issue.status)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/cues_test.py b/appengine/monorail/features/test/cues_test.py
new file mode 100644
index 0000000..2c11b47
--- /dev/null
+++ b/appengine/monorail/features/test/cues_test.py
@@ -0,0 +1,36 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the cues module."""
+
+import unittest
+
+from features import cues
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class CuesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService())
+ self.servlet = cues.SetCuesFeed('req', 'res', services=self.services)
+ self.services.user.TestAddUser('a@example.com', 111L)
+
+ def testHandleRequest(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/hosting/cues?cue_id=42', user_info={'user_id': 111L})
+
+ self.servlet.HandleRequest(mr)
+ user = self.services.user.test_users[111L]
+ self.assertTrue(user is not None)
+ dismissed_cues = user.dismissed_cues
+ self.assertTrue(dismissed_cues is not None)
+ self.assertIn('42', dismissed_cues)
+ self.assertNotIn('1492', dismissed_cues)
+
+
diff --git a/appengine/monorail/features/test/filterrules_helpers_test.py b/appengine/monorail/features/test/filterrules_helpers_test.py
new file mode 100644
index 0000000..8c48107
--- /dev/null
+++ b/appengine/monorail/features/test/filterrules_helpers_test.py
@@ -0,0 +1,633 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for filterrules_helpers feature."""
+
+import unittest
+
+import mox
+
+from google.appengine.api import taskqueue
+
+import settings
+from features import filterrules_helpers
+from framework import template_helpers
+from framework import urls
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import query2ast
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+
+
+ORIG_SUMMARY = 'this is the orginal summary'
+ORIG_LABELS = ['one', 'two']
+
+# Fake user id mapping
+TEST_ID_MAP = {
+ 'mike.j.parent': 1,
+ 'jrobbins': 2,
+ 'ningerso': 3,
+ }
+
+
+class MockTaskQueue(object):
+ def __init__(self):
+ self.work_items = []
+
+ def add(self, **kwargs):
+ self.work_items.append(kwargs)
+
+
+class RecomputeAllDerivedFieldsTest(unittest.TestCase):
+
+ BLOCK = filterrules_helpers.BLOCK
+
+ def setUp(self):
+ self.features = fake.FeaturesService()
+ self.user = fake.UserService()
+ self.services = service_manager.Services(
+ features=self.features,
+ user=self.user,
+ issue=fake.IssueService())
+ self.project = fake.Project(project_name='proj')
+ self.config = 'fake config'
+ self.cnxn = 'fake cnxn'
+ self.mox = mox.Mox()
+ self.mock_task_queue = MockTaskQueue()
+ self.mox.StubOutWithMock(taskqueue, 'add')
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testRecomputeDerivedFields_Disabled(self):
+ """Servlet should just call RecomputeAllDerivedFieldsNow with no bounds."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = False
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertTrue(self.services.issue.get_all_issues_in_project_called)
+ self.assertTrue(self.services.issue.update_issues_called)
+ self.assertTrue(self.services.issue.enqueue_issues_called)
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_DisabledNextIDSet(self):
+ """Servlet should just call RecomputeAllDerivedFields with no bounds."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = False
+ self.services.issue.next_id = 1234
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.services.issue.UpdateIssues('no', ['1'])
+ self.assertTrue(self.services.issue.get_all_issues_in_project_called)
+ self.assertTrue(self.services.issue.update_issues_called)
+ self.assertTrue(self.services.issue.enqueue_issues_called)
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_NoIssues(self):
+ """Servlet should not call because there is no work to do."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = True
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertFalse(self.services.issue.get_all_issues_in_project_called)
+ self.assertFalse(self.services.issue.update_issues_called)
+ self.assertFalse(self.services.issue.enqueue_issues_called)
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_SomeIssues(self):
+ """Servlet should enqueue one work item rather than call directly."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = True
+ self.services.issue.next_id = 1234
+ num_calls = (self.services.issue.next_id // self.BLOCK + 1)
+ for _ in range(num_calls):
+ taskqueue.add(
+ params=mox.IsA(dict),
+ url='/_task/recomputeDerivedFields.do').WithSideEffects(
+ self.mock_task_queue.add)
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertFalse(self.services.issue.get_all_issues_in_project_called)
+ self.assertFalse(self.services.issue.update_issues_called)
+ self.assertFalse(self.services.issue.enqueue_issues_called)
+ work_items = self.mock_task_queue.work_items
+ self.assertEqual(num_calls, len(work_items))
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeDerivedFields_LotsOfIssues(self):
+ """Servlet should enqueue multiple work items."""
+ saved_flag = settings.recompute_derived_fields_in_worker
+ settings.recompute_derived_fields_in_worker = True
+ self.services.issue.next_id = 12345
+ num_calls = (self.services.issue.next_id // self.BLOCK + 1)
+ for _ in range(num_calls):
+ taskqueue.add(
+ params=mox.IsA(dict),
+ url='/_task/recomputeDerivedFields.do').WithSideEffects(
+ self.mock_task_queue.add)
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.cnxn, self.services, self.project, self.config)
+ self.assertFalse(self.services.issue.get_all_issues_in_project_called)
+ self.assertFalse(self.services.issue.update_issues_called)
+ self.assertFalse(self.services.issue.enqueue_issues_called)
+
+ work_items = self.mock_task_queue.work_items
+ self.assertEqual(num_calls, len(work_items))
+ url, params = work_items[0]['url'], work_items[0]['params']
+ self.assertEqual(urls.RECOMPUTE_DERIVED_FIELDS_TASK + '.do', url)
+ self.assertEqual(self.project.project_id, params['project_id'])
+ self.assertEqual(12345 // self.BLOCK * self.BLOCK + 1,
+ params['lower_bound'])
+ self.assertEqual(12345, params['upper_bound'])
+
+ url, params = work_items[-1]['url'], work_items[-1]['params']
+ self.assertEqual(urls.RECOMPUTE_DERIVED_FIELDS_TASK + '.do', url)
+ self.assertEqual(self.project.project_id, params['project_id'])
+ self.assertEqual(1, params['lower_bound'])
+ self.assertEqual(self.BLOCK + 1, params['upper_bound'])
+
+ self.mox.VerifyAll()
+ settings.recompute_derived_fields_in_worker = saved_flag
+
+ def testRecomputeAllDerivedFieldsNow(self):
+ """Servlet should reapply all filter rules to project's issues."""
+ self.services.issue.next_id = 12345
+ test_issue_1 = fake.MakeTestIssue(
+ project_id=self.project.project_id, local_id=1, issue_id=1001,
+ summary='sum1', owner_id=100, status='New')
+ test_issue_2 = fake.MakeTestIssue(
+ project_id=self.project.project_id, local_id=2, issue_id=1002,
+ summary='sum2', owner_id=100, status='New')
+ test_issues = [test_issue_1, test_issue_2]
+ self.services.issue.TestAddIssue(test_issue_1)
+ self.services.issue.TestAddIssue(test_issue_2)
+
+ self.mox.StubOutWithMock(filterrules_helpers, 'ApplyGivenRules')
+ for test_issue in test_issues:
+ filterrules_helpers.ApplyGivenRules(
+ self.cnxn, self.services, test_issue, self.config,
+ [], []).AndReturn(True)
+ self.mox.ReplayAll()
+
+ filterrules_helpers.RecomputeAllDerivedFieldsNow(
+ self.cnxn, self.services, self.project, self.config)
+
+ self.assertTrue(self.services.issue.get_all_issues_in_project_called)
+ self.assertTrue(self.services.issue.update_issues_called)
+ self.assertTrue(self.services.issue.enqueue_issues_called)
+ self.assertEqual(test_issues, self.services.issue.updated_issues)
+ self.assertEqual([issue.issue_id for issue in test_issues],
+ self.services.issue.enqueued_issues)
+ self.mox.VerifyAll()
+
+
+class FilterRulesHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services()
+ self.services.user = fake.UserService()
+ self.services.project = fake.ProjectService()
+ self.services.issue = fake.IssueService()
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.other_project = self.services.project.TestAddProject(
+ 'otherproj', project_id=890)
+ for email, user_id in TEST_ID_MAP.iteritems():
+ self.services.user.TestAddUser(email, user_id)
+
+ def testApplyRule(self):
+ cnxn = 'fake sql connection'
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 111L, labels=ORIG_LABELS)
+ config = tracker_pb2.ProjectIssueConfig()
+ # Empty label set cannot satisfy rule looking for labels.
+ pred = 'label:a label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (None, None, [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, set(), config))
+
+ pred = 'label:a -label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (None, None, [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, set(), config))
+
+ # Empty label set will satisfy rule looking for missing labels.
+ pred = '-label:a -label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, set(), config))
+
+ # Label set has the needed labels.
+ pred = 'label:a label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {'a', 'b'},
+ config))
+
+ # Label set has the needed labels with test for unicode.
+ pred = 'label:a label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {u'a', u'b'},
+ config))
+
+ # Label set has the needed labels, capitalization irrelevant.
+ pred = 'label:A label:B'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (1, 'S', [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {'a', 'b'},
+ config))
+
+ # Label set has a label, the rule negates.
+ pred = 'label:a -label:b'
+ rule = filterrules_helpers.MakeRule(
+ pred, default_owner_id=1, default_status='S')
+ predicate_ast = query2ast.ParseUserQuery(
+ pred, '', query2ast.BUILTIN_ISSUE_FIELDS, config)
+ self.assertEquals(
+ (None, None, [], [], []),
+ filterrules_helpers._ApplyRule(
+ cnxn, self.services, rule, predicate_ast, issue, {'a', 'b'},
+ config))
+
+ def testComputeDerivedFields(self):
+ cnxn = 'fake sql connection'
+ rules = [
+ filterrules_helpers.MakeRule(
+ 'label:HasWorkaround', add_labels=['Priority-Low']),
+ filterrules_helpers.MakeRule(
+ 'label:Security', add_labels=['Private']),
+ filterrules_helpers.MakeRule(
+ 'label:Security', add_labels=['Priority-High'],
+ add_notify=['jrobbins@chromium.org']),
+ filterrules_helpers.MakeRule(
+ 'Priority=High label:Regression', add_labels=['Urgent']),
+ filterrules_helpers.MakeRule(
+ 'Size=L', default_owner_id=444L),
+ ]
+ excl_prefixes = ['priority', 'type', 'milestone']
+ config = tracker_pb2.ProjectIssueConfig(
+ exclusive_label_prefixes=excl_prefixes)
+ predicate_asts = filterrules_helpers.ParsePredicateASTs(rules, config, None)
+
+ # No rules fire.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=ORIG_LABELS)
+ self.assertEquals(
+ (0, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['foo', 'bar'])
+ self.assertEquals(
+ (0, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # One rule fires.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['Size-L'])
+ self.assertEquals(
+ (444L, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # One rule fires, but no effect because of explicit fields.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L,
+ labels=['HasWorkaround', 'Priority-Critical'])
+ self.assertEquals(
+ (0, '', [], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # One rule fires, but limited effect because of explicit exclusive label.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L,
+ labels=['Security', 'Priority-Critical'])
+ self.assertEquals(
+ (0, '', [], ['Private'], ['jrobbins@chromium.org']),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Multiple rules have cumulative effect.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['HasWorkaround', 'Size-L'])
+ self.assertEquals(
+ (444L, '', [], ['Priority-Low'], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Two rules fire, second overwrites the first.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['HasWorkaround', 'Security'])
+ self.assertEquals(
+ (0, '', [], ['Private', 'Priority-High'], ['jrobbins@chromium.org']),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Two rules fire, second triggered by the first.
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 0L, labels=['Security', 'Regression'])
+ self.assertEquals(
+ (0, '', [], ['Private', 'Priority-High', 'Urgent'],
+ ['jrobbins@chromium.org']),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ # Two rules fire, each one wants to add the same CC: only add once.
+ rules.append(filterrules_helpers.MakeRule('Watch', add_cc_ids=[111L]))
+ rules.append(filterrules_helpers.MakeRule('Monitor', add_cc_ids=[111L]))
+ config = tracker_pb2.ProjectIssueConfig(
+ exclusive_label_prefixes=excl_prefixes)
+ predicate_asts = filterrules_helpers.ParsePredicateASTs(rules, config, None)
+ issue = fake.MakeTestIssue(
+ 789, 1, ORIG_SUMMARY, 'New', 111L, labels=['Watch', 'Monitor'])
+ self.assertEquals(
+ (0, '', [111L], [], []),
+ filterrules_helpers._ComputeDerivedFields(
+ cnxn, self.services, issue, config, rules, predicate_asts))
+
+ def testCompareComponents_Trivial(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_NOT_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_DEFINED, [], []))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.IS_NOT_DEFINED, [], []))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, [123], []))
+
+ def testCompareComponents_Normal(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 100, 789, 'UI', 'doc', False, [], [], 0, 0))
+ config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 110, 789, 'UI>Help', 'doc', False, [], [], 0, 0))
+ config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 200, 789, 'Networking', 'doc', False, [], [], 0, 0))
+
+ # Check if the issue is in a specified component or subcomponent.
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [100]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI>Help'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [100, 110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], []))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI'], [200]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI>Help'], [100]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Networking'], [100]))
+
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NE, ['UI'], []))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NE, ['UI'], [100]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NE, ['Networking'], [100]))
+
+ # Exact vs non-exact.
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Help'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.TEXT_HAS, ['UI'], [110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.TEXT_HAS, ['Help'], [110]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NOT_TEXT_HAS, ['UI'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.NOT_TEXT_HAS, ['Help'], [110]))
+
+ # Multivalued issues and Quick-OR notation
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Networking'], [200]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['Networking'], [100, 110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [100]))
+ self.assertFalse(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [110]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [200]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI', 'Networking'], [110, 200]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.TEXT_HAS, ['UI', 'Networking'], [110, 200]))
+ self.assertTrue(filterrules_helpers._CompareComponents(
+ config, ast_pb2.QueryOp.EQ, ['UI>Help', 'Networking'], [110, 200]))
+
+ def testCompareIssueRefs_Trivial(self):
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_NOT_DEFINED, [], [123]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_DEFINED, [], []))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.IS_NOT_DEFINED, [], []))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['1'], []))
+
+ def testCompareIssueRefs_Normal(self):
+ self.services.issue.TestAddIssue(fake.MakeTestIssue(
+ 789, 1, 'summary', 'New', 0L, issue_id=123))
+ self.services.issue.TestAddIssue(fake.MakeTestIssue(
+ 789, 2, 'summary', 'New', 0L, issue_id=124))
+ self.services.issue.TestAddIssue(fake.MakeTestIssue(
+ 890, 1, 'other summary', 'New', 0L, issue_id=125))
+
+ # EQ and NE, implict references to the current project.
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['1'], [123]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.NE, ['1'], [123]))
+
+ # EQ and NE, explicit project references.
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['proj:1'], [123]))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.EQ, ['otherproj:1'], [125]))
+
+ # Inequalities
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GE, ['1'], [123]))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GE, ['1'], [124]))
+ self.assertTrue(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GE, ['2'], [124]))
+ self.assertFalse(filterrules_helpers._CompareIssueRefs(
+ self.cnxn, self.services, self.project,
+ ast_pb2.QueryOp.GT, ['2'], [124]))
+
+ def testCompareUsers(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testCompareUserIDs(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testCompareEmails(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testCompare(self):
+ pass # TODO(jrobbins): Add this test.
+
+ def testParseOneRuleAddLabels(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1 label:lab2', 'add_labels', 'hot cOld, ', None, 1,
+ error_list)
+ self.assertEquals('label:lab1 label:lab2', rule_pb.predicate)
+ self.assertEquals(error_list, [])
+ self.assertEquals(len(rule_pb.add_labels), 2)
+ self.assertEquals(rule_pb.add_labels[0], 'hot')
+ self.assertEquals(rule_pb.add_labels[1], 'cOld')
+
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, '', 'default_status', 'hot cold', None, 1, error_list)
+ self.assertEquals(len(rule_pb.predicate), 0)
+ self.assertEquals(error_list, [])
+
+ def testParseOneRuleDefaultOwner(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1, label:lab2 ', 'default_owner', 'jrobbins',
+ self.services.user, 1, error_list)
+ self.assertEquals(error_list, [])
+ self.assertEquals(rule_pb.default_owner_id, TEST_ID_MAP['jrobbins'])
+
+ def testParseOneRuleDefaultStatus(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1', 'default_status', 'InReview',
+ None, 1, error_list)
+ self.assertEquals(error_list, [])
+ self.assertEquals(rule_pb.default_status, 'InReview')
+
+ def testParseOneRuleAddCcs(self):
+ cnxn = 'fake SQL connection'
+ error_list = []
+ rule_pb = filterrules_helpers._ParseOneRule(
+ cnxn, 'label:lab1', 'add_ccs', 'jrobbins, mike.j.parent',
+ self.services.user, 1, error_list)
+ self.assertEquals(error_list, [])
+ self.assertEquals(rule_pb.add_cc_ids[0], TEST_ID_MAP['jrobbins'])
+ self.assertEquals(rule_pb.add_cc_ids[1], TEST_ID_MAP['mike.j.parent'])
+ self.assertEquals(len(rule_pb.add_cc_ids), 2)
+
+ def testParseRulesNone(self):
+ cnxn = 'fake SQL connection'
+ post_data = {}
+ rules = filterrules_helpers.ParseRules(
+ cnxn, post_data, None, template_helpers.EZTError())
+ self.assertEquals(rules, [])
+
+ def testParseRules(self):
+ cnxn = 'fake SQL connection'
+ post_data = {
+ 'predicate1': 'a, b c',
+ 'action_type1': 'default_status',
+ 'action_value1': 'Reviewed',
+ 'predicate2': 'a, b c',
+ 'action_type2': 'default_owner',
+ 'action_value2': 'jrobbins',
+ 'predicate3': 'a, b c',
+ 'action_type3': 'add_ccs',
+ 'action_value3': 'jrobbins, mike.j.parent',
+ 'predicate4': 'a, b c',
+ 'action_type4': 'add_labels',
+ 'action_value4': 'hot, cold',
+ }
+ errors = template_helpers.EZTError()
+ rules = filterrules_helpers.ParseRules(
+ cnxn, post_data, self.services.user, errors)
+ self.assertEquals(rules[0].predicate, 'a, b c')
+ self.assertEquals(rules[0].default_status, 'Reviewed')
+ self.assertEquals(rules[1].default_owner_id, TEST_ID_MAP['jrobbins'])
+ self.assertEquals(rules[2].add_cc_ids[0], TEST_ID_MAP['jrobbins'])
+ self.assertEquals(rules[2].add_cc_ids[1], TEST_ID_MAP['mike.j.parent'])
+ self.assertEquals(rules[3].add_labels[0], 'hot')
+ self.assertEquals(rules[3].add_labels[1], 'cold')
+ self.assertEquals(len(rules), 4)
+ self.assertFalse(errors.AnyErrors())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/filterrules_views_test.py b/appengine/monorail/features/test/filterrules_views_test.py
new file mode 100644
index 0000000..124db4f
--- /dev/null
+++ b/appengine/monorail/features/test/filterrules_views_test.py
@@ -0,0 +1,71 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for issue tracker views."""
+
+import unittest
+
+from features import filterrules_views
+from proto import tracker_pb2
+from testing import testing_helpers
+
+
+class RuleViewTest(unittest.TestCase):
+
+ def setUp(self):
+ self.rule = tracker_pb2.FilterRule()
+ self.rule.predicate = 'label:a label:b'
+
+ def testEmpty(self):
+ view = filterrules_views.RuleView(self.rule, {})
+ self.rule.predicate = ''
+ self.assertEquals('', view.predicate)
+ self.assertEquals('', view.action_type)
+ self.assertEquals('', view.action_value)
+
+ def testDefaultStatus(self):
+ self.rule.default_status = 'Unknown'
+ view = filterrules_views.RuleView(self.rule, {})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('default_status', view.action_type)
+ self.assertEquals('Unknown', view.action_value)
+
+ def testDefaultOwner(self):
+ self.rule.default_owner_id = 111L
+ view = filterrules_views.RuleView(
+ self.rule, {
+ 111L: testing_helpers.Blank(email='jrobbins@chromium.org')})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('default_owner', view.action_type)
+ self.assertEquals('jrobbins@chromium.org', view.action_value)
+
+ def testAddCCs(self):
+ self.rule.add_cc_ids.extend([111L, 222L])
+ view = filterrules_views.RuleView(
+ self.rule, {
+ 111L: testing_helpers.Blank(email='jrobbins@chromium.org'),
+ 222L: testing_helpers.Blank(email='jrobbins@gmail.com')})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('add_ccs', view.action_type)
+ self.assertEquals('jrobbins@chromium.org, jrobbins@gmail.com',
+ view.action_value)
+
+ def testAddLabels(self):
+ self.rule.add_labels.extend(['Hot', 'Cool'])
+ view = filterrules_views.RuleView(self.rule, {})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('add_labels', view.action_type)
+ self.assertEquals('Hot, Cool', view.action_value)
+
+ def testAlsoNotify(self):
+ self.rule.add_notify_addrs.extend(['a@dom.com', 'b@dom.com'])
+ view = filterrules_views.RuleView(self.rule, {})
+ self.assertEquals('label:a label:b', view.predicate)
+ self.assertEquals('also_notify', view.action_type)
+ self.assertEquals('a@dom.com, b@dom.com', view.action_value)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/inboundemail_test.py b/appengine/monorail/features/test/inboundemail_test.py
new file mode 100644
index 0000000..60b4e52
--- /dev/null
+++ b/appengine/monorail/features/test/inboundemail_test.py
@@ -0,0 +1,280 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.feature.inboundemail."""
+
+import unittest
+
+import mox
+
+from features import commitlogcommands
+from features import inboundemail
+from framework import emailfmt
+from framework import monorailrequest
+from framework import permissions
+from proto import project_pb2
+from proto import tracker_pb2
+from proto import user_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class InboundEmailTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService())
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=987, process_inbound_email=True)
+ self.project_addr = 'proj@monorail.example.com'
+
+ self.issue = tracker_pb2.Issue()
+ self.issue.project_id = 987
+ self.issue.local_id = 100
+ self.services.issue.TestAddIssue(self.issue)
+
+ self.msg = testing_helpers.MakeMessage(
+ testing_helpers.HEADER_LINES, 'awesome!')
+
+ request, _ = testing_helpers.GetRequestObjects()
+ self.inbound = inboundemail.InboundEmail(request, None, self.services)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testTemplates(self):
+ for name, template_path in self.inbound._templates.iteritems():
+ assert(name in inboundemail.MSG_TEMPLATES)
+ assert(
+ template_path.GetTemplatePath().endswith(
+ inboundemail.MSG_TEMPLATES[name]))
+
+ def testProcessMail_MsgTooBig(self):
+ self.mox.StubOutWithMock(emailfmt, 'IsBodyTooBigToParse')
+ emailfmt.IsBodyTooBigToParse(mox.IgnoreArg()).AndReturn(True)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Email body too long', email_task['subject'])
+
+ def testProcessMail_NoProjectOnToLine(self):
+ self.mox.StubOutWithMock(emailfmt, 'IsProjectAddressOnToLine')
+ emailfmt.IsProjectAddressOnToLine(
+ self.project_addr, [self.project_addr]).AndReturn(False)
+ self.mox.ReplayAll()
+
+ ret = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessMail_ProjectUnidentified(self):
+ self.mox.StubOutWithMock(emailfmt, 'IdentifyProjectAndIssue')
+ emailfmt.IdentifyProjectAndIssue(
+ self.project_addr, mox.IgnoreArg()).AndReturn((None, None))
+ self.mox.ReplayAll()
+
+ ret = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessMail_ProjectNotLive(self):
+ self.project.state = project_pb2.ProjectState.DELETABLE
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Project not found', email_task['subject'])
+
+ def testProcessMail_ProjectInboundEmailDisabled(self):
+ self.project.process_inbound_email = False
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Email replies are not enabled in project proj',
+ email_task['subject'])
+
+ def testProcessMail_NoRefHeader(self):
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(False)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Your message is not a reply to a notification email',
+ email_task['subject'])
+
+ def testProcessMail_NoAccount(self):
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(True)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Could not determine account of sender',
+ email_task['subject'])
+
+ def testProcessMail_BannedAccount(self):
+ self.services.user.TestAddUser('user@example.com', 111L)
+ class MockAuthData:
+ def __init__(self):
+ self.user_pb = user_pb2.MakeUser()
+ self.effective_ids = set([1, 2, 3])
+ self.user_id = 111L
+ mock_auth_data = MockAuthData()
+ mock_auth_data.user_pb.banned = 'banned'
+
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(True)
+ self.mox.StubOutWithMock(monorailrequest.AuthData, 'FromEmail')
+ monorailrequest.AuthData.FromEmail(
+ mox.IgnoreArg(), 'user@example.com', self.services).AndReturn(
+ mock_auth_data)
+ self.mox.ReplayAll()
+
+ email_tasks = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('You are banned from using this issue tracker',
+ email_task['subject'])
+
+ def testProcessMail_Success(self):
+ self.services.user.TestAddUser('user@example.com', 111L)
+ class MockAuthData:
+ def __init__(self):
+ self.user_pb = user_pb2.MakeUser()
+ self.effective_ids = set([1, 2, 3])
+ self.user_id = 111L
+ mock_auth_data = MockAuthData()
+
+ self.mox.StubOutWithMock(emailfmt, 'ValidateReferencesHeader')
+ emailfmt.ValidateReferencesHeader(
+ mox.IgnoreArg(), self.project, mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(True)
+
+ self.mox.StubOutWithMock(monorailrequest.AuthData, 'FromEmail')
+ monorailrequest.AuthData.FromEmail(
+ mox.IgnoreArg(), 'user@example.com', self.services).AndReturn(
+ mock_auth_data)
+
+ self.mox.StubOutWithMock(permissions, 'GetPermissions')
+ permissions.GetPermissions(
+ mock_auth_data.user_pb, mock_auth_data.effective_ids,
+ self.project).AndReturn('test permissions')
+
+ self.mox.StubOutWithMock(self.inbound, 'ProcessIssueReply')
+ self.inbound.ProcessIssueReply(
+ mox.IgnoreArg(), self.project, 123, self.project_addr,
+ 'user@example.com', 111L, mock_auth_data.effective_ids,
+ 'test permissions', 'awesome!')
+
+ self.mox.ReplayAll()
+
+ ret = self.inbound.ProcessMail(self.msg, self.project_addr)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessIssueReply_NoIssue(self):
+ nonexistant_local_id = 200
+ email_tasks = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, nonexistant_local_id, self.project_addr,
+ 'user@example.com', 111L, [1, 2, 3], permissions.USER_PERMISSIONSET,
+ 'awesome!')
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Could not find issue %d in project %s' % (
+ nonexistant_local_id, self.project.project_name),
+ email_task['subject'])
+
+ def testProcessIssueReply_DeletedIssue(self):
+ self.issue.deleted = True
+ email_tasks = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'user@example.com', 111L, [1, 2, 3], permissions.USER_PERMISSIONSET,
+ 'awesome!')
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('Could not find issue %d in project %s' % (
+ self.issue.local_id, self.project.project_name),
+ email_task['subject'])
+
+ def testProcessIssueReply_NoAddIssuePerm(self):
+ perms = permissions.READ_ONLY_PERMISSIONSET
+ email_tasks = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'user@example.com', 111L, [1, 2, 3], perms, 'awesome!')
+ self.assertEquals(1, len(email_tasks))
+ email_task = email_tasks[0]
+ self.assertEquals('user@example.com', email_task['to'])
+ self.assertEquals('User does not have permission to add a comment',
+ email_task['subject'])
+
+ def testProcessIssueReply_NoEditIssuePerm(self):
+ perms = permissions.USER_PERMISSIONSET
+ mock_uia = commitlogcommands.UpdateIssueAction(self.issue.local_id)
+
+ self.mox.StubOutWithMock(commitlogcommands, 'UpdateIssueAction')
+ commitlogcommands.UpdateIssueAction(self.issue.local_id).AndReturn(mock_uia)
+
+ self.mox.StubOutWithMock(mock_uia, 'Parse')
+ mock_uia.Parse(
+ self.cnxn, self.project.project_name, 111L, ['awesome!'], self.services,
+ strip_quoted_lines=True)
+ self.mox.StubOutWithMock(mock_uia, 'Run')
+ # Allow edit is false here because the permission set does not contain
+ # EDIT_ISSUE.
+ mock_uia.Run(self.cnxn, self.services, allow_edit=False)
+
+ self.mox.ReplayAll()
+ ret = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'from_addr', 111L, [1, 2, 3], perms, 'awesome!')
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testProcessIssueReply_Success(self):
+ perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ mock_uia = commitlogcommands.UpdateIssueAction(self.issue.local_id)
+
+ self.mox.StubOutWithMock(commitlogcommands, 'UpdateIssueAction')
+ commitlogcommands.UpdateIssueAction(self.issue.local_id).AndReturn(mock_uia)
+
+ self.mox.StubOutWithMock(mock_uia, 'Parse')
+ mock_uia.Parse(
+ self.cnxn, self.project.project_name, 111L, ['awesome!'], self.services,
+ strip_quoted_lines=True)
+ self.mox.StubOutWithMock(mock_uia, 'Run')
+ mock_uia.Run(self.cnxn, self.services, allow_edit=True)
+
+ self.mox.ReplayAll()
+ ret = self.inbound.ProcessIssueReply(
+ self.cnxn, self.project, self.issue.local_id, self.project_addr,
+ 'from_addr', 111L, [1, 2, 3], perms, 'awesome!')
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
diff --git a/appengine/monorail/features/test/notify_helpers_test.py b/appengine/monorail/features/test/notify_helpers_test.py
new file mode 100644
index 0000000..7267269
--- /dev/null
+++ b/appengine/monorail/features/test/notify_helpers_test.py
@@ -0,0 +1,384 @@
+# -*- coding: utf8 -*-
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for notify_helpers.py."""
+
+import unittest
+
+from features import notify_helpers
+from framework import emailfmt
+from framework import framework_views
+from services import service_manager
+from testing import fake
+
+
+REPLY_NOT_ALLOWED = notify_helpers.REPLY_NOT_ALLOWED
+REPLY_MAY_COMMENT = notify_helpers.REPLY_MAY_COMMENT
+REPLY_MAY_UPDATE = notify_helpers.REPLY_MAY_UPDATE
+
+
+class ComputeIssueChangeAddressPermListTest(unittest.TestCase):
+
+ def setUp(self):
+ self.users_by_id = {
+ 111L: framework_views.UserView(111L, 'owner@example.com', True),
+ 222L: framework_views.UserView(222L, 'member@example.com', True),
+ 999L: framework_views.UserView(999L, 'visitor@example.com', True),
+ }
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ self.services.user.TestAddUser('owner@example.com', 111L)
+ self.services.user.TestAddUser('member@example.com', 222L)
+ self.services.user.TestAddUser('visitor@example.com', 999L)
+ self.project = self.services.project.TestAddProject(
+ 'proj', owner_ids=[111L], committer_ids=[222L])
+ self.project.process_inbound_email = True
+ self.issue = fake.MakeTestIssue(
+ self.project.project_id, 1, 'summary', 'New', 111L)
+
+ def testEmptyIDs(self):
+ cnxn = 'fake cnxn'
+ addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, [], self.project, self.issue, self.services, [], {},
+ pref_check_function=lambda *args: True)
+ self.assertEqual([], addr_perm_list)
+
+ def testRecipientIsMember(self):
+ cnxn = 'fake cnxn'
+ ids_to_consider = [111L, 222L, 999L]
+ addr_perm_list = notify_helpers.ComputeIssueChangeAddressPermList(
+ cnxn, ids_to_consider, self.project, self.issue, self.services, set(),
+ self.users_by_id, pref_check_function=lambda *args: True)
+ self.assertEqual(
+ [(True, 'owner@example.com', REPLY_MAY_UPDATE),
+ (True, 'member@example.com', REPLY_MAY_UPDATE),
+ (False, 'visitor@example.com', REPLY_MAY_COMMENT)],
+ addr_perm_list)
+
+
+class ComputeProjectAndIssueNotificationAddrListTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ self.project = self.services.project.TestAddProject('project')
+ self.services.user.TestAddUser('alice@gmail.com', 111L)
+ self.services.user.TestAddUser('bob@gmail.com', 222L)
+ self.services.user.TestAddUser('fred@gmail.com', 555L)
+
+ def testNotifyAddress(self):
+ # No mailing list or filter rules are defined
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, True, set())
+ self.assertListEqual([], addr_perm_list)
+
+ # Only mailing list is notified.
+ self.project.issue_notify_address = 'mailing-list@domain.com'
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, True, set())
+ self.assertListEqual(
+ [(False, 'mailing-list@domain.com', REPLY_NOT_ALLOWED)],
+ addr_perm_list)
+
+ # No one is notified because mailing list was already notified.
+ omit_addrs = {'mailing-list@domain.com'}
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, False, omit_addrs)
+ self.assertListEqual([], addr_perm_list)
+
+ # No one is notified because anon users cannot view.
+ addr_perm_list = notify_helpers.ComputeProjectNotificationAddrList(
+ self.project, False, set())
+ self.assertListEqual([], addr_perm_list)
+
+ def testFilterRuleNotifyAddresses(self):
+ issue = fake.MakeTestIssue(
+ self.project.project_id, 1, 'summary', 'New', 555L)
+ issue.derived_notify_addrs.extend(['notify@domain.com'])
+
+ addr_perm_list = notify_helpers.ComputeIssueNotificationAddrList(
+ issue, set())
+ self.assertListEqual(
+ [(False, 'notify@domain.com', REPLY_NOT_ALLOWED)],
+ addr_perm_list)
+
+ # Also-notify addresses can be omitted (e.g., if it is the same as
+ # the email address of the user who made the change).
+ addr_perm_list = notify_helpers.ComputeIssueNotificationAddrList(
+ issue, {'notify@domain.com'})
+ self.assertListEqual([], addr_perm_list)
+
+
+class MakeBulletedEmailWorkItemsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project = fake.Project(project_name='proj1')
+ self.commenter_view = framework_views.UserView(
+ 111L, 'test@example.com', True)
+
+ def testEmptyAddrs(self):
+ """Test the case where we found zero users to notify."""
+ email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ [], 'subject', 'body', 'body', self.project, 'example.com',
+ self.commenter_view)
+ self.assertEqual([], email_tasks)
+ email_tasks = notify_helpers.MakeBulletedEmailWorkItems(
+ [([], 'reason')], 'subject', 'body', 'body', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual([], email_tasks)
+
+
+class MakeEmailWorkItemTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project = fake.Project(project_name='proj1')
+ self.project.process_inbound_email = True
+ self.commenter_view = framework_views.UserView(
+ 111L, 'test@example.com', True)
+ self.expected_html_footer = (
+ 'You received this message because:<br/> 1. reason<br/><br/>You may '
+ 'adjust your notification preferences at:<br/><a href="https://'
+ 'example.com/hosting/settings">https://example.com/hosting/settings'
+ '</a>')
+
+ def testBodySelection(self):
+ """We send non-members the email body that is indented for non-members."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+
+ self.assertEqual('a@a.com', email_task['to'])
+ self.assertEqual('subject', email_task['subject'])
+ self.assertIn('body non', email_task['body'])
+ self.assertEqual(
+ emailfmt.FormatFromAddr(self.project, commenter_view=self.commenter_view,
+ can_reply_to=False),
+ email_task['from_addr'])
+ self.assertEqual(emailfmt.NoReplyAddress(), email_task['reply_to'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body mem', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertIn('body mem', email_task['body'])
+
+ def testHtmlBody_NoDetailUrl(self):
+ """"An html body is not be sent if detail_url is not specified."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view, detail_url=None)
+
+ self.assertIsNone(email_task['html_body'])
+
+ def testHtmlBody_WithDetailUrl(self):
+ """"An html body is sent if a detail_url is specified."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'body non-- <br/>%s' % self.expected_html_footer))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_WithUnicodeChars(self):
+ """"An html body is sent if a detail_url is specified."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ unicode_content = '\xe2\x9d\xa4 â â'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', unicode_content, 'unused body mem',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ '%s-- <br/>%s' % (unicode_content.decode('utf-8'),
+ self.expected_html_footer)))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_WithLinks(self):
+ """"An html body is sent if a detail_url is specified."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'test google.com test', 'unused body mem',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'test <a href="http://google.com">google.com</a> test-- <br/>%s' % (
+ self.expected_html_footer)))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_LinkWithinTags(self):
+ """"An html body is sent with correct <a href>s."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'test <http://google.com> test', 'unused body',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'test <a href="http://google.com"><http://google.com></a> '
+ 'test-- <br/>%s' % self.expected_html_footer))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_EmailWithinTags(self):
+ """"An html body is sent with correct <a href>s."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'test <t@chromium.org> <a@chromium.org> test',
+ 'unused body mem', self.project, 'example.com', self.commenter_view,
+ detail_url=detail_url)
+
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ 'test <a href="mailto:t@chromium.org"><t@chromium.org></a> '
+ '<a href="mailto:a@chromium.org"><a@chromium.org></a> '
+ 'test-- <br/>%s' % self.expected_html_footer))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testHtmlBody_WithEscapedHtml(self):
+ """"An html body is sent with html content escaped."""
+ detail_url = 'http://test-detail-url.com/id=1234'
+ body_with_html_content = (
+ '<a href="http://www.google.com">test</a> \'something\'')
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (False, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', body_with_html_content, 'unused body mem',
+ self.project, 'example.com', self.commenter_view, detail_url=detail_url)
+
+ escaped_body_with_html_content = (
+ '<a href="http://www.google.com">test</a> '
+ ''something'')
+ notify_helpers._MakeNotificationFooter(
+ ['reason'], REPLY_NOT_ALLOWED, 'example.com')
+ expected_html_body = (
+ notify_helpers.HTML_BODY_WITH_GMAIL_ACTION_TEMPLATE % (
+ detail_url,
+ '%s-- <br/>%s' % (escaped_body_with_html_content,
+ self.expected_html_footer)))
+ self.assertEquals(expected_html_body, email_task['html_body'])
+
+ def testReplyInvitation(self):
+ """We include a footer about replying that is appropriate for that user."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_NOT_ALLOWED),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(emailfmt.NoReplyAddress(), email_task['reply_to'])
+ self.assertNotIn('Reply to this email', email_task['body'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_COMMENT),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(
+ '%s@%s' % (self.project.project_name, emailfmt.MailDomain()),
+ email_task['reply_to'])
+ self.assertIn('Reply to this email to add a comment', email_task['body'])
+ self.assertNotIn('make changes', email_task['body'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(
+ '%s@%s' % (self.project.project_name, emailfmt.MailDomain()),
+ email_task['reply_to'])
+ self.assertIn('Reply to this email to add a comment', email_task['body'])
+ self.assertIn('make updates', email_task['body'])
+
+ def testInboundEmailDisabled(self):
+ """We don't invite replies if they are disabled for this project."""
+ self.project.process_inbound_email = False
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ ['reason'], 'subject', 'body non', 'body mem', self.project,
+ 'example.com', self.commenter_view)
+ self.assertEqual(emailfmt.NoReplyAddress(), email_task['reply_to'])
+
+ def testReasons(self):
+ """The footer lists reasons why that email was sent to that user."""
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ ['Funny', 'Caring', 'Near'], 'subject', 'body', 'body', self.project,
+ 'example.com', self.commenter_view)
+ self.assertIn('because:', email_task['body'])
+ self.assertIn('1. Funny', email_task['body'])
+ self.assertIn('2. Caring', email_task['body'])
+ self.assertIn('3. Near', email_task['body'])
+
+ email_task = notify_helpers._MakeEmailWorkItem(
+ (True, 'a@a.com', REPLY_MAY_UPDATE),
+ [], 'subject', 'body', 'body', self.project,
+ 'example.com', self.commenter_view)
+ self.assertNotIn('because', email_task['body'])
+
+
+class MakeNotificationFooterTest(unittest.TestCase):
+
+ def testMakeNotificationFooter_NoReason(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ [], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertEqual('', footer)
+
+ def testMakeNotificationFooter_WithReason(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertIn('REASON', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertIn('REASON', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ def testMakeNotificationFooter_ManyReasons(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ ['Funny', 'Caring', 'Warmblooded'], REPLY_NOT_ALLOWED,
+ 'example.com')
+ self.assertIn('Funny', footer)
+ self.assertIn('Caring', footer)
+ self.assertIn('Warmblooded', footer)
+
+ def testMakeNotificationFooter_WithReplyInstructions(self):
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_NOT_ALLOWED, 'example.com')
+ self.assertNotIn('Reply', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_MAY_COMMENT, 'example.com')
+ self.assertIn('add a comment', footer)
+ self.assertNotIn('make updates', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+ footer = notify_helpers._MakeNotificationFooter(
+ ['REASON'], REPLY_MAY_UPDATE, 'example.com')
+ self.assertIn('add a comment', footer)
+ self.assertIn('make updates', footer)
+ self.assertIn('https://example.com/hosting/settings', footer)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/notify_test.py b/appengine/monorail/features/test/notify_test.py
new file mode 100644
index 0000000..f6a0a1a
--- /dev/null
+++ b/appengine/monorail/features/test/notify_test.py
@@ -0,0 +1,291 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for notify.py."""
+
+import os
+import unittest
+import urllib
+import webapp2
+import webtest
+
+from google.appengine.api import taskqueue
+from google.appengine.ext import testbed
+
+from features import notify
+from framework import urls
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+def MakeTestIssue(project_id, local_id, owner_id, reporter_id, is_spam=False):
+ issue = tracker_pb2.Issue()
+ issue.project_id = project_id
+ issue.local_id = local_id
+ issue.owner_id = owner_id
+ issue.reporter_id = reporter_id
+ issue.is_spam = is_spam
+ return issue
+
+
+class SendNotificationTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_taskqueue_stub()
+ self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
+ self.taskqueue_stub._root_path = os.path.dirname(
+ os.path.dirname(os.path.dirname( __file__ )))
+
+ def tearDown(self):
+ self.testbed.deactivate()
+
+ def testPrepareAndSendIssueChangeNotification(self):
+ notify.PrepareAndSendIssueChangeNotification(
+ project_id=789,
+ local_id=1,
+ hostport='testbed-test.appspotmail.com',
+ commenter_id=1,
+ seq_num=0,
+ old_owner_id=2,
+ send_email=True)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_ISSUE_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+
+ def testPrepareAndSendIssueBlockingNotification(self):
+ notify.PrepareAndSendIssueBlockingNotification(
+ project_id=789,
+ hostport='testbed-test.appspotmail.com',
+ local_id=1,
+ delta_blocker_iids=[],
+ commenter_id=1,
+ send_email=True)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BLOCKING_CHANGE_TASK + '.do')
+ self.assertEqual(0, len(tasks))
+
+ notify.PrepareAndSendIssueBlockingNotification(
+ project_id=789,
+ hostport='testbed-test.appspotmail.com',
+ local_id=1,
+ delta_blocker_iids=[2],
+ commenter_id=1,
+ send_email=True)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BLOCKING_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+
+ def testSendIssueBulkChangeNotification_CommentOnly(self):
+ notify.SendIssueBulkChangeNotification(
+ hostport='testbed-test.appspotmail.com',
+ project_id=789,
+ local_ids=[1],
+ old_owner_ids=[2],
+ comment_text='comment',
+ commenter_id=1,
+ amendments=[],
+ send_email=True,
+ users_by_id=2)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BULK_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+ params = dict(urllib.unquote_plus(item).split('=')
+ for item in tasks[0].payload.split('&'))
+ self.assertEqual('comment', params['comment_text'])
+ self.assertEqual('', params['amendments'])
+
+ def testSendIssueBulkChangeNotification_Normal(self):
+ notify.SendIssueBulkChangeNotification(
+ hostport='testbed-test.appspotmail.com',
+ project_id=789,
+ local_ids=[1],
+ old_owner_ids=[2],
+ comment_text='comment',
+ commenter_id=1,
+ amendments=[
+ tracker_bizobj.MakeStatusAmendment('New', 'Old'),
+ tracker_bizobj.MakeLabelsAmendment(['Added'], ['Removed']),
+ tracker_bizobj.MakeStatusAmendment('New', 'Old'),
+ ],
+ send_email=True,
+ users_by_id=2)
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.NOTIFY_BULK_CHANGE_TASK + '.do')
+ self.assertEqual(1, len(tasks))
+ params = dict(urllib.unquote_plus(item).split('=')
+ for item in tasks[0].payload.split('&'))
+ self.assertEqual('comment', params['comment_text'])
+ self.assertEqual(
+ [' Status: New',
+ ' Labels: -Removed Added'],
+ params['amendments'].split('\n'))
+
+ def testAddAllEmailTasks(self):
+ notify.AddAllEmailTasks(
+ tasks=[{'to': 'user'}, {'to': 'user2'}])
+
+ tasks = self.taskqueue_stub.get_filtered_tasks(
+ url=urls.OUTBOUND_EMAIL_TASK + '.do')
+ self.assertEqual(2, len(tasks))
+
+
+class NotifyTaskHandleRequestTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_taskqueue_stub()
+ self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
+ self.taskqueue_stub._root_path = os.path.dirname(
+ os.path.dirname(os.path.dirname( __file__ )))
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ issue_star=fake.IssueStarService(),
+ features=fake.FeaturesService())
+ self.services.user.TestAddUser('requester@example.com', 1)
+ self.services.user.TestAddUser('user@example.com', 2)
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1],
+ project_id=12345)
+ issue1 = MakeTestIssue(
+ project_id=12345, local_id=1, owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue1)
+
+ def VerifyParams(self, result, params):
+ self.assertEqual(
+ bool(params['send_email']), result['params']['send_email'])
+ if 'id' in params:
+ self.assertEqual(params['id'], result['params']['local_id'])
+ if 'ids' in params:
+ self.assertEqual([int(p) for p in params['ids'].split(',')],
+ result['params']['local_ids'])
+ self.assertEqual(params['project_id'], result['params']['project_id'])
+
+ def testNotifyIssueChangeTask(self):
+ task = notify.NotifyIssueChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'commenter_id': 2}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.VerifyParams(result, params)
+
+ def testNotifyIssueChangeTask_spam(self):
+ issue = MakeTestIssue(
+ project_id=12345, local_id=1, owner_id=1, reporter_id=1,
+ is_spam=True)
+ self.services.issue.TestAddIssue(issue)
+ task = notify.NotifyIssueChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 0, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'commenter_id': 2}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEquals(0, len(result['notified']))
+
+ def testNotifyBlockingChangeTask(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBlockingChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'delta_blocker_iids': 2, 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.VerifyParams(result, params)
+
+ def testNotifyBlockingChangeTask_spam(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1,
+ is_spam=True)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBlockingChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'id': 1, 'seq': 0,
+ 'delta_blocker_iids': 2, 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEquals(0, len(result['notified']))
+
+ def testNotifyBulkChangeTask(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBulkChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'ids': '1,2', 'seq': 0,
+ 'old_owner_ids': '1,1', 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.VerifyParams(result, params)
+
+ def testNotifyBulkChangeTask_spam(self):
+ issue2 = MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, reporter_id=1,
+ is_spam=True)
+ self.services.issue.TestAddIssue(issue2)
+ task = notify.NotifyBulkChangeTask(
+ request=None, response=None, services=self.services)
+ params = {'send_email': 1, 'project_id': 12345, 'ids': '1,2', 'seq': 0,
+ 'old_owner_ids': '1,1', 'commenter_id': 1}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEquals(1, len(result['notified']))
+
+ def testOutboundEmailTask(self):
+ task = notify.OutboundEmailTask(
+ request=None, response=None, services=self.services)
+ params = {
+ 'from_addr': 'requester@example.com',
+ 'reply_to': 'user@example.com',
+ 'to': 'user@example.com',
+ 'subject': 'Test subject'}
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1},
+ params=params,
+ method='POST',
+ services=self.services)
+ result = task.HandleRequest(mr)
+ self.assertEqual(params['from_addr'], result['sender'])
+ self.assertEqual(params['subject'], result['subject'])
diff --git a/appengine/monorail/features/test/prettify_test.py b/appengine/monorail/features/test/prettify_test.py
new file mode 100644
index 0000000..09fb403
--- /dev/null
+++ b/appengine/monorail/features/test/prettify_test.py
@@ -0,0 +1,93 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the prettify module."""
+
+import unittest
+
+from third_party import ezt
+
+from features import prettify
+
+
+class SourceBrowseTest(unittest.TestCase):
+
+ def testPrepareSourceLinesForHighlighting(self):
+ # String representing an empty source file
+ src = ''
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 0)
+
+ def testPrepareSourceLinesForHighlightingNoBreaks(self):
+ # seven lines of text with no blank lines
+ src = ' 1\n 2\n 3\n 4\n 5\n 6\n 7'
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 7)
+ out_lines = [fl.line for fl in file_lines]
+ self.assertEqual('\n'.join(out_lines), src)
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 7)
+
+ def testPrepareSourceLinesForHighlightingWithBreaks(self):
+ # seven lines of text with line 5 being blank
+ src = ' 1\n 2\n 3\n 4\n\n 6\n 7'
+
+ file_lines = prettify.PrepareSourceLinesForHighlighting(src)
+ self.assertEqual(len(file_lines), 7)
+
+
+class BuildPrettifyDataTest(unittest.TestCase):
+
+ def testNonSourceFile(self):
+ prettify_data = prettify.BuildPrettifyData(0, '/dev/null')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(False),
+ prettify_class=None),
+ prettify_data)
+
+ prettify_data = prettify.BuildPrettifyData(10, 'readme.txt')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(False),
+ prettify_class=None),
+ prettify_data)
+
+ def testGenericLanguage(self):
+ prettify_data = prettify.BuildPrettifyData(123, 'trunk/src/hello.php')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class=''),
+ prettify_data)
+
+ def testSpecificLanguage(self):
+ prettify_data = prettify.BuildPrettifyData(123, 'trunk/src/hello.java')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class='lang-java'),
+ prettify_data)
+
+ def testThirdPartyExtensionLanguages(self):
+ for ext in ['apollo', 'agc', 'aea', 'el', 'scm', 'cl', 'lisp',
+ 'go', 'hs', 'lua', 'fs', 'ml', 'proto', 'scala',
+ 'sql', 'vb', 'vbs', 'vhdl', 'vhd', 'wiki', 'yaml',
+ 'yml', 'clj']:
+ prettify_data = prettify.BuildPrettifyData(123, '/trunk/src/hello.' + ext)
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class='lang-' + ext),
+ prettify_data)
+
+ def testExactFilename(self):
+ prettify_data = prettify.BuildPrettifyData(123, 'trunk/src/Makefile')
+ self.assertDictEqual(
+ dict(should_prettify=ezt.boolean(True),
+ prettify_class='lang-sh'),
+ prettify_data)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/features/test/savedqueries_helpers_test.py b/appengine/monorail/features/test/savedqueries_helpers_test.py
new file mode 100644
index 0000000..d231294
--- /dev/null
+++ b/appengine/monorail/features/test/savedqueries_helpers_test.py
@@ -0,0 +1,106 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for savedqueries_helpers feature."""
+
+import unittest
+
+import mox
+
+from features import savedqueries_helpers
+from testing import fake
+from tracker import tracker_bizobj
+
+
+class SavedQueriesHelperTest(unittest.TestCase):
+
+ def setUp(self):
+ self.features = fake.FeaturesService()
+ self.project = fake.ProjectService()
+ self.cnxn = 'fake cnxn'
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testParseSavedQueries(self):
+ post_data = {
+ 'xyz_savedquery_name_1': '',
+ 'xyz_savedquery_name_2': 'name2',
+ 'xyz_savedquery_name_3': 'name3',
+ 'xyz_savedquery_id_1': 1,
+ 'xyz_savedquery_id_2': 2,
+ 'xyz_savedquery_id_3': 3,
+ 'xyz_savedquery_projects_1': '123',
+ 'xyz_savedquery_projects_2': 'abc',
+ 'xyz_savedquery_projects_3': 'def',
+ 'xyz_savedquery_base_1': 4,
+ 'xyz_savedquery_base_2': 5,
+ 'xyz_savedquery_base_3': 6,
+ 'xyz_savedquery_query_1': 'query1',
+ 'xyz_savedquery_query_2': 'query2',
+ 'xyz_savedquery_query_3': 'query3',
+ 'xyz_savedquery_sub_mode_1': 'sub_mode1',
+ 'xyz_savedquery_sub_mode_2': 'sub_mode2',
+ 'xyz_savedquery_sub_mode_3': 'sub_mode3',
+ }
+ self.project.TestAddProject(name='abc', project_id=1001)
+ self.project.TestAddProject(name='def', project_id=1002)
+
+ saved_queries = savedqueries_helpers.ParseSavedQueries(
+ self.cnxn, post_data, self.project, prefix='xyz_')
+ self.assertEqual(2, len(saved_queries))
+
+ # pylint: disable=unbalanced-tuple-unpacking
+ saved_query1, saved_query2 = saved_queries
+ # Assert contents of saved_query1.
+ self.assertEqual(2, saved_query1.query_id)
+ self.assertEqual('name2', saved_query1.name)
+ self.assertEqual(5, saved_query1.base_query_id)
+ self.assertEqual('query2', saved_query1.query)
+ self.assertEqual([1001], saved_query1.executes_in_project_ids)
+ self.assertEqual('sub_mode2', saved_query1.subscription_mode)
+ # Assert contents of saved_query2.
+ self.assertEqual(3, saved_query2.query_id)
+ self.assertEqual('name3', saved_query2.name)
+ self.assertEqual(6, saved_query2.base_query_id)
+ self.assertEqual('query3', saved_query2.query)
+ self.assertEqual([1002], saved_query2.executes_in_project_ids)
+ self.assertEqual('sub_mode3', saved_query2.subscription_mode)
+
+ def testSavedQueryToCond(self):
+ class MockSavedQuery:
+ def __init__(self):
+ self.base_query_id = 1
+ self.query = 'query'
+ saved_query = MockSavedQuery()
+
+ cond_with_no_base = savedqueries_helpers.SavedQueryToCond(saved_query)
+ self.assertEquals('query', cond_with_no_base)
+
+ self.mox.StubOutWithMock(tracker_bizobj, 'GetBuiltInQuery')
+ tracker_bizobj.GetBuiltInQuery(1).AndReturn('base')
+ self.mox.ReplayAll()
+ cond_with_base = savedqueries_helpers.SavedQueryToCond(saved_query)
+ self.assertEquals('base query', cond_with_base)
+ self.mox.VerifyAll()
+
+ def testSavedQueryIDToCond(self):
+ self.mox.StubOutWithMock(savedqueries_helpers, 'SavedQueryToCond')
+ savedqueries_helpers.SavedQueryToCond(mox.IgnoreArg()).AndReturn('ret')
+ self.mox.ReplayAll()
+ query_cond = savedqueries_helpers.SavedQueryIDToCond(
+ self.cnxn, self.features, 1)
+ self.assertEquals('ret', query_cond)
+ self.mox.VerifyAll()
+
+ self.mox.StubOutWithMock(tracker_bizobj, 'GetBuiltInQuery')
+ tracker_bizobj.GetBuiltInQuery(1).AndReturn('built_in_query')
+ self.mox.ReplayAll()
+ query_cond = savedqueries_helpers.SavedQueryIDToCond(
+ self.cnxn, self.features, 1)
+ self.assertEquals('built_in_query', query_cond)
+ self.mox.VerifyAll()
diff --git a/appengine/monorail/features/test/savedqueries_test.py b/appengine/monorail/features/test/savedqueries_test.py
new file mode 100644
index 0000000..8475041
--- /dev/null
+++ b/appengine/monorail/features/test/savedqueries_test.py
@@ -0,0 +1,40 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for savedqueries feature."""
+
+import unittest
+
+from features import savedqueries
+from framework import monorailrequest
+from framework import permissions
+from services import service_manager
+from testing import fake
+
+
+class SavedQueriesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService())
+ self.servlet = savedqueries.SavedQueries(
+ 'req', 'res', services=self.services)
+ self.services.user.TestAddUser('a@example.com', 111L)
+
+ def testAssertBasePermission(self):
+ """Only permit site admins and users viewing themselves."""
+ mr = monorailrequest.MonorailRequest()
+ mr.viewed_user_auth.user_id = 111L
+ mr.auth.user_id = 222L
+
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ mr.auth.user_id = 111L
+ self.servlet.AssertBasePermission(mr)
+
+ mr.auth.user_id = 222L
+ mr.auth.user_pb.is_site_admin = True
+ self.servlet.AssertBasePermission(mr)
\ No newline at end of file
diff --git a/appengine/monorail/features/test/stars_test.py b/appengine/monorail/features/test/stars_test.py
new file mode 100644
index 0000000..e96d765
--- /dev/null
+++ b/appengine/monorail/features/test/stars_test.py
@@ -0,0 +1,68 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the project and user stars feature."""
+
+import unittest
+
+from features import stars
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class StarsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ project_star=fake.ProjectStarService(),
+ user_star=fake.UserStarService())
+ self.services.project.TestAddProject('proj', project_id=123)
+ self.services.user.TestAddUser('testuser', 111L)
+ self.set_stars_feed = stars.SetStarsFeed(
+ 'req', 'res', services=self.services)
+
+ def SetAndVerifyStarredItems(self, scope, item, item_id, get_star_count):
+ self.assertEqual(0, get_star_count('fake cnxn', item_id))
+
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 111L}, services=self.services, method='POST',
+ params={'scope': scope, 'item': item, 'starred': 1, 'token': 'x'})
+ result = self.set_stars_feed.HandleRequest(mr)
+
+ self.assertEqual({'starred': True}, result)
+ self.assertEqual(1, get_star_count('fake cnxn', item_id))
+
+ # The same starrer doing it again does not drive up the count more.
+ result = self.set_stars_feed.HandleRequest(mr)
+ self.assertEqual({'starred': True}, result)
+ self.assertEqual(1, get_star_count('fake cnxn', item_id))
+
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 111L}, services=self.services, method='POST',
+ params={'scope': scope, 'item': item, 'starred': 0, 'token': 'x'})
+ result = self.set_stars_feed.HandleRequest(mr)
+ self.assertEqual({'starred': False}, result)
+ self.assertEqual(0, get_star_count('fake cnxn', item_id))
+
+ # The same starrer doing it again does not drive down the count more.
+ result = self.set_stars_feed.HandleRequest(mr)
+ self.assertEqual({'starred': False}, result)
+ self.assertEqual(0, get_star_count('fake cnxn', item_id))
+
+ def testSetAndGetStarredItems_User(self):
+ """Tests SetStarsFeed.HandleRequest method."""
+ self.SetAndVerifyStarredItems(
+ 'users', '111', 111L, self.services.user_star.CountItemStars)
+
+ def testSetAndGetStarredItems_Project(self):
+ self.SetAndVerifyStarredItems(
+ 'projects', 'proj', 123, self.services.project_star.CountItemStars)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/__init__.py b/appengine/monorail/framework/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/framework/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/framework/actionlimit.py b/appengine/monorail/framework/actionlimit.py
new file mode 100644
index 0000000..b994c1f
--- /dev/null
+++ b/appengine/monorail/framework/actionlimit.py
@@ -0,0 +1,227 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions to test action limits.
+
+Action limits help prevent an individual user from abusing the system
+by performing an excessive number of operations. E.g., creating
+thousands of projects.
+
+If the user reaches a soft limit within a given time period, the
+servlets will start demanding that the user solve a CAPTCHA.
+
+If the user reaches a hard limit within a given time period, any further
+requests to perform that type of action will fail.
+
+When the user reaches a lifetime limit, they are shown an error page.
+We can increase the lifetime limit for individual users who contact us.
+"""
+
+import logging
+import time
+
+from framework import framework_constants
+from proto import user_pb2
+
+
+# Action types
+PROJECT_CREATION = 1
+ISSUE_COMMENT = 2
+ISSUE_ATTACHMENT = 3
+ISSUE_BULK_EDIT = 4
+FLAG_SPAM = 5
+API_REQUEST = 6
+
+ACTION_TYPE_NAMES = {
+ 'project_creation': PROJECT_CREATION,
+ 'issue_comment': ISSUE_COMMENT,
+ 'issue_attachment': ISSUE_ATTACHMENT,
+ 'issue_bulk_edit': ISSUE_BULK_EDIT,
+ 'flag_spam': FLAG_SPAM,
+ 'api_request': API_REQUEST,
+ }
+
+# Action Limit definitions
+# {action_type: (period, soft_limit, hard_limit, life_max),...}
+ACTION_LIMITS = {
+ PROJECT_CREATION: (framework_constants.SECS_PER_DAY, 2, 5, 25),
+ ISSUE_COMMENT: (framework_constants.SECS_PER_DAY / 4, 5, 100, 10000),
+ ISSUE_ATTACHMENT: (framework_constants.SECS_PER_DAY, 25, 100, 1000),
+ ISSUE_BULK_EDIT: (framework_constants.SECS_PER_DAY, 100, 500, 10000),
+ FLAG_SPAM: (framework_constants.SECS_PER_DAY, 100, 100, 10000),
+ API_REQUEST: (framework_constants.SECS_PER_DAY, 100000, 100000, 10000000),
+ }
+
+
+# Determine scaling of CAPTCHA frequency.
+MAX_SOFT_LIMITS = max([ACTION_LIMITS[key][2] - ACTION_LIMITS[key][1]
+ for key in ACTION_LIMITS])
+SQUARES = {i**2 for i in range(1, MAX_SOFT_LIMITS)}
+SQUARES.add(1)
+
+
+def NeedCaptcha(user, action_type, now=None, skip_lifetime_check=False):
+ """Check that the user is under the limit on a given action.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ now: int time in millis. Defaults to int(time.time()). Used for testing.
+ skip_lifetime_check: No limit for lifetime actions.
+
+ Raises:
+ ExcessiveActivityException: when user is over hard or lifetime limits.
+
+ Returns:
+ False if user is under the soft-limit. True if user is over the
+ soft-limit, but under the hard and lifetime limits.
+ """
+ if not user: # Anything that can be done by anon users (which is not
+ return False # much) can be done any number of times w/o CAPTCHA.
+ if not now:
+ now = int(time.time())
+
+ period, soft, hard, life_max = ACTION_LIMITS[action_type]
+ actionlimit_pb = GetLimitPB(user, action_type)
+
+ # First, users with no action limits recorded must be below limits.
+ # And, users that we explicitly trust as non-abusers are allowed to take
+ # and unlimited number of actions. And, site admins are trusted non-abusers.
+ if (not actionlimit_pb or user.ignore_action_limits or
+ user.is_site_admin):
+ return False
+
+ # Second, check if user has reached lifetime limit.
+ if actionlimit_pb.lifetime_limit:
+ life_max = actionlimit_pb.lifetime_limit
+ if actionlimit_pb.period_soft_limit:
+ soft = actionlimit_pb.period_soft_limit
+ if actionlimit_pb.period_hard_limit:
+ hard = actionlimit_pb.period_hard_limit
+ if (not skip_lifetime_check and life_max is not None
+ and actionlimit_pb.lifetime_count >= life_max):
+ raise ExcessiveActivityException()
+
+ # Third, if user can begin a new time period, they are free to go ahead.
+ if now - actionlimit_pb.reset_timestamp > period:
+ return False
+
+ # Fourth, check for hard rate limits.
+ if hard is not None and actionlimit_pb.recent_count >= hard:
+ raise ExcessiveActivityException()
+
+ # Finally, check the soft limit in this time period.
+ action_limit = False
+ if soft is not None:
+ recent_count = actionlimit_pb.recent_count
+ if recent_count == soft:
+ action_limit = True
+ elif recent_count > soft:
+ remaining_soft = hard - recent_count
+ if remaining_soft in SQUARES:
+ action_limit = True
+
+ if action_limit:
+ logging.info('soft limit captcha: %d', recent_count)
+ return action_limit
+
+
+def GetLimitPB(user, action_type):
+ """Return the apporiate action limit PB part of the given User PB."""
+ if action_type == PROJECT_CREATION:
+ if not user.project_creation_limit:
+ user.project_creation_limit = user_pb2.ActionLimit()
+ return user.project_creation_limit
+ elif action_type == ISSUE_COMMENT:
+ if not user.issue_comment_limit:
+ user.issue_comment_limit = user_pb2.ActionLimit()
+ return user.issue_comment_limit
+ elif action_type == ISSUE_ATTACHMENT:
+ if not user.issue_attachment_limit:
+ user.issue_attachment_limit = user_pb2.ActionLimit()
+ return user.issue_attachment_limit
+ elif action_type == ISSUE_BULK_EDIT:
+ if not user.issue_bulk_edit_limit:
+ user.issue_bulk_edit_limit = user_pb2.ActionLimit()
+ return user.issue_bulk_edit_limit
+ elif action_type == FLAG_SPAM:
+ if not user.flag_spam_limit:
+ user.flag_spam_limit = user_pb2.ActionLimit()
+ return user.flag_spam_limit
+ elif action_type == API_REQUEST:
+ if not user.api_request_limit:
+ user.api_request_limit = user_pb2.ActionLimit()
+ return user.api_request_limit
+ raise Exception('unexpected action type %r' % action_type)
+
+
+def ResetRecentActions(user, action_type):
+ """Reset the recent counter for an action.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ """
+ al = GetLimitPB(user, action_type)
+ al.recent_count = 0
+ al.reset_timestamp = 0
+
+
+def CountAction(user, action_type, delta=1, now=int(time.time())):
+ """Reset recent counter if eligible, then increment recent and lifetime.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ delta: int number to increment count by.
+ now: int time in millis. Defaults to int(time.time()). Used for testing.
+ """
+ al = GetLimitPB(user, action_type)
+ period = ACTION_LIMITS[action_type][0]
+
+ if now - al.reset_timestamp > period:
+ al.reset_timestamp = now
+ al.recent_count = 0
+
+ al.recent_count = al.recent_count + delta
+ al.lifetime_count = al.lifetime_count + delta
+
+
+def CustomizeLimit(user, action_type, soft_limit, hard_limit, lifetime_limit):
+ """Set custom action limits for a user.
+
+ The recent counters are reset to zero, so the user will not run into
+ a hard limit.
+
+ Args:
+ user: instance of user_pb2.User.
+ action_type: int action type.
+ soft_limit: soft limit of period.
+ hard_limit: hard limit of period.
+ lifetime_limit: lifetime limit.
+ """
+ al = GetLimitPB(user, action_type)
+ al.lifetime_limit = lifetime_limit
+ al.period_soft_limit = soft_limit
+ al.period_hard_limit = hard_limit
+
+ # The mutator will mark the ActionLimit as present, but does not
+ # necessarily *initialize* the protobuf. We need to ensure that the
+ # lifetime_count is set (a required field). Additional required
+ # fields will be set below.
+ if not al.lifetime_count:
+ al.lifetime_count = 0
+
+ # Clear the recent counters so the user will not hit the period limit.
+ al.recent_count = 0
+ al.reset_timestamp = 0
+
+
+class Error(Exception):
+ """Base exception class for this package."""
+
+
+class ExcessiveActivityException(Error):
+ """No user with the specified name exists."""
diff --git a/appengine/monorail/framework/alerts.py b/appengine/monorail/framework/alerts.py
new file mode 100644
index 0000000..ef939b1
--- /dev/null
+++ b/appengine/monorail/framework/alerts.py
@@ -0,0 +1,54 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helpers for showing alerts at the top of the page.
+
+These alerts are then displayed by alerts.ezt.
+"""
+
+import time
+
+from third_party import ezt
+
+# Expiration time for special features of timestamped links.
+# This is not for security, just for informational messages that
+# make sense in the context of a user session, but that should
+# not appear days later if the user follows a bookmarked link.
+_LINK_EXPIRATION_SEC = 8
+
+
+class AlertsView(object):
+ """EZT object for showing alerts at the top of the page."""
+
+ def __init__(self, mr):
+ # Used to show message confirming item was updated
+ self.updated = mr.GetIntParam('updated')
+
+ # Used to show message confirming item was moved and the location of the new
+ # item.
+ self.moved_to_project = mr.GetParam('moved_to_project')
+ self.moved_to_id = mr.GetIntParam('moved_to_id')
+ self.moved = self.moved_to_project and self.moved_to_id
+
+ # Used to show message confirming item was copied and the location of the
+ # new item.
+ self.copied_from_id = mr.GetIntParam('copied_from_id')
+ self.copied_to_project = mr.GetParam('copied_to_project')
+ self.copied_to_id = mr.GetIntParam('copied_to_id')
+ self.copied = self.copied_to_project and self.copied_to_id
+
+ # Used to show message confirming items deleted
+ self.deleted = mr.GetParam('deleted')
+
+ # If present, we will show message confirming that data was saved
+ self.saved = mr.GetParam('saved')
+
+ link_generation_timestamp = mr.GetIntParam('ts', default_value=0)
+ now = int(time.time())
+ ts_links_are_valid = now - link_generation_timestamp < _LINK_EXPIRATION_SEC
+
+ show_alert = ts_links_are_valid and (
+ self.updated or self.moved or self.copied or self.deleted or self.saved)
+ self.show = ezt.boolean(show_alert)
diff --git a/appengine/monorail/framework/artifactcollision.py b/appengine/monorail/framework/artifactcollision.py
new file mode 100644
index 0000000..2cd0651
--- /dev/null
+++ b/appengine/monorail/framework/artifactcollision.py
@@ -0,0 +1,49 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Class that implements the artifact update collision page.
+
+This page is displayed only when one user views and edits an issue,
+but another user has already submitted an issue update before the
+first user submits his/her update.
+
+TODO(jrobbins): give the user better options on how to proceed.
+
+Summary of classes:
+ ArtifactCollision: Show an error message explaining the mid-air collision.
+"""
+
+import re
+
+from framework import monorailrequest
+from framework import servlet
+
+
+class ArtifactCollision(servlet.Servlet):
+ """ArtifactCollision page explains that a mid-air collision has occured."""
+
+ _PAGE_TEMPLATE = 'framework/artifact-collision-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_NONE
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ A dict of values used by EZT for rendering the page.
+ """
+ artifact_name = mr.GetParam('name')
+ if not artifact_name:
+ raise monorailrequest.InputException() # someone forged a link
+
+ artifact_detail_url = '/p/%s/issues/detail?id=%s' % (
+ mr.project_name, mr.continue_issue_id)
+
+ return {
+ 'artifact_name': artifact_name,
+ 'artifact_detail_url': artifact_detail_url,
+ }
diff --git a/appengine/monorail/framework/banned.py b/appengine/monorail/framework/banned.py
new file mode 100644
index 0000000..45dd326
--- /dev/null
+++ b/appengine/monorail/framework/banned.py
@@ -0,0 +1,46 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display the a message explaining that the user has been banned.
+
+We can ban a user for anti-social behavior. We indicate that the user is
+banned by adding a 'banned' field to his/her User PB in the DB. Whenever
+a user with a banned indicator visits any page, AssertBasePermission()
+checks has_banned and redirects to this page.
+"""
+
+import logging
+
+from framework import permissions
+from framework import servlet
+
+
+class Banned(servlet.Servlet):
+ """The Banned page shows a message explaining that the user is banned."""
+
+ _PAGE_TEMPLATE = 'framework/banned-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Allow banned users to see this page, and prevent non-banned users."""
+ # Note, we do not call Servlet.AssertBasePermission because
+ # that would redirect banned users here again in an endless loop.
+
+ # We only show this page to users who are banned. If a non-banned user
+ # follows a link to this URL, don't show the banned message, because that
+ # would lead to a big misunderstanding.
+ if not permissions.IsBanned(mr.auth.user_pb, mr.auth.user_view):
+ logging.info('non-banned user: %s', mr.auth.user_pb)
+ self.abort(404)
+
+ def GatherPageData(self, _mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ return {
+ # We do not actually display the specific reason for banning.
+ # That info is available via command-line tools..
+
+ # Make the "Sign Out" link just sign out, don't try to bring the
+ # user back to this page after they sign out.
+ 'currentPageURLEncoded': None,
+ }
diff --git a/appengine/monorail/framework/captcha.py b/appengine/monorail/framework/captcha.py
new file mode 100644
index 0000000..2aa9c19
--- /dev/null
+++ b/appengine/monorail/framework/captcha.py
@@ -0,0 +1,58 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A simple python interface to ReCAPTCHA."""
+
+import json
+import logging
+import urllib
+import urllib2
+
+from services import secrets_svc
+
+def Verify(remote_ip, response):
+ """Check the user's guess at a captcha solution.
+
+ Args:
+ remote_ip: user's IP address.
+ challenge: The captcha challenge presented to the user.
+ guess: The user's response to the captcha.
+
+ Returns:
+ A pair (correct, error_msg) where error_msg will indicate
+ why a response was deemed incorrect. It is logged so that
+ you can see, e.g., if you have the wrong private key.
+ """
+ # If the use did not enter anything, that is always incorrect
+ if not response:
+ logging.info('response was blank')
+ return False, 'incorrect-captcha-sol'
+
+ resp = _AskRecaptcha(remote_ip, response)
+ if not resp['success']:
+ if 'error-codes' in resp:
+ return False, resp['error-codes']
+ else:
+ return False, 'incorrect-captcha-sol'
+
+ return True, ''
+
+def _AskRecaptcha(remote_ip, response):
+ """Ask the ReCAPTCHA backend to verify the user's guess."""
+ recaptcha_server_request = urllib2.Request(
+ url='https://www.google.com/recaptcha/api/siteverify',
+ data=urllib.urlencode({
+ 'secret': secrets_svc.GetRecaptchaPrivateKey(),
+ 'remoteip': remote_ip,
+ 'response': response}),
+ headers={
+ 'Content-type': 'application/x-www-form-urlencoded',
+ 'User-agent': 'reCAPTCHA Python'})
+ recaptcha_server_response = urllib2.urlopen(recaptcha_server_request)
+ resp = json.loads(recaptcha_server_response.read())
+ recaptcha_server_response.close()
+
+ return resp
+
diff --git a/appengine/monorail/framework/csp_report.py b/appengine/monorail/framework/csp_report.py
new file mode 100644
index 0000000..b4dd0a4
--- /dev/null
+++ b/appengine/monorail/framework/csp_report.py
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet for Content Security Policy violation reporting.
+See http://www.html5rocks.com/en/tutorials/security/content-security-policy/
+for more information on how this mechanism works.
+"""
+
+import webapp2
+import logging
+
+
+class CSPReportPage(webapp2.RequestHandler):
+ """CSPReportPage serves CSP violation reports."""
+
+ def post(self):
+ logging.error('CSP Violation: %s' % self.request.body)
diff --git a/appengine/monorail/framework/emailfmt.py b/appengine/monorail/framework/emailfmt.py
new file mode 100644
index 0000000..d4aa955
--- /dev/null
+++ b/appengine/monorail/framework/emailfmt.py
@@ -0,0 +1,359 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Functions that format or parse email messages in Monorail.
+
+Specifically, this module has the logic for generating various email
+header lines that help match inbound and outbound email to the project
+and artifact that generated it.
+"""
+
+import hmac
+import logging
+import re
+import rfc822
+
+from google.appengine.api import app_identity
+
+import settings
+from framework import framework_constants
+from services import client_config_svc
+from services import secrets_svc
+
+# TODO(jrobbins): Parsing very large messages is slow, and we are not going
+# to handle attachments at first, so there is no reason to consider large
+# emails.
+MAX_BODY_SIZE = 100 * 1024
+MAX_HEADER_CHARS_CONSIDERED = 255
+
+
+
+def IsBodyTooBigToParse(body):
+ """Return True if the email message body is too big to process."""
+ return len(body) > MAX_BODY_SIZE
+
+
+def IsProjectAddressOnToLine(project_addr, to_addrs):
+ """Return True if an email was explicitly sent directly to us."""
+ return project_addr in to_addrs
+
+
+def ParseEmailMessage(msg):
+ """Parse the given MessageRouterMessage and return relevant fields.
+
+ Args:
+ msg: email.message.Message object for the email message sent to us.
+
+ Returns:
+ A tuple: from_addr, to_addrs, cc_addrs, references, subject, body.
+ """
+ # Ignore messages that are probably not from humans, see:
+ # http://google.com/search?q=precedence+bulk+junk
+ precedence = msg.get('precedence', '')
+ if precedence.lower() in ['bulk', 'junk']:
+ logging.info('Precedence: %r indicates an autoresponder', precedence)
+ return '', [], [], '', '', ''
+
+ from_addrs = _ExtractAddrs(msg.get('from', ''))
+ if from_addrs:
+ from_addr = from_addrs[0]
+ else:
+ from_addr = ''
+
+ to_addrs = _ExtractAddrs(msg.get('to', ''))
+ cc_addrs = _ExtractAddrs(msg.get('cc', ''))
+
+ in_reply_to = msg.get('in-reply-to', '')
+ references = msg.get('references', '').split()
+ references = list({ref for ref in [in_reply_to] + references if ref})
+ subject = _StripSubjectPrefixes(msg.get('subject', ''))
+
+ body = ''
+ for part in msg.walk():
+ # We only process plain text emails.
+ if part.get_content_type() == 'text/plain':
+ body = part.get_payload(decode=True)
+ break # Only consider the first text part.
+
+ return from_addr, to_addrs, cc_addrs, references, subject, body
+
+
+def _ExtractAddrs(header_value):
+ """Given a message header value, return email address found there."""
+ friendly_addr_pairs = list(rfc822.AddressList(header_value))
+ return [addr for _friendly, addr in friendly_addr_pairs]
+
+
+def _StripSubjectPrefixes(subject):
+ """Strip off any 'Re:', 'Fwd:', etc. subject line prefixes."""
+ prefix = _FindSubjectPrefix(subject)
+ while prefix:
+ subject = subject[len(prefix):].strip()
+ prefix = _FindSubjectPrefix(subject)
+
+ return subject
+
+
+def _FindSubjectPrefix(subject):
+ """If the given subject starts with a prefix, return that prefix."""
+ for prefix in ['re:', 'aw:', 'fwd:', 'fw:']:
+ if subject.lower().startswith(prefix):
+ return prefix
+
+ return None
+
+
+def MailDomain():
+ """Return the domain name where this app can recieve email."""
+ if settings.unit_test_mode:
+ return 'testbed-test.appspotmail.com'
+
+ # If running on a GAFYD domain, you must define an app alias on the
+ # Application Settings admin web page. If you cannot reserve the matching
+ # APP_ID for the alias, then specify it in settings.mail_domain.
+ if settings.mail_domain:
+ return settings.mail_domain
+
+ app_id = app_identity.get_application_id()
+ if ':' in app_id:
+ app_id = app_id.split(':')[-1]
+
+ return '%s.appspotmail.com' % app_id
+
+
+def FormatFriendly(commenter_view, sender, reveal_addr):
+ """Format the From: line to include the commenter's friendly name if given."""
+ if commenter_view:
+ site_name = settings.site_name
+ if commenter_view.email in client_config_svc.GetServiceAccountMap():
+ friendly = commenter_view.display_name
+ elif reveal_addr:
+ friendly = commenter_view.email
+ else:
+ friendly = commenter_view.display_name
+ return '%s via %s <%s>' % (friendly, site_name, sender)
+ else:
+ return sender
+
+
+def NoReplyAddress(commenter_view=None, reveal_addr=False):
+ """Return an address that ignores all messages sent to it."""
+ # Note: We use "no_reply" with an underscore to avoid potential conflict
+ # with any project name. Project names cannot have underscores.
+ sender = 'no_reply@%s' % MailDomain()
+ return FormatFriendly(commenter_view, sender, reveal_addr)
+
+
+def FormatFromAddr(_project, commenter_view=None, reveal_addr=False,
+ can_reply_to=True):
+ """Return a string to be used on the email From: line.
+
+ Args:
+ project: Project PB for the project that the email is sent from.
+ commenter_view: Optional UserView of the user who made a comment. We use
+ the user's (potentially obscured) email address as their friendly name.
+ reveal_addr: Optional bool. If False then the address is obscured.
+ can_reply_to: Optional bool. If True then settings.send_email_as is used,
+ otherwise settings.send_noreply_email_as is used.
+
+ Returns:
+ A string that should be used in the From: line of outbound email
+ notifications for the given project.
+ """
+ addr = (settings.send_email_as if can_reply_to
+ else settings.send_noreply_email_as)
+ return FormatFriendly(commenter_view, addr, reveal_addr)
+
+
+def NormalizeHeader(s):
+ """Make our message-ids robust against mail client spacing and truncation."""
+ words = _StripSubjectPrefixes(s).split() # Split on any runs of whitespace.
+ normalized = ' '.join(words)
+ truncated = normalized[:MAX_HEADER_CHARS_CONSIDERED]
+ return truncated
+
+
+def MakeMessageID(to_addr, subject, from_addr):
+ """Make a unique (but deterministic) email Message-Id: value."""
+ normalized_subject = NormalizeHeader(subject)
+ if isinstance(normalized_subject, unicode):
+ normalized_subject = normalized_subject.encode('utf-8')
+ mail_hmac_key = secrets_svc.GetEmailKey()
+ return '<0=%s=%s=%s@%s>' % (
+ hmac.new(mail_hmac_key, to_addr).hexdigest(),
+ hmac.new(mail_hmac_key, normalized_subject).hexdigest(),
+ from_addr.split('@')[0],
+ MailDomain())
+
+
+def GetReferences(to_addr, subject, seq_num, project_from_addr):
+ """Make a References: header to make this message thread properly.
+
+ Args:
+ to_addr: address that email message will be sent to.
+ subject: subject line of email message.
+ seq_num: sequence number of message in thread, e.g., 0, 1, 2, ...,
+ or None if the message is not part of a thread.
+ project_from_addr: address that the message will be sent from.
+
+ Returns:
+ A string Message-ID that does not correspond to any actual email
+ message that was ever sent, but it does serve to unite all the
+ messages that belong togther in a thread.
+ """
+ if seq_num is not None:
+ return MakeMessageID(to_addr, subject, project_from_addr)
+ else:
+ return ''
+
+
+def ValidateReferencesHeader(message_ref, project, from_addr, subject):
+ """Check that the References header is one that we could have sent.
+
+ Args:
+ message_ref: one of the References header values from the inbound email.
+ project: Project PB for the affected project.
+ from_addr: string email address that inbound email was sent from.
+ subject: string base subject line of inbound email.
+
+ Returns:
+ True if it looks like this is a reply to a message that we sent
+ to the same address that replied. Otherwise, False.
+ """
+ sender = '%s@%s' % (project.project_name, MailDomain())
+ expected_ref = MakeMessageID(from_addr, subject, sender)
+
+ # TODO(jrobbins): project option to not check from_addr.
+ # TODO(jrobbins): project inbound auth token.
+ return expected_ref == message_ref
+
+
+PROJECT_EMAIL_RE = re.compile(
+ r'(?P<project>[-a-z0-9]+)'
+ r'@(?P<domain>[-a-z0-9.]+)')
+
+ISSUE_CHANGE_SUMMARY_RE = re.compile(
+ r'Issue (?P<local_id>[0-9]+) in '
+ r'(?P<project>[-a-z0-9]+): '
+ r'(?P<summary>.+)')
+
+
+def IdentifyProjectAndIssue(project_addr, subject):
+ """Parse the domain name, project name, and artifact id from a reply.
+
+ Args:
+ project_addr: string email address that the email was delivered to,
+ it must match the Reply-To: header sent in the notification message.
+ subject: string email subject line received, it must match the one
+ sent. Leading prefixes like "Re:" should already have been stripped.
+
+ Returns:
+ A 2-tuple: (project_name, local_id). If either or both are
+ None, they could not be determined.
+ """
+ # Ignore any inbound email sent to a "no_reply@" address.
+ if project_addr.startswith('no_reply@'):
+ return None, None
+
+ project_name = None
+
+ m = PROJECT_EMAIL_RE.match(project_addr.lower())
+ if m:
+ project_name = m.group('project')
+
+ issue_project_name, local_id_str = _MatchSubject(subject)
+
+ if project_name != issue_project_name:
+ # Something is wrong with the project name.
+ project_name = None
+
+ logging.info('project_name = %r', project_name)
+ logging.info('local_id_str = %r', local_id_str)
+
+ try:
+ local_id = int(local_id_str)
+ except ValueError:
+ local_id = None
+
+ return project_name, local_id
+
+
+def _MatchSubject(subject):
+ """Parse the project, artifact type, and artifact id from a subject line."""
+ m = ISSUE_CHANGE_SUMMARY_RE.match(subject)
+ if m:
+ return m.group('project'), m.group('local_id')
+
+ return None, None
+
+
+# TODO(jrobbins): For now, we strip out lines that look like quoted
+# text and then will give the user the option to see the whole email.
+# For 2.0 of this feature, we should change the Comment PB to have
+# runs of text with different properties so that the UI can present
+# "- Show quoted text -" and expand it in-line.
+
+# TODO(jrobbins): For now, we look for lines that indicate quoted
+# text (e.g., they start with ">"). But, we should also collapse
+# multiple lines that are identical to other lines in previous
+# non-deleted comments on the same issue, regardless of quote markers.
+
+
+# We cut off the message if we see something that looks like a signature and
+# it is near the bottom of the message.
+SIGNATURE_BOUNDARY_RE = re.compile(
+ r'^(([-_=]+ ?)+|'
+ r'cheers|(best |warm |kind )?regards|thx|thanks|thank you|'
+ r'Sent from my i?Phone|Sent from my iPod)'
+ r',? *$', re.I)
+
+MAX_SIGNATURE_LINES = 8
+
+FORWARD_OR_EXPLICIT_SIG_PATS = [
+ r'[^0-9a-z]+(forwarded|original) message[^0-9a-z]+\s*$',
+ r'Updates:\s*$',
+ r'Comment #\d+ on issue \d+ by \S+:',
+ # If we see this anywhere in the message, treat the rest as a signature.
+ r'--\s*$',
+ ]
+FORWARD_OR_EXPLICIT_SIG_PATS_AND_REST_RE = re.compile(
+ r'^(%s)(.|\n)*' % '|'.join(FORWARD_OR_EXPLICIT_SIG_PATS),
+ flags=re.MULTILINE | re.IGNORECASE)
+
+# This handles gmail well, and it's pretty broad without seeming like
+# it would cause false positives.
+QUOTE_PATS = [
+ r'^On .*\s+<\s*\S+?@[-a-z0-9.]+>\s*wrote:\s*$',
+ r'^On .* \S+?@[-a-z0-9.]+\s*wrote:\s*$',
+ r'^\S+?@[-a-z0-9.]+ \(\S+?@[-a-z0-9.]+\)\s*wrote:\s*$',
+ r'\S+?@[-a-z0-9]+.appspotmail.com\s.*wrote:\s*$',
+ r'\S+?@[-a-z0-9]+.appspotmail.com\s+.*a\s+\xc3\xa9crit\s*:\s*$',
+ r'^\d+/\d+/\d+ +<\S+@[-a-z0-9.]+>:?\s*$',
+ r'^>.*$',
+ ]
+QUOTED_BLOCKS_RE = re.compile(
+ r'(^\s*\n)*((%s)\n?)+(^\s*\n)*' % '|'.join(QUOTE_PATS),
+ flags=re.MULTILINE | re.IGNORECASE)
+
+
+def StripQuotedText(description):
+ """Strip all quoted text lines out of the given comment text."""
+ # If the rest of message is forwared text, we're done.
+ description = FORWARD_OR_EXPLICIT_SIG_PATS_AND_REST_RE.sub('', description)
+ # Replace each quoted block of lines and surrounding blank lines with at
+ # most one blank line.
+ description = QUOTED_BLOCKS_RE.sub('\n', description)
+
+ new_lines = description.strip().split('\n')
+ # Make another pass over the last few lines to strip out signatures.
+ sig_zone_start = max(0, len(new_lines) - MAX_SIGNATURE_LINES)
+ for idx in range(sig_zone_start, len(new_lines)):
+ line = new_lines[idx]
+ if SIGNATURE_BOUNDARY_RE.match(line):
+ # We found the likely start of a signature, just keep the lines above it.
+ new_lines = new_lines[:idx]
+ break
+
+ return '\n'.join(new_lines).strip()
diff --git a/appengine/monorail/framework/excessiveactivity.py b/appengine/monorail/framework/excessiveactivity.py
new file mode 100644
index 0000000..fddb7e5
--- /dev/null
+++ b/appengine/monorail/framework/excessiveactivity.py
@@ -0,0 +1,22 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display the an error page for excessive activity.
+
+This page is shown when the user performs a given type of action
+too many times in a 24-hour period or exceeds a lifetime limit.
+"""
+
+from framework import servlet
+
+
+class ExcessiveActivity(servlet.Servlet):
+ """ExcessiveActivity page shows an error message."""
+
+ _PAGE_TEMPLATE = 'framework/excessive-activity-page.ezt'
+
+ def GatherPageData(self, _mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ return {}
diff --git a/appengine/monorail/framework/filecontent.py b/appengine/monorail/framework/filecontent.py
new file mode 100644
index 0000000..ec3c171
--- /dev/null
+++ b/appengine/monorail/framework/filecontent.py
@@ -0,0 +1,171 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Utility routines for dealing with MIME types and decoding text files."""
+
+import itertools
+import logging
+
+from framework import framework_constants
+
+
+_EXTENSION_TO_CTYPE_TABLE = {
+ # These are images/PDFs that we trust the browser to display.
+ 'gif': 'image/gif',
+ 'jpg': 'image/jpeg',
+ 'jpeg': 'image/jpeg',
+ 'png': 'image/png',
+ 'ico': 'image/x-icon',
+ 'svg': 'image/svg+xml',
+ 'pdf': 'application/pdf',
+
+ # We do not serve mimetypes that cause the brower to launch a local
+ # app because that is not required for issue tracking and it is a
+ # potential security risk.
+}
+
+
+def GuessContentTypeFromFilename(filename):
+ """Guess a file's content type based on the filename extension.
+
+ Args:
+ filename: String name of a file.
+
+ Returns:
+ MIME type string to use when serving this file. We only use text/plain for
+ text files, appropriate image content-types, or application/octet-stream
+ for virtually all binary files. This limits the richness of the user's
+ experience, e.g., the user cannot open an MS Office application directly
+ by clicking on an attachment, but it is safer.
+ """
+ ext = filename.split('.')[-1] if ('.' in filename) else ''
+ ext = ext.lower()
+ if ext in COMMON_TEXT_FILE_EXTENSIONS:
+ return 'text/plain'
+ return _EXTENSION_TO_CTYPE_TABLE.get(ext.lower(), 'application/octet-stream')
+
+
+# Constants used in detecting if a file has binary content.
+# All line lengths must be below the upper limit, and there must be a spefic
+# ratio below the lower limit.
+_MAX_SOURCE_LINE_LEN_LOWER = 350
+_MAX_SOURCE_LINE_LEN_UPPER = 800
+_SOURCE_LINE_LEN_LOWER_RATIO = 0.9
+
+# Message to display for undecodable commit log or author values.
+UNDECODABLE_LOG_CONTENT = '[Cannot be displayed]'
+
+# How large a repository file is in bytes before we don't try to display it
+SOURCE_FILE_MAX_SIZE = 1000 * 1024
+SOURCE_FILE_MAX_LINES = 50000
+
+# The source code browser will not attempt to display any filename ending
+# with one of these extensions.
+COMMON_BINARY_FILE_EXTENSIONS = {
+ 'gif', 'jpg', 'jpeg', 'psd', 'ico', 'icon', 'xbm', 'xpm', 'xwd', 'pcx',
+ 'bmp', 'png', 'vsd,' 'mpg', 'mpeg', 'wmv', 'wmf', 'avi', 'flv', 'snd',
+ 'mp3', 'wma', 'exe', 'dll', 'bin', 'class', 'o', 'so', 'lib', 'dylib',
+ 'jar', 'ear', 'war', 'par', 'msi', 'tar', 'zip', 'rar', 'cab', 'z', 'gz',
+ 'bz2', 'dmg', 'iso', 'rpm', 'pdf', 'eps', 'tif', 'tiff', 'xls', 'ppt',
+ 'graffie', 'violet',
+ }
+
+# The source code browser will display file contents as text data for files
+# with the following extensions or exact filenames (assuming they decode
+# correctly).
+COMMON_TEXT_FILE_EXTENSIONS = (
+ set(framework_constants.PRETTIFY_CLASS_MAP.iterkeys()) |
+ { '', 'ada', 'asm', 'asp', 'bat', 'cgi', 'csv', 'el', 'emacs',
+ 'jsp', 'log', 'markdown', 'md', 'mf', 'plist', 'properties', 'r',
+ 'rc', 'txt', 'vim', 'wiki', 'xemacs', 'yacc',
+ })
+COMMON_TEXT_FILENAMES = (
+ set(framework_constants.PRETTIFY_FILENAME_CLASS_MAP.iterkeys()) |
+ {'authors', 'install', 'readme'})
+
+
+def DecodeFileContents(file_contents, path=None):
+ """Try converting file contents to unicode using utf-8 or latin-1.
+
+ This is applicable to untrusted maybe-text from vcs files or inbound emails.
+
+ We try decoding the file as utf-8, then fall back on latin-1. In the former
+ case, we call the file a text file; in the latter case, we guess whether
+ the file is text or binary based on line length.
+
+ If we guess text when the file is binary, the user sees safely encoded
+ gibberish. If the other way around, the user sees a message that we will
+ not display the file.
+
+ TODO(jrobbins): we could try the user-supplied encoding, iff it
+ is one of the encodings that we know that we can handle.
+
+ Args:
+ file_contents: byte string from svn file. It could be text in almost
+ any encoding, or binary. We cannot trust the user-supplied encoding
+ in the mime-type property.
+ path: string pathname of file.
+
+ Returns:
+ The tuple (unicode_string, is_binary, is_long):
+ - The unicode version of the string.
+ - is_binary is true if the string could not be decoded as text.
+ - is_long is true if the file has more than SOURCE_FILE_MAX_LINES lines.
+ """
+ # If the filename is one that typically identifies a binary file, then
+ # just treat it as binary without any further analysis.
+ ext = None
+ if path and '.' in path:
+ ext = path.split('.')[-1]
+ if ext.lower() in COMMON_BINARY_FILE_EXTENSIONS:
+ # If the file is binary, we don't care about the length, since we don't
+ # show or diff it.
+ return u'', True, False
+
+ # If the string can be decoded as utf-8, we treat it as textual.
+ try:
+ u_str = file_contents.decode('utf-8', 'strict')
+ is_long = len(u_str.split('\n')) > SOURCE_FILE_MAX_LINES
+ return u_str, False, is_long
+ except UnicodeDecodeError:
+ logging.info('not a utf-8 file: %s bytes', len(file_contents))
+
+ # Fall back on latin-1. This will always succeed, since every byte maps to
+ # something in latin-1, even if that something is gibberish.
+ u_str = file_contents.decode('latin-1', 'strict')
+
+ lines = u_str.split('\n')
+ is_long = len(lines) > SOURCE_FILE_MAX_LINES
+ # Treat decodable files with certain filenames and/or extensions as text
+ # files. This avoids problems with common file types using our text/binary
+ # heuristic rules below.
+ if path:
+ name = path.split('/')[-1]
+ if (name.lower() in COMMON_TEXT_FILENAMES or
+ (ext and ext.lower() in COMMON_TEXT_FILE_EXTENSIONS)):
+ return u_str, False, is_long
+
+ # HEURISTIC: Binary files can qualify as latin-1, so we need to
+ # check further. Any real source code is going to be divided into
+ # reasonably sized lines. All lines must be below an upper character limit,
+ # and most lines must be below a lower limit. This allows some exceptions
+ # to the lower limit, but is more restrictive than just using a single
+ # large character limit.
+ is_binary = False
+ if lines:
+ lower_count = 0
+ for line in itertools.islice(lines, SOURCE_FILE_MAX_LINES):
+ size = len(line)
+ if size <= _MAX_SOURCE_LINE_LEN_LOWER:
+ lower_count += 1
+ elif size > _MAX_SOURCE_LINE_LEN_UPPER:
+ is_binary = True
+ break
+
+ ratio = lower_count / float(len(lines))
+ if ratio < _SOURCE_LINE_LEN_LOWER_RATIO:
+ is_binary = True
+
+ return u_str, is_binary, is_long
diff --git a/appengine/monorail/framework/framework_bizobj.py b/appengine/monorail/framework/framework_bizobj.py
new file mode 100644
index 0000000..b1478fc
--- /dev/null
+++ b/appengine/monorail/framework/framework_bizobj.py
@@ -0,0 +1,156 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Business objects for Monorail's framework.
+
+These are classes and functions that operate on the objects that
+users care about in Monorail but that are not part of just one specific
+component: e.g., projects, users, and labels.
+"""
+
+import logging
+import re
+import string
+
+import settings
+from framework import framework_constants
+
+
+# Pattern to match a valid project name. Users of this pattern MUST use
+# the re.VERBOSE flag or the whitespace and comments we be considered
+# significant and the pattern will not work. See "re" module documentation.
+_RE_PROJECT_NAME_PATTERN_VERBOSE = r"""
+ (?=[-a-z0-9]*[a-z][-a-z0-9]*) # Lookahead to make sure there is at least
+ # one letter in the whole name.
+ [a-z0-9] # Start with a letter or digit.
+ [-a-z0-9]* # Follow with any number of valid characters.
+ [a-z0-9] # End with a letter or digit.
+"""
+
+
+# Compiled regexp to match the project name and nothing more before or after.
+RE_PROJECT_NAME = re.compile(
+ '^%s$' % _RE_PROJECT_NAME_PATTERN_VERBOSE, re.VERBOSE)
+
+
+def IsValidProjectName(s):
+ """Return true if the given string is a valid project name."""
+ return (RE_PROJECT_NAME.match(s) and
+ len(s) <= framework_constants.MAX_PROJECT_NAME_LENGTH)
+
+
+def UserOwnsProject(project, effective_ids):
+ """Return True if any of the effective_ids is a project owner."""
+ return not effective_ids.isdisjoint(project.owner_ids or set())
+
+
+def UserIsInProject(project, effective_ids):
+ """Return True if any of the effective_ids is a project member.
+
+ Args:
+ project: Project PB for the current project.
+ effective_ids: set of int user IDs for the current user (including all
+ user groups). This will be an empty set for anonymous users.
+
+ Returns:
+ True if the user has any direct or indirect role in the project. The value
+ will actually be a set(), but it will have an ID in it if the user is in
+ the project, or it will be an empty set which is considered False.
+ """
+ return (UserOwnsProject(project, effective_ids) or
+ not effective_ids.isdisjoint(project.committer_ids or set()) or
+ not effective_ids.isdisjoint(project.contributor_ids or set()))
+
+
+def AllProjectMembers(project):
+ """Return a list of user IDs of all members in the given project."""
+ return project.owner_ids + project.committer_ids + project.contributor_ids
+
+
+def IsPriviledgedDomainUser(email):
+ """Return True if the user's account is from a priviledged domain."""
+ if email and '@' in email:
+ _, user_domain = email.split('@', 1)
+ return user_domain in settings.priviledged_user_domains
+
+ return False
+
+
+
+# String translation table to catch a common typos in label names.
+_CANONICALIZATION_TRANSLATION_TABLE = {
+ ord(delete_u_char): None
+ for delete_u_char in u'!"#$%&\'()*+,/:;<>?@[\\]^`{|}~\t\n\x0b\x0c\r '
+ }
+_CANONICALIZATION_TRANSLATION_TABLE.update({ord(u'='): ord(u'-')})
+
+
+def CanonicalizeLabel(user_input):
+ """Canonicalize a given label or status value.
+
+ When the user enters a string that represents a label or an enum,
+ convert it a canonical form that makes it more likely to match
+ existing values.
+
+ Args:
+ user_input: string that the user typed for a label.
+
+ Returns:
+ Canonical form of that label as a unicode string.
+ """
+ if user_input is None:
+ return user_input
+
+ if not isinstance(user_input, unicode):
+ user_input = user_input.decode('utf-8')
+
+ canon_str = user_input.translate(_CANONICALIZATION_TRANSLATION_TABLE)
+ return canon_str
+
+
+def MergeLabels(labels_list, labels_add, labels_remove, excl_prefixes):
+ """Update a list of labels with the given add and remove label lists.
+
+ Args:
+ labels_list: list of current labels.
+ labels_add: labels that the user wants to add.
+ labels_remove: labels that the user wants to remove.
+ excl_prefixes: prefixes that can have only one value, e.g., Priority.
+
+ Returns:
+ (merged_labels, update_labels_add, update_labels_remove):
+ A new list of labels with the given labels added and removed, and
+ any exclusive label prefixes taken into account. Then two
+ lists of update strings to explain the changes that were actually
+ made.
+ """
+ old_lower_labels = [lab.lower() for lab in labels_list]
+ labels_add = [lab for lab in labels_add
+ if lab.lower() not in old_lower_labels]
+ labels_remove = [lab for lab in labels_remove
+ if lab.lower() in old_lower_labels]
+ labels_remove_lower = [lab.lower() for lab in labels_remove]
+ config_excl = [lab.lower() for lab in excl_prefixes]
+
+ # "Old minus exclusive" is the set of old label values minus any
+ # that are implictly removed by newly set exclusive labels.
+ excl_add = [] # E.g., there can be only one "Priority-*" label
+ for lab in labels_add:
+ prefix = lab.split('-')[0].lower()
+ if prefix in config_excl:
+ excl_add.append('%s-' % prefix)
+ old_minus_excl = []
+ for lab in labels_list:
+ for prefix_dash in excl_add:
+ if lab.lower().startswith(prefix_dash):
+ # Note: don't add -lab to update_labels_remove, it is implicit.
+ break
+ else:
+ old_minus_excl.append(lab)
+
+ merged_labels = [lab for lab in old_minus_excl + labels_add
+ if lab.lower() not in labels_remove_lower]
+
+ return merged_labels, labels_add, labels_remove
diff --git a/appengine/monorail/framework/framework_constants.py b/appengine/monorail/framework/framework_constants.py
new file mode 100644
index 0000000..42d3e04
--- /dev/null
+++ b/appengine/monorail/framework/framework_constants.py
@@ -0,0 +1,158 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Some constants used throughout Monorail."""
+
+import os
+import re
+
+
+# Number of seconds in various periods.
+SECS_PER_MINUTE = 60
+SECS_PER_HOUR = SECS_PER_MINUTE * 60
+SECS_PER_DAY = SECS_PER_HOUR * 24
+SECS_PER_MONTH = SECS_PER_DAY * 30
+SECS_PER_YEAR = SECS_PER_DAY * 365
+
+# When we write to memcache, let the values expire so that we don't
+# get any unexpected super-old values as we make code changes over the
+# years. Also, searches can contain date terms like [opened<today-1]
+# that would become wrong if cached for a long time.
+MEMCACHE_EXPIRATION = 6 * SECS_PER_HOUR
+
+# Fulltext indexing happens asynchronously and we get no notification
+# when the indexing operation has completed. So, when we cache searches
+# that use fulltext terms, the results might be stale. We still do
+# cache them and use the cached values, but we expire them so that the
+# results cannot be stale for a long period of time.
+FULLTEXT_MEMCACHE_EXPIRATION = 3 * SECS_PER_MINUTE
+
+# Size in bytes of the largest form submission that we will accept
+MAX_POST_BODY_SIZE = 10 * 1024 * 1024 # = 10 MB
+
+# Special user ID and name to use when no user was specified.
+NO_USER_SPECIFIED = 0
+NO_SESSION_SPECIFIED = 0
+NO_USER_NAME = '----'
+
+# String to display when some field has no value.
+NO_VALUES = '----'
+
+# If the user enters one or more dashes, that means "no value". This is useful
+# in bulk edit, inbound email, and commit log command where a blank field
+# means "keep what was there" or is ignored.
+NO_VALUE_RE = re.compile(r'^-+$')
+
+# Used to loosely validate column spec. Mainly guards against malicious input.
+COLSPEC_RE = re.compile(r'^[-.\w\s/]*$', re.UNICODE)
+COLSPEC_COL_RE = re.compile(r'[-.\w/]+', re.UNICODE)
+
+# Used to loosely validate sort spec. Mainly guards against malicious input.
+SORTSPEC_RE = re.compile(r'^[-.\w\s/]*$', re.UNICODE)
+
+# For the artifact search box autosizing when the user types a long query.
+MIN_ARTIFACT_SEARCH_FIELD_SIZE = 38
+MAX_ARTIFACT_SEARCH_FIELD_SIZE = 75
+AUTOSIZE_STEP = 3
+
+# Regular expressions used in parsing label and status configuration text
+IDENTIFIER_REGEX = r'[-.\w]+'
+IDENTIFIER_RE = re.compile(IDENTIFIER_REGEX, re.UNICODE)
+# Labels and status values that are prefixed by a pound-sign are not displayed
+# in autocomplete menus.
+IDENTIFIER_DOCSTRING_RE = re.compile(
+ r'^(#?%s)[ \t]*=?[ \t]*(.*)$' % IDENTIFIER_REGEX,
+ re.MULTILINE | re.UNICODE)
+
+# Number of label text fields that we can display on a web form for issues.
+MAX_LABELS = 24
+
+# Default number of comments to display on an artifact detail page at one time.
+# Other comments will be paginated. This happens to be the same as the max
+# set by the --max_comments_per_page flag.
+DEFAULT_COMMENTS_PER_PAGE = 500
+
+# Content type to use when serving JSON.
+CONTENT_TYPE_JSON = 'application/x-javascript; charset=UTF-8'
+
+# Maximum comments to index to keep the search index from choking. E.g., if an
+# artifact had 1200 comments, only 0..99 and 701..1200 would be indexed.
+# This mainly affects advocacy issues which are highly redundant anyway.
+INITIAL_COMMENTS_TO_INDEX = 100
+FINAL_COMMENTS_TO_INDEX = 500
+
+# This is the longest string that GAE search will accept in one field.
+# The entire serach document is also limited to 1M, so our limit is 800
+# so that the comments leave room for metadata.
+MAX_FTS_FIELD_SIZE = 800 * 1024
+
+# Base path to EZT templates.
+this_dir = os.path.dirname(__file__)
+TEMPLATE_PATH = this_dir[:this_dir.rindex('/')] + '/templates/'
+
+# Defaults for dooming a project.
+DEFAULT_DOOM_REASON = 'No longer needed'
+DEFAULT_DOOM_PERIOD = SECS_PER_DAY * 90
+
+MAX_PROJECT_PEOPLE = 1000
+MAX_PROJECT_NAME_LENGTH = 63
+
+# When logging potentially long debugging strings, only show this many chars.
+LOGGING_MAX_LENGTH = 2000
+
+# Maps languages supported by google-code-prettify
+# to the class name that should be added to code blocks in that language.
+# This list should be kept in sync with the handlers registered
+# in lang-*.js and prettify.js from the prettify project.
+PRETTIFY_CLASS_MAP = {
+ ext: 'lang-' + ext
+ for ext in [
+ # Supported in lang-*.js
+ 'apollo', 'agc', 'aea', 'lisp', 'el', 'cl', 'scm',
+ 'css', 'go', 'hs', 'lua', 'fs', 'ml', 'proto', 'scala', 'sql', 'vb',
+ 'vbs', 'vhdl', 'vhd', 'wiki', 'yaml', 'yml', 'clj',
+ # Supported in prettify.js
+ 'htm', 'html', 'mxml', 'xhtml', 'xml', 'xsl',
+ 'c', 'cc', 'cpp', 'cxx', 'cyc', 'm',
+ 'json', 'cs', 'java', 'bsh', 'csh', 'sh', 'cv', 'py', 'perl', 'pl',
+ 'pm', 'rb', 'js', 'coffee',
+ ]}
+
+# Languages which are not specifically mentioned in prettify.js
+# but which render intelligibly with the default handler.
+PRETTIFY_CLASS_MAP.update(
+ (ext, '') for ext in [
+ 'hpp', 'hxx', 'hh', 'h', 'inl', 'idl', 'swig', 'd',
+ 'php', 'tcl', 'aspx', 'cfc', 'cfm',
+ 'ent', 'mod', 'as',
+ 'y', 'lex', 'awk', 'n', 'pde',
+ ])
+
+# Languages which are not specifically mentioned in prettify.js
+# but which should be rendered using a certain prettify module.
+PRETTIFY_CLASS_MAP.update({
+ 'docbook': 'lang-xml',
+ 'dtd': 'lang-xml',
+ 'duby': 'lang-rb',
+ 'mk': 'lang-sh',
+ 'mak': 'lang-sh',
+ 'make': 'lang-sh',
+ 'mirah': 'lang-rb',
+ 'ss': 'lang-lisp',
+ 'vcproj': 'lang-xml',
+ 'xsd': 'lang-xml',
+ 'xslt': 'lang-xml',
+})
+
+PRETTIFY_FILENAME_CLASS_MAP = {
+ 'makefile': 'lang-sh',
+ 'makefile.in': 'lang-sh',
+ 'doxyfile': 'lang-sh', # Key-value pairs with hash comments
+ '.checkstyle': 'lang-xml',
+ '.classpath': 'lang-xml',
+ '.project': 'lang-xml',
+}
+
+OAUTH_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
diff --git a/appengine/monorail/framework/framework_helpers.py b/appengine/monorail/framework/framework_helpers.py
new file mode 100644
index 0000000..2b30a63
--- /dev/null
+++ b/appengine/monorail/framework/framework_helpers.py
@@ -0,0 +1,671 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions and classes used throughout Monorail."""
+
+import logging
+import random
+import string
+import textwrap
+import threading
+import time
+import traceback
+import urllib
+import urlparse
+
+from google.appengine.api import app_identity
+
+from third_party import ezt
+
+import settings
+from framework import actionlimit
+from framework import framework_constants
+from framework import template_helpers
+from framework import timestr
+from framework import urls
+from services import client_config_svc
+
+
+# For random key generation
+RANDOM_KEY_LENGTH = 128
+RANDOM_KEY_CHARACTERS = string.ascii_letters + string.digits
+
+# params recognized by FormatURL, in the order they will appear in the url
+RECOGNIZED_PARAMS = ['can', 'start', 'num', 'q', 'colspec', 'groupby', 'sort',
+ 'show', 'format', 'me', 'table_title', 'projects']
+
+
+def retry(tries, delay=1, backoff=2):
+ """A retry decorator with exponential backoff.
+
+ Functions are retried when Exceptions occur.
+
+ Args:
+ tries: int Number of times to retry, set to 0 to disable retry.
+ delay: float Initial sleep time in seconds.
+ backoff: float Must be greater than 1, further failures would sleep
+ delay*=backoff seconds.
+ """
+ if backoff <= 1:
+ raise ValueError("backoff must be greater than 1")
+ if tries < 0:
+ raise ValueError("tries must be 0 or greater")
+ if delay <= 0:
+ raise ValueError("delay must be greater than 0")
+
+ def decorator(func):
+ def wrapper(*args, **kwargs):
+ _tries, _delay = tries, delay
+ _tries += 1 # Ensure we call func at least once.
+ while _tries > 0:
+ try:
+ ret = func(*args, **kwargs)
+ return ret
+ except Exception:
+ _tries -= 1
+ if _tries == 0:
+ logging.error('Exceeded maximum number of retries for %s.',
+ func.__name__)
+ raise
+ trace_str = traceback.format_exc()
+ logging.warning('Retrying %s due to Exception: %s',
+ func.__name__, trace_str)
+ time.sleep(_delay)
+ _delay *= backoff # Wait longer the next time we fail.
+ return wrapper
+ return decorator
+
+
+class PromiseCallback(object):
+ """Executes the work of a Promise and then dereferences everything."""
+
+ def __init__(self, promise, callback, *args, **kwargs):
+ self.promise = promise
+ self.callback = callback
+ self.args = args
+ self.kwargs = kwargs
+
+ def __call__(self):
+ try:
+ self.promise._WorkOnPromise(self.callback, *self.args, **self.kwargs)
+ finally:
+ # Make sure we no longer hold onto references to anything.
+ self.promise = self.callback = self.args = self.kwargs = None
+
+
+class Promise(object):
+ """Class for promises to deliver a value in the future.
+
+ A thread is started to run callback(args), that thread
+ should return the value that it generates, or raise an expception.
+ p.WaitAndGetValue() will block until a value is available.
+ If an exception was raised, p.WaitAndGetValue() will re-raise the
+ same exception.
+ """
+
+ def __init__(self, callback, *args, **kwargs):
+ """Initialize the promise and immediately call the supplied function.
+
+ Args:
+ callback: Function that takes the args and returns the promise value.
+ *args: Any arguments to the target function.
+ **kwargs: Any keyword args for the target function.
+ """
+
+ self.has_value = False
+ self.value = None
+ self.event = threading.Event()
+ self.exception = None
+
+ promise_callback = PromiseCallback(self, callback, *args, **kwargs)
+
+ # Execute the callback in another thread.
+ promise_thread = threading.Thread(target=promise_callback)
+ promise_thread.start()
+
+ def _WorkOnPromise(self, callback, *args, **kwargs):
+ """Run callback to compute the promised value. Save any exceptions."""
+ try:
+ self.value = callback(*args, **kwargs)
+ except Exception as e:
+ trace_str = traceback.format_exc()
+ logging.info('Exception while working on promise: %s\n', trace_str)
+ # Add the stack trace at this point to the exception. That way, in the
+ # logs, we can see what happened further up in the call stack
+ # than WaitAndGetValue(), which re-raises exceptions.
+ e.pre_promise_trace = trace_str
+ self.exception = e
+ finally:
+ self.has_value = True
+ self.event.set()
+
+ def WaitAndGetValue(self):
+ """Block until my value is available, then return it or raise exception."""
+ self.event.wait()
+ if self.exception:
+ raise self.exception # pylint: disable=raising-bad-type
+ return self.value
+
+
+def FormatAbsoluteURLForDomain(
+ host, project_name, servlet_name, scheme='https', **kwargs):
+ """A variant of FormatAbsoluteURL for when request objects are not available.
+
+ Args:
+ host: string with hostname and optional port, e.g. 'localhost:8080'.
+ project_name: the destination project name, if any.
+ servlet_name: site or project-local url fragement of dest page.
+ scheme: url scheme, e.g., 'http' or 'https'.
+ **kwargs: additional query string parameters may be specified as named
+ arguments to this function.
+
+ Returns:
+ A full url beginning with 'http[s]://'.
+ """
+ path_and_args = FormatURL(None, servlet_name, **kwargs)
+
+ if host:
+ domain_port = host.split(':')
+ domain_port[0] = GetPreferredDomain(domain_port[0])
+ host = ':'.join(domain_port)
+
+ absolute_domain_url = '%s://%s' % (scheme, host)
+ if project_name:
+ return '%s/p/%s%s' % (absolute_domain_url, project_name, path_and_args)
+ return absolute_domain_url + path_and_args
+
+
+def FormatAbsoluteURL(
+ mr, servlet_name, include_project=True, project_name=None,
+ scheme=None, copy_params=True, **kwargs):
+ """Return an absolute URL to a servlet with old and new params.
+
+ Args:
+ mr: info parsed from the current request.
+ servlet_name: site or project-local url fragement of dest page.
+ include_project: if True, include the project home url as part of the
+ destination URL (as long as it is specified either in mr
+ or as the project_name param.)
+ project_name: the destination project name, to override
+ mr.project_name if include_project is True.
+ scheme: either 'http' or 'https', to override mr.request.scheme.
+ copy_params: if True, copy well-known parameters from the existing request.
+ **kwargs: additional query string parameters may be specified as named
+ arguments to this function.
+
+ Returns:
+ A full url beginning with 'http[s]://'. The destination URL will be in
+ the same domain as the current request.
+ """
+ path_and_args = FormatURL(
+ mr if copy_params else None, servlet_name, **kwargs)
+ scheme = scheme or mr.request.scheme
+
+ project_base = ''
+ if include_project:
+ project_base = '/p/%s' % (project_name or mr.project_name)
+
+ return '%s://%s%s%s' % (scheme, mr.request.host, project_base, path_and_args)
+
+
+def FormatMovedProjectURL(mr, moved_to):
+ """Return a transformation of the given url into the given project.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ moved_to: A string from a project's moved_to field that matches
+ framework_bizobj.RE_PROJECT_NAME.
+
+ Returns:
+ The url transposed into the given destination project.
+ """
+ project_name = moved_to
+ _, _, path, parameters, query, fragment_identifier = urlparse.urlparse(
+ mr.current_page_url)
+ # Strip off leading "/p/<moved from project>"
+ path = '/' + path.split('/', 3)[3]
+ rest_of_url = urlparse.urlunparse(
+ ('', '', path, parameters, query, fragment_identifier))
+ return '/p/%s%s' % (project_name, rest_of_url)
+
+
+def FormatURL(mr, servlet_path, **kwargs):
+ """Return a project relative URL to a servlet with old and new params."""
+ # Standard params not overridden in **kwargs come first, followed by kwargs.
+ # The exception is the 'id' param. If present then the 'id' param always comes
+ # first. See bugs.chromium.org/p/monorail/issues/detail?id=374
+ all_params = []
+ if kwargs.get('id'):
+ all_params.append(('id', kwargs['id']))
+ if mr:
+ all_params.extend(
+ (name, mr.GetParam(name)) for name in RECOGNIZED_PARAMS
+ if name not in kwargs)
+
+ all_params.extend(
+ # Ignore the 'id' param since we already added it above.
+ sorted([kwarg for kwarg in kwargs.items() if kwarg[0] != 'id']))
+ return _FormatQueryString(servlet_path, all_params)
+
+
+def _FormatQueryString(url, params):
+ """URLencode a list of parameters and attach them to the end of a URL."""
+ param_string = '&'.join(
+ '%s=%s' % (name, urllib.quote(unicode(value).encode('utf-8')))
+ for name, value in params if value is not None)
+ if not param_string:
+ qs_start_char = ''
+ elif '?' in url:
+ qs_start_char = '&'
+ else:
+ qs_start_char = '?'
+ return '%s%s%s' % (url, qs_start_char, param_string)
+
+
+def WordWrapSuperLongLines(s, max_cols=100):
+ """Reformat input that was not word-wrapped by the browser.
+
+ Args:
+ s: the string to be word-wrapped, it may have embedded newlines.
+ max_cols: int maximum line length.
+
+ Returns:
+ Wrapped text string.
+
+ Rather than wrap the whole thing, we only wrap super-long lines and keep
+ all the reasonable lines formated as-is.
+ """
+ lines = [textwrap.fill(line, max_cols) for line in s.splitlines()]
+ wrapped_text = '\n'.join(lines)
+
+ # The split/join logic above can lose one final blank line.
+ if s.endswith('\n') or s.endswith('\r'):
+ wrapped_text += '\n'
+
+ return wrapped_text
+
+
+def StaticCacheHeaders():
+ """Returns HTTP headers for static content, based on the current time."""
+ year_from_now = int(time.time()) + framework_constants.SECS_PER_YEAR
+ headers = [
+ ('Cache-Control',
+ 'max-age=%d, private' % framework_constants.SECS_PER_YEAR),
+ ('Last-Modified', timestr.TimeForHTMLHeader()),
+ ('Expires', timestr.TimeForHTMLHeader(when=year_from_now)),
+ ]
+ logging.info('static headers are %r', headers)
+ return headers
+
+
+def ComputeListDeltas(old_list, new_list):
+ """Given an old and new list, return the items added and removed.
+
+ Args:
+ old_list: old list of values for comparison.
+ new_list: new list of values for comparison.
+
+ Returns:
+ Two lists: one with all the values added (in new_list but was not
+ in old_list), and one with all the values removed (not in new_list
+ but was in old_lit).
+ """
+ if old_list == new_list:
+ return [], [] # A common case: nothing was added or removed.
+
+ added = set(new_list)
+ added.difference_update(old_list)
+ removed = set(old_list)
+ removed.difference_update(new_list)
+ return list(added), list(removed)
+
+
+def GetRoleName(effective_ids, project):
+ """Determines the name of the role a member has for a given project.
+
+ Args:
+ effective_ids: set of user IDs to get the role name for.
+ project: Project PB containing the different the different member lists.
+
+ Returns:
+ The name of the role.
+ """
+ if not effective_ids.isdisjoint(project.owner_ids):
+ return 'Owner'
+ if not effective_ids.isdisjoint(project.committer_ids):
+ return 'Committer'
+ if not effective_ids.isdisjoint(project.contributor_ids):
+ return 'Contributor'
+ return None
+
+
+class UserSettings(object):
+ """Abstract class providing static methods for user settings forms."""
+
+ @classmethod
+ def GatherUnifiedSettingsPageData(
+ cls, logged_in_user_id, settings_user_view, settings_user):
+ """Gather EZT variables needed for the unified user settings form.
+
+ Args:
+ logged_in_user_id: The user ID of the acting user.
+ settings_user_view: The UserView of the target user.
+ settings_user: The User PB of the target user.
+
+ Returns:
+ A dictionary giving the names and values of all the variables to
+ be exported to EZT to support the unified user settings form template.
+ """
+
+ def ActionLastReset(action_limit):
+ """Return a formatted time string for the last action limit reset."""
+ if action_limit:
+ return time.asctime(time.localtime(action_limit.reset_timestamp))
+ return 'Never'
+
+ def DefaultLifetimeLimit(action_type):
+ """Return the deault lifetime limit for the give type of action."""
+ return actionlimit.ACTION_LIMITS[action_type][3]
+
+ def DefaultPeriodSoftLimit(action_type):
+ """Return the deault period soft limit for the give type of action."""
+ return actionlimit.ACTION_LIMITS[action_type][1]
+
+ def DefaultPeriodHardLimit(action_type):
+ """Return the deault period jard limit for the give type of action."""
+ return actionlimit.ACTION_LIMITS[action_type][2]
+
+ project_creation_lifetime_limit = (
+ (settings_user.project_creation_limit and
+ settings_user.project_creation_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.PROJECT_CREATION))
+ project_creation_soft_limit = (
+ (settings_user.project_creation_limit and
+ settings_user.project_creation_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.PROJECT_CREATION))
+ project_creation_hard_limit = (
+ (settings_user.project_creation_limit and
+ settings_user.project_creation_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.PROJECT_CREATION))
+ issue_comment_lifetime_limit = (
+ (settings_user.issue_comment_limit and
+ settings_user.issue_comment_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.ISSUE_COMMENT))
+ issue_comment_soft_limit = (
+ (settings_user.issue_comment_limit and
+ settings_user.issue_comment_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.ISSUE_COMMENT))
+ issue_comment_hard_limit = (
+ (settings_user.issue_comment_limit and
+ settings_user.issue_comment_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.ISSUE_COMMENT ))
+ issue_attachment_lifetime_limit = (
+ (settings_user.issue_attachment_limit and
+ settings_user.issue_attachment_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.ISSUE_ATTACHMENT))
+ issue_attachment_soft_limit = (
+ (settings_user.issue_attachment_limit and
+ settings_user.issue_attachment_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.ISSUE_ATTACHMENT))
+ issue_attachment_hard_limit = (
+ (settings_user.issue_attachment_limit and
+ settings_user.issue_attachment_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.ISSUE_ATTACHMENT))
+ issue_bulk_edit_lifetime_limit = (
+ (settings_user.issue_bulk_edit_limit and
+ settings_user.issue_bulk_edit_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.ISSUE_BULK_EDIT))
+ issue_bulk_edit_soft_limit = (
+ (settings_user.issue_bulk_edit_limit and
+ settings_user.issue_bulk_edit_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.ISSUE_BULK_EDIT))
+ issue_bulk_edit_hard_limit = (
+ (settings_user.issue_bulk_edit_limit and
+ settings_user.issue_bulk_edit_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.ISSUE_BULK_EDIT))
+ api_request_lifetime_limit = (
+ (settings_user.api_request_limit and
+ settings_user.api_request_limit.lifetime_limit) or
+ DefaultLifetimeLimit(actionlimit.API_REQUEST))
+ api_request_soft_limit = (
+ (settings_user.api_request_limit and
+ settings_user.api_request_limit.period_soft_limit) or
+ DefaultPeriodSoftLimit(actionlimit.API_REQUEST))
+ api_request_hard_limit = (
+ (settings_user.api_request_limit and
+ settings_user.api_request_limit.period_hard_limit) or
+ DefaultPeriodHardLimit(actionlimit.API_REQUEST))
+
+ return {
+ 'settings_user': settings_user_view,
+ 'settings_user_pb': template_helpers.PBProxy(settings_user),
+ 'settings_user_is_banned': ezt.boolean(settings_user.banned),
+ 'settings_user_ignore_action_limits': (
+ ezt.boolean(settings_user.ignore_action_limits)),
+ 'self': ezt.boolean(logged_in_user_id == settings_user_view.user_id),
+ 'project_creation_reset': (
+ ActionLastReset(settings_user.project_creation_limit)),
+ 'issue_comment_reset': (
+ ActionLastReset(settings_user.issue_comment_limit)),
+ 'issue_attachment_reset': (
+ ActionLastReset(settings_user.issue_attachment_limit)),
+ 'issue_bulk_edit_reset': (
+ ActionLastReset(settings_user.issue_bulk_edit_limit)),
+ 'api_request_reset': (
+ ActionLastReset(settings_user.api_request_limit)),
+ 'project_creation_lifetime_limit': project_creation_lifetime_limit,
+ 'project_creation_soft_limit': project_creation_soft_limit,
+ 'project_creation_hard_limit': project_creation_hard_limit,
+ 'issue_comment_lifetime_limit': issue_comment_lifetime_limit,
+ 'issue_comment_soft_limit': issue_comment_soft_limit,
+ 'issue_comment_hard_limit': issue_comment_hard_limit,
+ 'issue_attachment_lifetime_limit': issue_attachment_lifetime_limit,
+ 'issue_attachment_soft_limit': issue_attachment_soft_limit,
+ 'issue_attachment_hard_limit': issue_attachment_hard_limit,
+ 'issue_bulk_edit_lifetime_limit': issue_bulk_edit_lifetime_limit,
+ 'issue_bulk_edit_soft_limit': issue_bulk_edit_soft_limit,
+ 'issue_bulk_edit_hard_limit': issue_bulk_edit_hard_limit,
+ 'api_request_lifetime_limit': api_request_lifetime_limit,
+ 'api_request_soft_limit': api_request_soft_limit,
+ 'api_request_hard_limit': api_request_hard_limit,
+ 'profile_url_fragment': (
+ settings_user_view.profile_url[len('/u/'):]),
+ 'preview_on_hover': ezt.boolean(settings_user.preview_on_hover),
+ }
+
+ @classmethod
+ def ProcessSettingsForm(
+ cls, cnxn, user_service, post_data, user_id, user, admin=False):
+ """Process the posted form data from the unified user settings form.
+
+ Args:
+ cnxn: connection to the SQL database.
+ user_service: An instance of UserService for saving changes.
+ post_data: The parsed post data from the form submission request.
+ user_id: The user id of the target user.
+ user: The user PB of the target user.
+ admin: Whether settings reserved for admins are supported.
+ """
+ obscure_email = 'obscure_email' in post_data
+
+ kwargs = {}
+ if admin:
+ kwargs.update(is_site_admin='site_admin' in post_data,
+ ignore_action_limits='ignore_action_limits' in post_data)
+ kwargs.update(is_banned='banned' in post_data,
+ banned_reason=post_data.get('banned_reason', ''))
+
+ # action limits
+ action_limit_updates = {}
+ for action_name in actionlimit.ACTION_TYPE_NAMES.iterkeys():
+ reset_input = 'reset_' + action_name
+ lifetime_input = action_name + '_lifetime_limit'
+ soft_input = action_name + '_soft_limit'
+ hard_input = action_name + '_hard_limit'
+ pb_getter = action_name + '_limit'
+ old_lifetime_limit = getattr(user, pb_getter).lifetime_limit
+ old_soft_limit = getattr(user, pb_getter).period_soft_limit
+ old_hard_limit = getattr(user, pb_getter).period_hard_limit
+
+ # Try and get the new limit from post data.
+ # If the user doesn't use an integer, act as if no change requested.
+ def _GetLimit(post_data, limit_input, old_limit):
+ try:
+ new_limit = int(post_data[limit_input])
+ except (KeyError, ValueError):
+ new_limit = old_limit
+ return new_limit
+
+ new_lifetime_limit = _GetLimit(post_data, lifetime_input,
+ old_lifetime_limit)
+ new_soft_limit = _GetLimit(post_data, soft_input,
+ old_soft_limit)
+ new_hard_limit = _GetLimit(post_data, hard_input,
+ old_hard_limit)
+
+ if ((new_lifetime_limit >= 0 and
+ new_lifetime_limit != old_lifetime_limit) or
+ (new_soft_limit >= 0 and new_soft_limit != old_soft_limit) or
+ (new_hard_limit >= 0 and new_hard_limit != old_hard_limit)):
+ action_limit_updates[action_name] = (
+ new_soft_limit, new_hard_limit, new_lifetime_limit)
+ elif reset_input in post_data:
+ action_limit_updates[action_name] = None
+ kwargs.update(action_limit_updates=action_limit_updates)
+
+ user_service.UpdateUserSettings(
+ cnxn, user_id, user, notify='notify' in post_data,
+ notify_starred='notify_starred' in post_data,
+ preview_on_hover='preview_on_hover' in post_data,
+ obscure_email=obscure_email, **kwargs)
+
+
+def GetHostPort():
+ """Get string domain name and port number."""
+
+ app_id = app_identity.get_application_id()
+ if ':' in app_id:
+ domain, app_id = app_id.split(':')
+ else:
+ domain = ''
+
+ if domain.startswith('google'):
+ hostport = '%s.googleplex.com' % app_id
+ else:
+ hostport = '%s.appspot.com' % app_id
+
+ return GetPreferredDomain(hostport)
+
+
+def IssueCommentURL(hostport, project, local_id, seq_num=None):
+ """Return a URL pointing directly to the specified comment."""
+ detail_url = FormatAbsoluteURLForDomain(
+ hostport, project.project_name, urls.ISSUE_DETAIL, id=local_id)
+ if seq_num:
+ detail_url += '#c%d' % seq_num
+
+ return detail_url
+
+
+def MurmurHash3_x86_32(key, seed=0x0):
+ """Implements the x86/32-bit version of Murmur Hash 3.0.
+
+ MurmurHash3 is written by Austin Appleby, and is placed in the public
+ domain. See https://code.google.com/p/smhasher/ for details.
+
+ This pure python implementation of the x86/32 bit version of MurmurHash3 is
+ written by Fredrik Kihlander and also placed in the public domain.
+ See https://github.com/wc-duck/pymmh3 for details.
+
+ The MurmurHash3 algorithm is chosen for these reasons:
+ * It is fast, even when implemented in pure python.
+ * It is remarkably well distributed, and unlikely to cause collisions.
+ * It is stable and unchanging (any improvements will be in MurmurHash4).
+ * It is well-tested, and easily usable in other contexts (such as bulk
+ data imports).
+
+ Args:
+ key (string): the data that you want hashed
+ seed (int): An offset, treated as essentially part of the key.
+
+ Returns:
+ A 32-bit integer (can be interpreted as either signed or unsigned).
+ """
+ key = bytearray(key.encode('utf-8'))
+
+ def fmix(h):
+ h ^= h >> 16
+ h = (h * 0x85ebca6b) & 0xFFFFFFFF
+ h ^= h >> 13
+ h = (h * 0xc2b2ae35) & 0xFFFFFFFF
+ h ^= h >> 16
+ return h;
+
+ length = len(key)
+ nblocks = int(length / 4)
+
+ h1 = seed;
+
+ c1 = 0xcc9e2d51
+ c2 = 0x1b873593
+
+ # body
+ for block_start in xrange(0, nblocks * 4, 4):
+ k1 = key[ block_start + 3 ] << 24 | \
+ key[ block_start + 2 ] << 16 | \
+ key[ block_start + 1 ] << 8 | \
+ key[ block_start + 0 ]
+
+ k1 = c1 * k1 & 0xFFFFFFFF
+ k1 = (k1 << 15 | k1 >> 17) & 0xFFFFFFFF
+ k1 = (c2 * k1) & 0xFFFFFFFF;
+
+ h1 ^= k1
+ h1 = ( h1 << 13 | h1 >> 19 ) & 0xFFFFFFFF
+ h1 = ( h1 * 5 + 0xe6546b64 ) & 0xFFFFFFFF
+
+ # tail
+ tail_index = nblocks * 4
+ k1 = 0
+ tail_size = length & 3
+
+ if tail_size >= 3:
+ k1 ^= key[ tail_index + 2 ] << 16
+ if tail_size >= 2:
+ k1 ^= key[ tail_index + 1 ] << 8
+ if tail_size >= 1:
+ k1 ^= key[ tail_index + 0 ]
+
+ if tail_size != 0:
+ k1 = ( k1 * c1 ) & 0xFFFFFFFF
+ k1 = ( k1 << 15 | k1 >> 17 ) & 0xFFFFFFFF
+ k1 = ( k1 * c2 ) & 0xFFFFFFFF
+ h1 ^= k1
+
+ return fmix( h1 ^ length )
+
+
+def MakeRandomKey(length=RANDOM_KEY_LENGTH, chars=RANDOM_KEY_CHARACTERS):
+ """Return a string with lots of random characters."""
+ chars = [random.choice(chars) for _ in range(length)]
+ return ''.join(chars)
+
+
+def IsServiceAccount(email):
+ """Return a boolean value whether this email is a service account."""
+ if email.endswith('gserviceaccount.com'):
+ return True
+ _, client_emails = (
+ client_config_svc.GetClientConfigSvc().GetClientIDEmails())
+ return email in client_emails
+
+
+def GetPreferredDomain(domain):
+ """Get preferred domain to display.
+
+ The preferred domain replaces app_id for default version of monorail-prod
+ and monorail-staging.
+ """
+ return settings.preferred_domains.get(domain, domain)
diff --git a/appengine/monorail/framework/framework_views.py b/appengine/monorail/framework/framework_views.py
new file mode 100644
index 0000000..2b9453c
--- /dev/null
+++ b/appengine/monorail/framework/framework_views.py
@@ -0,0 +1,214 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""View classes to make it easy to display framework objects in EZT."""
+
+from third_party import ezt
+
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import permissions
+from framework import template_helpers
+from services import client_config_svc
+import settings
+
+
+_LABEL_DISPLAY_CHARS = 30
+_LABEL_PART_DISPLAY_CHARS = 15
+
+
+class LabelView(object):
+ """Wrapper class that makes it easier to display a label via EZT."""
+
+ def __init__(self, label, config):
+ """Make several values related to this label available as attrs.
+
+ Args:
+ label: artifact label string. E.g., 'Priority-High' or 'Frontend'.
+ config: PB with a well_known_labels list, or None.
+ """
+ self.name = label
+ self.tooltip = label
+ self.is_restrict = ezt.boolean(permissions.IsRestrictLabel(label))
+
+ self.docstring = ''
+ if config:
+ for wkl in config.well_known_labels:
+ if label.lower() == wkl.label.lower():
+ self.docstring = wkl.label_docstring
+
+ if '-' in label:
+ self.prefix, self.value = label.split('-', 1)
+ else:
+ self.prefix, self.value = '', label
+
+
+class StatusView(object):
+ """Wrapper class that makes it easier to display a status via EZT."""
+
+ def __init__(self, status, config):
+ """Make several values related to this status available as attrs.
+
+ Args:
+ status: artifact status string. E.g., 'New' or 'Accepted'.
+ config: PB with a well_known_statuses list, or None.
+ """
+
+ self.name = status
+ self.tooltip = status
+
+ self.docstring = ''
+ self.means_open = ezt.boolean(True)
+ if config:
+ for wks in config.well_known_statuses:
+ if status.lower() == wks.status.lower():
+ self.docstring = wks.status_docstring
+ self.means_open = ezt.boolean(wks.means_open)
+
+
+class UserView(object):
+ """Wrapper class to easily display basic user information in a template."""
+
+ def __init__(self, user_id, email, obscure_email):
+ email = email or ''
+ self.user_id = user_id
+ self.email = email
+ self.profile_url = '/u/%s/' % user_id
+ self.obscure_email = obscure_email
+ self.banned = ''
+
+ (self.username, self.domain,
+ self.obscured_username) = ParseAndObscureAddress(email)
+ # No need to obfuscate or reveal client email.
+ # Instead display a human-readable username.
+ if not self.email:
+ self.display_name = 'a deleted user'
+ self.obscure_email = ''
+ self.profile_url = ''
+ elif self.email in client_config_svc.GetServiceAccountMap():
+ self.display_name = client_config_svc.GetServiceAccountMap()[self.email]
+ elif not self.obscure_email:
+ self.display_name = email
+ else:
+ self.display_name = '%s...@%s' % (self.obscured_username, self.domain)
+
+ def RevealEmail(self):
+ if not self.email:
+ return
+ if self.email not in client_config_svc.GetServiceAccountMap():
+ self.obscure_email = False
+ self.display_name = self.email
+ self.profile_url = '/u/%s/' % self.email
+
+
+def MakeAllUserViews(cnxn, user_service, *list_of_user_id_lists):
+ """Make a dict {user_id: user_view, ...} for all user IDs given."""
+ distinct_user_ids = set()
+ distinct_user_ids.update(*list_of_user_id_lists)
+ user_dict = user_service.GetUsersByIDs(cnxn, distinct_user_ids)
+ return {user_id: UserView(user_id, user_pb.email, user_pb.obscure_email)
+ for user_id, user_pb in user_dict.iteritems()}
+
+
+def MakeUserView(cnxn, user_service, user_id):
+ """Make a UserView for the given user ID."""
+ user = user_service.GetUser(cnxn, user_id)
+ return UserView(user_id, user.email, user.obscure_email)
+
+
+def ParseAndObscureAddress(email):
+ """Break the given email into username and domain, and obscure.
+
+ Args:
+ email: string email address to process
+
+ Returns:
+ A 3-tuple (username, domain, obscured_username).
+ The obscured_username is trucated the same way that Google Groups does it.
+ """
+ if '@' in email:
+ username, user_domain = email.split('@', 1)
+ else: # don't fail if User table has unexpected email address format.
+ username, user_domain = email, ''
+
+ base_username = username.split('+')[0]
+ cutoff_point = min(8, max(1, len(base_username) - 3))
+ obscured_username = base_username[:cutoff_point]
+
+ return username, user_domain, obscured_username
+
+
+def _ShouldRevealEmail(auth, project, viewed_email):
+ """Decide whether to publish a user's email address.
+
+ Args:
+ auth: The AuthData of the user viewing the email addresses.
+ project: The project to which the viewed users belong.
+ viewed_email: The email of the viewed user.
+
+ Returns:
+ True if email addresses should be published to the logged-in user.
+ """
+ # Case 1: Anon users don't see anything revealed.
+ if auth.user_pb is None:
+ return False
+
+ # Case 2: site admins always see unobscured email addresses.
+ if auth.user_pb.is_site_admin:
+ return True
+
+ # Case 3: Domain users in same-org-only projects always see unobscured addrs.
+ # TODO(jrobbins): re-implement same_org
+
+ # Case 4: Project members see the unobscured email of everyone in a project.
+ if project and framework_bizobj.UserIsInProject(project, auth.effective_ids):
+ return True
+
+ # Case 5: Emails that end in priviledged user domains see unobscured email
+ # addresses.
+ if framework_bizobj.IsPriviledgedDomainUser(auth.user_pb.email):
+ return True
+
+ # Case 6: Do not obscure your own email.
+ if viewed_email and auth.user_pb.email == viewed_email:
+ return True
+
+ return False
+
+
+def RevealAllEmailsToMembers(mr, users_by_id):
+ """Allow project members to see unobscured email addresses in that project.
+
+ Non project member addresses will be obscured.
+ Site admins can see all email addresses unobscured.
+
+ Args:
+ mr: common info parsed from the user's request.
+ users_by_id: dictionary of UserView's that will be displayed.
+
+ Returns:
+ Nothing, but the UserViews in users_by_id may be modified to
+ publish email address.
+ """
+ for user_view in users_by_id.itervalues():
+ if _ShouldRevealEmail(mr.auth, mr.project, user_view.email):
+ user_view.RevealEmail()
+
+
+def RevealAllEmails(users_by_id):
+ """Allow anyone to see unobscured email addresses of project members.
+
+ The modified view objects should only be used to generate views for other
+ project members.
+
+ Args:
+ users_by_id: dictionary of UserViews that will be displayed.
+
+ Returns:
+ Nothing, but the UserViews in users_by_id may be modified to
+ publish email address.
+ """
+ for user_view in users_by_id.itervalues():
+ user_view.RevealEmail()
diff --git a/appengine/monorail/framework/gcs_helpers.py b/appengine/monorail/framework/gcs_helpers.py
new file mode 100644
index 0000000..843eac9
--- /dev/null
+++ b/appengine/monorail/framework/gcs_helpers.py
@@ -0,0 +1,118 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Set of helpers for interacting with Google Cloud Storage."""
+
+import base64
+import logging
+import os
+import time
+import urllib
+import uuid
+
+from datetime import datetime, timedelta
+
+from google.appengine.api import app_identity
+from google.appengine.api import images
+from third_party import cloudstorage
+
+from framework import filecontent
+
+
+ATTACHMENT_TTL = timedelta(seconds=30)
+
+IS_DEV_APPSERVER = (
+ 'development' in os.environ.get('SERVER_SOFTWARE', '').lower())
+
+RESIZABLE_MIME_TYPES = ['image/png', 'image/jpg', 'image/jpeg', 'image/gif']
+
+DEFAULT_THUMB_WIDTH = 250
+DEFAULT_THUMB_HEIGHT = 200
+LOGO_THUMB_WIDTH = 110
+LOGO_THUMB_HEIGHT = 30
+
+
+def _Now():
+ return datetime.utcnow()
+
+
+class UnsupportedMimeType(Exception):
+ pass
+
+
+def DeleteObjectFromGCS(object_id):
+ object_path = ('/' + app_identity.get_default_gcs_bucket_name() + object_id)
+ cloudstorage.delete(object_path)
+
+
+def StoreObjectInGCS(
+ content, mime_type, project_id, thumb_width=DEFAULT_THUMB_WIDTH,
+ thumb_height=DEFAULT_THUMB_HEIGHT):
+ bucket_name = app_identity.get_default_gcs_bucket_name()
+ guid = uuid.uuid4()
+ object_id = '/%s/attachments/%s' % (project_id, guid)
+ object_path = '/' + bucket_name + object_id
+ with cloudstorage.open(object_path, 'w', mime_type) as f:
+ f.write(content)
+
+ if mime_type in RESIZABLE_MIME_TYPES:
+ # Create and save a thumbnail too.
+ thumb_content = None
+ try:
+ thumb_content = images.resize(content, thumb_width, thumb_height)
+ except Exception, e:
+ # Do not raise exception for incorrectly formed images.
+ # See https://bugs.chromium.org/p/monorail/issues/detail?id=597 for more
+ # detail.
+ logging.exception(e)
+ if thumb_content:
+ thumb_path = '%s-thumbnail' % object_path
+ with cloudstorage.open(thumb_path, 'w', 'image/png') as f:
+ f.write(thumb_content)
+
+ return object_id
+
+
+def CheckMimeTypeResizable(mime_type):
+ if mime_type not in RESIZABLE_MIME_TYPES:
+ raise UnsupportedMimeType(
+ 'Please upload a logo with one of the following mime types:\n%s' %
+ ', '.join(RESIZABLE_MIME_TYPES))
+
+
+def StoreLogoInGCS(file_name, content, project_id):
+ mime_type = filecontent.GuessContentTypeFromFilename(file_name)
+ CheckMimeTypeResizable(mime_type)
+ if '\\' in file_name: # IE insists on giving us the whole path.
+ file_name = file_name[file_name.rindex('\\') + 1:]
+ return StoreObjectInGCS(
+ content, mime_type, project_id, thumb_width=LOGO_THUMB_WIDTH,
+ thumb_height=LOGO_THUMB_HEIGHT)
+
+
+def SignUrl(gcs_filename):
+ expiration_dt = _Now() + ATTACHMENT_TTL
+ expiration = int(time.mktime(expiration_dt.timetuple()))
+ signature_string = '\n'.join([
+ 'GET',
+ '', # Optional MD5, which we don't have.
+ '', # Optional content-type, which only applies to uploads.
+ str(expiration),
+ gcs_filename]).encode('utf-8')
+
+ signature_bytes = app_identity.sign_blob(signature_string)[1]
+
+ query_params = {'GoogleAccessId': app_identity.get_service_account_name(),
+ 'Expires': str(expiration),
+ 'Signature': base64.b64encode(signature_bytes)}
+
+ result = 'https://storage.googleapis.com{resource}?{querystring}'
+
+ if IS_DEV_APPSERVER:
+ result = '/_ah/gcs{resource}?{querystring}'
+
+ return result.format(
+ resource=gcs_filename, querystring=urllib.urlencode(query_params))
+
diff --git a/appengine/monorail/framework/grid_view_helpers.py b/appengine/monorail/framework/grid_view_helpers.py
new file mode 100644
index 0000000..833d2ae
--- /dev/null
+++ b/appengine/monorail/framework/grid_view_helpers.py
@@ -0,0 +1,275 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions for displaying grids of project artifacts.
+
+A grid is a two-dimensional display of items where the user can choose
+the X and Y axes.
+"""
+
+import collections
+import logging
+
+from framework import framework_constants
+from framework import sorting
+from framework import template_helpers
+from proto import tracker_pb2
+from tracker import tracker_bizobj
+
+
+# We shorten long attribute values to fit into the table cells.
+_MAX_CELL_DISPLAY_CHARS = 70
+
+
+def SortGridHeadings(col_name, heading_value_list, users_by_id, config,
+ asc_accessors):
+ """Sort the grid headings according to well-known status and label order.
+
+ Args:
+ col_name: String column name that is used on that grid axis.
+ heading_value_list: List of grid row or column heading values.
+ users_by_id: Dict mapping user_ids to UserViews.
+ config: ProjectIssueConfig PB for the current project.
+ asc_accessors: Dict (col_name -> function()) for special columns.
+
+ Returns:
+ The same heading values, but sorted in a logical order.
+ """
+ decorated_list = []
+ fd = tracker_bizobj.FindFieldDef(col_name, config)
+ if fd: # Handle fields.
+ for value in heading_value_list:
+ field_value = tracker_bizobj.GetFieldValueWithRawValue(
+ fd.field_type, None, users_by_id, value)
+ decorated_list.append([field_value, field_value])
+ elif col_name == 'status':
+ wk_statuses = [wks.status.lower()
+ for wks in config.well_known_statuses]
+ decorated_list = [(_WKSortingValue(value.lower(), wk_statuses), value)
+ for value in heading_value_list]
+
+ elif col_name in asc_accessors: # Special cols still sort alphabetically.
+ decorated_list = [(value, value)
+ for value in heading_value_list]
+
+ else: # Anything else is assumed to be a label prefix
+ wk_labels = [wkl.label.lower().split('-', 1)[-1]
+ for wkl in config.well_known_labels]
+ decorated_list = [(_WKSortingValue(value.lower(), wk_labels), value)
+ for value in heading_value_list]
+
+ decorated_list.sort()
+ result = [decorated_tuple[1] for decorated_tuple in decorated_list]
+ logging.info('Headers for %s are: %r', col_name, result)
+ return result
+
+
+def _WKSortingValue(value, well_known_list):
+ """Return a value used to sort headings so that well-known ones are first."""
+ if not value:
+ return sorting.MAX_STRING # Undefined values sort last.
+ try:
+ # well-known values sort by index
+ return well_known_list.index(value)
+ except ValueError:
+ return value # odd-ball values lexicographically after all well-known ones
+
+
+def MakeGridData(
+ artifacts, x_attr, x_headings, y_attr, y_headings, users_by_id,
+ artifact_view_factory, all_label_values, config):
+ """Return a list of grid row items for display by EZT.
+
+ Args:
+ artifacts: a list of issues to consider showing.
+ x_attr: lowercase name of the attribute that defines the x-axis.
+ x_headings: list of values for column headings.
+ y_attr: lowercase name of the attribute that defines the y-axis.
+ y_headings: list of values for row headings.
+ users_by_id: dict {user_id: user_view, ...} for referenced users.
+ artifact_view_factory: constructor for grid tiles.
+ all_label_values: pre-parsed dictionary of values from the key-value
+ labels on each issue: {issue_id: {key: [val,...], ...}, ...}
+ config: ProjectIssueConfig PB for the current project.
+
+ Returns:
+ A list of EZTItems, each representing one grid row, and each having
+ a nested list of grid cells.
+
+ Each grid row has a row name, and a list of cells. Each cell has a
+ list of tiles. Each tile represents one artifact. Artifacts are
+ represented once in each cell that they match, so one artifact that
+ has multiple values for a certain attribute can occur in multiple cells.
+ """
+ x_attr = x_attr.lower()
+ y_attr = y_attr.lower()
+
+ # A flat dictionary {(x, y): [cell, ...], ...] for the whole grid.
+ x_y_data = collections.defaultdict(list)
+
+ # Put each issue into the grid cell(s) where it belongs.
+ for art in artifacts:
+ label_value_dict = all_label_values[art.local_id]
+ x_vals = GetArtifactAttr(
+ art, x_attr, users_by_id, label_value_dict, config)
+ y_vals = GetArtifactAttr(
+ art, y_attr, users_by_id, label_value_dict, config)
+ tile = artifact_view_factory(art)
+
+ # Put the current issue into each cell where it belongs, which will usually
+ # be exactly 1 cell, but it could be a few.
+ if x_attr != '--' and y_attr != '--': # User specified both axes.
+ for x in x_vals:
+ for y in y_vals:
+ x_y_data[x, y].append(tile)
+ elif y_attr != '--': # User only specified Y axis.
+ for y in y_vals:
+ x_y_data['All', y].append(tile)
+ elif x_attr != '--': # User only specified X axis.
+ for x in x_vals:
+ x_y_data[x, 'All'].append(tile)
+ else: # User specified neither axis.
+ x_y_data['All', 'All'].append(tile)
+
+ # Convert the dictionary to a list-of-lists so that EZT can iterate over it.
+ grid_data = []
+ for y in y_headings:
+ cells_in_row = []
+ for x in x_headings:
+ tiles = x_y_data[x, y]
+
+ drill_down = ''
+ if x_attr != '--':
+ drill_down = MakeDrillDownSearch(x_attr, x)
+ if y_attr != '--':
+ drill_down += MakeDrillDownSearch(y_attr, y)
+
+ cells_in_row.append(template_helpers.EZTItem(
+ tiles=tiles, count=len(tiles), drill_down=drill_down))
+ grid_data.append(template_helpers.EZTItem(
+ grid_y_heading=y, cells_in_row=cells_in_row))
+
+ return grid_data
+
+
+def MakeDrillDownSearch(attr, value):
+ """Constructs search term for drill-down.
+
+ Args:
+ attr: lowercase name of the attribute to narrow the search on.
+ value: value to narrow the search to.
+
+ Returns:
+ String with user-query term to narrow a search to the given attr value.
+ """
+ if value == framework_constants.NO_VALUES:
+ return '-has:%s ' % attr
+ else:
+ return '%s=%s ' % (attr, value)
+
+
+def MakeLabelValuesDict(art):
+ """Return a dict of label values and a list of one-word labels.
+
+ Args:
+ art: artifact object, e.g., an issue PB.
+
+ Returns:
+ A dict {prefix: [suffix,...], ...} for each key-value label.
+ """
+ label_values = collections.defaultdict(list)
+ for label_name in tracker_bizobj.GetLabels(art):
+ if '-' in label_name:
+ key, value = label_name.split('-', 1)
+ label_values[key.lower()].append(value)
+
+ return label_values
+
+
+def GetArtifactAttr(
+ art, attribute_name, users_by_id, label_attr_values_dict, config):
+ """Return the requested attribute values of the given artifact.
+
+ Args:
+ art: a tracked artifact with labels, local_id, summary, stars, and owner.
+ attribute_name: lowercase string name of attribute to get.
+ users_by_id: dictionary of UserViews already created.
+ label_attr_values_dict: dictionary {'key': [value, ...], }.
+ config: ProjectIssueConfig PB for the current project.
+
+ Returns:
+ A list of string attribute values, or [framework_constants.NO_VALUES]
+ if the artifact has no value for that attribute.
+ """
+ if attribute_name == '--':
+ return []
+ if attribute_name == 'id':
+ return [art.local_id]
+ if attribute_name == 'summary':
+ return [art.summary]
+ if attribute_name == 'status':
+ return [tracker_bizobj.GetStatus(art)]
+ if attribute_name == 'stars':
+ return [art.star_count]
+ if attribute_name == 'attachments':
+ return [art.attachment_count]
+ # TODO(jrobbins): support blocked on, blocking, and mergedinto.
+ if attribute_name == 'reporter':
+ return [users_by_id[art.reporter_id].display_name]
+ if attribute_name == 'owner':
+ owner_id = tracker_bizobj.GetOwnerId(art)
+ if not owner_id:
+ return [framework_constants.NO_VALUES]
+ else:
+ return [users_by_id[owner_id].display_name]
+ if attribute_name == 'cc':
+ cc_ids = tracker_bizobj.GetCcIds(art)
+ if not cc_ids:
+ return [framework_constants.NO_VALUES]
+ else:
+ return [users_by_id[cc_id].display_name for cc_id in cc_ids]
+ if attribute_name == 'component':
+ comp_ids = list(art.component_ids) + list(art.derived_component_ids)
+ if not comp_ids:
+ return [framework_constants.NO_VALUES]
+ else:
+ paths = []
+ for comp_id in comp_ids:
+ cd = tracker_bizobj.FindComponentDefByID(comp_id, config)
+ if cd:
+ paths.append(cd.path)
+ return paths
+
+ # Check to see if it is a field. Process as field only if it is not an enum
+ # type because enum types are stored as key-value labels.
+ fd = tracker_bizobj.FindFieldDef(attribute_name, config)
+ if fd and fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE:
+ values = []
+ for fv in art.field_values:
+ if fv.field_id == fd.field_id:
+ value = tracker_bizobj.GetFieldValueWithRawValue(
+ fd.field_type, fv, users_by_id, None)
+ values.append(value)
+ return values
+
+ # Since it is not a built-in attribute or a field, it must be a key-value
+ # label.
+ return label_attr_values_dict.get(
+ attribute_name, [framework_constants.NO_VALUES])
+
+
+def AnyArtifactHasNoAttr(
+ artifacts, attr_name, users_by_id, all_label_values, config):
+ """Return true if any artifact does not have a value for attr_name."""
+ # TODO(jrobbins): all_label_values needs to be keyed by issue_id to allow
+ # cross-project grid views.
+ for art in artifacts:
+ vals = GetArtifactAttr(
+ art, attr_name.lower(), users_by_id, all_label_values[art.local_id],
+ config)
+ if framework_constants.NO_VALUES in vals:
+ return True
+
+ return False
diff --git a/appengine/monorail/framework/jsonfeed.py b/appengine/monorail/framework/jsonfeed.py
new file mode 100644
index 0000000..a2cb6d5
--- /dev/null
+++ b/appengine/monorail/framework/jsonfeed.py
@@ -0,0 +1,117 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This file defines a subclass of Servlet for JSON feeds.
+
+A "feed" is a servlet that is accessed by another part of our system and that
+responds with a JSON value rather than HTML to display in a browser.
+"""
+
+import httplib
+import json
+import logging
+
+from google.appengine.api import app_identity
+
+import settings
+
+from framework import framework_constants
+from framework import permissions
+from framework import servlet
+from framework import xsrf
+
+# This causes a JS error for a hacker trying to do a cross-site inclusion.
+XSSI_PREFIX = ")]}'\n"
+
+
+class JsonFeed(servlet.Servlet):
+ """A convenient base class for JSON feeds."""
+
+ # By default, JSON output is compact. Subclasses can set this to
+ # an integer, like 4, for pretty-printed output.
+ JSON_INDENT = None
+
+ # Some JSON handlers can only be accessed from our own app.
+ CHECK_SAME_APP = False
+
+ def HandleRequest(self, _mr):
+ """Override this method to implement handling of the request.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ A dictionary of json data.
+ """
+ raise servlet.MethodNotSupportedError()
+
+ def _DoRequestHandling(self, request, mr):
+ """Do permission checking, page processing, and response formatting."""
+ try:
+ if self.CHECK_SECURITY_TOKEN and mr.auth.user_id:
+ # Validate the XSRF token with the specific request path for this
+ # servlet. But, not every XHR request has a distinct token, so just
+ # use 'xhr' for ones that don't.
+ # TODO(jrobbins): make specific tokens for:
+ # user and project stars, issue options, check names.
+ try:
+ logging.info('request in jsonfeed is %r', request)
+ xsrf.ValidateToken(mr.token, mr.auth.user_id, request.path)
+ except xsrf.TokenIncorrect:
+ logging.info('using token path "xhr"')
+ xsrf.ValidateToken(mr.token, mr.auth.user_id, xsrf.XHR_SERVLET_PATH)
+
+ if self.CHECK_SAME_APP and not settings.dev_mode:
+ calling_app_id = request.headers.get('X-Appengine-Inbound-Appid')
+ if calling_app_id != app_identity.get_application_id():
+ self.response.status = httplib.FORBIDDEN
+ return
+
+ self._CheckForMovedProject(mr, request)
+ self.AssertBasePermission(mr)
+
+ json_data = self.HandleRequest(mr)
+
+ self._RenderJsonResponse(json_data)
+ raise servlet.AlreadySentResponseException()
+
+ except permissions.PermissionException as e:
+ logging.info('Trapped PermissionException %s', e)
+ self.response.status = httplib.FORBIDDEN
+
+ # pylint: disable=unused-argument
+ # pylint: disable=arguments-differ
+ def get(self, project_name=None, viewed_username=None):
+ """Collect page-specific and generic info, then render the page.
+
+ Args:
+ project_name: string project name parsed from the URL by webapp2,
+ but we also parse it out in our code.
+ viewed_username: string user email parsed from the URL by webapp2,
+ but we also parse it out in our code.
+ """
+ self._DoRequestHandling(self.mr.request, self.mr)
+
+ # pylint: disable=unused-argument
+ # pylint: disable=arguments-differ
+ def post(self, project_name=None, viewed_username=None):
+ """Parse the request, check base perms, and call form-specific code."""
+ self._DoRequestHandling(self.mr.request, self.mr)
+
+ def _RenderJsonResponse(self, json_data):
+ """Serialize the data as JSON so that it can be sent to the browser."""
+ json_str = json.dumps(json_data, indent=self.JSON_INDENT)
+ logging.debug(
+ 'Sending JSON response: %r length: %r',
+ json_str[:framework_constants.LOGGING_MAX_LENGTH], len(json_str))
+ self.response.content_type = framework_constants.CONTENT_TYPE_JSON
+ self.response.write(XSSI_PREFIX)
+ self.response.write(json_str)
+
+
+class InternalTask(JsonFeed):
+ """Internal tasks are JSON feeds that can only be reached by our own code."""
+
+ CHECK_SECURITY_TOKEN = False
diff --git a/appengine/monorail/framework/monorailrequest.py b/appengine/monorail/framework/monorailrequest.py
new file mode 100644
index 0000000..8e4dcd0
--- /dev/null
+++ b/appengine/monorail/framework/monorailrequest.py
@@ -0,0 +1,691 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes to hold information parsed from a request.
+
+To simplify our servlets and avoid duplication of code, we parse some
+info out of the request as soon as we get it and then pass a MonorailRequest
+object to the servlet-specific request handler methods.
+"""
+
+import endpoints
+import logging
+import re
+import urllib
+
+from third_party import ezt
+
+from google.appengine.api import app_identity
+from google.appengine.api import oauth
+from google.appengine.api import users
+
+import webapp2
+
+import settings
+from framework import framework_constants
+from framework import framework_views
+from framework import permissions
+from framework import sql
+from framework import template_helpers
+from proto import api_pb2_v1
+from proto import user_pb2
+from services import user_svc
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+_HOSTPORT_RE = re.compile('^[-a-z0-9.]+(:\d+)?$', re.I)
+
+
+class AuthData(object):
+ """This object holds authentication data about a user.
+
+ This is used by MonorailRequest as it determines which user the
+ requester is authenticated as and fetches the user's data. It can
+ also be used to lookup perms for user IDs specified in issue fields.
+
+ Attributes:
+ user_id: The user ID of the user (or 0 if not signed in).
+ effective_ids: A set of user IDs that includes the signed in user's
+ direct user ID and the user IDs of all their user groups.
+ This set will be empty for anonymous users.
+ user_view: UserView object for the signed-in user.
+ user_pb: User object for the signed-in user.
+ email: email address for the user, or None.
+ """
+
+ def __init__(self):
+ self.user_id = 0
+ self.effective_ids = set()
+ self.user_view = None
+ self.user_pb = user_pb2.MakeUser()
+ self.email = None
+
+ @classmethod
+ def FromRequest(cls, cnxn, services):
+ """Determine auth information from the request and fetches user data.
+
+ If everything works and the user is signed in, then all of the public
+ attributes of the AuthData instance will be filled in appropriately.
+
+ Args:
+ cnxn: connection to the SQL database.
+ services: Interface to all persistence storage backends.
+
+ Returns:
+ A new AuthData object.
+ """
+ user = users.get_current_user()
+ if user is None:
+ return cls()
+ else:
+ # We create a User row for each user who visits the site.
+ # TODO(jrobbins): we should really only do it when they take action.
+ return cls.FromEmail(cnxn, user.email(), services, autocreate=True)
+
+ @classmethod
+ def FromEmail(cls, cnxn, email, services, autocreate=False):
+ """Determine auth information for the given user email address.
+
+ Args:
+ cnxn: monorail connection to the database.
+ email: string email address of the user.
+ services: connections to backend servers.
+ autocreate: set to True to create a new row in the Users table if needed.
+
+ Returns:
+ A new AuthData object.
+
+ Raises:
+ user_svc.NoSuchUserException: If the user of the email does not exist.
+ """
+ auth = cls()
+ auth.email = email
+ if email:
+ auth.user_id = services.user.LookupUserID(
+ cnxn, email, autocreate=autocreate)
+ assert auth.user_id
+
+ cls._FinishInitialization(cnxn, auth, services)
+ return auth
+
+ @classmethod
+ def FromUserID(cls, cnxn, user_id, services):
+ """Determine auth information for the given user ID.
+
+ Args:
+ cnxn: monorail connection to the database.
+ user_id: int user ID of the user.
+ services: connections to backend servers.
+
+ Returns:
+ A new AuthData object.
+ """
+ auth = cls()
+ auth.user_id = user_id
+ if auth.user_id:
+ auth.email = services.user.LookupUserEmail(cnxn, user_id)
+
+ cls._FinishInitialization(cnxn, auth, services)
+ return auth
+
+ @classmethod
+ def _FinishInitialization(cls, cnxn, auth, services):
+ """Fill in the test of the fields based on the user_id."""
+ # TODO(jrobbins): re-implement same_org
+ if auth.user_id:
+ auth.effective_ids = services.usergroup.LookupMemberships(
+ cnxn, auth.user_id)
+ auth.effective_ids.add(auth.user_id)
+ auth.user_pb = services.user.GetUser(cnxn, auth.user_id)
+ if auth.user_pb:
+ auth.user_view = framework_views.UserView(
+ auth.user_id, auth.email,
+ auth.user_pb.obscure_email)
+
+
+class MonorailApiRequest(object):
+ """A class to hold information parsed from the Endpoints API request."""
+
+ # pylint: disable=attribute-defined-outside-init
+ def __init__(self, request, services):
+ requester = (
+ endpoints.get_current_user() or
+ oauth.get_current_user(
+ framework_constants.OAUTH_SCOPE))
+ requester_email = requester.email().lower()
+ self.cnxn = sql.MonorailConnection()
+ self.auth = AuthData.FromEmail(
+ self.cnxn, requester_email, services)
+ self.me_user_id = self.auth.user_id
+ self.viewed_username = None
+ self.viewed_user_auth = None
+ self.project_name = None
+ self.project = None
+ self.issue = None
+ self.config = None
+ self.granted_perms = set()
+
+ # query parameters
+ self.params = {
+ 'can': 1,
+ 'start': 0,
+ 'num': 100,
+ 'q': '',
+ 'sort': '',
+ 'groupby': '',
+ 'projects': []}
+ self.use_cached_searches = True
+ self.warnings = []
+ self.errors = template_helpers.EZTError()
+ self.mode = None
+
+ if hasattr(request, 'projectId'):
+ self.project_name = request.projectId
+ self.project = services.project.GetProjectByName(
+ self.cnxn, self.project_name)
+ self.params['projects'].append(self.project_name)
+ self.config = services.config.GetProjectConfig(
+ self.cnxn, self.project_id)
+ if hasattr(request, 'additionalProject'):
+ self.params['projects'].extend(request.additionalProject)
+ self.params['projects'] = list(set(self.params['projects']))
+ if hasattr(request, 'issueId'):
+ self.issue = services.issue.GetIssueByLocalID(
+ self.cnxn, self.project_id, request.issueId)
+ self.granted_perms = tracker_bizobj.GetGrantedPerms(
+ self.issue, self.auth.effective_ids, self.config)
+ if hasattr(request, 'userId'):
+ self.viewed_username = request.userId.lower()
+ if self.viewed_username == 'me':
+ self.viewed_username = requester_email
+ self.viewed_user_auth = AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services)
+ elif hasattr(request, 'groupName'):
+ self.viewed_username = request.groupName.lower()
+ try:
+ self.viewed_user_auth = AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services)
+ except user_svc.NoSuchUserException:
+ self.viewed_user_auth = None
+ self.perms = permissions.GetPermissions(
+ self.auth.user_pb, self.auth.effective_ids, self.project)
+
+ # Build q.
+ if hasattr(request, 'q') and request.q:
+ self.params['q'] = request.q
+ if hasattr(request, 'publishedMax') and request.publishedMax:
+ self.params['q'] += ' opened<=%d' % request.publishedMax
+ if hasattr(request, 'publishedMin') and request.publishedMin:
+ self.params['q'] += ' opened>=%d' % request.publishedMin
+ if hasattr(request, 'updatedMax') and request.updatedMax:
+ self.params['q'] += ' modified<=%d' % request.updatedMax
+ if hasattr(request, 'updatedMin') and request.updatedMin:
+ self.params['q'] += ' modified>=%d' % request.updatedMin
+ if hasattr(request, 'owner') and request.owner:
+ self.params['q'] += ' owner:%s' % request.owner
+ if hasattr(request, 'status') and request.status:
+ self.params['q'] += ' status:%s' % request.status
+ if hasattr(request, 'label') and request.label:
+ self.params['q'] += ' label:%s' % request.label
+
+ if hasattr(request, 'can') and request.can:
+ if request.can == api_pb2_v1.CannedQuery.all:
+ self.params['can'] = 1
+ elif request.can == api_pb2_v1.CannedQuery.new:
+ self.params['can'] = 6
+ elif request.can == api_pb2_v1.CannedQuery.open:
+ self.params['can'] = 2
+ elif request.can == api_pb2_v1.CannedQuery.owned:
+ self.params['can'] = 3
+ elif request.can == api_pb2_v1.CannedQuery.reported:
+ self.params['can'] = 4
+ elif request.can == api_pb2_v1.CannedQuery.starred:
+ self.params['can'] = 5
+ elif request.can == api_pb2_v1.CannedQuery.to_verify:
+ self.params['can'] = 7
+ else: # Endpoints should have caught this.
+ raise InputException(
+ 'Canned query %s is not supported.', request.can)
+ if hasattr(request, 'startIndex') and request.startIndex:
+ self.params['start'] = request.startIndex
+ if hasattr(request, 'maxResults') and request.maxResults:
+ self.params['num'] = request.maxResults
+ if hasattr(request, 'sort') and request.sort:
+ self.params['sort'] = request.sort
+
+ self.query_project_names = self.GetParam('projects')
+ self.group_by_spec = self.GetParam('groupby')
+ self.sort_spec = self.GetParam('sort')
+ self.query = self.GetParam('q')
+ self.can = self.GetParam('can')
+ self.start = self.GetParam('start')
+ self.num = self.GetParam('num')
+
+ @property
+ def project_id(self):
+ return self.project.project_id if self.project else None
+
+ def GetParam(self, query_param_name, default_value=None,
+ _antitamper_re=None):
+ return self.params.get(query_param_name, default_value)
+
+ def GetPositiveIntParam(self, query_param_name, default_value=None):
+ """Returns 0 if the user-provided value is less than 0."""
+ return max(self.GetParam(query_param_name, default_value=default_value),
+ 0)
+
+
+class MonorailRequest(object):
+ """A class to hold information parsed from the HTTP request.
+
+ The goal of MonorailRequest is to do almost all URL path and query string
+ procesing in one place, which makes the servlet code simpler.
+
+ Attributes:
+ cnxn: connection to the SQL databases.
+ logged_in_user_id: int user ID of the signed-in user, or None.
+ effective_ids: set of signed-in user ID and all their user group IDs.
+ user_pb: User object for the signed in user.
+ project_name: string name of the current project.
+ project_id: int ID of the current projet.
+ viewed_username: string username of the user whose profile is being viewed.
+ can: int "canned query" number to scope the user's search.
+ num: int number of results to show per pagination page.
+ start: int position in result set to show on this pagination page.
+ etc: there are many more, all read-only.
+ """
+
+ # pylint: disable=attribute-defined-outside-init
+ def __init__(self, params=None):
+ """Initialize the MonorailRequest object."""
+ self.form_overrides = {}
+ if params:
+ self.form_overrides.update(params)
+ self.warnings = []
+ self.errors = template_helpers.EZTError()
+ self.debug_enabled = False
+ self.use_cached_searches = True
+ self.cnxn = sql.MonorailConnection()
+
+ self.auth = AuthData() # Authentication info for logged-in user
+
+ self.project_name = None
+ self.project = None
+
+ self.viewed_username = None
+ self.viewed_user_auth = AuthData()
+
+ @property
+ def project_id(self):
+ return self.project.project_id if self.project else None
+
+ def CleanUp(self):
+ """Close the database connection so that the app does not run out."""
+ if self.cnxn:
+ self.cnxn.Close()
+ self.cnxn = None
+
+ def ParseRequest(self, request, services, prof, do_user_lookups=True):
+ """Parse tons of useful info from the given request object.
+
+ Args:
+ request: webapp2 Request object w/ path and query params.
+ services: connections to backend servers including DB.
+ prof: Profiler instance.
+ do_user_lookups: Set to False to disable lookups during testing.
+ """
+ with prof.Phase('basic parsing'):
+ self.request = request
+ self.current_page_url = request.url
+ self.current_page_url_encoded = urllib.quote_plus(self.current_page_url)
+
+ # Only accept a hostport from the request that looks valid.
+ if not _HOSTPORT_RE.match(request.host):
+ raise InputException('request.host looks funny: %r', request.host)
+
+ logging.info('Request: %s', self.current_page_url)
+
+ with prof.Phase('path parsing'):
+ viewed_user_val, self.project_name = _ParsePathIdentifiers(
+ self.request.path)
+ self.viewed_username = _GetViewedEmail(
+ viewed_user_val, self.cnxn, services)
+ with prof.Phase('qs parsing'):
+ self._ParseQueryParameters()
+ with prof.Phase('overrides parsing'):
+ self._ParseFormOverrides()
+
+ if not self.project: # It can be already set in unit tests.
+ self._LookupProject(services, prof)
+ if do_user_lookups:
+ if self.viewed_username:
+ self._LookupViewedUser(services, prof)
+ self._LookupLoggedInUser(services, prof)
+ # TODO(jrobbins): re-implement HandleLurkerViewingSelf()
+
+ prod_debug_allowed = self.perms.HasPerm(
+ permissions.VIEW_DEBUG, self.auth.user_id, None)
+ self.debug_enabled = (request.params.get('debug') and
+ (settings.dev_mode or prod_debug_allowed))
+ # temporary option for perf testing on staging instance.
+ if request.params.get('disable_cache'):
+ if settings.dev_mode or 'staging' in request.host:
+ self.use_cached_searches = False
+
+ def _ParseQueryParameters(self):
+ """Parse and convert all the query string params used in any servlet."""
+ self.start = self.GetPositiveIntParam('start', default_value=0)
+ self.num = self.GetPositiveIntParam('num', default_value=100)
+ # Prevent DoS attacks that try to make us serve really huge result pages.
+ self.num = min(self.num, settings.max_artifact_search_results_per_page)
+
+ self.invalidation_timestep = self.GetIntParam(
+ 'invalidation_timestep', default_value=0)
+
+ self.continue_issue_id = self.GetIntParam(
+ 'continue_issue_id', default_value=0)
+ self.redir = self.GetParam('redir')
+
+ # Search scope, a.k.a., canned query ID
+ # TODO(jrobbins): make configurable
+ self.can = self.GetIntParam(
+ 'can', default_value=tracker_constants.OPEN_ISSUES_CAN)
+
+ # Search query
+ self.query = self.GetParam('q', default_value='').strip()
+
+ # Sorting of search results (needed for result list and flipper)
+ self.sort_spec = self.GetParam(
+ 'sort', default_value='',
+ antitamper_re=framework_constants.SORTSPEC_RE)
+
+ # Note: This is set later in request handling by ComputeColSpec().
+ self.col_spec = None
+
+ # Grouping of search results (needed for result list and flipper)
+ self.group_by_spec = self.GetParam(
+ 'groupby', default_value='',
+ antitamper_re=framework_constants.SORTSPEC_RE)
+
+ # For issue list and grid mode.
+ self.cursor = self.GetParam('cursor')
+ self.preview = self.GetParam('preview')
+ self.mode = self.GetParam('mode', default_value='list')
+ self.x = self.GetParam('x', default_value='')
+ self.y = self.GetParam('y', default_value='')
+ self.cells = self.GetParam('cells', default_value='ids')
+
+ # For the dashboard and issue lists included in the dashboard.
+ self.ajah = self.GetParam('ajah') # AJAH = Asychronous Javascript And HTML
+ self.table_title = self.GetParam('table_title')
+ self.panel_id = self.GetIntParam('panel')
+
+ # For pagination of updates lists
+ self.before = self.GetPositiveIntParam('before')
+ self.after = self.GetPositiveIntParam('after')
+
+ # For cron tasks and backend calls
+ self.lower_bound = self.GetIntParam('lower_bound')
+ self.upper_bound = self.GetIntParam('upper_bound')
+ self.shard_id = self.GetIntParam('shard_id')
+
+ # For specifying which objects to operate on
+ self.local_id = self.GetIntParam('id')
+ self.local_id_list = self.GetIntListParam('ids')
+ self.seq = self.GetIntParam('seq')
+ self.aid = self.GetIntParam('aid')
+ self.specified_user_id = self.GetIntParam('u', default_value=0)
+ self.specified_logged_in_user_id = self.GetIntParam(
+ 'logged_in_user_id', default_value=0)
+ self.specified_me_user_id = self.GetIntParam(
+ 'me_user_id', default_value=0)
+ self.specified_project = self.GetParam('project')
+ self.specified_project_id = self.GetIntParam('project_id')
+ self.query_project_names = self.GetListParam('projects', default_value=[])
+ self.template_name = self.GetParam('template')
+ self.component_path = self.GetParam('component')
+ self.field_name = self.GetParam('field')
+
+ # For image attachments
+ self.inline = bool(self.GetParam('inline'))
+ self.thumb = bool(self.GetParam('thumb'))
+
+ # For JS callbacks
+ self.token = self.GetParam('token')
+ self.starred = bool(self.GetIntParam('starred'))
+
+ # For issue reindexing utility servlet
+ self.auto_submit = self.GetParam('auto_submit')
+
+ def _ParseFormOverrides(self):
+ """Support deep linking by allowing the user to set form fields via QS."""
+ allowed_overrides = {
+ 'template_name': self.GetParam('template_name'),
+ 'initial_summary': self.GetParam('summary'),
+ 'initial_description': (self.GetParam('description') or
+ self.GetParam('comment')),
+ 'initial_comment': self.GetParam('comment'),
+ 'initial_status': self.GetParam('status'),
+ 'initial_owner': self.GetParam('owner'),
+ 'initial_cc': self.GetParam('cc'),
+ 'initial_blocked_on': self.GetParam('blockedon'),
+ 'initial_blocking': self.GetParam('blocking'),
+ 'initial_merge_into': self.GetIntParam('mergeinto'),
+ 'initial_components': self.GetParam('components'),
+
+ # For the people pages
+ 'initial_add_members': self.GetParam('add_members'),
+ 'initially_expanded_form': ezt.boolean(self.GetParam('expand_form')),
+
+ # For user group admin pages
+ 'initial_name': (self.GetParam('group_name') or
+ self.GetParam('proposed_project_name')),
+ }
+
+ # Only keep the overrides that were actually provided in the query string.
+ self.form_overrides.update(
+ (k, v) for (k, v) in allowed_overrides.iteritems()
+ if v is not None)
+
+ def _LookupViewedUser(self, services, prof):
+ """Get information about the viewed user (if any) from the request."""
+ try:
+ with prof.Phase('get viewed user, if any'):
+ self.viewed_user_auth = AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services, autocreate=False)
+ except user_svc.NoSuchUserException:
+ logging.info('could not find user %r', self.viewed_username)
+ webapp2.abort(404, 'user not found')
+
+ if not self.viewed_user_auth.user_id:
+ webapp2.abort(404, 'user not found')
+
+ def _LookupProject(self, services, prof):
+ """Get information about the current project (if any) from the request."""
+ with prof.Phase('get current project, if any'):
+ if not self.project_name:
+ logging.info('no project_name, so no project')
+ else:
+ self.project = services.project.GetProjectByName(
+ self.cnxn, self.project_name)
+ if not self.project:
+ webapp2.abort(404, 'invalid project')
+
+ def _LookupLoggedInUser(self, services, prof):
+ """Get information about the signed-in user (if any) from the request."""
+ with prof.Phase('get user info, if any'):
+ self.auth = AuthData.FromRequest(self.cnxn, services)
+ self.me_user_id = (self.GetIntParam('me') or
+ self.viewed_user_auth.user_id or self.auth.user_id)
+
+ with prof.Phase('looking up signed in user permissions'):
+ self.perms = permissions.GetPermissions(
+ self.auth.user_pb, self.auth.effective_ids, self.project)
+
+ def ComputeColSpec(self, config):
+ """Set col_spec based on param, default in the config, or site default."""
+ if self.col_spec is not None:
+ return # Already set.
+ default_col_spec = ''
+ if config:
+ default_col_spec = config.default_col_spec
+
+ col_spec = self.GetParam(
+ 'colspec', default_value=default_col_spec,
+ antitamper_re=framework_constants.COLSPEC_RE)
+
+ if not col_spec:
+ # If col spec is still empty then default to the global col spec.
+ col_spec = tracker_constants.DEFAULT_COL_SPEC
+
+ self.col_spec = ' '.join(ParseColSpec(col_spec))
+
+ def PrepareForReentry(self, echo_data):
+ """Expose the results of form processing as if it was a new GET.
+
+ This method is called only when the user submits a form with invalid
+ information which they are being asked to correct it. Updating the MR
+ object allows the normal servlet get() method to populate the form with
+ the entered values and error messages.
+
+ Args:
+ echo_data: dict of {page_data_key: value_to_reoffer, ...} that will
+ override whatever HTML form values are nomally shown to the
+ user when they initially view the form. This allows them to
+ fix user input that was not valid.
+ """
+ self.form_overrides.update(echo_data)
+
+ def GetParam(self, query_param_name, default_value=None,
+ antitamper_re=None):
+ """Get a query parameter from the URL as a utf8 string."""
+ value = self.request.params.get(query_param_name)
+ assert value is None or isinstance(value, unicode)
+ using_default = value is None
+ if using_default:
+ value = default_value
+
+ if antitamper_re and not antitamper_re.match(value):
+ if using_default:
+ logging.error('Default value fails antitamper for %s field: %s',
+ query_param_name, value)
+ else:
+ logging.info('User seems to have tampered with %s field: %s',
+ query_param_name, value)
+ raise InputException()
+
+ return value
+
+ def GetIntParam(self, query_param_name, default_value=None):
+ """Get an integer param from the URL or default."""
+ value = self.request.params.get(query_param_name)
+ if value is None:
+ return default_value
+
+ try:
+ return int(value)
+ except (TypeError, ValueError):
+ return default_value
+
+ def GetPositiveIntParam(self, query_param_name, default_value=None):
+ """Returns 0 if the user-provided value is less than 0."""
+ return max(self.GetIntParam(query_param_name, default_value=default_value),
+ 0)
+
+ def GetListParam(self, query_param_name, default_value=None):
+ """Get a list of strings from the URL or default."""
+ params = self.request.params.get(query_param_name)
+ if params is None:
+ return default_value
+ if not params:
+ return []
+ return params.split(',')
+
+ def GetIntListParam(self, query_param_name, default_value=None):
+ """Get a list of ints from the URL or default."""
+ param_list = self.GetListParam(query_param_name)
+ if param_list is None:
+ return default_value
+
+ try:
+ return [int(p) for p in param_list]
+ except (TypeError, ValueError):
+ return default_value
+
+
+def _ParsePathIdentifiers(path):
+ """Parse out the workspace being requested (if any).
+
+ Args:
+ path: A string beginning with the request's path info.
+
+ Returns:
+ (viewed_user_val, project_name).
+ """
+ viewed_user_val = None
+ project_name = None
+
+ # Strip off any query params
+ split_path = path.lstrip('/').split('?')[0].split('/')
+
+ if len(split_path) >= 2:
+ if split_path[0] == 'p':
+ project_name = split_path[1]
+ if split_path[0] == 'u':
+ viewed_user_val = urllib.unquote(split_path[1])
+ if split_path[0] == 'g':
+ viewed_user_val = urllib.unquote(split_path[1])
+
+ return viewed_user_val, project_name
+
+
+def _GetViewedEmail(viewed_user_val, cnxn, services):
+ """Returns the viewed user's email.
+
+ Args:
+ viewed_user_val: Could be either int (user_id) or str (email).
+ cnxn: connection to the SQL database.
+ services: Interface to all persistence storage backends.
+
+ Returns:
+ viewed_email
+ """
+ if not viewed_user_val:
+ return None
+
+ try:
+ viewed_userid = int(viewed_user_val)
+ viewed_email = services.user.LookupUserEmail(cnxn, viewed_userid)
+ if not viewed_email:
+ logging.info('userID %s not found', viewed_userid)
+ webapp2.abort(404, 'user not found')
+ except ValueError:
+ viewed_email = viewed_user_val
+
+ return viewed_email
+
+
+def ParseColSpec(col_spec):
+ """Split a string column spec into a list of column names.
+
+ Args:
+ col_spec: a unicode string containing a list of labels.
+
+ Returns:
+ A list of the extracted labels. Non-alphanumeric
+ characters other than the period will be stripped from the text.
+ """
+ return framework_constants.COLSPEC_COL_RE.findall(col_spec)
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+class InputException(Error):
+ """Error in user input processing."""
+ pass
diff --git a/appengine/monorail/framework/paginate.py b/appengine/monorail/framework/paginate.py
new file mode 100644
index 0000000..fde7ebe
--- /dev/null
+++ b/appengine/monorail/framework/paginate.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that help display pagination widgets for result sets."""
+
+import logging
+
+from third_party import ezt
+
+import settings
+from framework import framework_helpers
+
+
+class VirtualPagination(object):
+ """Class to calc Prev and Next pagination links based on result counts."""
+
+ def __init__(self, mr, total_count, items_per_page,
+ list_page_url=None, count_up=True,
+ start_param='start', num_param='num', max_num=None):
+ """Given 'num' and 'start' params, determine Prev and Next links.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ total_count: total number of artifacts that satisfy the query.
+ items_per_page: number of items to display on each page, e.g., 25.
+ list_page_url: URL of the web application page that is displaying
+ the list of artifacts. Used to build the Prev and Next URLs.
+ If None, no URLs will be built.
+ count_up: if False, count down from total_count.
+ start_param: query string parameter name to use for the start
+ of the pagination page.
+ num_param: query string parameter name to use for the number of items
+ to show on a pagination page.
+ max_num: optional limit on the value of the num param. If not given,
+ settings.max_artifact_search_results_per_page is used.
+ """
+ self.total_count = total_count
+ self.prev_url = ''
+ self.reload_url = ''
+ self.next_url = ''
+
+ if max_num is None:
+ max_num = settings.max_artifact_search_results_per_page
+
+ self.num = mr.GetPositiveIntParam(num_param, items_per_page)
+ self.num = min(self.num, max_num)
+
+ if count_up:
+ self.start = mr.GetPositiveIntParam(start_param, 0)
+ self.last = min(self.total_count, self.start + self.num)
+ prev_start = max(0, self.start - self.num)
+ next_start = self.start + self.num
+ else:
+ self.start = mr.GetPositiveIntParam(start_param, self.total_count)
+ self.last = max(0, self.start - self.num)
+ prev_start = min(self.total_count, self.start + self.num)
+ next_start = self.start - self.num
+
+ if list_page_url:
+ if mr.project_name:
+ list_servlet_rel_url = '/p/%s%s' % (
+ mr.project_name, list_page_url)
+ else:
+ list_servlet_rel_url = list_page_url
+
+ self.reload_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url,
+ **{start_param: self.start, num_param: self.num})
+
+ if prev_start != self.start:
+ self.prev_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url,
+ **{start_param: prev_start, num_param: self.num})
+ if ((count_up and next_start < self.total_count) or
+ (not count_up and next_start >= 1)):
+ self.next_url = framework_helpers.FormatURL(
+ mr, list_servlet_rel_url,
+ **{start_param: next_start, num_param: self.num})
+
+ self.visible = ezt.boolean(self.last != self.start)
+
+ # Adjust indices to one-based values for display to users.
+ if count_up:
+ self.start += 1
+ else:
+ self.last += 1
+
+ def DebugString(self):
+ """Return a string that is useful in on-page debugging."""
+ return '%s - %s of %s; prev_url:%s; next_url:%s' % (
+ self.start, self.last, self.total_count, self.prev_url, self.next_url)
+
+
+class ArtifactPagination(VirtualPagination):
+ """Class to calc Prev and Next pagination links based on a results list."""
+
+ def __init__(
+ self, mr, results, items_per_page, list_page_url, total_count=None,
+ limit_reached=False, skipped=0):
+ """Given 'num' and 'start' params, determine Prev and Next links.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ results: a list of artifact ids that satisfy the query.
+ items_per_page: number of items to display on each page, e.g., 25.
+ list_page_url: URL of the web application page that is displaying
+ the list of artifacts. Used to build the Prev and Next URLs.
+ total_count: specify total result count rather than the length of results
+ limit_reached: optional boolean that indicates that more results could
+ not be fetched because a limit was reached.
+ skipped: optional int number of items that were skipped and left off the
+ front of results.
+ """
+ if total_count is None:
+ total_count = skipped + len(results)
+ super(ArtifactPagination, self).__init__(
+ mr, total_count, items_per_page, list_page_url=list_page_url)
+
+ self.limit_reached = ezt.boolean(limit_reached)
+ # Determine which of those results should be visible on the current page.
+ range_start = self.start - 1 - skipped
+ range_end = range_start + self.num
+ assert 0 <= range_start <= range_end
+ self.visible_results = results[range_start:range_end]
diff --git a/appengine/monorail/framework/pbproxy_test_pb2.py b/appengine/monorail/framework/pbproxy_test_pb2.py
new file mode 100644
index 0000000..48bf399
--- /dev/null
+++ b/appengine/monorail/framework/pbproxy_test_pb2.py
@@ -0,0 +1,21 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Message classes for use by template_helpers_test."""
+
+from protorpc import messages
+
+
+class PBProxyExample(messages.Message):
+ """A simple protocol buffer to test template_helpers.PBProxy."""
+ foo = messages.StringField(1)
+ bar = messages.BooleanField(2, default=False)
+
+
+class PBProxyNested(messages.Message):
+ """A simple protocol buffer to test template_helpers.PBProxy."""
+ nested = messages.MessageField(PBProxyExample, 1)
+ multiple_strings = messages.StringField(2, repeated=True)
+ multiple_pbes = messages.MessageField(PBProxyExample, 3, repeated=True)
diff --git a/appengine/monorail/framework/permissions.py b/appengine/monorail/framework/permissions.py
new file mode 100644
index 0000000..e5b8404
--- /dev/null
+++ b/appengine/monorail/framework/permissions.py
@@ -0,0 +1,959 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions to implement permission checking.
+
+The main data structure is a simple map from (user role, project status,
+project_access_level) to specific perms.
+
+A perm is simply a string that indicates that the user has a given
+permission. The servlets and templates can test whether the current
+user has permission to see a UI element or perform an action by
+testing for the presence of the corresponding perm in the user's
+permission set.
+
+The user role is one of admin, owner, member, outsider user, or anon.
+The project status is one of the project states defined in project_pb2,
+or a special constant defined below. Likewise for access level.
+"""
+
+import logging
+import time
+
+from third_party import ezt
+
+import settings
+from framework import framework_bizobj
+from framework import framework_constants
+from proto import project_pb2
+from proto import site_pb2
+from proto import usergroup_pb2
+from tracker import tracker_bizobj
+
+# Constants that define permissions.
+# Note that perms with a leading "_" can never be granted
+# to users who are not site admins.
+VIEW = 'View'
+EDIT_PROJECT = 'EditProject'
+CREATE_PROJECT = 'CreateProject'
+PUBLISH_PROJECT = '_PublishProject' # for making "doomed" projects LIVE
+VIEW_DEBUG = '_ViewDebug' # on-page debugging info
+EDIT_OTHER_USERS = '_EditOtherUsers' # can edit other user's prefs, ban, etc.
+CUSTOMIZE_PROCESS = 'CustomizeProcess' # can use some enterprise features
+VIEW_EXPIRED_PROJECT = '_ViewExpiredProject' # view long-deleted projects
+# View the list of contributors even in hub-and-spoke projects.
+VIEW_CONTRIBUTOR_LIST = 'ViewContributorList'
+
+# Quota
+VIEW_QUOTA = 'ViewQuota'
+EDIT_QUOTA = 'EditQuota'
+
+# Permissions for editing user groups
+CREATE_GROUP = 'CreateGroup'
+EDIT_GROUP = 'EditGroup'
+DELETE_GROUP = 'DeleteGroup'
+VIEW_GROUP = 'ViewGroup'
+
+# Perms for Source tools
+# TODO(jrobbins): Monorail is just issue tracking with no version control, so
+# phase out use of the term "Commit", sometime after Monorail's initial launch.
+COMMIT = 'Commit'
+
+# Perms for issue tracking
+CREATE_ISSUE = 'CreateIssue'
+EDIT_ISSUE = 'EditIssue'
+EDIT_ISSUE_OWNER = 'EditIssueOwner'
+EDIT_ISSUE_SUMMARY = 'EditIssueSummary'
+EDIT_ISSUE_STATUS = 'EditIssueStatus'
+EDIT_ISSUE_CC = 'EditIssueCc'
+DELETE_ISSUE = 'DeleteIssue'
+ADD_ISSUE_COMMENT = 'AddIssueComment'
+VIEW_INBOUND_MESSAGES = 'ViewInboundMessages'
+# Note, there is no separate DELETE_ATTACHMENT perm. We
+# allow a user to delete an attachment iff they could soft-delete
+# the comment that holds the attachment.
+
+# Note: the "_" in the perm name makes it impossible for a
+# project owner to grant it to anyone as an extra perm.
+ADMINISTER_SITE = '_AdministerSite'
+
+# Permissions to soft-delete artifact comment
+DELETE_ANY = 'DeleteAny'
+DELETE_OWN = 'DeleteOwn'
+
+# Granting this allows owners to delegate some team management work.
+EDIT_ANY_MEMBER_NOTES = 'EditAnyMemberNotes'
+
+# Permission to star/unstar any artifact.
+SET_STAR = 'SetStar'
+
+# Permission to flag any artifact as spam.
+FLAG_SPAM = 'FlagSpam'
+VERDICT_SPAM = 'VerdictSpam'
+MODERATE_SPAM = 'ModerateSpam'
+
+STANDARD_ADMIN_PERMISSIONS = [
+ EDIT_PROJECT, CREATE_PROJECT, PUBLISH_PROJECT, VIEW_DEBUG,
+ EDIT_OTHER_USERS, CUSTOMIZE_PROCESS,
+ VIEW_QUOTA, EDIT_QUOTA, ADMINISTER_SITE,
+ EDIT_ANY_MEMBER_NOTES, VERDICT_SPAM, MODERATE_SPAM]
+
+STANDARD_ISSUE_PERMISSIONS = [
+ VIEW, EDIT_ISSUE, ADD_ISSUE_COMMENT, DELETE_ISSUE, FLAG_SPAM]
+
+# Monorail has no source control, but keep COMMIT for backward compatability.
+STANDARD_SOURCE_PERMISSIONS = [COMMIT]
+
+STANDARD_COMMENT_PERMISSIONS = [DELETE_OWN, DELETE_ANY]
+
+STANDARD_OTHER_PERMISSIONS = [CREATE_ISSUE, FLAG_SPAM, SET_STAR]
+
+STANDARD_PERMISSIONS = (STANDARD_ADMIN_PERMISSIONS +
+ STANDARD_ISSUE_PERMISSIONS +
+ STANDARD_SOURCE_PERMISSIONS +
+ STANDARD_COMMENT_PERMISSIONS +
+ STANDARD_OTHER_PERMISSIONS)
+
+# roles
+SITE_ADMIN_ROLE = 'admin'
+OWNER_ROLE = 'owner'
+COMMITTER_ROLE = 'committer'
+CONTRIBUTOR_ROLE = 'contributor'
+USER_ROLE = 'user'
+ANON_ROLE = 'anon'
+
+# Project state out-of-band values for keys
+UNDEFINED_STATUS = 'undefined_status'
+UNDEFINED_ACCESS = 'undefined_access'
+WILDCARD_ACCESS = 'wildcard_access'
+
+
+class PermissionSet(object):
+ """Class to represent the set of permissions available to the user."""
+
+ def __init__(self, perm_names, consider_restrictions=True):
+ """Create a PermissionSet with the given permissions.
+
+ Args:
+ perm_names: a list of permission name strings.
+ consider_restrictions: if true, the user's permissions can be blocked
+ by restriction labels on an artifact. Project owners and site
+ admins do not consider restrictions so that they cannot
+ "lock themselves out" of editing an issue.
+ """
+ self.perm_names = frozenset(p.lower() for p in perm_names)
+ self.consider_restrictions = consider_restrictions
+
+ def __getattr__(self, perm_name):
+ """Easy permission testing in EZT. E.g., [if-any perms.format_drive]."""
+ return ezt.boolean(self.HasPerm(perm_name, None, None))
+
+ def CanUsePerm(
+ self, perm_name, effective_ids, project, restriction_labels,
+ granted_perms=None):
+ """Return True if the user can use the given permission.
+
+ Args:
+ perm_name: string name of permission, e.g., 'EditIssue'.
+ effective_ids: set of int user IDs for the user (including any groups),
+ or an empty set if user is not signed in.
+ project: Project PB for the project being accessed, or None if not
+ in a project.
+ restriction_labels: list of strings that restrict permission usage.
+ granted_perms: optional list of lowercase strings of permissions that the
+ user is granted only within the scope of one issue, e.g., by being
+ named in a user-type custom field that grants permissions.
+
+ Restriction labels have 3 parts, e.g.:
+ 'Restrict-EditIssue-InnerCircle' blocks the use of just the
+ EditIssue permission, unless the user also has the InnerCircle
+ permission. This allows fine-grained restrictions on specific
+ actions, such as editing, commenting, or deleting.
+
+ Restriction labels and permissions are case-insensitive.
+
+ Returns:
+ True if the user can use the given permission, or False
+ if they cannot (either because they don't have that permission
+ or because it is blocked by a relevant restriction label).
+ """
+ # TODO(jrobbins): room for performance improvement: avoid set creation and
+ # repeated string operations.
+ granted_perms = granted_perms or set()
+ perm_lower = perm_name.lower()
+ if perm_lower in granted_perms:
+ return True
+
+ needed_perms = {perm_lower}
+ if self.consider_restrictions:
+ for label in restriction_labels:
+ label = label.lower()
+ # format: Restrict-Action-ToThisPerm
+ _kw, requested_perm, needed_perm = label.split('-', 2)
+ if requested_perm == perm_lower and needed_perm not in granted_perms:
+ needed_perms.add(needed_perm)
+
+ if not effective_ids:
+ effective_ids = {framework_constants.NO_USER_SPECIFIED}
+ # Id X might have perm A and Y might have B, if both A and B are needed
+ # True should be returned.
+ for perm in needed_perms:
+ if not any(
+ self.HasPerm(perm, user_id, project) for user_id in effective_ids):
+ return False
+
+ return True
+
+ def HasPerm(self, perm_name, user_id, project):
+ """Return True if the user has the given permission (ignoring user groups).
+
+ Args:
+ perm_name: string name of permission, e.g., 'EditIssue'.
+ user_id: int user id of the user, or None if user is not signed in.
+ project: Project PB for the project being accessed, or None if not
+ in a project.
+
+ Returns:
+ True if the user has the given perm.
+ """
+ # TODO(jrobbins): room for performance improvement: pre-compute
+ # extra perms (maybe merge them into the perms object), avoid
+ # redundant call to lower().
+ extra_perms = [p.lower() for p in GetExtraPerms(project, user_id)]
+ perm_name = perm_name.lower()
+ return perm_name in self.perm_names or perm_name in extra_perms
+
+ def DebugString(self):
+ """Return a useful string to show when debugging."""
+ return 'PermissionSet(%s)' % ', '.join(sorted(self.perm_names))
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, self.perm_names)
+
+
+EMPTY_PERMISSIONSET = PermissionSet([])
+
+READ_ONLY_PERMISSIONSET = PermissionSet([VIEW])
+
+USER_PERMISSIONSET = PermissionSet([
+ VIEW, FLAG_SPAM, SET_STAR,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT,
+ DELETE_OWN])
+
+CONTRIBUTOR_ACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW,
+ FLAG_SPAM, SET_STAR,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT,
+ DELETE_OWN])
+
+CONTRIBUTOR_INACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW])
+
+COMMITTER_ACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, COMMIT, VIEW_CONTRIBUTOR_LIST,
+ FLAG_SPAM, SET_STAR, VIEW_QUOTA,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT, EDIT_ISSUE, VIEW_INBOUND_MESSAGES,
+ DELETE_OWN])
+
+COMMITTER_INACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST,
+ VIEW_INBOUND_MESSAGES, VIEW_QUOTA])
+
+OWNER_ACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST, EDIT_PROJECT, COMMIT,
+ FLAG_SPAM, VERDICT_SPAM, SET_STAR, VIEW_QUOTA,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT, EDIT_ISSUE, DELETE_ISSUE,
+ VIEW_INBOUND_MESSAGES,
+ DELETE_ANY, EDIT_ANY_MEMBER_NOTES],
+ consider_restrictions=False)
+
+OWNER_INACTIVE_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST, EDIT_PROJECT,
+ VIEW_INBOUND_MESSAGES, VIEW_QUOTA],
+ consider_restrictions=False)
+
+ADMIN_PERMISSIONSET = PermissionSet(
+ [VIEW, VIEW_CONTRIBUTOR_LIST,
+ CREATE_PROJECT, EDIT_PROJECT, PUBLISH_PROJECT, VIEW_DEBUG,
+ COMMIT, CUSTOMIZE_PROCESS, FLAG_SPAM, VERDICT_SPAM, SET_STAR,
+ ADMINISTER_SITE, VIEW_EXPIRED_PROJECT, EDIT_OTHER_USERS,
+ VIEW_QUOTA, EDIT_QUOTA,
+ CREATE_ISSUE, ADD_ISSUE_COMMENT, EDIT_ISSUE, DELETE_ISSUE,
+ VIEW_INBOUND_MESSAGES,
+ DELETE_ANY, EDIT_ANY_MEMBER_NOTES,
+ CREATE_GROUP, EDIT_GROUP, DELETE_GROUP, VIEW_GROUP,
+ MODERATE_SPAM],
+ consider_restrictions=False)
+
+GROUP_IMPORT_BORG_PERMISSIONSET = PermissionSet(
+ [CREATE_GROUP, VIEW_GROUP, EDIT_GROUP])
+
+
+# Permissions for project pages, e.g., the project summary page
+_PERMISSIONS_TABLE = {
+
+ # Project owners can view and edit artifacts in a LIVE project.
+ (OWNER_ROLE, project_pb2.ProjectState.LIVE, WILDCARD_ACCESS):
+ OWNER_ACTIVE_PERMISSIONSET,
+
+ # Project owners can view, but not edit artifacts in ARCHIVED.
+ # Note: EDIT_PROJECT is not enough permission to change an ARCHIVED project
+ # back to LIVE if a delete_time was set.
+ (OWNER_ROLE, project_pb2.ProjectState.ARCHIVED, WILDCARD_ACCESS):
+ OWNER_INACTIVE_PERMISSIONSET,
+
+ # Project members can view their own project, regardless of state.
+ (COMMITTER_ROLE, project_pb2.ProjectState.LIVE, WILDCARD_ACCESS):
+ COMMITTER_ACTIVE_PERMISSIONSET,
+ (COMMITTER_ROLE, project_pb2.ProjectState.ARCHIVED, WILDCARD_ACCESS):
+ COMMITTER_INACTIVE_PERMISSIONSET,
+
+ # Project contributors can view their own project, regardless of state.
+ (CONTRIBUTOR_ROLE, project_pb2.ProjectState.LIVE, WILDCARD_ACCESS):
+ CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ (CONTRIBUTOR_ROLE, project_pb2.ProjectState.ARCHIVED, WILDCARD_ACCESS):
+ CONTRIBUTOR_INACTIVE_PERMISSIONSET,
+
+ # Non-members users can read and comment in projects with access == ANYONE
+ (USER_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE):
+ USER_PERMISSIONSET,
+
+ # Anonymous users can only read projects with access == ANYONE.
+ (ANON_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE):
+ READ_ONLY_PERMISSIONSET,
+
+ # Permissions for site pages, e.g., creating a new project
+ (USER_ROLE, UNDEFINED_STATUS, UNDEFINED_ACCESS):
+ PermissionSet([CREATE_PROJECT, CREATE_GROUP]),
+ }
+
+
+def GetPermissions(user, effective_ids, project):
+ """Return a permission set appropriate for the user and project.
+
+ Args:
+ user: The User PB for the signed-in user, or None for anon users.
+ effective_ids: set of int user IDs for the current user and all user
+ groups that s/he is a member of. This will be an empty set for
+ anonymous users.
+ project: either a Project protobuf, or None for a page whose scope is
+ wider than a single project.
+
+ Returns:
+ a PermissionSet object for the current user and project (or for
+ site-wide operations if project is None).
+
+ If an exact match for the user's role and project status is found, that is
+ returned. Otherwise, we look for permissions for the user's role that is
+ not specific to any project status, or not specific to any project access
+ level. If neither of those are defined, we give the user an empty
+ permission set.
+ """
+ # Site admins get ADMIN_PERMISSIONSET regardless of groups or projects.
+ if user and user.is_site_admin:
+ return ADMIN_PERMISSIONSET
+
+ # Grant the borg job permission to view/edit groups
+ if user and user.email == settings.borg_service_account:
+ return GROUP_IMPORT_BORG_PERMISSIONSET
+
+ # Anon users don't need to accumulate anything.
+ if not effective_ids:
+ role, status, access = _GetPermissionKey(None, project)
+ return _LookupPermset(role, status, access)
+
+ effective_perms = set()
+ consider_restrictions = True
+
+ # Check for signed-in user with no roles in the current project.
+ if not project or not framework_bizobj.UserIsInProject(
+ project, effective_ids):
+ role, status, access = _GetPermissionKey(None, project)
+ return _LookupPermset(USER_ROLE, status, access)
+
+ # Signed-in user gets the union of all his/her PermissionSets from the table.
+ for user_id in effective_ids:
+ role, status, access = _GetPermissionKey(user_id, project)
+ role_perms = _LookupPermset(role, status, access)
+ # Accumulate a union of all the user's permissions.
+ effective_perms.update(role_perms.perm_names)
+ # If any role allows the user to ignore restriction labels, then
+ # ignore them overall.
+ if not role_perms.consider_restrictions:
+ consider_restrictions = False
+
+ return PermissionSet(
+ effective_perms, consider_restrictions=consider_restrictions)
+
+
+def _LookupPermset(role, status, access):
+ """Lookup the appropriate PermissionSet in _PERMISSIONS_TABLE.
+
+ Args:
+ role: a string indicating the user's role in the project.
+ status: a Project PB status value, or UNDEFINED_STATUS.
+ access: a Project PB access value, or UNDEFINED_ACCESS.
+
+ Returns:
+ A PermissionSet that is appropriate for that kind of user in that
+ project context.
+ """
+ if (role, status, access) in _PERMISSIONS_TABLE:
+ return _PERMISSIONS_TABLE[(role, status, access)]
+ elif (role, status, WILDCARD_ACCESS) in _PERMISSIONS_TABLE:
+ return _PERMISSIONS_TABLE[(role, status, WILDCARD_ACCESS)]
+ else:
+ return EMPTY_PERMISSIONSET
+
+
+def _GetPermissionKey(user_id, project, expired_before=None):
+ """Return a permission lookup key appropriate for the user and project."""
+ if user_id is None:
+ role = ANON_ROLE
+ elif project and IsExpired(project, expired_before=expired_before):
+ role = USER_ROLE # Do not honor roles in expired projects.
+ elif project and user_id in project.owner_ids:
+ role = OWNER_ROLE
+ elif project and user_id in project.committer_ids:
+ role = COMMITTER_ROLE
+ elif project and user_id in project.contributor_ids:
+ role = CONTRIBUTOR_ROLE
+ else:
+ role = USER_ROLE
+
+ # TODO(jrobbins): re-implement same_org
+
+ if project is None:
+ status = UNDEFINED_STATUS
+ else:
+ status = project.state
+
+ if project is None:
+ access = UNDEFINED_ACCESS
+ else:
+ access = project.access
+
+ return role, status, access
+
+
+def GetExtraPerms(project, member_id):
+ """Return a list of extra perms for the user in the project.
+
+ Args:
+ project: Project PB for the current project.
+ member_id: user id of a project owner, member, or contributor.
+
+ Returns:
+ A list of strings for the extra perms granted to the
+ specified user in this project. The list will often be empty.
+ """
+
+ extra_perms = FindExtraPerms(project, member_id)
+
+ if extra_perms:
+ return list(extra_perms.perms)
+ else:
+ return []
+
+
+def FindExtraPerms(project, member_id):
+ """Return a ExtraPerms PB for the given user in the project.
+
+ Args:
+ project: Project PB for the current project, or None if the user is
+ not currently in a project.
+ member_id: user ID of a project owner, member, or contributor.
+
+ Returns:
+ An ExtraPerms PB, or None.
+ """
+ if not project:
+ # TODO(jrobbins): maybe define extra perms for site-wide operations.
+ return None
+
+ # Users who have no current role cannot have any extra perms. Don't
+ # consider effective_ids (which includes user groups) for this check.
+ if not framework_bizobj.UserIsInProject(project, {member_id}):
+ return None
+
+ for extra_perms in project.extra_perms:
+ if extra_perms.member_id == member_id:
+ return extra_perms
+
+ return None
+
+
+def GetCustomPermissions(project):
+ """Return a sorted iterable of custom perms granted in a project."""
+ custom_permissions = set()
+ for extra_perms in project.extra_perms:
+ for perm in extra_perms.perms:
+ if perm not in STANDARD_PERMISSIONS:
+ custom_permissions.add(perm)
+
+ return sorted(custom_permissions)
+
+
+def UserCanViewProject(user, effective_ids, project, expired_before=None):
+ """Return True if the user can view the given project.
+
+ Args:
+ user: User protobuf for the user trying to view the project.
+ effective_ids: set of int user IDs of the user trying to view the project
+ (including any groups), or an empty set for anonymous users.
+ project: the Project protobuf to check.
+ expired_before: option time value for testing.
+
+ Returns:
+ True if the user should be allowed to view the project.
+ """
+ perms = GetPermissions(user, effective_ids, project)
+
+ if IsExpired(project, expired_before=expired_before):
+ needed_perm = VIEW_EXPIRED_PROJECT
+ else:
+ needed_perm = VIEW
+
+ return perms.CanUsePerm(needed_perm, effective_ids, project, [])
+
+
+def IsExpired(project, expired_before=None):
+ """Return True if a project deletion has been pending long enough already.
+
+ Args:
+ project: The project being viewed.
+ expired_before: If supplied, this method will return True only if the
+ project expired before the given time.
+
+ Returns:
+ True if the project is eligible for reaping.
+ """
+ if project.state != project_pb2.ProjectState.ARCHIVED:
+ return False
+
+ if expired_before is None:
+ expired_before = int(time.time())
+
+ return project.delete_time and project.delete_time < expired_before
+
+
+def CanDelete(logged_in_user_id, effective_ids, perms, deleted_by_user_id,
+ creator_user_id, project, restrictions, granted_perms=None):
+ """Returns true if user has delete permission.
+
+ Args:
+ logged_in_user_id: int user id of the logged in user.
+ effective_ids: set of int user IDs for the user (including any groups),
+ or an empty set if user is not signed in.
+ perms: instance of PermissionSet describing the current user's permissions.
+ deleted_by_user_id: int user ID of the user having previously deleted this
+ comment, or None, if the comment has never been deleted.
+ creator_user_id: int user ID of the user having created this comment.
+ project: Project PB for the project being accessed, or None if not
+ in a project.
+ restrictions: list of strings that restrict permission usage.
+ granted_perms: optional list of strings of permissions that the user is
+ granted only within the scope of one issue, e.g., by being named in
+ a user-type custom field that grants permissions.
+
+ Returns:
+ True if the logged in user has delete permissions.
+ """
+
+ # User is not logged in or has no permissions.
+ if not logged_in_user_id or not perms:
+ return False
+
+ # Site admin or project owners can delete any comment.
+ permit_delete_any = perms.CanUsePerm(
+ DELETE_ANY, effective_ids, project, restrictions,
+ granted_perms=granted_perms)
+ if permit_delete_any:
+ return True
+
+ # Users cannot undelete unless they deleted.
+ if deleted_by_user_id and deleted_by_user_id != logged_in_user_id:
+ return False
+
+ # Users can delete their own items.
+ permit_delete_own = perms.CanUsePerm(
+ DELETE_OWN, effective_ids, project, restrictions)
+ if permit_delete_own and creator_user_id == logged_in_user_id:
+ return True
+
+ return False
+
+
+def CanView(effective_ids, perms, project, restrictions, granted_perms=None):
+ """Checks if user has permission to view an issue."""
+ return perms.CanUsePerm(
+ VIEW, effective_ids, project, restrictions, granted_perms=granted_perms)
+
+
+def CanCreateProject(perms):
+ """Return True if the given user may create a project.
+
+ Args:
+ perms: Permissionset for the current user.
+
+ Returns:
+ True if the user should be allowed to create a project.
+ """
+ # "ANYONE" means anyone who has the needed perm.
+ if (settings.project_creation_restriction ==
+ site_pb2.UserTypeRestriction.ANYONE):
+ return perms.HasPerm(CREATE_PROJECT, None, None)
+
+ if (settings.project_creation_restriction ==
+ site_pb2.UserTypeRestriction.ADMIN_ONLY):
+ return perms.HasPerm(ADMINISTER_SITE, None, None)
+
+ return False
+
+
+def CanCreateGroup(perms):
+ """Return True if the given user may create a user group.
+
+ Args:
+ perms: Permissionset for the current user.
+
+ Returns:
+ True if the user should be allowed to create a group.
+ """
+ # "ANYONE" means anyone who has the needed perm.
+ if (settings.group_creation_restriction ==
+ site_pb2.UserTypeRestriction.ANYONE):
+ return perms.HasPerm(CREATE_GROUP, None, None)
+
+ if (settings.group_creation_restriction ==
+ site_pb2.UserTypeRestriction.ADMIN_ONLY):
+ return perms.HasPerm(ADMINISTER_SITE, None, None)
+
+ return False
+
+
+def CanEditGroup(perms, effective_ids, group_owner_ids):
+ """Return True if the given user may edit a user group.
+
+ Args:
+ perms: Permissionset for the current user.
+ effective_ids: set of user IDs for the logged in user.
+ group_owner_ids: set of user IDs of the user group owners.
+
+ Returns:
+ True if the user should be allowed to edit the group.
+ """
+ return (perms.HasPerm(EDIT_GROUP, None, None) or
+ not effective_ids.isdisjoint(group_owner_ids))
+
+
+def CanViewGroup(perms, effective_ids, group_settings, member_ids, owner_ids,
+ user_project_ids):
+ """Return True if the given user may view a user group.
+
+ Args:
+ perms: Permissionset for the current user.
+ effective_ids: set of user IDs for the logged in user.
+ group_settings: PB of UserGroupSettings.
+ member_ids: A list of member ids of this user group.
+ owner_ids: A list of owner ids of this user group.
+ user_project_ids: A list of project ids which the user has a role.
+
+ Returns:
+ True if the user should be allowed to view the group.
+ """
+ if perms.HasPerm(VIEW_GROUP, None, None):
+ return True
+ # The user could view this group with membership of some projects which are
+ # friends of the group.
+ if (group_settings.friend_projects and user_project_ids
+ and (set(group_settings.friend_projects) & set(user_project_ids))):
+ return True
+ visibility = group_settings.who_can_view_members
+ if visibility == usergroup_pb2.MemberVisibility.OWNERS:
+ return not effective_ids.isdisjoint(owner_ids)
+ elif visibility == usergroup_pb2.MemberVisibility.MEMBERS:
+ return (not effective_ids.isdisjoint(member_ids) or
+ not effective_ids.isdisjoint(owner_ids))
+ else:
+ return True
+
+
+def IsBanned(user, user_view):
+ """Return True if this user is banned from using our site."""
+ if user is None:
+ return False # Anyone is welcome to browse
+
+ if user.banned:
+ return True # We checked the "Banned" checkbox for this user.
+
+ if user_view:
+ if user_view.domain in settings.banned_user_domains:
+ return True # Some spammers create many accounts with the same domain.
+
+ return False
+
+
+def CanViewContributorList(mr):
+ """Return True if we should display the list project contributors.
+
+ This is used on the project summary page, when deciding to offer the
+ project People page link, and when generating autocomplete options
+ that include project members.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ True if we should display the project contributor list.
+ """
+ if not mr.project:
+ return False # We are not even in a project context.
+
+ if not mr.project.only_owners_see_contributors:
+ return True # Contributor list is not resticted.
+
+ # If it is hub-and-spoke, check for the perm that allows the user to
+ # view it anyway.
+ return mr.perms.HasPerm(
+ VIEW_CONTRIBUTOR_LIST, mr.auth.user_id, mr.project)
+
+
+def ShouldCheckForAbandonment(mr):
+ """Return True if user should be warned before changing/deleting their role.
+
+ Args:
+ mr: common info parsed from the user's request.
+
+ Returns:
+ True if user should be warned before changing/deleting their role.
+ """
+ # Note: No need to warn admins because they won't lose access anyway.
+ if mr.perms.CanUsePerm(
+ ADMINISTER_SITE, mr.auth.effective_ids, mr.project, []):
+ return False
+
+ return mr.perms.CanUsePerm(
+ EDIT_PROJECT, mr.auth.effective_ids, mr.project, [])
+
+
+# For speed, we remember labels that we have already classified as being
+# restriction labels or not being restriction labels. These sets are for
+# restrictions in general, not for any particular perm.
+_KNOWN_RESTRICTION_LABELS = set()
+_KNOWN_NON_RESTRICTION_LABELS = set()
+
+
+def IsRestrictLabel(label, perm=''):
+ """Returns True if a given label is a restriction label.
+
+ Args:
+ label: string for the label to examine.
+ perm: a permission that can be restricted (e.g. 'View' or 'Edit').
+ Defaults to '' to mean 'any'.
+
+ Returns:
+ True if a given label is a restriction label (of the specified perm)
+ """
+ if label in _KNOWN_NON_RESTRICTION_LABELS:
+ return False
+ if not perm and label in _KNOWN_RESTRICTION_LABELS:
+ return True
+
+ prefix = ('restrict-%s-' % perm.lower()) if perm else 'restrict-'
+ is_restrict = label.lower().startswith(prefix) and label.count('-') >= 2
+
+ if is_restrict:
+ _KNOWN_RESTRICTION_LABELS.add(label)
+ elif not perm:
+ _KNOWN_NON_RESTRICTION_LABELS.add(label)
+
+ return is_restrict
+
+
+def HasRestrictions(issue, perm=''):
+ """Return True if the issue has any restrictions (on the specified perm)."""
+ return (
+ any(IsRestrictLabel(lab, perm=perm) for lab in issue.labels) or
+ any(IsRestrictLabel(lab, perm=perm) for lab in issue.derived_labels))
+
+
+def GetRestrictions(issue):
+ """Return a list of restriction labels on the given issue."""
+ if not issue:
+ return []
+
+ return [lab.lower() for lab in tracker_bizobj.GetLabels(issue)
+ if IsRestrictLabel(lab)]
+
+
+def CanViewIssue(
+ effective_ids, perms, project, issue, allow_viewing_deleted=False,
+ granted_perms=None):
+ """Checks if user has permission to view an artifact.
+
+ Args:
+ effective_ids: set of user IDs for the logged in user and any user
+ group memberships. Should be an empty set for anon users.
+ perms: PermissionSet for the user.
+ project: Project PB for the project that contains this issue.
+ issue: Issue PB for the issue being viewed.
+ allow_viewing_deleted: True if the user should be allowed to view
+ deleted artifacts.
+ granted_perms: optional list of strings of permissions that the user is
+ granted only within the scope of one issue, e.g., by being named in
+ a user-type custom field that grants permissions.
+
+ Returns:
+ True iff the user can view the specified issue.
+ """
+ if issue.deleted and not allow_viewing_deleted:
+ # No one can view a deleted issue. If the user can undelete, that
+ # goes through the custom 404 page.
+ return False
+
+ # Check to see if the user can view anything in the project.
+ if not perms.CanUsePerm(VIEW, effective_ids, project, []):
+ return False
+
+ if not HasRestrictions(issue):
+ return True
+
+ return CanViewRestrictedIssueInVisibleProject(
+ effective_ids, perms, project, issue, granted_perms=granted_perms)
+
+
+def CanViewRestrictedIssueInVisibleProject(
+ effective_ids, perms, project, issue, granted_perms=None):
+ """Return True if the user can view this issue. Assumes project is OK."""
+ # The reporter, owner, and CC'd users can always see the issue.
+ # In effect, these fields override artifact restriction labels.
+ if effective_ids:
+ if (issue.reporter_id in effective_ids or
+ tracker_bizobj.GetOwnerId(issue) in effective_ids or
+ not effective_ids.isdisjoint(tracker_bizobj.GetCcIds(issue))):
+ return True
+
+ # Otherwise, apply the usual permission checking.
+ return CanView(
+ effective_ids, perms, project, GetRestrictions(issue),
+ granted_perms=granted_perms)
+
+
+def CanEditIssue(effective_ids, perms, project, issue, granted_perms=None):
+ """Return True if a user can edit an issue.
+
+ Args:
+ effective_ids: set of user IDs for the logged in user and any user
+ group memberships. Should be an empty set for anon users.
+ perms: PermissionSet for the user.
+ project: Project PB for the project that contains this issue.
+ issue: Issue PB for the issue being viewed.
+ granted_perms: optional list of strings of permissions that the user is
+ granted only within the scope of one issue, e.g., by being named in
+ a user-type custom field that grants permissions.
+
+ Returns:
+ True iff the user can edit the specified issue.
+ """
+ # TODO(jrobbins): We need to actually grant View+EditIssue in most cases.
+ # So, always grant View whenever there is any granted perm.
+ if not CanViewIssue(
+ effective_ids, perms, project, issue, granted_perms=granted_perms):
+ return False
+
+ # The issue owner can always edit the issue.
+ if effective_ids:
+ if tracker_bizobj.GetOwnerId(issue) in effective_ids:
+ return True
+
+ # Otherwise, apply the usual permission checking.
+ return perms.CanUsePerm(
+ EDIT_ISSUE, effective_ids, project, GetRestrictions(issue),
+ granted_perms=granted_perms)
+
+
+def CanCommentIssue(effective_ids, perms, project, issue, granted_perms=None):
+ """Return True if a user can comment on an issue."""
+
+ return perms.CanUsePerm(
+ ADD_ISSUE_COMMENT, effective_ids, project,
+ GetRestrictions(issue), granted_perms=granted_perms)
+
+
+def CanViewComponentDef(effective_ids, perms, project, component_def):
+ """Return True if a user can view the given component definition."""
+ if not effective_ids.isdisjoint(component_def.admin_ids):
+ return True # Component admins can view that component.
+
+ # TODO(jrobbins): check restrictions on the component definition.
+ return perms.CanUsePerm(VIEW, effective_ids, project, [])
+
+
+def CanEditComponentDef(effective_ids, perms, project, component_def, config):
+ """Return True if a user can edit the given component definition."""
+ if not effective_ids.isdisjoint(component_def.admin_ids):
+ return True # Component admins can edit that component.
+
+ # Check to see if user is admin of any parent component.
+ parent_components = tracker_bizobj.FindAncestorComponents(
+ config, component_def)
+ for parent in parent_components:
+ if not effective_ids.isdisjoint(parent.admin_ids):
+ return True
+
+ return perms.CanUsePerm(EDIT_PROJECT, effective_ids, project, [])
+
+
+def CanViewFieldDef(effective_ids, perms, project, field_def):
+ """Return True if a user can view the given field definition."""
+ if not effective_ids.isdisjoint(field_def.admin_ids):
+ return True # Field admins can view that field.
+
+ # TODO(jrobbins): check restrictions on the field definition.
+ return perms.CanUsePerm(VIEW, effective_ids, project, [])
+
+
+def CanEditFieldDef(effective_ids, perms, project, field_def):
+ """Return True if a user can edit the given field definition."""
+ if not effective_ids.isdisjoint(field_def.admin_ids):
+ return True # Field admins can edit that field.
+
+ return perms.CanUsePerm(EDIT_PROJECT, effective_ids, project, [])
+
+
+def CanViewTemplate(effective_ids, perms, project, template):
+ """Return True if a user can view the given issue template."""
+ if not effective_ids.isdisjoint(template.admin_ids):
+ return True # template admins can view that template.
+
+ # Members-only templates are only shown to members, other templates are
+ # shown to any user that is generally allowed to view project content.
+ if template.members_only:
+ return framework_bizobj.UserIsInProject(project, effective_ids)
+ else:
+ return perms.CanUsePerm(VIEW, effective_ids, project, [])
+
+
+def CanEditTemplate(effective_ids, perms, project, template):
+ """Return True if a user can edit the given field definition."""
+ if not effective_ids.isdisjoint(template.admin_ids):
+ return True # Template admins can edit that template.
+
+ return perms.CanUsePerm(EDIT_PROJECT, effective_ids, project, [])
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+
+
+class PermissionException(Error):
+ """The user is not authorized to make the current request."""
+
+
+class BannedUserException(Error):
+ """The user has been banned from using our service."""
diff --git a/appengine/monorail/framework/profiler.py b/appengine/monorail/framework/profiler.py
new file mode 100644
index 0000000..7c34891
--- /dev/null
+++ b/appengine/monorail/framework/profiler.py
@@ -0,0 +1,92 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A simple profiler object to track how time is spent on a request.
+
+The profiler is called from application code at the begining and
+end of each major phase and subphase of processing. The profiler
+object keeps track of how much time was spent on each phase or subphase.
+
+This class is useful when developers need to understand where
+server-side time is being spent. It includes durations in
+milliseconds, and a simple bar chart on the HTML page.
+
+On-page debugging and performance info is useful because it makes it easier
+to explore performance interactively.
+"""
+
+import logging
+import time
+
+from contextlib import contextmanager
+
+
+class Profiler(object):
+ """Object to record and help display request processing profiling info.
+
+ The Profiler class holds a list of phase objects, which can hold additional
+ phase objects (which are subphases). Each phase or subphase represents some
+ meaningful part of this application's HTTP request processing.
+ """
+
+ _COLORS = ['900', '090', '009', '360', '306', '036',
+ '630', '630', '063', '333']
+
+ def __init__(self):
+ """Each request processing profile begins with an empty list of phases."""
+ self.top_phase = _Phase('overall profile', -1, None)
+ self.current_phase = self.top_phase
+ self.next_color = 0
+
+ @contextmanager
+ def Phase(self, name='unspecified phase'):
+ """Context manager to automatically begin and end (sub)phases."""
+ color = self._COLORS[self.next_color % len(self._COLORS)]
+ self.next_color += 1
+ self.current_phase = _Phase(name, color, self.current_phase)
+ try:
+ yield
+ finally:
+ self.current_phase = self.current_phase.End()
+
+ def LogStats(self):
+ """Log sufficiently-long phases and subphases, for debugging purposes."""
+ self.top_phase.LogStats()
+
+
+class _Phase(object):
+ """A _Phase instance represents a period of time during request processing."""
+
+ def __init__(self, name, color, parent):
+ """Initialize a (sub)phase with the given name and current system clock."""
+ self.start = time.time()
+ self.name = name
+ self.color = color
+ self.subphases = []
+ self.elapsed_seconds = None
+ self.ms = 'in_progress' # shown if the phase never records a finish.
+ self.uncategorized_ms = None
+ self.parent = parent
+ if self.parent is not None:
+ self.parent._RegisterSubphase(self)
+
+ def _RegisterSubphase(self, subphase):
+ """Add a subphase to this phase."""
+ self.subphases.append(subphase)
+
+ def End(self):
+ """Record the time between the start and end of this (sub)phase."""
+ self.elapsed_seconds = time.time() - self.start
+ self.ms = str(int(self.elapsed_seconds * 1000))
+ categorized = sum(sub.elapsed_seconds for sub in self.subphases)
+ self.uncategorized_ms = int((self.elapsed_seconds - categorized) * 1000)
+ return self.parent
+
+ def LogStats(self):
+ # Phases that took longer than 30ms are interesting.
+ if self.elapsed_seconds > 0.03:
+ logging.info('%5s: %s', self.ms, self.name)
+ for subphase in self.subphases:
+ subphase.LogStats()
diff --git a/appengine/monorail/framework/ratelimiter.py b/appengine/monorail/framework/ratelimiter.py
new file mode 100644
index 0000000..0c968df
--- /dev/null
+++ b/appengine/monorail/framework/ratelimiter.py
@@ -0,0 +1,178 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Request rate limiting implementation.
+
+This is intented to be used for automatic DDoS protection.
+
+"""
+
+import datetime
+import logging
+import settings
+import time
+
+from infra_libs import ts_mon
+
+from google.appengine.api import memcache
+from google.appengine.api.modules import modules
+from google.appengine.api import users
+
+N_MINUTES = 5
+EXPIRE_AFTER_SECS = 60 * 60
+DEFAULT_LIMIT = 300 # 300 requests in 5 minutes is 1 QPS.
+
+ANON_USER = 'anon'
+
+COUNTRY_HEADER = 'X-AppEngine-Country'
+
+COUNTRY_LIMITS = {
+ # Two-letter country code: max requests per N_MINUTES
+ # This limit will apply to all requests coming
+ # from this country.
+ # To add a country code, see GAE logs and use the
+ # appropriate code from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
+ # E.g., 'cn': 300, # Limit to 1 QPS.
+}
+
+# Modules not in this list will not have rate limiting applied by this
+# class.
+MODULE_WHITELIST = ['default']
+
+def _CacheKeys(request, now_sec):
+ """ Returns an array of arrays. Each array contains strings with
+ the same prefix and a timestamp suffix, starting with the most
+ recent and decrementing by 1 minute each time.
+ """
+ now = datetime.datetime.fromtimestamp(now_sec)
+ country = request.headers.get(COUNTRY_HEADER, 'ZZ')
+ ip = request.remote_addr
+ minute_buckets = [now - datetime.timedelta(minutes=m) for m in
+ range(N_MINUTES)]
+ user = users.get_current_user()
+ user_email = user.email() if user else ANON_USER
+
+ # <IP, country, user_email> to be rendered into each key prefix.
+ prefixes = []
+
+ # All logged-in users get a per-user rate limit, regardless of IP and country.
+ if user:
+ prefixes.append(['ALL', 'ALL', user.email()])
+ else:
+ # All anon requests get a per-IP ratelimit.
+ prefixes.append([ip, 'ALL', 'ALL'])
+
+ # All requests from a problematic country get a per-country rate limit,
+ # regardless of the user (even a non-logged-in one) or IP.
+ if country in COUNTRY_LIMITS:
+ prefixes.append(['ALL', country, 'ALL'])
+
+ keysets = []
+ for prefix in prefixes:
+ keysets.append(['ratelimit-%s-%s' % ('-'.join(prefix),
+ str(minute_bucket.replace(second=0, microsecond=0)))
+ for minute_bucket in minute_buckets])
+
+ return keysets, country, ip, user_email
+
+class RateLimiter:
+ blocked_requests = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/blocked_request')
+ limit_exceeded = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/rate_exceeded')
+ cost_thresh_exceeded = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/cost_thresh_exceeded')
+ checks = ts_mon.CounterMetric(
+ 'monorail/ratelimiter/check')
+
+ def __init__(self, _cache=memcache, fail_open=True, **_kwargs):
+ self.fail_open = fail_open
+
+ def CheckStart(self, request, now=None):
+ if (modules.get_current_module_name() not in MODULE_WHITELIST or
+ users.is_current_user_admin()):
+ return
+ logging.info('X-AppEngine-Country: %s' %
+ request.headers.get(COUNTRY_HEADER, 'ZZ'))
+
+ if now is None:
+ now = time.time()
+
+ keysets, country, ip, user_email = _CacheKeys(request, now)
+ # There are either two or three sets of keys in keysets.
+ # Three if the user's country is in COUNTRY_LIMITS, otherwise two.
+ for keys in keysets:
+ count = 0
+ try:
+ counters = memcache.get_multi(keys)
+ count = sum(counters.values())
+ self.checks.increment({'type': 'success'})
+ except Exception as e:
+ logging.error(e)
+ if not self.fail_open:
+ self.checks.increment({'type': 'fail_open'})
+ raise RateLimitExceeded(country=country, ip=ip, user_email=user_email)
+ self.checks.increment({'type': 'fail_closed'})
+
+ limit = COUNTRY_LIMITS.get(country, DEFAULT_LIMIT)
+ if count > limit:
+ # Since webapp2 won't let us return a 429 error code
+ # <http://tools.ietf.org/html/rfc6585#section-4>, we can't
+ # monitor rate limit exceeded events with our standard tools.
+ # We return a 400 with a custom error message to the client,
+ # and this logging is so we can monitor it internally.
+ logging.info('Rate Limit Exceeded: %s, %s, %s, %d' % (
+ country, ip, user_email, count))
+
+ self.limit_exceeded.increment()
+
+ if settings.ratelimiting_enabled:
+ self.blocked_requests.increment()
+ raise RateLimitExceeded(country=country, ip=ip, user_email=user_email)
+
+ k = keys[0]
+ # Only update the latest *time* bucket for each prefix (reverse chron).
+ memcache.add(k, 0, time=EXPIRE_AFTER_SECS)
+ memcache.incr(k, initial_value=0)
+
+ def CheckEnd(self, request, now, start_time):
+ """If a request was expensive to process, charge some extra points
+ against this set of buckets.
+ We pass in both now and start_time so we can update the buckets
+ based on keys created from start_time instead of now.
+ now and start_time are float seconds.
+ """
+ if (modules.get_current_module_name() not in MODULE_WHITELIST or
+ not settings.ratelimiting_cost_enabled):
+ return
+
+ elapsed_ms = (now - start_time) * 1000
+ # Would it kill the python lib maintainers to have timedelta.total_ms()?
+ if elapsed_ms < settings.ratelimiting_cost_thresh_ms:
+ return
+
+ # TODO: Look into caching the keys instead of generating them twice
+ # for every request. Say, return them from CheckStart so they can
+ # be bassed back in here later.
+ keysets, country, ip, user_email = _CacheKeys(request, start_time)
+ for keys in keysets:
+ logging.info('Rate Limit Cost Threshold Exceeded: %s, %s, %s' % (
+ country, ip, user_email))
+ self.cost_thresh_exceeded.increment_by(settings.ratelimiting_cost_penalty)
+
+ # Only update the latest *time* bucket for each prefix (reverse chron).
+ k = keys[0]
+ memcache.add(k, 0, time=EXPIRE_AFTER_SECS)
+ memcache.incr(k, initial_value=0)
+
+class RateLimitExceeded(Exception):
+ def __init__(self, country=None, ip=None, user_email=None, **_kwargs):
+ self.country = country
+ self.ip = ip
+ self.user_email = user_email
+
+ def __str__(self):
+ return 'RateLimitExceeded: %s, %s, %s' % (
+ self.country, self.ip, self.user_email)
diff --git a/appengine/monorail/framework/reap.py b/appengine/monorail/framework/reap.py
new file mode 100644
index 0000000..4c2bbd7
--- /dev/null
+++ b/appengine/monorail/framework/reap.py
@@ -0,0 +1,119 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to handle cron requests to expunge doomed and deletable projects."""
+
+import logging
+import time
+
+from framework import jsonfeed
+
+RUN_DURATION_LIMIT = 50 * 60 # 50 minutes
+
+
+class Reap(jsonfeed.InternalTask):
+ """Look for doomed and deletable projects and delete them."""
+
+ def HandleRequest(self, mr):
+ """Update/Delete doomed and deletable projects as needed.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format. The JSON will look like this:
+ {
+ 'doomed_project_ids': <int>,
+ 'expunged_project_ids': <int>
+ }
+ doomed_project_ids are the projects which have been marked as deletable.
+ expunged_project_ids are the projects that have either been completely
+ expunged or are in the midst of being expunged.
+ """
+ doomed_project_ids = self._MarkDoomedProjects(mr.cnxn)
+ expunged_project_ids = self._ExpungeDeletableProjects(mr.cnxn)
+ return {
+ 'doomed_project_ids': doomed_project_ids,
+ 'expunged_project_ids': expunged_project_ids,
+ }
+
+ def _MarkDoomedProjects(self, cnxn):
+ """No longer needed projects get doomed, and this marks them deletable."""
+ now = int(time.time())
+ doomed_project_rows = self.services.project.project_tbl.Select(
+ cnxn, cols=['project_id'],
+ # We only match projects with real timestamps and not delete_time = 0.
+ where=[('delete_time < %s', [now]), ('delete_time != %s', [0])],
+ state='archived', limit=1000)
+ doomed_project_ids = [row[0] for row in doomed_project_rows]
+ for project_id in doomed_project_ids:
+ self.services.project.MarkProjectDeletable(
+ cnxn, project_id, self.services.config)
+
+ return doomed_project_ids
+
+ def _ExpungeDeletableProjects(self, cnxn):
+ """Chip away at deletable projects until they are gone."""
+ request_deadline = time.time() + RUN_DURATION_LIMIT
+
+ deletable_project_rows = self.services.project.project_tbl.Select(
+ cnxn, cols=['project_id'], state='deletable', limit=100)
+ deletable_project_ids = [row[0] for row in deletable_project_rows]
+ # expunged_project_ids will contain projects that have either been
+ # completely expunged or are in the midst of being expunged.
+ expunged_project_ids = set()
+ for project_id in deletable_project_ids:
+ for _part in self._ExpungeParts(cnxn, project_id):
+ expunged_project_ids.add(project_id)
+ if time.time() > request_deadline:
+ return list(expunged_project_ids)
+
+ return list(expunged_project_ids)
+
+ def _ExpungeParts(self, cnxn, project_id):
+ """Delete all data from the specified project, one part at a time.
+
+ This method purges all data associated with the specified project. The
+ following is purged:
+ * All issues of the project.
+ * Project config.
+ * Saved queries.
+ * Filter rules.
+ * Former locations.
+ * Local ID counters.
+ * Quick edit history.
+ * Item stars.
+ * Project from the DB.
+
+ Returns a generator whose return values can be either issue
+ ids or the specified project id. The returned values are intended to be
+ iterated over and not read.
+ """
+ # Purge all issues of the project.
+ while True:
+ issue_id_rows = self.services.issue.issue_tbl.Select(
+ cnxn, cols=['id'], project_id=project_id, limit=1000)
+ issue_ids = [row[0] for row in issue_id_rows]
+ for issue_id in issue_ids:
+ self.services.issue_star.ExpungeStars(cnxn, issue_id)
+ self.services.issue.ExpungeIssues(cnxn, issue_ids)
+ yield issue_ids
+ break
+
+ # All project purge functions are called with cnxn and project_id.
+ project_purge_functions = (
+ self.services.config.ExpungeConfig,
+ self.services.features.ExpungeSavedQueriesExecuteInProject,
+ self.services.features.ExpungeFilterRules,
+ self.services.issue.ExpungeFormerLocations,
+ self.services.issue.ExpungeLocalIDCounters,
+ self.services.features.ExpungeQuickEditHistory,
+ self.services.project_star.ExpungeStars,
+ self.services.project.ExpungeProject,
+ )
+
+ for f in project_purge_functions:
+ f(cnxn, project_id)
+ yield project_id
diff --git a/appengine/monorail/framework/registerpages_helpers.py b/appengine/monorail/framework/registerpages_helpers.py
new file mode 100644
index 0000000..5a86336
--- /dev/null
+++ b/appengine/monorail/framework/registerpages_helpers.py
@@ -0,0 +1,72 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This file sets up all the urls for monorail pages."""
+
+
+import httplib
+import logging
+
+import webapp2
+
+
+def MakeRedirect(redirect_to_this_uri, permanent=True):
+ """Return a new request handler class that redirects to the given URL."""
+
+ class Redirect(webapp2.RequestHandler):
+ """Redirect is a response handler that issues a redirect to another URI."""
+
+ def get(self, **_kw):
+ """Send the 301/302 response code and write the Location: redirect."""
+ self.response.location = redirect_to_this_uri
+ self.response.headers.add('Strict-Transport-Security',
+ 'max-age=31536000; includeSubDomains')
+ self.response.status = (
+ httplib.MOVED_PERMANENTLY if permanent else httplib.FOUND)
+
+ return Redirect
+
+
+def MakeRedirectInScope(uri_in_scope, scope, permanent=True):
+ """Redirect to a URI within a given scope, e.g., per project or user.
+
+ Args:
+ uri_in_scope: a uri within a project or user starting with a slash.
+ scope: a string indicating the uri-space scope:
+ p for project pages
+ u for user pages
+ g for group pages
+ permanent: True for a HTTP 301 permanently moved response code,
+ otherwise a HTTP 302 temporarily moved response will be used.
+
+ Example:
+ self._SetupProjectPage(
+ redirect.MakeRedirectInScope('/newpage', 'p'), '/oldpage')
+
+ Returns:
+ A class that can be used with webapp2.
+ """
+ assert uri_in_scope.startswith('/')
+
+ class RedirectInScope(webapp2.RequestHandler):
+ """A handler that redirects to another URI in the same scope."""
+
+ def get(self, **_kw):
+ """Send the 301/302 response code and write the Location: redirect."""
+ split_path = self.request.path.lstrip('/').split('/')
+ if len(split_path) > 1:
+ project_or_user = split_path[1]
+ url = '//%s/%s/%s%s' % (
+ self.request.host, scope, project_or_user, uri_in_scope)
+ self.response.location = url
+ else:
+ self.response.location = '/'
+
+ self.response.headers.add('Strict-Transport-Security',
+ 'max-age=31536000; includeSubDomains')
+ self.response.status = (
+ httplib.MOVED_PERMANENTLY if permanent else httplib.FOUND)
+
+ return RedirectInScope
diff --git a/appengine/monorail/framework/servlet.py b/appengine/monorail/framework/servlet.py
new file mode 100644
index 0000000..ce1e25e
--- /dev/null
+++ b/appengine/monorail/framework/servlet.py
@@ -0,0 +1,909 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Base classes for Monorail servlets.
+
+This base class provides HTTP get() and post() methods that
+conveniently drive the process of parsing the request, checking base
+permissions, gathering common page information, gathering
+page-specific information, and adding on-page debugging information
+(when appropriate). Subclasses can simply implement the page-specific
+logic.
+
+Summary of page classes:
+ Servlet: abstract base class for all Monorail servlets.
+ _ContextDebugItem: displays page_data elements for on-page debugging.
+"""
+
+import httplib
+import json
+import logging
+import os
+import time
+import urllib
+
+from third_party import ezt
+
+from google.appengine.api import users
+
+import webapp2
+
+import settings
+from features import savedqueries_helpers
+from framework import actionlimit
+from framework import alerts
+from framework import captcha
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import monorailrequest
+from framework import permissions
+from framework import profiler
+from framework import ratelimiter
+from framework import servlet_helpers
+from framework import template_helpers
+from framework import urls
+from framework import xsrf
+from proto import project_pb2
+from search import query2ast
+from services import issue_svc
+from services import project_svc
+from services import secrets_svc
+from services import user_svc
+from tracker import tracker_views
+
+NONCE_LENGTH = 32
+
+if not settings.unit_test_mode:
+ import MySQLdb
+
+
+class MethodNotSupportedError(NotImplementedError):
+ """An exception class for indicating that the method is not supported.
+
+ Used by GatherPageData and ProcessFormData to indicate that GET and POST,
+ respectively, are not supported methods on the given Servlet.
+ """
+ pass
+
+
+class Servlet(webapp2.RequestHandler):
+ """Base class for all Monorail servlets.
+
+ Defines a framework of methods that build up parts of the EZT page data.
+
+ Subclasses should override GatherPageData and/or ProcessFormData to
+ handle requests.
+ """
+
+ _MAIN_TAB_MODE = None # Normally overriden in subclasses to be one of these:
+
+ MAIN_TAB_NONE = 't0'
+ MAIN_TAB_DASHBOARD = 't1'
+ MAIN_TAB_ISSUES = 't2'
+ MAIN_TAB_PEOPLE = 't3'
+ MAIN_TAB_PROCESS = 't4'
+ MAIN_TAB_UPDATES = 't5'
+ MAIN_TAB_ADMIN = 't6'
+ PROCESS_TAB_SUMMARY = 'st1'
+ PROCESS_TAB_STATUSES = 'st3'
+ PROCESS_TAB_LABELS = 'st4'
+ PROCESS_TAB_RULES = 'st5'
+ PROCESS_TAB_TEMPLATES = 'st6'
+ PROCESS_TAB_COMPONENTS = 'st7'
+ PROCESS_TAB_VIEWS = 'st8'
+ ADMIN_TAB_META = 'st1'
+ ADMIN_TAB_ADVANCED = 'st9'
+
+ # Most forms require a security token, however if a form is really
+ # just redirecting to a search GET request without writing any data,
+ # subclass can override this to allow anonymous use.
+ CHECK_SECURITY_TOKEN = True
+
+ # Most forms just ignore fields that have value "". Subclasses can override
+ # if needed.
+ KEEP_BLANK_FORM_VALUES = False
+
+ # Most forms use regular forms, but subclasses that accept attached files can
+ # override this to be True.
+ MULTIPART_POST_BODY = False
+
+ # This value should not typically be overridden.
+ _TEMPLATE_PATH = framework_constants.TEMPLATE_PATH
+
+ _PAGE_TEMPLATE = None # Normally overriden in subclasses.
+ _ELIMINATE_BLANK_LINES = False
+
+ _CAPTCHA_ACTION_TYPES = [] # Override this in subclass to add captcha.
+
+ _MISSING_PERMISSIONS_TEMPLATE = 'sitewide/403-page.ezt'
+
+ def __init__(self, request, response, services=None,
+ content_type='text/html; charset=UTF-8'):
+ """Load and parse the template, saving it for later use."""
+ super(Servlet, self).__init__(request, response)
+ if self._PAGE_TEMPLATE: # specified in subclasses
+ template_path = self._TEMPLATE_PATH + self._PAGE_TEMPLATE
+ self.template = template_helpers.GetTemplate(
+ template_path, eliminate_blank_lines=self._ELIMINATE_BLANK_LINES)
+ else:
+ self.template = None
+
+ self._missing_permissions_template = template_helpers.MonorailTemplate(
+ self._TEMPLATE_PATH + self._MISSING_PERMISSIONS_TEMPLATE)
+ self.services = services or self.app.config.get('services')
+ self.content_type = content_type
+ self.profiler = profiler.Profiler()
+ self.mr = None
+ self.ratelimiter = ratelimiter.RateLimiter()
+
+ def dispatch(self):
+ """Do common stuff then dispatch the request to get() or put() methods."""
+ handler_start_time = time.time()
+
+ logging.info('\n\n\nRequest handler: %r', self)
+
+ self.mr = monorailrequest.MonorailRequest()
+
+ self.ratelimiter.CheckStart(self.request)
+ self.response.headers.add('Strict-Transport-Security',
+ 'max-age=31536000; includeSubDomains')
+
+ if self.services.cache_manager:
+ # TODO(jrobbins): don't do this step if invalidation_timestep was
+ # passed via the request and matches our last timestep
+ try:
+ with self.profiler.Phase('distributed invalidation'):
+ self.services.cache_manager.DoDistributedInvalidation(self.mr.cnxn)
+
+ except MySQLdb.OperationalError as e:
+ logging.exception(e)
+ self.redirect('/database-maintenance', abort=True)
+
+ try:
+ with self.profiler.Phase('parsing request and doing lookups'):
+ self.mr.ParseRequest(self.request, self.services, self.profiler)
+
+ self.response.headers['X-Frame-Options'] = 'SAMEORIGIN'
+ webapp2.RequestHandler.dispatch(self)
+
+ except user_svc.NoSuchUserException as e:
+ logging.warning('Trapped NoSuchUserException %s', e)
+ self.abort(404, 'user not found')
+
+ except monorailrequest.InputException as e:
+ logging.info('Rejecting invalid input: %r', e)
+ self.response.status = httplib.BAD_REQUEST
+
+ except project_svc.NoSuchProjectException as e:
+ logging.info('Rejecting invalid request: %r', e)
+ self.response.status = httplib.BAD_REQUEST
+
+ except xsrf.TokenIncorrect as e:
+ logging.info('Bad XSRF token: %r', e.message)
+ self.response.status = httplib.BAD_REQUEST
+
+ except AlreadySentResponseException:
+ # If servlet already sent response, then do nothing more. E.g.,
+ # when serving attachment content, we do not use templates.
+ pass
+
+ except permissions.BannedUserException as e:
+ logging.warning('The user has been banned')
+ url = framework_helpers.FormatAbsoluteURL(
+ self.mr, urls.BANNED, include_project=False, copy_params=False)
+ self.redirect(url, abort=True)
+
+ except actionlimit.ExcessiveActivityException:
+ logging.info('Excessive Activity Exception %r', self.mr.auth.user_id)
+ url = framework_helpers.FormatAbsoluteURL(
+ self.mr, urls.EXCESSIVE_ACTIVITY,
+ include_project=False, copy_params=False)
+ self.redirect(url, abort=True)
+
+ except ratelimiter.RateLimitExceeded as e:
+ logging.info('RateLimitExceeded Exception %s', e)
+ self.response.status = httplib.BAD_REQUEST
+ self.response.body = 'Slow your roll.'
+
+ finally:
+ self.mr.CleanUp()
+ self.ratelimiter.CheckEnd(self.request, time.time(), handler_start_time)
+
+ total_processing_time = time.time() - handler_start_time
+ logging.warn('Processed request in %d ms',
+ int(total_processing_time * 1000))
+ if settings.enable_profiler_logging:
+ self.profiler.LogStats()
+
+ def _AddHelpDebugPageData(self, page_data):
+ with self.profiler.Phase('help and debug data'):
+ page_data.update(self.GatherHelpData(self.mr, page_data))
+ page_data.update(self.GatherDebugData(self.mr, page_data))
+
+ # pylint: disable=unused-argument
+ def get(self, **kwargs):
+ """Collect page-specific and generic info, then render the page.
+
+ Args:
+ Any path components parsed by webapp2 will be in kwargs, but we do
+ our own parsing later anyway, so igore them for now.
+ """
+ page_data = {}
+ nonce = framework_helpers.MakeRandomKey(length=NONCE_LENGTH)
+ try:
+ csp_header = 'Content-Security-Policy'
+ csp_scheme = 'https:'
+ if settings.dev_mode:
+ csp_header = 'Content-Security-Policy-Report-Only'
+ csp_scheme = 'http:'
+ user_agent = self.mr.request.headers.get('User-Agent', '')
+ csp_supports_nonce = (
+ ('Chrome' in user_agent or 'Firefox' in user_agent) and
+ ('Edge' not in user_agent))
+ self.response.headers.add(csp_header,
+ ("default-src %(scheme)s ; "
+ "script-src"
+ " 'unsafe-inline'" # Only counts in browsers that lack CSP2.
+ " 'unsafe-dynamic'" # Allows <script nonce> to load more.
+ " https://www.gstatic.com/recaptcha/api2/"
+ " %(csp_self)s 'nonce-%(nonce)s'; "
+ "child-src https://www.google.com/recaptcha/; "
+ "frame-src https://www.google.com/recaptcha/; "
+ "img-src %(scheme)s data: blob: ; "
+ "style-src %(scheme)s 'unsafe-inline'; "
+ "object-src 'none'; "
+ "report-uri /csp.do" % {
+ 'nonce': nonce,
+ 'scheme': csp_scheme,
+ 'csp_self': '' if csp_supports_nonce else "'self'",
+ }))
+
+ page_data.update(self._GatherFlagData(self.mr))
+
+ # Page-specific work happens in this call.
+ page_data.update(self._DoPageProcessing(self.mr, nonce))
+
+ self._AddHelpDebugPageData(page_data)
+
+ with self.profiler.Phase('rendering template'):
+ self._RenderResponse(page_data)
+
+ except (MethodNotSupportedError, NotImplementedError) as e:
+ # Instead of these pages throwing 500s display the 404 message and log.
+ # The motivation of this is to minimize 500s on the site to keep alerts
+ # meaningful during fuzzing. For more context see
+ # https://bugs.chromium.org/p/monorail/issues/detail?id=659
+ logging.warning('Trapped NotImplementedError %s', e)
+ self.abort(404, 'invalid page')
+ except query2ast.InvalidQueryError as e:
+ logging.warning('Trapped InvalidQueryError: %s', e)
+ logging.exception(e)
+ msg = e.message if e.message else 'invalid query'
+ self.abort(400, msg)
+ except permissions.PermissionException as e:
+ logging.warning('Trapped PermissionException %s', e)
+ if not self.mr.auth.user_id:
+ # If not logged in, let them log in
+ url = _SafeCreateLoginURL(self.mr)
+ self.redirect(url, abort=True)
+ else:
+ # Display the missing permissions template.
+ self.response.status = httplib.FORBIDDEN
+ page_data = {'reason': e.message}
+ with self.profiler.Phase('gather base data'):
+ page_data.update(self.GatherBaseData(self.mr, nonce))
+ self._AddHelpDebugPageData(page_data)
+ self._missing_permissions_template.WriteResponse(
+ self.response, page_data, content_type=self.content_type)
+
+ def SetCacheHeaders(self, response):
+ """Set headers to allow the response to be cached."""
+ headers = framework_helpers.StaticCacheHeaders()
+ for name, value in headers:
+ response.headers[name] = value
+
+ def GetTemplate(self, _page_data):
+ """Get the template to use for writing the http response.
+
+ Defaults to self.template. This method can be overwritten in subclasses
+ to allow dynamic template selection based on page_data.
+
+ Args:
+ _page_data: A dict of data for ezt rendering, containing base ezt
+ data, captcha data, page data, and debug data.
+
+ Returns:
+ The template to be used for writing the http response.
+ """
+ return self.template
+
+ def _GatherFlagData(self, mr):
+ page_data = {
+ 'recaptcha_public_key': secrets_svc.GetRecaptchaPublicKey(),
+ 'project_stars_enabled': ezt.boolean(
+ settings.enable_project_stars),
+ 'user_stars_enabled': ezt.boolean(settings.enable_user_stars),
+ 'can_create_project': ezt.boolean(
+ permissions.CanCreateProject(mr.perms)),
+ 'can_create_group': ezt.boolean(
+ permissions.CanCreateGroup(mr.perms)),
+ }
+
+ return page_data
+
+ def _RenderResponse(self, page_data):
+ logging.info('rendering response len(page_data) is %r', len(page_data))
+ self.GetTemplate(page_data).WriteResponse(
+ self.response, page_data, content_type=self.content_type)
+
+ def ProcessFormData(self, mr, post_data):
+ """Handle form data and redirect appropriately.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ raise MethodNotSupportedError()
+
+ def post(self, **kwargs):
+ """Parse the request, check base perms, and call form-specific code."""
+ try:
+ # Page-specific work happens in this call.
+ self._DoFormProcessing(self.request, self.mr)
+
+ except permissions.PermissionException as e:
+ logging.warning('Trapped permission-related exception "%s".', e)
+ # TODO(jrobbins): can we do better than an error page? not much.
+ self.response.status = httplib.BAD_REQUEST
+
+ except issue_svc.MidAirCollisionException as e:
+ logging.info('Mid-air collision detected.')
+ collision_page_url = urls.ARTIFACT_COLLISION
+ url = framework_helpers.FormatAbsoluteURL(
+ self.mr, collision_page_url, copy_params=False,
+ name=e.name, continue_issue_id=e.continue_issue_id,
+ ts=int(time.time()))
+ self.redirect(url, abort=True)
+
+ def _DoCommonRequestProcessing(self, request, mr):
+ """Do common processing dependent on having the user and project pbs."""
+ with self.profiler.Phase('basic processing'):
+ self._CheckForMovedProject(mr, request)
+ self.AssertBasePermission(mr)
+
+ def _DoPageProcessing(self, mr, nonce):
+ """Do user lookups and gather page-specific ezt data."""
+ with self.profiler.Phase('common request data'):
+ self._DoCommonRequestProcessing(self.request, mr)
+ page_data = self.GatherBaseData(mr, nonce)
+ page_data.update(self.GatherCaptchaData(mr))
+
+ with self.profiler.Phase('page processing'):
+ page_data.update(self.GatherPageData(mr))
+ page_data.update(mr.form_overrides)
+ template_helpers.ExpandLabels(page_data)
+
+ return page_data
+
+ def _DoFormProcessing(self, request, mr):
+ """Do user lookups and handle form data."""
+ self._DoCommonRequestProcessing(request, mr)
+
+ if self.CHECK_SECURITY_TOKEN:
+ xsrf.ValidateToken(
+ request.POST.get('token'), mr.auth.user_id, request.path)
+
+ redirect_url = self.ProcessFormData(mr, request.POST)
+
+ # Most forms redirect the user to a new URL on success. If no
+ # redirect_url was returned, the form handler must have already
+ # sent a response. E.g., bounced the user back to the form with
+ # invalid form fields higlighted.
+ if redirect_url:
+ self.redirect(redirect_url, abort=True)
+ else:
+ assert self.response.body
+
+ def _CheckForMovedProject(self, mr, request):
+ """If the project moved, redirect there or to an informational page."""
+ if not mr.project:
+ return # We are on a site-wide or user page.
+ if not mr.project.moved_to:
+ return # This project has not moved.
+ admin_url = '/p/%s%s' % (mr.project_name, urls.ADMIN_META)
+ if request.path.startswith(admin_url):
+ return # It moved, but we are near the page that can un-move it.
+
+ logging.info('project %s has moved: %s', mr.project.project_name,
+ mr.project.moved_to)
+
+ moved_to = mr.project.moved_to
+ if framework_bizobj.RE_PROJECT_NAME.match(moved_to):
+ # Use the redir query parameter to avoid redirect loops.
+ if mr.redir is None:
+ url = framework_helpers.FormatMovedProjectURL(mr, moved_to)
+ if '?' in url:
+ url += '&redir=1'
+ else:
+ url += '?redir=1'
+ logging.info('trusted move to a new project on our site')
+ self.redirect(url, abort=True)
+
+ logging.info('not a trusted move, will display link to user to click')
+ # Attach the project name as a url param instead of generating a /p/
+ # link to the destination project.
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.PROJECT_MOVED,
+ include_project=False, copy_params=False, project=mr.project_name)
+ self.redirect(url, abort=True)
+
+ def CheckPerm(self, mr, perm, art=None, granted_perms=None):
+ """Return True if the user can use the requested permission."""
+ return servlet_helpers.CheckPerm(
+ mr, perm, art=art, granted_perms=granted_perms)
+
+ def MakePagePerms(self, mr, art, *perm_list, **kwargs):
+ """Make an EZTItem with a set of permissions needed in a given template.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ art: a project artifact, such as an issue.
+ *perm_list: any number of permission names that are referenced
+ in the EZT template.
+ **kwargs: dictionary that may include 'granted_perms' list of permissions
+ granted to the current user specifically on the current page.
+
+ Returns:
+ An EZTItem with one attribute for each permission and the value
+ of each attribute being an ezt.boolean(). True if the user
+ is permitted to do that action on the given artifact, or
+ False if not.
+ """
+ granted_perms = kwargs.get('granted_perms')
+ page_perms = template_helpers.EZTItem()
+ for perm in perm_list:
+ setattr(
+ page_perms, perm,
+ ezt.boolean(self.CheckPerm(
+ mr, perm, art=art, granted_perms=granted_perms)))
+
+ return page_perms
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page.
+
+ Subclasses should call super, then check additional permissions
+ and raise a PermissionException if the user is not authorized to
+ do something.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Raises:
+ PermissionException: If the user does not have permisssion to view
+ the current page.
+ """
+ servlet_helpers.AssertBasePermission(mr)
+
+ def GatherBaseData(self, mr, nonce):
+ """Return a dict of info used on almost all pages."""
+ project = mr.project
+
+ project_summary = ''
+ project_alert = None
+ project_read_only = False
+ project_home_page = ''
+ project_thumbnail_url = ''
+ if project:
+ project_summary = project.summary
+ project_alert = _CalcProjectAlert(project)
+ project_read_only = project.read_only_reason
+ project_home_page = project.home_page
+ project_thumbnail_url = tracker_views.LogoView(project).thumbnail_url
+
+ # If we have both a project and a logged in user, we need to check if the
+ # user has starred that project.
+ with self.profiler.Phase('project star'):
+ is_project_starred = False
+ if mr.project and mr.auth.user_id:
+ is_project_starred = self.services.project_star.IsItemStarredBy(
+ mr.cnxn, mr.project_id, mr.auth.user_id)
+
+ project_view = None
+ if mr.project:
+ # TODO(jrobbins): should this be a ProjectView?
+ project_view = template_helpers.PBProxy(mr.project)
+
+ app_version = os.environ.get('CURRENT_VERSION_ID')
+
+ viewed_username = None
+ if mr.viewed_user_auth.user_view:
+ viewed_username = mr.viewed_user_auth.user_view.username
+
+ grid_x_attr = None
+ grid_y_attr = None
+ canned_query_views = []
+ issue_entry_url = 'entry'
+ if mr.project_id and self.services.config:
+ with self.profiler.Phase('getting config'):
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ canned_queries = self.services.features.GetCannedQueriesByProjectID(
+ mr.cnxn, mr.project_id)
+ grid_x_attr = (mr.x or config.default_x_attr).lower()
+ grid_y_attr = (mr.y or config.default_y_attr).lower()
+ canned_query_views = [
+ savedqueries_helpers.SavedQueryView(sq, idx + 1, None, None)
+ for idx, sq in enumerate(canned_queries)]
+ issue_entry_url = _ComputeIssueEntryURL(mr, config)
+
+ if mr.auth.user_id and self.services.features:
+ with self.profiler.Phase('getting saved queries'):
+ saved_queries = self.services.features.GetSavedQueriesByUserID(
+ mr.cnxn, mr.me_user_id)
+ saved_query_views = [
+ savedqueries_helpers.SavedQueryView(sq, idx + 1, None, None)
+ for idx, sq in enumerate(saved_queries)
+ if (mr.project_id in sq.executes_in_project_ids or
+ not mr.project_id)]
+ else:
+ saved_query_views = []
+
+ viewing_self = mr.auth.user_id == mr.viewed_user_auth.user_id
+ offer_saved_queries_subtab = (
+ viewing_self or mr.auth.user_pb and mr.auth.user_pb.is_site_admin)
+
+ login_url = _SafeCreateLoginURL(mr)
+ logout_url = _SafeCreateLogoutURL(mr)
+ logout_url_goto_home = users.create_logout_url('/')
+
+ base_data = {
+ # EZT does not have constants for True and False, so we pass them in.
+ 'True': ezt.boolean(True),
+ 'False': ezt.boolean(False),
+
+ 'site_name': settings.site_name,
+ 'show_search_metadata': ezt.boolean(False),
+ 'page_template': self._PAGE_TEMPLATE,
+ 'main_tab_mode': self._MAIN_TAB_MODE,
+ 'project_summary': project_summary,
+ 'project_home_page': project_home_page,
+ 'project_thumbnail_url': project_thumbnail_url,
+
+ 'hostport': mr.request.host,
+ 'absolute_base_url': '%s://%s' % (mr.request.scheme, mr.request.host),
+ 'project_home_url': None,
+ 'link_rel_canonical': None, # For specifying <link rel="canonical">
+ 'projectname': mr.project_name,
+ 'project': project_view,
+ 'project_is_restricted': ezt.boolean(_ProjectIsRestricted(mr)),
+ 'offer_contributor_list': ezt.boolean(
+ permissions.CanViewContributorList(mr)),
+ 'logged_in_user': mr.auth.user_view,
+ 'form_token': None, # Set to a value below iff the user is logged in.
+ 'form_token_path': None,
+ 'token_expires_sec': None,
+ 'xhr_token': None, # Set to a value below iff the user is logged in.
+ 'flag_spam_token': None,
+ 'nonce': nonce,
+ 'perms': mr.perms,
+ 'warnings': mr.warnings,
+ 'errors': mr.errors,
+
+ 'viewed_username': viewed_username,
+ 'viewed_user': mr.viewed_user_auth.user_view,
+ 'viewed_user_pb': template_helpers.PBProxy(
+ mr.viewed_user_auth.user_pb),
+ 'viewing_self': ezt.boolean(viewing_self),
+ 'viewed_user_id': mr.viewed_user_auth.user_id,
+ 'offer_saved_queries_subtab': ezt.boolean(offer_saved_queries_subtab),
+
+ 'currentPageURL': mr.current_page_url,
+ 'currentPageURLEncoded': mr.current_page_url_encoded,
+ 'login_url': login_url,
+ 'logout_url': logout_url,
+ 'logout_url_goto_home': logout_url_goto_home,
+ 'continue_issue_id': mr.continue_issue_id,
+ 'feedback_email': settings.feedback_email,
+ 'category_css': None, # Used to specify a category of stylesheet
+ 'page_css': None, # Used to add a stylesheet to a specific page.
+
+ 'can': mr.can,
+ 'query': mr.query,
+ 'colspec': None,
+ 'sortspec': mr.sort_spec,
+
+ 'grid_x_attr': grid_x_attr,
+ 'grid_y_attr': grid_y_attr,
+ 'grid_cell_mode': mr.cells,
+ 'grid_mode': None,
+
+ 'issue_entry_url': issue_entry_url,
+ 'canned_queries': canned_query_views,
+ 'saved_queries': saved_query_views,
+ 'is_cross_project': ezt.boolean(False),
+
+ # for project search (some also used in issue search)
+ 'start': mr.start,
+ 'num': mr.num,
+ 'groupby': mr.group_by_spec,
+ 'q_field_size': (
+ min(framework_constants.MAX_ARTIFACT_SEARCH_FIELD_SIZE,
+ max(framework_constants.MIN_ARTIFACT_SEARCH_FIELD_SIZE,
+ len(mr.query) + framework_constants.AUTOSIZE_STEP))),
+ 'mode': None, # Display mode, e.g., grid mode.
+ 'ajah': mr.ajah,
+ 'table_title': mr.table_title,
+
+ 'alerts': alerts.AlertsView(mr), # For alert.ezt
+ 'project_alert': project_alert,
+
+ 'title': None, # First part of page title
+ 'title_summary': None, # Appended to title on artifact detail pages
+
+ # TODO(jrobbins): make sure that the templates use
+ # project_read_only for project-mutative actions and if any
+ # uses of read_only remain.
+ 'project_read_only': ezt.boolean(project_read_only),
+ 'site_read_only': ezt.boolean(settings.read_only),
+ 'banner_time': servlet_helpers.GetBannerTime(settings.banner_time),
+ 'read_only': ezt.boolean(settings.read_only or project_read_only),
+ 'site_banner_message': settings.banner_message,
+ 'robots_no_index': None,
+ 'analytics_id': settings.analytics_id,
+
+ 'is_project_starred': ezt.boolean(is_project_starred),
+
+ 'app_version': app_version,
+ 'viewing_user_page': ezt.boolean(False),
+ }
+
+ if mr.project:
+ base_data['project_home_url'] = '/p/%s' % mr.project_name
+
+ # Always add an anti-xsrf token when the user is logged in.
+ if mr.auth.user_id:
+ form_token_path = self._FormHandlerURL(mr.request.path)
+ base_data['form_token'] = xsrf.GenerateToken(
+ mr.auth.user_id, form_token_path)
+ base_data['form_token_path'] = form_token_path
+ base_data['token_expires_sec'] = xsrf.TokenExpiresSec()
+ base_data['xhr_token'] = xsrf.GenerateToken(
+ mr.auth.user_id, xsrf.XHR_SERVLET_PATH)
+ base_data['flag_spam_token'] = xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_FLAGSPAM_JSON))
+
+ return base_data
+
+ def _FormHandlerURL(self, path):
+ """Return the form handler for the main form on a page."""
+ if path.endswith('/'):
+ return path + 'edit.do'
+ elif path.endswith('.do'):
+ return path # This happens as part of PleaseCorrect().
+ else:
+ return path + '.do'
+
+ def GatherCaptchaData(self, mr):
+ """If this page needs a captcha, return captcha info for use in EZT."""
+ if (mr.project and
+ framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids)):
+ # Don't show users CAPTCHAs within their own projects.
+ return {'show_captcha': ezt.boolean(False)}
+
+ show_captcha = any(actionlimit.NeedCaptcha(mr.auth.user_pb, action_type)
+ for action_type in self._CAPTCHA_ACTION_TYPES)
+ return {'show_captcha': ezt.boolean(show_captcha)}
+
+ def GatherPageData(self, mr):
+ """Return a dict of page-specific ezt data."""
+ raise MethodNotSupportedError()
+
+ # pylint: disable=unused-argument
+ def GatherHelpData(self, mr, page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ return {
+ 'cue': None, # for cues.ezt
+ }
+
+ def GatherDebugData(self, mr, page_data):
+ """Return debugging info for display at the very bottom of the page."""
+ if mr.debug_enabled:
+ debug = [_ContextDebugCollection('Page data', page_data)]
+ return {
+ 'dbg': 'on',
+ 'debug': debug,
+ 'profiler': self.profiler,
+ }
+ else:
+ if '?' in mr.current_page_url:
+ debug_url = mr.current_page_url + '&debug=1'
+ else:
+ debug_url = mr.current_page_url + '?debug=1'
+
+ return {
+ 'debug_uri': debug_url,
+ 'dbg': 'off',
+ 'debug': [('none', 'recorded')],
+ }
+
+ def CheckCaptcha(self, mr, post_data):
+ """Check the provided CAPTCHA solution and add an error if it is wrong."""
+ if (mr.project and
+ framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids)):
+ return # Don't check a user's actions within their own projects.
+
+ if not any(actionlimit.NeedCaptcha(mr.auth.user_pb, action_type)
+ for action_type in self._CAPTCHA_ACTION_TYPES):
+ return # no captcha was needed.
+
+ remote_ip = mr.request.remote_addr
+ captcha_response = post_data.get('g-recaptcha-response')
+ correct, _msg = captcha.Verify(remote_ip, captcha_response)
+ if not correct:
+ logging.info('BZzzz! Bad captcha solution.')
+ mr.errors.captcha = 'Captcha check failed.'
+
+ def CountRateLimitedActions(self, mr, action_counts):
+ """Count attempted actions against non-member's action limits.
+
+ Note that users can take any number of actions in their own projects.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ action_counts: {action_type: delta, ... }
+ a dictionary mapping action type constants to the number of times
+ that action was performed during the current request (usually 1).
+ """
+ if (mr.project and
+ framework_bizobj.UserIsInProject(mr.project, mr.auth.effective_ids)):
+ # Don't count a user's actions within their own projects...
+ return
+
+ for action_type in action_counts:
+ actionlimit.CountAction(
+ mr.auth.user_pb, action_type, delta=action_counts[action_type])
+
+ self.services.user.UpdateUser(mr.cnxn, mr.auth.user_id, mr.auth.user_pb)
+
+ def PleaseCorrect(self, mr, **echo_data):
+ """Show the same form again so that the user can correct their input."""
+ mr.PrepareForReentry(echo_data)
+ self.get()
+
+
+def _CalcProjectAlert(project):
+ """Return a string to be shown as red text explaning the project state."""
+
+ project_alert = None
+
+ if project.read_only_reason:
+ project_alert = 'READ-ONLY: %s.' % project.read_only_reason
+ if project.moved_to:
+ project_alert = 'This project has moved to: %s.' % project.moved_to
+ elif project.delete_time:
+ delay_seconds = project.delete_time - time.time()
+ delay_days = delay_seconds // framework_constants.SECS_PER_DAY
+ if delay_days <= 0:
+ project_alert = 'Scheduled for deletion today.'
+ else:
+ days_word = 'day' if delay_days == 1 else 'days'
+ project_alert = (
+ 'Scheduled for deletion in %d %s.' % (delay_days, days_word))
+ elif project.state == project_pb2.ProjectState.ARCHIVED:
+ project_alert = 'Project is archived: read-only by members only.'
+
+ return project_alert
+
+
+class _ContextDebugItem(object):
+ """Wrapper class to generate on-screen debugging output."""
+
+ def __init__(self, key, val):
+ """Store the key and generate a string for the value."""
+ self.key = key
+ if isinstance(val, list):
+ nested_debug_strs = [self.StringRep(v) for v in val]
+ self.val = '[%s]' % ', '.join(nested_debug_strs)
+ else:
+ self.val = self.StringRep(val)
+
+ def StringRep(self, val):
+ """Make a useful string representation of the given value."""
+ try:
+ return val.DebugString()
+ except Exception:
+ try:
+ return str(val.__dict__)
+ except Exception:
+ return repr(val)
+
+
+class _ContextDebugCollection(object):
+ """Attach a title to a dictionary for exporting as a table of debug info."""
+
+ def __init__(self, title, collection):
+ self.title = title
+ self.collection = [_ContextDebugItem(key, collection[key])
+ for key in sorted(collection.iterkeys())]
+
+
+def _ProjectIsRestricted(mr):
+ """Return True if the mr has a 'private' project."""
+ return (mr.project and
+ mr.project.access != project_pb2.ProjectAccess.ANYONE)
+
+
+def _ComputeIssueEntryURL(mr, config):
+ """Compute the URL to use for the "New issue" subtab.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ config: ProjectIssueConfig for the current project.
+
+ Returns:
+ A URL string to use. It will be simply "entry" in the non-customized
+ case. Otherewise it will be a fully qualified URL that includes some
+ query string parameters.
+ """
+ if not config.custom_issue_entry_url:
+ return 'entry'
+
+ base_url = config.custom_issue_entry_url
+ sep = '&' if '?' in base_url else '?'
+ token = xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s%s' % (mr.project_name, urls.ISSUE_ENTRY, '.do'))
+ role_name = framework_helpers.GetRoleName(mr.auth.effective_ids, mr.project)
+
+ continue_url = urllib.quote(framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_ENTRY + '.do'))
+
+ return '%s%stoken=%s&role=%s&continue=%s' % (
+ base_url, sep, urllib.quote(token),
+ urllib.quote(role_name or ''), continue_url)
+
+
+def _SafeCreateLoginURL(mr):
+ """Make a login URL w/ a detailed continue URL, otherwise use a short one."""
+ try:
+ return users.create_login_url(mr.current_page_url)
+ except users.RedirectTooLongError:
+ if mr.project_name:
+ return users.create_login_url('/p/%s' % mr.project_name)
+ else:
+ return users.create_login_url('/')
+
+
+def _SafeCreateLogoutURL(mr):
+ """Make a logout URL w/ a detailed continue URL, otherwise use a short one."""
+ try:
+ return users.create_logout_url(mr.current_page_url)
+ except users.RedirectTooLongError:
+ if mr.project_name:
+ return users.create_logout_url('/p/%s' % mr.project_name)
+ else:
+ return users.create_logout_url('/')
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+class AlreadySentResponseException(Error):
+ """The servlet already responded, no need to render a page template."""
+ pass
diff --git a/appengine/monorail/framework/servlet_helpers.py b/appengine/monorail/framework/servlet_helpers.py
new file mode 100644
index 0000000..be79be3
--- /dev/null
+++ b/appengine/monorail/framework/servlet_helpers.py
@@ -0,0 +1,114 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions used by the Monorail servlet base class."""
+
+import datetime
+import logging
+import time
+
+from framework import permissions
+from framework import template_helpers
+
+
+_WEEKDAY = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
+ 'Saturday', 'Sunday']
+
+
+def GetBannerTime(timestamp):
+ """Converts a timestamp into EZT-ready data so it can appear in the banner.
+
+ Args:
+ timestamp: timestamp expressed in the following format:
+ [year,month,day,hour,minute,second]
+ e.g. [2009,3,20,21,45,50] represents March 20 2009 9:45:50 PM
+
+ Returns:
+ EZT-ready data used to display the time inside the banner message.
+ """
+ if timestamp is None:
+ return None
+
+ # Get the weekday and 'hour:min AM/PM' to display the timestamp
+ # to users with javascript disabled
+ ts = datetime.datetime(*[int(t) for t in timestamp])
+ weekday = _WEEKDAY[ts.weekday()]
+ hour_min = datetime.datetime.strftime(ts, '%I:%M%p')
+
+ # Convert the timestamp to milliseconds since the epoch to display
+ # the timestamp to users with javascript
+ ts_ms = time.mktime(ts.timetuple()) * 1000
+
+ return template_helpers.EZTItem(
+ ts=ts_ms, year=ts.year, month=ts.month, day=ts.day, hour=ts.hour,
+ minute=ts.minute, second=ts.second, weekday=weekday, hour_min=hour_min)
+
+
+def AssertBasePermissionForUser(user, user_view):
+ """Verify user permissions and state.
+
+ Args:
+ user: user_pb2.User protocol buffer for the user
+ user_view: framework.views.UserView for the user
+ """
+ if permissions.IsBanned(user, user_view):
+ raise permissions.BannedUserException(
+ 'You have been banned from using this site')
+
+
+def AssertBasePermission(mr):
+ """Make sure that the logged in user can view the requested page.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Nothing
+
+ Raises:
+ BannedUserException: If the user is banned.
+ PermissionException: If the user does not have permisssion to view.
+ """
+ AssertBasePermissionForUser(mr.auth.user_pb, mr.auth.user_view)
+
+ if mr.project_name and not CheckPerm(mr, permissions.VIEW):
+ logging.info('your perms are %r', mr.perms)
+ raise permissions.PermissionException(
+ 'User is not allowed to view this project')
+
+
+def CheckPerm(mr, perm, art=None, granted_perms=None):
+ """Convenience method that makes permission checks easier.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ perm: A permission constant, defined in module framework.permissions
+ art: Optional artifact pb
+ granted_perms: optional set of perms granted specifically in that artifact.
+
+ Returns:
+ A boolean, whether the request can be satisfied, given the permission.
+ """
+ return mr.perms.CanUsePerm(
+ perm, mr.auth.effective_ids, mr.project,
+ permissions.GetRestrictions(art), granted_perms=granted_perms)
+
+
+def CheckPermForProject(mr, perm, project, art=None):
+ """Convenience method that makes permission checks for projects easier.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ perm: A permission constant, defined in module framework.permissions
+ project: The project to enforce permissions for.
+ art: Optional artifact pb
+
+ Returns:
+ A boolean, whether the request can be satisfied, given the permission.
+ """
+ perms = permissions.GetPermissions(
+ mr.auth.user_pb, mr.auth.effective_ids, project)
+ return perms.CanUsePerm(
+ perm, mr.auth.effective_ids, project, permissions.GetRestrictions(art))
diff --git a/appengine/monorail/framework/sorting.py b/appengine/monorail/framework/sorting.py
new file mode 100644
index 0000000..282935c
--- /dev/null
+++ b/appengine/monorail/framework/sorting.py
@@ -0,0 +1,480 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for sorting lists of project artifacts.
+
+This module exports the SortArtifacts function that does sorting of
+Monorail business objects (e.g., an issue). The sorting is done by
+extracting relevant values from the PB using a dictionary of
+accessor functions.
+
+The desired sorting directives are specified in part of the user's
+HTTP request. This sort spec consists of the names of the columns
+with optional minus signs to indicate descending sort order.
+
+The tool configuration object also affects sorting. When sorting by
+key-value labels, the well-known labels are considered to come
+before any non-well-known labels, and those well-known labels sort in
+the order in which they are defined in the tool config PB.
+"""
+
+import logging
+
+import settings
+from framework import framework_constants
+from proto import tracker_pb2
+from tracker import tracker_bizobj
+
+
+class DescendingValue(object):
+ """A wrapper which reverses the sort order of values."""
+
+ @classmethod
+ def MakeDescendingValue(cls, obj):
+ """Make a value that sorts in the reverse order as obj."""
+ if isinstance(obj, int):
+ return -obj
+ if obj == MAX_STRING:
+ return MIN_STRING
+ if obj == MIN_STRING:
+ return MAX_STRING
+ if isinstance(obj, list):
+ return [cls.MakeDescendingValue(item) for item in reversed(obj)]
+ return DescendingValue(obj)
+
+ def __init__(self, val):
+ self.val = val
+
+ def __cmp__(self, other):
+ """Return -1, 0, or 1 base on the reverse of the normal sort order."""
+ if isinstance(other, DescendingValue):
+ return cmp(other.val, self.val)
+ else:
+ return cmp(other, self.val)
+
+ def __repr__(self):
+ return 'DescendingValue(%r)' % self.val
+
+
+# A string that sorts after every other string, and one that sorts before them.
+MAX_STRING = '~~~'
+MIN_STRING = DescendingValue(MAX_STRING)
+
+
+# RAMCache {issue_id: {column_name: sort_key, ...}, ...}
+art_values_cache = None
+
+
+def InitializeArtValues(services):
+ global art_values_cache
+ art_values_cache = services.cache_manager.MakeCache(
+ 'issue', max_size=settings.issue_cache_max_size)
+
+
+def SortArtifacts(
+ mr, artifacts, config, accessors, username_cols=None, users_by_id=None):
+ """Return a list of artifacts sorted by the user's sort specification.
+
+ In the following, an "accessor" is a function(art) -> [field_value, ...].
+
+ Args:
+ mr: commonly used info parsed from the request, including query.
+ artifacts: an unsorted list of project artifact PBs.
+ config: Project config PB instance that defines the sort order for
+ labels and statuses in this project.
+ accessors: dictionary of (column_name -> accessor) to get values
+ from the artifacts.
+ username_cols: optional list of lowercase column names that will show
+ user names.
+ users_by_id: optional dictionary {user_id: user_view,...} for all users
+ who participate in the list of artifacts.
+
+ Returns:
+ A sorted list of artifacts.
+
+ Note: if username_cols is supplied, then users_by_id should be too.
+
+ The approach to sorting is to construct a comprehensive sort key for
+ each artifact. To create the sort key, we (a) build lists with a
+ variable number of fields to sort on, and (b) allow individual
+ fields to be sorted in descending order. Even with the time taken
+ to build the sort keys, calling sorted() with the key seems to be
+ faster overall than doing multiple stable-sorts or doing one sort
+ using a multi-field comparison function.
+ """
+ sort_directives = ComputeSortDirectives(mr, config)
+
+ # Build a list of accessors that will extract sort keys from the issues.
+ accessor_pairs = [
+ (sd, _MakeCombinedSortKeyAccessor(
+ sd, config, accessors, username_cols, users_by_id))
+ for sd in sort_directives]
+
+ def SortKey(art):
+ """Make a sort_key for the given artifact, used by sorted() below."""
+ if art_values_cache.HasItem(art.issue_id):
+ art_values = art_values_cache.GetItem(art.issue_id)
+ else:
+ art_values = {}
+
+ sort_key = []
+ for sd, accessor in accessor_pairs:
+ if sd not in art_values:
+ art_values[sd] = accessor(art)
+ sort_key.append(art_values[sd])
+
+ art_values_cache.CacheItem(art.issue_id, art_values)
+ return sort_key
+
+ return sorted(artifacts, key=SortKey)
+
+
+def ComputeSortDirectives(mr, config, tie_breaker='id'):
+ """Return a list with sort directives to be used in sorting.
+
+ Args:
+ mr: commonly used info parsed from the request, including query.
+ config: Project config PB instance that defines the sort order for
+ labels and statuses in this project.
+ tie_breaker: column name to add to the end of the sort spec if it is
+ not already somewhere in the sort spec.
+
+ Returns:
+ A list of lower-case column names, each one may have a leading
+ minus-sign.
+ """
+ # Prepend the end-user's sort spec to any project default sort spec.
+ sort_spec = '%s %s %s' % (
+ mr.group_by_spec, mr.sort_spec, config.default_sort_spec)
+ # Sort specs can have interfering sort orders, so remove any duplicates.
+ field_names = set()
+ sort_directives = []
+ for sort_directive in sort_spec.lower().split():
+ field_name = sort_directive.lstrip('-')
+ if field_name not in field_names:
+ sort_directives.append(sort_directive)
+ field_names.add(field_name)
+
+ # Add in the project name so that the overall ordering is completely
+ # defined in cross-project search. Otherwise, issues jump up and
+ # down on each reload of the same query, and prev/next links get
+ # messed up. It's a no-op in single projects.
+ if 'project' not in sort_directives:
+ sort_directives.append('project')
+
+ if tie_breaker not in sort_directives:
+ sort_directives.append(tie_breaker)
+
+ return sort_directives
+
+
+def _MakeCombinedSortKeyAccessor(
+ sort_directive, config, accessors, username_cols, users_by_id):
+ """Return an accessor that extracts a sort key for a UI table column.
+
+ Args:
+ sort_directive: string with column name and optional leading minus sign,
+ for combined columns, it may have slashes, e.g., "-priority/pri".
+ config: ProjectIssueConfig instance that defines the sort order for
+ labels and statuses in this project.
+ accessors: dictionary of (column_name -> accessor) to get values
+ from the artifacts.
+ username_cols: list of lowercase names of columns that contain user names.
+ users_by_id: dictionary {user_id: user_view,...} for all users
+ who participate in the list of artifacts (e.g., owners, reporters, cc).
+
+ Returns:
+ A list of accessor functions that can be applied to an issue to extract
+ the relevant sort key value.
+
+ The strings for status and labels are converted to lower case in
+ this method so that they sort like case-insensitive enumerations.
+ Any component-specific field of the artifact is sorted according to the
+ value returned by the accessors defined in that component. Those
+ accessor functions should lower case string values for fields where
+ case-insensitive sorting is desired.
+ """
+ if sort_directive.startswith('-'):
+ combined_col_name = sort_directive[1:]
+ descending = True
+ else:
+ combined_col_name = sort_directive
+ descending = False
+
+ wk_labels = [wkl.label for wkl in config.well_known_labels]
+ accessors = [
+ _MakeSingleSortKeyAccessor(
+ col_name, config, accessors, username_cols, users_by_id, wk_labels)
+ for col_name in combined_col_name.split('/')]
+
+ # The most common case is that we sort on a single column, like "priority".
+ if len(accessors) == 1:
+ return _MaybeMakeDescending(accessors[0], descending)
+
+ # Less commonly, we are sorting on a combined column like "priority/pri".
+ def CombinedAccessor(art):
+ """Flatten and sort the values for each column in a combined column."""
+ key_part = []
+ for single_accessor in accessors:
+ value = single_accessor(art)
+ if isinstance(value, list):
+ key_part.extend(value)
+ else:
+ key_part.append(value)
+ return sorted(key_part)
+
+ return _MaybeMakeDescending(CombinedAccessor, descending)
+
+
+def _MaybeMakeDescending(accessor, descending):
+ """If descending is True, return a new function that reverses accessor."""
+ if not descending:
+ return accessor
+
+ def DescendingAccessor(art):
+ asc_value = accessor(art)
+ return DescendingValue.MakeDescendingValue(asc_value)
+
+ return DescendingAccessor
+
+
+def _MakeSingleSortKeyAccessor(
+ col_name, config, accessors, username_cols, users_by_id, wk_labels):
+ """Return an accessor function for a single simple UI column."""
+ # Case 1. Handle built-in fields: status, component.
+ if col_name == 'status':
+ wk_statuses = [wks.status for wks in config.well_known_statuses]
+ return _IndexOrLexical(wk_statuses, accessors[col_name])
+
+ if col_name == 'component':
+ comp_defs = sorted(config.component_defs, key=lambda cd: cd.path.lower())
+ comp_ids = [cd.component_id for cd in comp_defs]
+ return _IndexListAccessor(comp_ids, accessors[col_name])
+
+ # Case 2. Any other defined accessor functions.
+ if col_name in accessors:
+ if username_cols and col_name in username_cols:
+ # sort users by email address rather than user ids.
+ return _UserEditNameAccessor(users_by_id, accessors[col_name])
+ else:
+ return accessors[col_name]
+
+ # Case 3. Anything else is assumed to be a label prefix or custom field.
+ # TODO(jrobbins): user-valued custom fields. Find them at top of loop.
+ fd_list = [
+ fd for fd in config.field_defs
+ if (fd.field_name.lower() == col_name and
+ fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE)]
+ return _IndexOrLexicalList(
+ wk_labels, fd_list, col_name, users_by_id)
+
+
+IGNORABLE_INDICATOR = -1
+
+
+def _PrecomputeSortIndexes(values, col_name):
+ """Precompute indexes of strings in the values list for fast lookup later."""
+ # Make a dictionary that immediately gives us the index of any value
+ # in the list, and also add the same values in all-lower letters. In
+ # the case where two values differ only by case, the later value wins,
+ # which is fine.
+ indexes = {}
+ if col_name:
+ prefix = col_name + '-'
+ else:
+ prefix = ''
+ for idx, val in enumerate(values):
+ if val.lower().startswith(prefix):
+ indexes[val] = idx
+ indexes[val.lower()] = idx
+ else:
+ indexes[val] = IGNORABLE_INDICATOR
+ indexes[val.lower()] = IGNORABLE_INDICATOR
+
+ return indexes
+
+
+def _UserEditNameAccessor(users_by_id, base_accessor):
+ """Make an accessor that returns a list of user edit names for sorting.
+
+ Args:
+ users_by_id: dictionary {user_id: user_view, ...} for all participants
+ in the entire list of artifacts.
+ base_accessor: an accessor function f(artifact) -> user_id.
+
+ Returns:
+ An accessor f(artifact) -> value that can be used in sorting
+ the decorated list.
+ """
+
+ def Accessor(art):
+ """Return a user edit name for the given artifact's base_accessor."""
+ id_or_id_list = base_accessor(art)
+ if isinstance(id_or_id_list, list):
+ emails = [users_by_id[user_id].email
+ for user_id in id_or_id_list]
+ else:
+ emails = [users_by_id[id_or_id_list].email]
+
+ return sorted(emails) or MAX_STRING
+
+ return Accessor
+
+
+def _MakeColumnAccessor(col_name):
+ """Make an accessor for an issue's labels that have col_name as a prefix.
+
+ Args:
+ col_name: string column name.
+
+ Returns:
+ An accessor that can be applied to an artifact to return a list of
+ labels that have col_name as a prefix.
+
+ For example, _MakeColumnAccessor('priority')(issue) could result in
+ [], or ['priority-high'], or a longer list for multi-valued labels.
+ """
+ prefix = col_name + '-'
+
+ def Accessor(art):
+ """Return a list of label values on the given artifact."""
+ result = [label.lower() for label in tracker_bizobj.GetLabels(art)
+ if label.lower().startswith(prefix)]
+ return result
+
+ return Accessor
+
+
+def _IndexOrLexical(wk_values, base_accessor):
+ """Return an accessor to score an artifact based on a user-defined ordering.
+
+ Args:
+ wk_values: a list of well-known status values from the config.
+ base_accessor: function that gets a field from a given issue.
+
+ Returns:
+ An accessor that can be applied to an issue to return a suitable
+ sort key.
+
+ For example, when used to sort issue statuses, these accessors return an
+ integer for well-known statuses, a string for odd-ball statuses, and an
+ extreme value key for issues with no status. That causes issues to appear
+ in the expected order with odd-ball issues sorted lexicographically after
+ the ones with well-known status values, and issues with no defined status at
+ the very end.
+ """
+ well_known_value_indexes = _PrecomputeSortIndexes(wk_values, '')
+
+ def Accessor(art):
+ """Custom-made function to return a specific value of any issue."""
+ value = base_accessor(art)
+ if not value:
+ # Undefined values sort last.
+ return MAX_STRING
+
+ try:
+ # Well-known values sort by index. Ascending sorting has positive ints
+ # in well_known_value_indexes.
+ return well_known_value_indexes[value]
+ except KeyError:
+ # Odd-ball values after well-known and lexicographically.
+ return value.lower()
+
+ return Accessor
+
+
+def _IndexListAccessor(wk_values, base_accessor):
+ """Return an accessor to score an artifact based on a user-defined ordering.
+
+ Args:
+ wk_values: a list of well-known values from the config.
+ base_accessor: function that gets a field from a given issue.
+
+ Returns:
+ An accessor that can be applied to an issue to return a suitable
+ sort key.
+ """
+ well_known_value_indexes = {
+ val: idx for idx, val in enumerate(wk_values)}
+
+ def Accessor(art):
+ """Custom-made function to return a specific value of any issue."""
+ values = base_accessor(art)
+ if not values:
+ # Undefined values sort last.
+ return MAX_STRING
+
+ indexes = [well_known_value_indexes.get(val, MAX_STRING) for val in values]
+ return sorted(indexes)
+
+ return Accessor
+
+
+def _IndexOrLexicalList(wk_values, fd_list, col_name, users_by_id):
+ """Return an accessor to score an artifact based on a user-defined ordering.
+
+ Args:
+ wk_values: A list of well-known labels from the config.
+ fd_list: list of FieldDef PBs that match the column name. These might not
+ all have the same field_type. Enum-type field are not included.
+ col_name: lowercase string name of the column that will be sorted on.
+ users_by_id: A dictionary {user_id: user_view}.
+
+ Returns:
+ An accessor that can be applied to an issue to return a suitable
+ sort key.
+ """
+ well_known_value_indexes = _PrecomputeSortIndexes(wk_values, col_name)
+
+ def Accessor(art):
+ """Custom-made function to return a sort value for any issue."""
+ idx_or_lex_list = (
+ _SortableFieldValues(art, fd_list, users_by_id) +
+ _SortableLabelValues(art, col_name, well_known_value_indexes))
+ if not idx_or_lex_list:
+ return MAX_STRING # issues with no value sort to the end of the list.
+ return sorted(idx_or_lex_list)
+
+ return Accessor
+
+
+def _SortableFieldValues(art, fd_list, users_by_id):
+ """Return a list of field values relevant to one UI table column."""
+ sortable_value_list = []
+ for fd in fd_list:
+ for fv in art.field_values:
+ if fv.field_id == fd.field_id:
+ sortable_value_list.append(
+ tracker_bizobj.GetFieldValue(fv, users_by_id))
+
+ return sortable_value_list
+
+
+def _SortableLabelValues(art, col_name, well_known_value_indexes):
+ """Return a list of ints and strings for labels relevant to one UI column."""
+ sortable_value_list = []
+ for label in tracker_bizobj.GetLabels(art):
+ idx_or_lex = well_known_value_indexes.get(label)
+ if idx_or_lex == IGNORABLE_INDICATOR:
+ continue # Label is known to not have the desired prefix.
+ if idx_or_lex is None:
+ if '-' not in label:
+ # Skip an irrelevant OneWord label and remember to ignore it later.
+ well_known_value_indexes[label] = IGNORABLE_INDICATOR
+ continue
+ key, value = label.lower().split('-', 1)
+ if key == col_name:
+ # Label is a key-value label with an odd-ball value, remember it
+ idx_or_lex = value
+ well_known_value_indexes[label] = value
+ else:
+ # Label was a key-value label that is not relevant to this column.
+ # Remember to ignore it later.
+ well_known_value_indexes[label] = IGNORABLE_INDICATOR
+ continue
+
+ sortable_value_list.append(idx_or_lex)
+
+ return sortable_value_list
diff --git a/appengine/monorail/framework/sql.py b/appengine/monorail/framework/sql.py
new file mode 100644
index 0000000..223912d
--- /dev/null
+++ b/appengine/monorail/framework/sql.py
@@ -0,0 +1,745 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of classes for interacting with tables in SQL."""
+
+import logging
+import random
+import re
+import sys
+import time
+
+import settings
+
+if not settings.unit_test_mode:
+ import MySQLdb
+
+from framework import framework_helpers
+
+
+# MonorailConnection maintains a dictionary of connections to SQL databases.
+# Each is identified by an int shard ID.
+# And there is one connection to the master DB identified by key MASTER_CNXN.
+MASTER_CNXN = 'master_cnxn'
+
+
+@framework_helpers.retry(2, delay=1, backoff=2)
+def MakeConnection(instance, database):
+ logging.info('About to connect to SQL instance %r db %r', instance, database)
+ if settings.unit_test_mode:
+ raise ValueError('unit tests should not need real database connections')
+ if settings.dev_mode:
+ cnxn = MySQLdb.connect(
+ host='127.0.0.1', port=3306, db=database, user='root', charset='utf8')
+ else:
+ cnxn = MySQLdb.connect(
+ unix_socket='/cloudsql/' + instance, db=database, user='root',
+ charset='utf8')
+ return cnxn
+
+
+class MonorailConnection(object):
+ """Create and manage connections to the SQL servers.
+
+ We only store connections in the context of a single user request, not
+ across user requests. The main purpose of this class is to make using
+ sharded tables easier.
+ """
+
+ def __init__(self):
+ self.sql_cnxns = {} # {MASTER_CNXN: cnxn, shard_id: cnxn, ...}
+
+ def GetMasterConnection(self):
+ """Return a connection to the master SQL DB."""
+ if MASTER_CNXN not in self.sql_cnxns:
+ self.sql_cnxns[MASTER_CNXN] = MakeConnection(
+ settings.db_instance, settings.db_database_name)
+ logging.info(
+ 'created a master connection %r', self.sql_cnxns[MASTER_CNXN])
+
+ return self.sql_cnxns[MASTER_CNXN]
+
+ def GetConnectionForShard(self, shard_id):
+ """Return a connection to the DB replica that will be used for shard_id."""
+ if settings.dev_mode:
+ return self.GetMasterConnection()
+
+ if shard_id not in self.sql_cnxns:
+ physical_shard_id = shard_id % settings.num_logical_shards
+ shard_instance_name = (
+ settings.physical_db_name_format % physical_shard_id)
+ self.sql_cnxns[shard_id] = MakeConnection(
+ shard_instance_name, settings.db_database_name)
+ logging.info('created a replica connection for shard %d', shard_id)
+
+ return self.sql_cnxns[shard_id]
+
+ def Execute(self, stmt_str, stmt_args, shard_id=None, commit=True):
+ """Execute the given SQL statement on one of the relevant databases."""
+ if shard_id is None:
+ # No shard was specified, so hit the master.
+ sql_cnxn = self.GetMasterConnection()
+ else:
+ sql_cnxn = self.GetConnectionForShard(shard_id)
+
+ return self._ExecuteWithSQLConnection(
+ sql_cnxn, stmt_str, stmt_args, commit=commit)
+
+ def _ExecuteWithSQLConnection(
+ self, sql_cnxn, stmt_str, stmt_args, commit=True):
+ """Execute a statement on the given database and return a cursor."""
+ cursor = sql_cnxn.cursor()
+ start_time = time.time()
+ if stmt_str.startswith('INSERT') or stmt_str.startswith('REPLACE'):
+ logging.info('SQL stmt_str: \n%s', stmt_str)
+ logging.info('SQL stmt_args: %r', stmt_args)
+ cursor.executemany(stmt_str, stmt_args)
+ else:
+ logging.info('SQL stmt: \n%s', (stmt_str % tuple(stmt_args)))
+ cursor.execute(stmt_str, args=stmt_args)
+ logging.info('%d rows in %d ms', cursor.rowcount,
+ int((time.time() - start_time) * 1000))
+ if commit and not stmt_str.startswith('SELECT'):
+ start_time = time.time()
+ try:
+ sql_cnxn.commit()
+ except MySQLdb.DatabaseError:
+ sql_cnxn.rollback()
+ logging.info('commit took %d ms',
+ int((time.time() - start_time) * 1000))
+
+ return cursor
+
+ def Commit(self):
+ """Explicitly commit any pending txns. Normally done automatically."""
+ sql_cnxn = self.GetMasterConnection()
+ start_time = time.time()
+ try:
+ sql_cnxn.commit()
+ except MySQLdb.DatabaseError:
+ logging.exception('Commit failed for cnxn, rolling back')
+ sql_cnxn.rollback()
+ logging.info('final commit took %d ms',
+ int((time.time() - start_time) * 1000))
+
+ def Close(self):
+ """Safely close any connections that are still open."""
+ for sql_cnxn in self.sql_cnxns.itervalues():
+ try:
+ sql_cnxn.close()
+ except MySQLdb.DatabaseError:
+ # This might happen if the cnxn is somehow already closed.
+ logging.exception('ProgrammingError when trying to close cnxn')
+
+
+class SQLTableManager(object):
+ """Helper class to make it easier to deal with an SQL table."""
+
+ def __init__(self, table_name):
+ self.table_name = table_name
+
+ def Select(
+ self, cnxn, distinct=False, cols=None, left_joins=None,
+ joins=None, where=None, or_where_conds=False, group_by=None,
+ order_by=None, limit=None, offset=None, shard_id=None, use_clause=None,
+ **kwargs):
+ """Compose and execute an SQL SELECT statement on this table.
+
+ Args:
+ cnxn: MonorailConnection to the databases.
+ distinct: If True, add DISTINCT keyword.
+ cols: List of columns to retrieve, defaults to '*'.
+ left_joins: List of LEFT JOIN (str, args) pairs.
+ joins: List of regular JOIN (str, args) pairs.
+ where: List of (str, args) for WHERE clause.
+ or_where_conds: Set to True to use OR in the WHERE conds.
+ group_by: List of strings for GROUP BY clause.
+ order_by: List of (str, args) for ORDER BY clause.
+ limit: Optional LIMIT on the number of rows returned.
+ offset: Optional OFFSET when using LIMIT.
+ shard_id: Int ID of the shard to query.
+ use_clause: Optional string USE clause to tell the DB which index to use.
+ **kwargs: WHERE-clause equality and set-membership conditions.
+
+ Keyword args are used to build up more WHERE conditions that compare
+ column values to constants. Key word Argument foo='bar' translates to 'foo
+ = "bar"', and foo=[3, 4, 5] translates to 'foo IN (3, 4, 5)'.
+
+ Returns:
+ A list of rows, each row is a tuple of values for the requested cols.
+ """
+ cols = cols or ['*'] # If columns not specified, retrieve all columns.
+ stmt = Statement.MakeSelect(
+ self.table_name, cols, distinct=distinct,
+ or_where_conds=or_where_conds)
+ if use_clause:
+ stmt.AddUseClause(use_clause)
+ stmt.AddJoinClauses(left_joins or [], left=True)
+ stmt.AddJoinClauses(joins or [])
+ stmt.AddWhereTerms(where or [], **kwargs)
+ stmt.AddGroupByTerms(group_by or [])
+ stmt.AddOrderByTerms(order_by or [])
+ stmt.SetLimitAndOffset(limit, offset)
+ stmt_str, stmt_args = stmt.Generate()
+
+ cursor = cnxn.Execute(stmt_str, stmt_args, shard_id=shard_id)
+ rows = cursor.fetchall()
+ return rows
+
+ def SelectRow(
+ self, cnxn, cols=None, default=None, where=None, **kwargs):
+ """Run a query that is expected to return just one row."""
+ rows = self.Select(cnxn, distinct=True, cols=cols, where=where, **kwargs)
+ if len(rows) == 1:
+ return rows[0]
+ elif not rows:
+ logging.info('SelectRow got 0 results, so using default %r', default)
+ return default
+ else:
+ raise ValueError('SelectRow got %d results, expected only 1', len(rows))
+
+ def SelectValue(self, cnxn, col, default=None, where=None, **kwargs):
+ """Run a query that is expected to return just one row w/ one value."""
+ row = self.SelectRow(
+ cnxn, cols=[col], default=[default], where=where, **kwargs)
+ return row[0]
+
+ def InsertRows(
+ self, cnxn, cols, row_values, replace=False, ignore=False,
+ commit=True, return_generated_ids=False):
+ """Insert all the given rows.
+
+ Args:
+ cnxn: MonorailConnection object.
+ cols: List of column names to set.
+ row_values: List of lists with values to store. The length of each
+ nested list should be equal to len(cols).
+ replace: Set to True if inserted values should replace existing DB rows
+ that have the same DB keys.
+ ignore: Set to True to ignore rows that would duplicate existing DB keys.
+ commit: Set to False if this operation is part of a series of operations
+ that should not be committed until the final one is done.
+ return_generated_ids: Set to True to return a list of generated
+ autoincrement IDs for inserted rows. This requires us to insert rows
+ one at a time.
+
+ Returns:
+ If return_generated_ids is set to True, this method returns a list of the
+ auto-increment IDs generated by the DB. Otherwise, [] is returned.
+ """
+ if not row_values:
+ return None # Nothing to insert
+
+ generated_ids = []
+ if return_generated_ids:
+ # We must insert the rows one-at-a-time to know the generated IDs.
+ for row_value in row_values:
+ stmt = Statement.MakeInsert(
+ self.table_name, cols, [row_value], replace=replace, ignore=ignore)
+ stmt_str, stmt_args = stmt.Generate()
+ cursor = cnxn.Execute(stmt_str, stmt_args, commit=commit)
+ if cursor.lastrowid:
+ generated_ids.append(cursor.lastrowid)
+ return generated_ids
+
+ stmt = Statement.MakeInsert(
+ self.table_name, cols, row_values, replace=replace, ignore=ignore)
+ stmt_str, stmt_args = stmt.Generate()
+ cnxn.Execute(stmt_str, stmt_args, commit=commit)
+ return []
+
+
+ def InsertRow(
+ self, cnxn, replace=False, ignore=False, commit=True, **kwargs):
+ """Insert a single row into the table.
+
+ Args:
+ cnxn: MonorailConnection object.
+ replace: Set to True if inserted values should replace existing DB rows
+ that have the same DB keys.
+ ignore: Set to True to ignore rows that would duplicate existing DB keys.
+ commit: Set to False if this operation is part of a series of operations
+ that should not be committed until the final one is done.
+ **kwargs: column=value assignments to specify what to store in the DB.
+
+ Returns:
+ The generated autoincrement ID of the key column if one was generated.
+ Otherwise, return None.
+ """
+ cols = sorted(kwargs.keys())
+ row = tuple(kwargs[col] for col in cols)
+ generated_ids = self.InsertRows(
+ cnxn, cols, [row], replace=replace, ignore=ignore,
+ commit=commit, return_generated_ids=True)
+ if generated_ids:
+ return generated_ids[0]
+ else:
+ return None
+
+ def Update(self, cnxn, delta, where=None, commit=True, **kwargs):
+ """Update one or more rows.
+
+ Args:
+ cnxn: MonorailConnection object.
+ delta: Dictionary of {column: new_value} assignments.
+ where: Optional list of WHERE conditions saying which rows to update.
+ commit: Set to False if this operation is part of a series of operations
+ that should not be committed until the final one is done.
+ **kwargs: WHERE-clause equality and set-membership conditions.
+
+ Returns:
+ Int number of rows updated.
+ """
+ if not delta:
+ return 0 # Nothing is being changed
+
+ stmt = Statement.MakeUpdate(self.table_name, delta)
+ stmt.AddWhereTerms(where, **kwargs)
+ stmt_str, stmt_args = stmt.Generate()
+
+ cursor = cnxn.Execute(stmt_str, stmt_args, commit=commit)
+ return cursor.rowcount
+
+ def IncrementCounterValue(self, cnxn, col_name, where=None, **kwargs):
+ """Atomically increment a counter stored in MySQL, return new value.
+
+ Args:
+ cnxn: MonorailConnection object.
+ col_name: int column to increment.
+ where: Optional list of WHERE conditions saying which rows to update.
+ **kwargs: WHERE-clause equality and set-membership conditions. The
+ where and kwargs together should narrow the update down to exactly
+ one row.
+
+ Returns:
+ The new, post-increment value of the counter.
+ """
+ stmt = Statement.MakeIncrement(self.table_name, col_name)
+ stmt.AddWhereTerms(where, **kwargs)
+ stmt_str, stmt_args = stmt.Generate()
+
+ cursor = cnxn.Execute(stmt_str, stmt_args)
+ assert cursor.rowcount == 1, (
+ 'missing or ambiguous counter: %r' % cursor.rowcount)
+ return cursor.lastrowid
+
+ def Delete(self, cnxn, where=None, commit=True, **kwargs):
+ """Delete the specified table rows.
+
+ Args:
+ cnxn: MonorailConnection object.
+ where: Optional list of WHERE conditions saying which rows to update.
+ commit: Set to False if this operation is part of a series of operations
+ that should not be committed until the final one is done.
+ **kwargs: WHERE-clause equality and set-membership conditions.
+
+ Returns:
+ Int number of rows updated.
+ """
+ # Deleting the whole table is never intended in Monorail.
+ assert where or kwargs
+
+ stmt = Statement.MakeDelete(self.table_name)
+ stmt.AddWhereTerms(where, **kwargs)
+ stmt_str, stmt_args = stmt.Generate()
+
+ cursor = cnxn.Execute(stmt_str, stmt_args, commit=commit)
+ return cursor.rowcount
+
+
+class Statement(object):
+ """A class to help build complex SQL statements w/ full escaping.
+
+ Start with a Make*() method, then fill in additional clauses as needed,
+ then call Generate() to return the SQL string and argument list. We pass
+ the string and args to MySQLdb separately so that it can do escaping on
+ the arg values as appropriate to prevent SQL-injection attacks.
+
+ The only values that are not escaped by MySQLdb are the table names
+ and column names, and bits of SQL syntax, all of which is hard-coded
+ in our application.
+ """
+
+ @classmethod
+ def MakeSelect(cls, table_name, cols, distinct=False, or_where_conds=False):
+ """Constuct a SELECT statement."""
+ assert _IsValidTableName(table_name)
+ assert all(_IsValidColumnName(col) for col in cols)
+ main_clause = 'SELECT%s %s FROM %s' % (
+ (' DISTINCT' if distinct else ''), ', '.join(cols), table_name)
+ return cls(main_clause, or_where_conds=or_where_conds)
+
+ @classmethod
+ def MakeInsert(
+ cls, table_name, cols, new_values, replace=False, ignore=False):
+ """Constuct an INSERT statement."""
+ if replace == True:
+ return cls.MakeReplace(table_name, cols, new_values, ignore)
+ assert _IsValidTableName(table_name)
+ assert all(_IsValidColumnName(col) for col in cols)
+ ignore_word = ' IGNORE' if ignore else ''
+ main_clause = 'INSERT%s INTO %s (%s)' % (
+ ignore_word, table_name, ', '.join(cols))
+ return cls(main_clause, insert_args=new_values)
+
+ @classmethod
+ def MakeReplace(
+ cls, table_name, cols, new_values, ignore=False):
+ """Construct an INSERT...ON DUPLICATE KEY UPDATE... statement.
+
+ Uses the INSERT/UPDATE syntax because REPLACE is literally a DELETE
+ followed by an INSERT, which doesn't play well with foreign keys.
+ INSERT/UPDATE is an atomic check of whether the primary key exists,
+ followed by an INSERT if it doesn't or an UPDATE if it does.
+ """
+ assert _IsValidTableName(table_name)
+ assert all(_IsValidColumnName(col) for col in cols)
+ ignore_word = ' IGNORE' if ignore else ''
+ main_clause = 'INSERT%s INTO %s (%s)' % (
+ ignore_word, table_name, ', '.join(cols))
+ return cls(main_clause, insert_args=new_values, duplicate_update_cols=cols)
+
+ @classmethod
+ def MakeUpdate(cls, table_name, delta):
+ """Constuct an UPDATE statement."""
+ assert _IsValidTableName(table_name)
+ assert all(_IsValidColumnName(col) for col in delta.iterkeys())
+ update_strs = []
+ update_args = []
+ for col, val in delta.iteritems():
+ update_strs.append(col + '=%s')
+ update_args.append(val)
+
+ main_clause = 'UPDATE %s SET %s' % (
+ table_name, ', '.join(update_strs))
+ return cls(main_clause, update_args=update_args)
+
+ @classmethod
+ def MakeIncrement(cls, table_name, col_name, step=1):
+ """Constuct an UPDATE statement that increments and returns a counter."""
+ assert _IsValidTableName(table_name)
+ assert _IsValidColumnName(col_name)
+
+ main_clause = (
+ 'UPDATE %s SET %s = LAST_INSERT_ID(%s + %%s)' % (
+ table_name, col_name, col_name))
+ update_args = [step]
+ return cls(main_clause, update_args=update_args)
+
+ @classmethod
+ def MakeDelete(cls, table_name):
+ """Constuct a DELETE statement."""
+ assert _IsValidTableName(table_name)
+ main_clause = 'DELETE FROM %s' % table_name
+ return cls(main_clause)
+
+ def __init__(
+ self, main_clause, insert_args=None, update_args=None,
+ duplicate_update_cols=None, or_where_conds=False):
+ self.main_clause = main_clause # E.g., SELECT or DELETE
+ self.or_where_conds = or_where_conds
+ self.insert_args = insert_args or [] # For INSERT statements
+ self.update_args = update_args or [] # For UPDATEs
+ self.duplicate_update_cols = duplicate_update_cols or [] # For REPLACE-ish
+
+ self.use_clauses = []
+ self.join_clauses, self.join_args = [], []
+ self.where_conds, self.where_args = [], []
+ self.group_by_terms, self.group_by_args = [], []
+ self.order_by_terms, self.order_by_args = [], []
+ self.limit, self.offset = None, None
+
+ def Generate(self):
+ """Return an SQL string having %s placeholders and args to fill them in."""
+ clauses = [self.main_clause] + self.use_clauses + self.join_clauses
+ if self.where_conds:
+ if self.or_where_conds:
+ clauses.append('WHERE ' + '\n OR '.join(self.where_conds))
+ else:
+ clauses.append('WHERE ' + '\n AND '.join(self.where_conds))
+ if self.group_by_terms:
+ clauses.append('GROUP BY ' + ', '.join(self.group_by_terms))
+ if self.order_by_terms:
+ clauses.append('ORDER BY ' + ', '.join(self.order_by_terms))
+
+ if self.limit and self.offset:
+ clauses.append('LIMIT %d OFFSET %d' % (self.limit, self.offset))
+ elif self.limit:
+ clauses.append('LIMIT %d' % self.limit)
+ elif self.offset:
+ clauses.append('LIMIT %d OFFSET %d' % (sys.maxint, self.offset))
+
+ if self.insert_args:
+ clauses.append('VALUES (' + PlaceHolders(self.insert_args[0]) + ')')
+ args = self.insert_args
+ if self.duplicate_update_cols:
+ clauses.append('ON DUPLICATE KEY UPDATE %s' % (
+ ', '.join(['%s=VALUES(%s)' % (col, col)
+ for col in self.duplicate_update_cols])))
+ assert not (self.join_args + self.update_args + self.where_args +
+ self.group_by_args + self.order_by_args)
+ else:
+ args = (self.join_args + self.update_args + self.where_args +
+ self.group_by_args + self.order_by_args)
+ assert not (self.insert_args + self.duplicate_update_cols)
+
+ args = _BoolsToInts(args)
+ stmt_str = '\n'.join(clause for clause in clauses if clause)
+
+ assert _IsValidStatement(stmt_str), stmt_str
+ return stmt_str, args
+
+ def AddUseClause(self, use_clause):
+ """Add a USE clause (giving the DB a hint about which indexes to use)."""
+ assert _IsValidUseClause(use_clause), use_clause
+ self.use_clauses.append(use_clause)
+
+ def AddJoinClauses(self, join_pairs, left=False):
+ """Save JOIN clauses based on the given list of join conditions."""
+ for join, args in join_pairs:
+ assert _IsValidJoin(join), join
+ assert join.count('%s') == len(args), join
+ self.join_clauses.append(
+ ' %sJOIN %s' % (('LEFT ' if left else ''), join))
+ self.join_args.extend(args)
+
+ def AddGroupByTerms(self, group_by_term_list):
+ """Save info needed to generate the GROUP BY clause."""
+ assert all(_IsValidGroupByTerm(term) for term in group_by_term_list)
+ self.group_by_terms.extend(group_by_term_list)
+
+ def AddOrderByTerms(self, order_by_pairs):
+ """Save info needed to generate the ORDER BY clause."""
+ for term, args in order_by_pairs:
+ assert _IsValidOrderByTerm(term), term
+ assert term.count('%s') == len(args), term
+ self.order_by_terms.append(term)
+ self.order_by_args.extend(args)
+
+ def SetLimitAndOffset(self, limit, offset):
+ """Save info needed to generate the LIMIT OFFSET clause."""
+ self.limit = limit
+ self.offset = offset
+
+ def AddWhereTerms(self, where_cond_pairs, **kwargs):
+ """Gererate a WHERE clause."""
+ where_cond_pairs = where_cond_pairs or []
+
+ for cond, args in where_cond_pairs:
+ assert _IsValidWhereCond(cond), cond
+ assert cond.count('%s') == len(args), cond
+ self.where_conds.append(cond)
+ self.where_args.extend(args)
+
+ for col, val in sorted(kwargs.items()):
+ assert _IsValidColumnName(col), col
+ eq = True
+ if col.endswith('_not'):
+ col = col[:-4]
+ eq = False
+
+ if isinstance(val, set):
+ val = list(val) # MySQL inteface cannot handle sets.
+
+ if val is None or val == []:
+ op = 'IS' if eq else 'IS NOT'
+ self.where_conds.append(col + ' ' + op + ' NULL')
+ elif isinstance(val, list):
+ op = 'IN' if eq else 'NOT IN'
+ # Sadly, MySQLdb cannot escape lists, so we flatten to multiple "%s"s
+ self.where_conds.append(
+ col + ' ' + op + ' (' + PlaceHolders(val) + ')')
+ self.where_args.extend(val)
+ else:
+ op = '=' if eq else '!='
+ self.where_conds.append(col + ' ' + op + ' %s')
+ self.where_args.append(val)
+
+
+def PlaceHolders(sql_args):
+ """Return a comma-separated list of %s placeholders for the given args."""
+ return ','.join('%s' for _ in sql_args)
+
+
+TABLE_PAT = '[A-Z][_a-zA-Z0-9]+'
+COLUMN_PAT = '[a-z][_a-z]+'
+COMPARE_OP_PAT = '(<|>|=|!=|>=|<=|LIKE|NOT LIKE)'
+SHORTHAND = {
+ 'table': TABLE_PAT,
+ 'column': COLUMN_PAT,
+ 'tab_col': r'(%s\.)?%s' % (TABLE_PAT, COLUMN_PAT),
+ 'placeholder': '%s', # That's a literal %s that gets passed to MySQLdb
+ 'multi_placeholder': '%s(, ?%s)*',
+ 'compare_op': COMPARE_OP_PAT,
+ 'opt_asc_desc': '( ASC| DESC)?',
+ 'opt_alias': '( AS %s)?' % TABLE_PAT,
+ 'email_cond': (r'LOWER\(User\d+\.email\) '
+ r'(%s %%s|IN \(%%s(, ?%%s)*\))' % COMPARE_OP_PAT),
+ }
+
+
+def _MakeRE(regex_str):
+ """Return a regular expression object, expanding our shorthand as needed."""
+ return re.compile(regex_str.format(**SHORTHAND))
+
+
+TABLE_RE = _MakeRE('^{table}$')
+TAB_COL_RE = _MakeRE('^{tab_col}$')
+USE_CLAUSE_RE = _MakeRE(
+ r'^USE INDEX \({column}\) USE INDEX FOR ORDER BY \({column}\)$')
+COLUMN_RE_LIST = [
+ TAB_COL_RE,
+ _MakeRE(r'\*'),
+ _MakeRE(r'COUNT\(\*\)'),
+ _MakeRE(r'COUNT\({tab_col}\)'),
+ _MakeRE(r'MAX\({tab_col}\)'),
+ _MakeRE(r'MIN\({tab_col}\)'),
+ ]
+JOIN_RE_LIST = [
+ TABLE_RE,
+ _MakeRE(
+ r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
+ r'( AND {tab_col} = {tab_col})?'
+ r'( AND {tab_col} IN \({multi_placeholder}\))?$'),
+ _MakeRE(
+ r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
+ r'( AND {tab_col} IN \({multi_placeholder}\))?$'),
+ _MakeRE(
+ r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
+ r'( AND {tab_col} = {tab_col})?'
+ r' AND {tab_col} = {placeholder}$'),
+ _MakeRE(
+ r'^{table}{opt_alias} ON {tab_col} = {tab_col} AND {email_cond}$'),
+ _MakeRE(
+ r'^{table}{opt_alias} ON '
+ r'\({tab_col} = {tab_col} OR {tab_col} = {tab_col}\)$'),
+ _MakeRE(
+ r'^\({table} AS {table} JOIN User AS {table} '
+ r'ON {tab_col} = {tab_col} '
+ r'AND {email_cond}\) ON Issue.id = {tab_col}'),
+ _MakeRE(
+ r'^{table} AS {table} ON {tab_col} = {tab_col} '
+ r'LEFT JOIN {table} AS {table} ON {tab_col} = {tab_col}'),
+ ]
+ORDER_BY_RE_LIST = [
+ _MakeRE(r'^{tab_col}{opt_asc_desc}$'),
+ _MakeRE(r'^LOWER\({tab_col}\){opt_asc_desc}$'),
+ _MakeRE(r'^ISNULL\({tab_col}\){opt_asc_desc}$'),
+ _MakeRE(r'^FIELD\({tab_col}, {multi_placeholder}\){opt_asc_desc}$'),
+ _MakeRE(r'^FIELD\(IF\(ISNULL\({tab_col}\), {tab_col}, {tab_col}\), '
+ r'{multi_placeholder}\){opt_asc_desc}$'),
+ ]
+GROUP_BY_RE_LIST = [
+ TAB_COL_RE,
+ ]
+WHERE_COND_RE_LIST = [
+ _MakeRE(r'^TRUE$'),
+ _MakeRE(r'^FALSE$'),
+ _MakeRE(r'^{tab_col} IS NULL$'),
+ _MakeRE(r'^{tab_col} IS NOT NULL$'),
+ _MakeRE(r'^{tab_col} {compare_op} {tab_col}$'),
+ _MakeRE(r'^{tab_col} {compare_op} {placeholder}$'),
+ _MakeRE(r'^{tab_col} %% {placeholder} = {placeholder}$'),
+ _MakeRE(r'^{tab_col} IN \({multi_placeholder}\)$'),
+ _MakeRE(r'^{tab_col} NOT IN \({multi_placeholder}\)$'),
+ _MakeRE(r'^LOWER\({tab_col}\) IS NULL$'),
+ _MakeRE(r'^LOWER\({tab_col}\) IS NOT NULL$'),
+ _MakeRE(r'^LOWER\({tab_col}\) {compare_op} {placeholder}$'),
+ _MakeRE(r'^LOWER\({tab_col}\) IN \({multi_placeholder}\)$'),
+ _MakeRE(r'^LOWER\({tab_col}\) NOT IN \({multi_placeholder}\)$'),
+ _MakeRE(r'^LOWER\({tab_col}\) LIKE {placeholder}$'),
+ _MakeRE(r'^LOWER\({tab_col}\) NOT LIKE {placeholder}$'),
+ _MakeRE(r'^timestep < \(SELECT MAX\(j.timestep\) FROM Invalidate AS j '
+ r'WHERE j.kind = %s '
+ r'AND j.cache_key = Invalidate.cache_key\)$'),
+ _MakeRE(r'^\({tab_col} IS NULL OR {tab_col} {compare_op} {placeholder}\) '
+ 'AND \({tab_col} IS NULL OR {tab_col} {compare_op} {placeholder}'
+ '\)$'),
+ _MakeRE(r'^\({tab_col} IS NOT NULL AND {tab_col} {compare_op} '
+ '{placeholder}\) OR \({tab_col} IS NOT NULL AND {tab_col} '
+ '{compare_op} {placeholder}\)$'),
+ ]
+
+# Note: We never use ';' for multiple statements, '@' for SQL variables, or
+# any quoted strings in stmt_str (quotes are put in my MySQLdb for args).
+STMT_STR_RE = re.compile(
+ r'\A(SELECT|UPDATE|DELETE|INSERT|REPLACE) [-+=!<>%*.,()\w\s]+\Z',
+ re.MULTILINE)
+
+
+def _IsValidTableName(table_name):
+ return TABLE_RE.match(table_name)
+
+
+def _IsValidColumnName(column_expr):
+ return any(regex.match(column_expr) for regex in COLUMN_RE_LIST)
+
+
+def _IsValidUseClause(use_clause):
+ return USE_CLAUSE_RE.match(use_clause)
+
+
+def _IsValidJoin(join):
+ return any(regex.match(join) for regex in JOIN_RE_LIST)
+
+
+def _IsValidOrderByTerm(term):
+ return any(regex.match(term) for regex in ORDER_BY_RE_LIST)
+
+
+def _IsValidGroupByTerm(term):
+ return any(regex.match(term) for regex in GROUP_BY_RE_LIST)
+
+
+def _IsValidWhereCond(cond):
+ if cond.startswith('NOT '):
+ cond = cond[4:]
+ if cond.startswith('(') and cond.endswith(')'):
+ cond = cond[1:-1]
+
+ if any(regex.match(cond) for regex in WHERE_COND_RE_LIST):
+ return True
+
+ if ' OR ' in cond:
+ return all(_IsValidWhereCond(c) for c in cond.split(' OR '))
+
+ if ' AND ' in cond:
+ return all(_IsValidWhereCond(c) for c in cond.split(' AND '))
+
+ return False
+
+
+def _IsValidStatement(stmt_str):
+ """Final check to make sure there is no funny junk sneaking in somehow."""
+ return (STMT_STR_RE.match(stmt_str) and
+ '--' not in stmt_str)
+
+
+def _BoolsToInts(arg_list):
+ """Convert any True values to 1s and Falses to 0s.
+
+ Google's copy of MySQLdb has bool-to-int conversion disabled,
+ and yet it seems to be needed otherwise they are converted
+ to strings and always interpreted as 0 (which is FALSE).
+
+ Args:
+ arg_list: (nested) list of SQL statment argument values, which may
+ include some boolean values.
+
+ Returns:
+ The same list, but with True replaced by 1 and False replaced by 0.
+ """
+ result = []
+ for arg in arg_list:
+ if isinstance(arg, (list, tuple)):
+ result.append(_BoolsToInts(arg))
+ elif arg is True:
+ result.append(1)
+ elif arg is False:
+ result.append(0)
+ else:
+ result.append(arg)
+
+ return result
diff --git a/appengine/monorail/framework/table_view_helpers.py b/appengine/monorail/framework/table_view_helpers.py
new file mode 100644
index 0000000..1ca8098
--- /dev/null
+++ b/appengine/monorail/framework/table_view_helpers.py
@@ -0,0 +1,627 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions for displaying lists of project artifacts.
+
+This file exports classes TableRow and TableCell that help
+represent HTML table rows and cells. These classes make rendering
+HTML tables that list project artifacts much easier to do with EZT.
+"""
+
+import collections
+import logging
+
+from third_party import ezt
+
+from framework import framework_constants
+from framework import template_helpers
+from proto import tracker_pb2
+from tracker import tracker_bizobj
+
+
+def ComputeUnshownColumns(results, shown_columns, config, built_in_cols):
+ """Return a list of unshown columns that the user could add.
+
+ Args:
+ results: list of search result PBs. Each must have labels.
+ shown_columns: list of column names to be used in results table.
+ config: harmonized config for the issue search, including all
+ well known labels and custom fields.
+ built_in_cols: list of other column names that are built into the tool.
+ E.g., star count, or creation date.
+
+ Returns:
+ List of column names to append to the "..." menu.
+ """
+ unshown_set = set() # lowercases column names
+ unshown_list = [] # original-case column names
+ shown_set = {col.lower() for col in shown_columns}
+ labels_already_seen = set() # whole labels, original case
+
+ def _MaybeAddLabel(label_name):
+ """Add the key part of the given label if needed."""
+ if label_name.lower() in labels_already_seen:
+ return
+ labels_already_seen.add(label_name.lower())
+ if '-' in label_name:
+ col, _value = label_name.split('-', 1)
+ _MaybeAddCol(col)
+
+ def _MaybeAddCol(col):
+ if col.lower() not in shown_set and col.lower() not in unshown_set:
+ unshown_list.append(col)
+ unshown_set.add(col.lower())
+
+ # The user can always add any of the default columns.
+ for col in config.default_col_spec.split():
+ _MaybeAddCol(col)
+
+ # The user can always add any of the built-in columns.
+ for col in built_in_cols:
+ _MaybeAddCol(col)
+
+ # The user can add a column for any well-known labels
+ for wkl in config.well_known_labels:
+ _MaybeAddLabel(wkl.label)
+
+ # The user can add a column for any custom field
+ field_ids_alread_seen = set()
+ for fd in config.field_defs:
+ field_lower = fd.field_name.lower()
+ field_ids_alread_seen.add(fd.field_id)
+ if field_lower not in shown_set and field_lower not in unshown_set:
+ unshown_list.append(fd.field_name)
+ unshown_set.add(field_lower)
+
+ # The user can add a column for any key-value label or field in the results.
+ for r in results:
+ for label_name in tracker_bizobj.GetLabels(r):
+ _MaybeAddLabel(label_name)
+ for field_value in r.field_values:
+ if field_value.field_id not in field_ids_alread_seen:
+ field_ids_alread_seen.add(field_value.field_id)
+ fd = tracker_bizobj.FindFieldDefByID(field_value.field_id, config)
+ if fd: # could be None for a foreign field, which we don't display.
+ field_lower = fd.field_name.lower()
+ if field_lower not in shown_set and field_lower not in unshown_set:
+ unshown_list.append(fd.field_name)
+ unshown_set.add(field_lower)
+
+ return sorted(unshown_list)
+
+
+def ExtractUniqueValues(columns, artifact_list, users_by_id, config):
+ """Build a nested list of unique values so the user can auto-filter.
+
+ Args:
+ columns: a list of lowercase column name strings, which may contain
+ combined columns like "priority/pri".
+ artifact_list: a list of artifacts in the complete set of search results.
+ users_by_id: dict mapping user_ids to UserViews.
+ config: ProjectIssueConfig PB for the current project.
+
+ Returns:
+ [EZTItem(col1, colname1, [val11, val12,...]), ...]
+ A list of EZTItems, each of which has a col_index, column_name,
+ and a list of unique values that appear in that column.
+ """
+ column_values = {col_name: {} for col_name in columns}
+
+ # For each combined column "a/b/c", add entries that point from "a" back
+ # to "a/b/c", from "b" back to "a/b/c", and from "c" back to "a/b/c".
+ combined_column_parts = collections.defaultdict(list)
+ for col in columns:
+ if '/' in col:
+ for col_part in col.split('/'):
+ combined_column_parts[col_part].append(col)
+
+ unique_labels = set()
+ for art in artifact_list:
+ unique_labels.update(tracker_bizobj.GetLabels(art))
+
+ for label in unique_labels:
+ if '-' in label:
+ col, val = label.split('-', 1)
+ col = col.lower()
+ if col in column_values:
+ column_values[col][val.lower()] = val
+ if col in combined_column_parts:
+ for combined_column in combined_column_parts[col]:
+ column_values[combined_column][val.lower()] = val
+ else:
+ if 'summary' in column_values:
+ column_values['summary'][label.lower()] = label
+
+ # TODO(jrobbins): Consider refacting some of this to tracker_bizobj
+ # or a new builtins.py to reduce duplication.
+ if 'reporter' in column_values:
+ for art in artifact_list:
+ reporter_id = art.reporter_id
+ if reporter_id and reporter_id in users_by_id:
+ reporter_username = users_by_id[reporter_id].display_name
+ column_values['reporter'][reporter_username] = reporter_username
+
+ if 'owner' in column_values:
+ for art in artifact_list:
+ owner_id = tracker_bizobj.GetOwnerId(art)
+ if owner_id and owner_id in users_by_id:
+ owner_username = users_by_id[owner_id].display_name
+ column_values['owner'][owner_username] = owner_username
+
+ if 'cc' in column_values:
+ for art in artifact_list:
+ cc_ids = tracker_bizobj.GetCcIds(art)
+ for cc_id in cc_ids:
+ if cc_id and cc_id in users_by_id:
+ cc_username = users_by_id[cc_id].display_name
+ column_values['cc'][cc_username] = cc_username
+
+ if 'component' in column_values:
+ for art in artifact_list:
+ all_comp_ids = list(art.component_ids) + list(art.derived_component_ids)
+ for component_id in all_comp_ids:
+ cd = tracker_bizobj.FindComponentDefByID(component_id, config)
+ if cd:
+ column_values['component'][cd.path] = cd.path
+
+ if 'stars' in column_values:
+ for art in artifact_list:
+ star_count = art.star_count
+ column_values['stars'][star_count] = star_count
+
+ if 'status' in column_values:
+ for art in artifact_list:
+ status = tracker_bizobj.GetStatus(art)
+ if status:
+ column_values['status'][status.lower()] = status
+
+ # TODO(jrobbins): merged into, blocked on, and blocking. And, the ability
+ # to parse a user query on those fields and do a SQL search.
+
+ if 'attachments' in column_values:
+ for art in artifact_list:
+ attachment_count = art.attachment_count
+ column_values['attachments'][attachment_count] = attachment_count
+
+ # Add all custom field values if the custom field name is a shown column.
+ field_id_to_col = {}
+ for art in artifact_list:
+ for fv in art.field_values:
+ field_col, field_type = field_id_to_col.get(fv.field_id, (None, None))
+ if field_col == 'NOT_SHOWN':
+ continue
+ if field_col is None:
+ fd = tracker_bizobj.FindFieldDefByID(fv.field_id, config)
+ if not fd:
+ field_id_to_col[fv.field_id] = 'NOT_SHOWN', None
+ continue
+ field_col = fd.field_name.lower()
+ field_type = fd.field_type
+ if field_col not in column_values:
+ field_id_to_col[fv.field_id] = 'NOT_SHOWN', None
+ continue
+ field_id_to_col[fv.field_id] = field_col, field_type
+
+ if field_type == tracker_pb2.FieldTypes.ENUM_TYPE:
+ continue # Already handled by label parsing
+ elif field_type == tracker_pb2.FieldTypes.INT_TYPE:
+ val = fv.int_value
+ elif field_type == tracker_pb2.FieldTypes.STR_TYPE:
+ val = fv.str_value
+ elif field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ user = users_by_id.get(fv.user_id)
+ val = user.email if user else framework_constants.NO_USER_NAME
+ elif field_type == tracker_pb2.FieldTypes.DATE_TYPE:
+ val = fv.int_value # TODO(jrobbins): convert to date
+ elif field_type == tracker_pb2.FieldTypes.BOOL_TYPE:
+ val = 'Yes' if fv.int_value else 'No'
+
+ column_values[field_col][val] = val
+
+ # TODO(jrobbins): make the capitalization of well-known unique label and
+ # status values match the way it is written in the issue config.
+
+ # Return EZTItems for each column in left-to-right display order.
+ result = []
+ for i, col_name in enumerate(columns):
+ # TODO(jrobbins): sort each set of column values top-to-bottom, by the
+ # order specified in the project artifact config. For now, just sort
+ # lexicographically to make expected output defined.
+ sorted_col_values = sorted(column_values[col_name].values())
+ result.append(template_helpers.EZTItem(
+ col_index=i, column_name=col_name, filter_values=sorted_col_values))
+
+ return result
+
+
+def MakeTableData(
+ visible_results, logged_in_user_id, starred_items,
+ lower_columns, lower_group_by, users_by_id, cell_factories,
+ id_accessor, related_issues, config):
+ """Return a list of list row objects for display by EZT.
+
+ Args:
+ visible_results: list of artifacts to display on one pagination page.
+ logged_in_user_id: user ID of the signed in user, or None.
+ starred_items: list of IDs/names of items in the current project
+ that the signed in user has starred.
+ lower_columns: list of column names to display, all lowercase. These can
+ be combined column names, e.g., 'priority/pri'.
+ lower_group_by: list of column names that define row groups, all lowercase.
+ users_by_id: dict mapping user IDs to UserViews.
+ cell_factories: dict of functions that each create TableCell objects.
+ id_accessor: function that maps from an artifact to the ID/name that might
+ be in the starred items list.
+ related_issues: dict {issue_id: issue} of pre-fetched related issues.
+ config: ProjectIssueConfig PB for the current project.
+
+ Returns:
+ A list of TableRow objects, one for each visible result.
+ """
+ table_data = []
+
+ group_cell_factories = [
+ ChooseCellFactory(group.strip('-'), cell_factories, config)
+ for group in lower_group_by]
+
+ # Make a list of cell factories, one for each column.
+ factories_to_use = [
+ ChooseCellFactory(col, cell_factories, config) for col in lower_columns]
+
+ current_group = None
+ for idx, art in enumerate(visible_results):
+ owner_is_me = ezt.boolean(
+ logged_in_user_id and
+ tracker_bizobj.GetOwnerId(art) == logged_in_user_id)
+ row = MakeRowData(
+ art, lower_columns, owner_is_me, users_by_id, factories_to_use,
+ related_issues, config)
+ row.starred = ezt.boolean(id_accessor(art) in starred_items)
+ row.idx = idx # EZT does not have loop counters, so add idx.
+ table_data.append(row)
+ row.group = None
+
+ # Also include group information for the first row in each group.
+ # TODO(jrobbins): This seems like more overhead than we need for the
+ # common case where no new group heading row is to be inserted.
+ group = MakeRowData(
+ art, [group_name.strip('-') for group_name in lower_group_by],
+ owner_is_me, users_by_id, group_cell_factories, related_issues,
+ config)
+ for cell, group_name in zip(group.cells, lower_group_by):
+ cell.group_name = group_name
+ if group == current_group:
+ current_group.rows_in_group += 1
+ else:
+ row.group = group
+ current_group = group
+ current_group.rows_in_group = 1
+
+ return table_data
+
+
+def MakeRowData(
+ art, columns, owner_is_me, users_by_id, cell_factory_list,
+ related_issues, config):
+ """Make a TableRow for use by EZT when rendering HTML table of results.
+
+ Args:
+ art: a project artifact PB
+ columns: list of lower-case column names
+ owner_is_me: boolean indicating that the logged in user is the owner
+ of the current artifact
+ users_by_id: dictionary {user_id: UserView} with each UserView having
+ a "display_name" member.
+ cell_factory_list: list of functions that each create TableCell
+ objects for a given column.
+ related_issues: dict {issue_id: issue} of pre-fetched related issues.
+ config: ProjectIssueConfig PB for the current project.
+
+ Returns:
+ A TableRow object for use by EZT to render a table of results.
+ """
+ ordered_row_data = []
+ non_col_labels = []
+ label_values = collections.defaultdict(list)
+
+ flattened_columns = set()
+ for col in columns:
+ if '/' in col:
+ flattened_columns.update(col.split('/'))
+ else:
+ flattened_columns.add(col)
+
+ # Group all "Key-Value" labels by key, and separate the "OneWord" labels.
+ _AccumulateLabelValues(
+ art.labels, flattened_columns, label_values, non_col_labels)
+
+ _AccumulateLabelValues(
+ art.derived_labels, flattened_columns, label_values,
+ non_col_labels, is_derived=True)
+
+ # Build up a list of TableCell objects for this row.
+ for i, col in enumerate(columns):
+ factory = cell_factory_list[i]
+ new_cell = factory(
+ art, col, users_by_id, non_col_labels, label_values, related_issues,
+ config)
+ new_cell.col_index = i
+ ordered_row_data.append(new_cell)
+
+ return TableRow(ordered_row_data, owner_is_me)
+
+
+def _AccumulateLabelValues(
+ labels, columns, label_values, non_col_labels, is_derived=False):
+ """Parse OneWord and Key-Value labels for display in a list page.
+
+ Args:
+ labels: a list of label strings.
+ columns: a list of column names.
+ label_values: mutable dictionary {key: [value, ...]} of label values
+ seen so far.
+ non_col_labels: mutable list of OneWord labels seen so far.
+ is_derived: true if these labels were derived via rules.
+
+ Returns:
+ Nothing. But, the given label_values dictionary will grow to hold
+ the values of the key-value labels passed in, and the non_col_labels
+ list will grow to hold the OneWord labels passed in. These are shown
+ in label columns, and in the summary column, respectively
+ """
+ for label_name in labels:
+ if '-' in label_name:
+ parts = label_name.split('-')
+ for pivot in range(1, len(parts)):
+ column_name = '-'.join(parts[:pivot])
+ value = '-'.join(parts[pivot:])
+ column_name = column_name.lower()
+ if column_name in columns:
+ label_values[column_name].append((value, is_derived))
+ else:
+ non_col_labels.append((label_name, is_derived))
+
+
+class TableRow(object):
+ """A tiny auxiliary class to represent a row in an HTML table."""
+
+ def __init__(self, cells, owner_is_me):
+ """Initialize the table row with the given data."""
+ self.cells = cells
+ self.owner_is_me = ezt.boolean(owner_is_me) # Shows tiny ">" on my issues.
+ # Used by MakeTableData for layout.
+ self.idx = None
+ self.group = None
+ self.rows_in_group = None
+ self.starred = None
+
+ def __cmp__(self, other):
+ """A row is == if each cell is == to the cells in the other row."""
+ return cmp(self.cells, other.cells) if other else -1
+
+ def DebugString(self):
+ """Return a string that is useful for on-page debugging."""
+ return 'TR(%s)' % self.cells
+
+
+# TODO(jrobbins): also add unsortable... or change this to a list of operations
+# that can be done.
+CELL_TYPE_ID = 'ID'
+CELL_TYPE_SUMMARY = 'summary'
+CELL_TYPE_ATTR = 'attr'
+CELL_TYPE_UNFILTERABLE = 'unfilterable'
+
+
+class TableCell(object):
+ """Helper class to represent a table cell when rendering using EZT."""
+
+ # Should instances of this class be rendered with whitespace:nowrap?
+ # Subclasses can override this constant, e.g., issuelist TableCellOwner.
+ NOWRAP = ezt.boolean(False)
+
+ def __init__(self, cell_type, explicit_values,
+ derived_values=None, non_column_labels=None, align='',
+ sort_values=True):
+ """Store all the given data for later access by EZT."""
+ self.type = cell_type
+ self.align = align
+ self.col_index = 0 # Is set afterward
+ self.values = []
+ if non_column_labels:
+ self.non_column_labels = [
+ template_helpers.EZTItem(value=v, is_derived=ezt.boolean(d))
+ for v, d in non_column_labels]
+ else:
+ self.non_column_labels = []
+
+ for v in (sorted(explicit_values) if sort_values else explicit_values):
+ self.values.append(CellItem(v))
+
+ if derived_values:
+ for v in (sorted(derived_values) if sort_values else derived_values):
+ self.values.append(CellItem(v, is_derived=True))
+
+ def __cmp__(self, other):
+ """A cell is == if each value is == to the values in the other cells."""
+ return cmp(self.values, other.values) if other else -1
+
+ def DebugString(self):
+ return 'TC(%r, %r, %r)' % (
+ self.type,
+ [v.DebugString() for v in self.values],
+ self.non_column_labels)
+
+
+def CompositeTableCell(columns_to_combine, cell_factories):
+ """Cell factory that combines multiple cells in a combined column."""
+
+ class FactoryClass(TableCell):
+ def __init__(self, art, _col, users_by_id,
+ non_col_labels, label_values, related_issues, config):
+ TableCell.__init__(self, CELL_TYPE_UNFILTERABLE, [])
+
+ for sub_col in columns_to_combine:
+ sub_factory = ChooseCellFactory(sub_col, cell_factories, config)
+ sub_cell = sub_factory(
+ art, sub_col, users_by_id, non_col_labels, label_values,
+ related_issues, config)
+ self.non_column_labels.extend(sub_cell.non_column_labels)
+ self.values.extend(sub_cell.values)
+
+ return FactoryClass
+
+
+class CellItem(object):
+ """Simple class to display one part of a table cell's value, with style."""
+
+ def __init__(self, item, is_derived=False):
+ self.item = item
+ self.is_derived = ezt.boolean(is_derived)
+
+ def __cmp__(self, other):
+ return cmp(self.item, other.item) if other else -1
+
+ def DebugString(self):
+ if self.is_derived:
+ return 'CI(derived: %r)' % self.item
+ else:
+ return 'CI(%r)' % self.item
+
+
+class TableCellKeyLabels(TableCell):
+ """TableCell subclass specifically for showing user-defined label values."""
+
+ def __init__(
+ self, _art, col, _users_by_id, _non_col_labels,
+ label_values, _related_issues, _config):
+ label_value_pairs = label_values.get(col, [])
+ explicit_values = [value for value, is_derived in label_value_pairs
+ if not is_derived]
+ derived_values = [value for value, is_derived in label_value_pairs
+ if is_derived]
+ TableCell.__init__(self, CELL_TYPE_ATTR, explicit_values,
+ derived_values=derived_values)
+
+
+class TableCellProject(TableCell):
+ """TableCell subclass for showing an artifact's project name."""
+
+ # pylint: disable=unused-argument
+ def __init__(
+ self, art, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ TableCell.__init__(
+ self, CELL_TYPE_ATTR, [art.project_name])
+
+
+class TableCellStars(TableCell):
+ """TableCell subclass for showing an artifact's star count."""
+
+ # pylint: disable=unused-argument
+ def __init__(
+ self, art, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ TableCell.__init__(
+ self, CELL_TYPE_ATTR, [art.star_count], align='right')
+
+
+class TableCellSummary(TableCell):
+ """TableCell subclass for showing an artifact's summary."""
+
+ # pylint: disable=unused-argument
+ def __init__(
+ self, art, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ TableCell.__init__(
+ self, CELL_TYPE_SUMMARY, [art.summary],
+ non_column_labels=non_col_labels)
+
+
+class TableCellCustom(TableCell):
+ """Abstract TableCell subclass specifically for showing custom fields."""
+
+ def __init__(
+ self, art, col, users_by_id, _non_col_labels,
+ _label_values, _related_issues, config):
+ explicit_values = []
+ derived_values = []
+ for fv in art.field_values:
+ # TODO(jrobbins): for cross-project search this could be a list.
+ fd = tracker_bizobj.FindFieldDefByID(fv.field_id, config)
+ if fd.field_name.lower() == col:
+ val = self.ExtractValue(fv, users_by_id)
+ if fv.derived:
+ derived_values.append(val)
+ else:
+ explicit_values.append(val)
+
+ TableCell.__init__(self, CELL_TYPE_ATTR, explicit_values,
+ derived_values=derived_values)
+
+ def ExtractValue(self, fv, _users_by_id):
+ return 'field-id-%d-not-implemented-yet' % fv.field_id
+
+
+class TableCellCustomInt(TableCellCustom):
+ """TableCell subclass specifically for showing custom int fields."""
+
+ def ExtractValue(self, fv, _users_by_id):
+ return fv.int_value
+
+
+class TableCellCustomStr(TableCellCustom):
+ """TableCell subclass specifically for showing custom str fields."""
+
+ def ExtractValue(self, fv, _users_by_id):
+ return fv.str_value
+
+
+class TableCellCustomUser(TableCellCustom):
+ """TableCell subclass specifically for showing custom user fields."""
+
+ def ExtractValue(self, fv, users_by_id):
+ if fv.user_id in users_by_id:
+ return users_by_id[fv.user_id].email
+ return 'USER_%d' % fv.user_id
+
+
+class TableCellCustomDate(TableCellCustom):
+ """TableCell subclass specifically for showing custom date fields."""
+
+ def ExtractValue(self, fv, _users_by_id):
+ # TODO(jrobbins): convert timestamp to formatted date and time
+ return fv.int_value
+
+
+class TableCellCustomBool(TableCellCustom):
+ """TableCell subclass specifically for showing custom int fields."""
+
+ def ExtractValue(self, fv, _users_by_id):
+ return 'Yes' if fv.int_value else 'No'
+
+
+_CUSTOM_FIELD_CELL_FACTORIES = {
+ tracker_pb2.FieldTypes.ENUM_TYPE: TableCellKeyLabels,
+ tracker_pb2.FieldTypes.INT_TYPE: TableCellCustomInt,
+ tracker_pb2.FieldTypes.STR_TYPE: TableCellCustomStr,
+ tracker_pb2.FieldTypes.USER_TYPE: TableCellCustomUser,
+ tracker_pb2.FieldTypes.DATE_TYPE: TableCellCustomDate,
+ tracker_pb2.FieldTypes.BOOL_TYPE: TableCellCustomBool,
+}
+
+
+def ChooseCellFactory(col, cell_factories, config):
+ """Return the CellFactory to use for the given column."""
+ if col in cell_factories:
+ return cell_factories[col]
+
+ if '/' in col:
+ return CompositeTableCell(col.split('/'), cell_factories)
+
+ fd = tracker_bizobj.FindFieldDef(col, config)
+ if fd:
+ return _CUSTOM_FIELD_CELL_FACTORIES[fd.field_type]
+
+ return TableCellKeyLabels
diff --git a/appengine/monorail/framework/template_helpers.py b/appengine/monorail/framework/template_helpers.py
new file mode 100644
index 0000000..5127699
--- /dev/null
+++ b/appengine/monorail/framework/template_helpers.py
@@ -0,0 +1,309 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Some utility classes for interacting with templates."""
+
+import cgi
+import cStringIO
+import httplib
+import logging
+import time
+import types
+
+from third_party import ezt
+
+from protorpc import messages
+
+import settings
+from framework import framework_constants
+
+
+_DISPLAY_VALUE_TRAILING_CHARS = 8
+_DISPLAY_VALUE_TIP_CHARS = 120
+
+
+class PBProxy(object):
+ """Wraps a Protocol Buffer so it is easy to acceess from a template."""
+
+ def __init__(self, pb):
+ self.__pb = pb
+
+ def __getattr__(self, name):
+ """Make the getters template friendly.
+
+ Psudo-hack alert: When attributes end with _bool, they are converted in
+ to EZT style bools. I.e., if false return None, if true return True.
+
+ Args:
+ name: the name of the attribute to get.
+
+ Returns:
+ The value of that attribute (as an EZT bool if the name ends with _bool).
+ """
+ if name.endswith('_bool'):
+ bool_name = name
+ name = name[0:-5]
+ else:
+ bool_name = None
+
+ # Make it possible for a PBProxy-local attribute to override the protocol
+ # buffer field, or even to allow attributes to be added to the PBProxy that
+ # the protocol buffer does not even have.
+ if name in self.__dict__:
+ if callable(self.__dict__[name]):
+ val = self.__dict__[name]()
+ else:
+ val = self.__dict__[name]
+
+ if bool_name:
+ return ezt.boolean(val)
+ return val
+
+ if bool_name:
+ # return an ezt.boolean for the named field.
+ return ezt.boolean(getattr(self.__pb, name))
+
+ val = getattr(self.__pb, name)
+
+ if isinstance(val, messages.Enum):
+ return int(val) # TODO(jrobbins): use str() instead
+
+ if isinstance(val, messages.Message):
+ return PBProxy(val)
+
+ # Return a list of values whose Message entries
+ # have been wrapped in PBProxies.
+ if isinstance(val, (list, messages.FieldList)):
+ list_to_return = []
+ for v in val:
+ if isinstance(v, messages.Message):
+ list_to_return.append(PBProxy(v))
+ else:
+ list_to_return.append(v)
+ return list_to_return
+
+ return val
+
+ def DebugString(self):
+ """Return a string representation that is useful in debugging."""
+ return 'PBProxy(%s)' % self.__pb
+
+ def __eq__(self, other):
+ # Disable warning about accessing other.__pb.
+ # pylint: disable=protected-access
+ return isinstance(other, PBProxy) and self.__pb == other.__pb
+
+
+_templates = {}
+
+
+def GetTemplate(
+ template_path, compress_whitespace=True, eliminate_blank_lines=False,
+ base_format=ezt.FORMAT_HTML):
+ """Make a MonorailTemplate if needed, or reuse one if possible."""
+ key = template_path, compress_whitespace, base_format
+ if key in _templates:
+ return _templates[key]
+
+ template = MonorailTemplate(
+ template_path, compress_whitespace=compress_whitespace,
+ eliminate_blank_lines=eliminate_blank_lines, base_format=base_format)
+ _templates[key] = template
+ return template
+
+
+class cStringIOUnicodeWrapper(object):
+ """Wrapper on cStringIO.StringIO that encodes unicode as UTF-8 as it goes."""
+
+ def __init__(self):
+ self.buffer = cStringIO.StringIO()
+
+ def write(self, s):
+ if isinstance(s, unicode):
+ utf8_s = s.encode('utf-8')
+ else:
+ utf8_s = s
+ self.buffer.write(utf8_s)
+
+ def getvalue(self):
+ return self.buffer.getvalue()
+
+
+SNIFFABLE_PATTERNS = {
+ '%PDF-': '%NoNoNo-',
+}
+
+
+class MonorailTemplate(object):
+ """A template with additional functionality."""
+
+ def __init__(self, template_path, compress_whitespace=True,
+ eliminate_blank_lines=False, base_format=ezt.FORMAT_HTML):
+ self.template_path = template_path
+ self.template = None
+ self.compress_whitespace = compress_whitespace
+ self.base_format = base_format
+ self.eliminate_blank_lines = eliminate_blank_lines
+
+ def WriteResponse(self, response, data, content_type=None):
+ """Write the parsed and filled in template to http server."""
+ if content_type:
+ response.content_type = content_type
+
+ response.status = data.get('http_response_code', httplib.OK)
+ whole_page = self.GetResponse(data)
+ if data.get('prevent_sniffing'):
+ for sniff_pattern, sniff_replacement in SNIFFABLE_PATTERNS.items():
+ whole_page = whole_page.replace(sniff_pattern, sniff_replacement)
+ start = time.time()
+ response.write(whole_page)
+ logging.info('wrote response in %dms', int((time.time() - start) * 1000))
+
+ def GetResponse(self, data):
+ """Generate the text from the template and return it as a string."""
+ template = self.GetTemplate()
+ start = time.time()
+ buf = cStringIOUnicodeWrapper()
+ template.generate(buf, data)
+ whole_page = buf.getvalue()
+ logging.info('rendering took %dms', int((time.time() - start) * 1000))
+ logging.info('whole_page len is %r', len(whole_page))
+ if self.eliminate_blank_lines:
+ lines = whole_page.split('\n')
+ whole_page = '\n'.join(line for line in lines if line.strip())
+ logging.info('smaller whole_page len is %r', len(whole_page))
+ logging.info('smaller rendering took %dms',
+ int((time.time() - start) * 1000))
+ return whole_page
+
+ def GetTemplate(self):
+ """Parse the EZT template, or return an already parsed one."""
+ # We don't operate directly on self.template to avoid races.
+ template = self.template
+
+ if template is None or settings.dev_mode:
+ start = time.time()
+ template = ezt.Template(
+ fname=self.template_path,
+ compress_whitespace=self.compress_whitespace,
+ base_format=self.base_format)
+ logging.info('parsed in %dms', int((time.time() - start) * 1000))
+ self.template = template
+
+ return template
+
+ def GetTemplatePath(self):
+ """Accessor for the template path specified in the constructor.
+
+ Returns:
+ The string path for the template file provided to the constructor.
+ """
+ return self.template_path
+
+
+class EZTError(object):
+ """This class is a helper class to pass errors to EZT.
+
+ This class is used to hold information that will be passed to EZT but might
+ be unset. All unset values return None (ie EZT False)
+ Example: page errors
+ """
+
+ def __getattr__(self, _name):
+ """This is the EZT retrieval function."""
+ return None
+
+ def AnyErrors(self):
+ return len(self.__dict__) != 0
+
+ def DebugString(self):
+ return 'EZTError(%s)' % self.__dict__
+
+ def SetError(self, name, value):
+ self.__setattr__(name, value)
+
+ def SetCustomFieldError(self, field_id, value):
+ # This access works because of the custom __getattr__.
+ # pylint: disable=access-member-before-definition
+ # pylint: disable=attribute-defined-outside-init
+ if self.custom_fields is None:
+ self.custom_fields = []
+ self.custom_fields.append(EZTItem(field_id=field_id, message=value))
+
+ any_errors = property(AnyErrors, None)
+
+def FitUnsafeText(text, length):
+ """Trim some unsafe (unescaped) text to a specific length.
+
+ Three periods are appended if trimming occurs. Note that we cannot use
+ the ellipsis character (&hellip) because this is unescaped text.
+
+ Args:
+ text: the string to fit (ASCII or unicode).
+ length: the length to trim to.
+
+ Returns:
+ An ASCII or unicode string fitted to the given length.
+ """
+ if not text:
+ return ""
+
+ if len(text) <= length:
+ return text
+
+ return text[:length] + '...'
+
+
+def BytesKbOrMb(num_bytes):
+ """Return a human-readable string representation of a number of bytes."""
+ if num_bytes < 1024:
+ return '%d bytes' % num_bytes # e.g., 128 bytes
+ if num_bytes < 99 * 1024:
+ return '%.1f KB' % (num_bytes / 1024.0) # e.g. 23.4 KB
+ if num_bytes < 1024 * 1024:
+ return '%d KB' % (num_bytes / 1024) # e.g., 219 KB
+ if num_bytes < 99 * 1024 * 1024:
+ return '%.1f MB' % (num_bytes / 1024.0 / 1024.0) # e.g., 21.9 MB
+ return '%d MB' % (num_bytes / 1024 / 1024) # e.g., 100 MB
+
+
+class EZTItem(object):
+ """A class that makes a collection of fields easily accessible in EZT."""
+
+ def __init__(self, **kwargs):
+ """Store all the given key-value pairs as fields of this object."""
+ vars(self).update(kwargs)
+
+ def __repr__(self):
+ fields = ', '.join('%r: %r' % (k, v) for k, v in
+ sorted(vars(self).iteritems()))
+ return '%s({%s})' % (self.__class__.__name__, fields)
+
+
+def ExpandLabels(page_data):
+ """If page_data has a 'labels' list, expand it into 'label1', etc.
+
+ Args:
+ page_data: Template data which may include a 'labels' field.
+ """
+ label_list = page_data.get('labels', [])
+ if isinstance(label_list, types.StringTypes):
+ label_list = [label.strip() for label in page_data['labels'].split(',')]
+
+ for i in range(len(label_list)):
+ page_data['label%d' % i] = label_list[i]
+ for i in range(len(label_list), framework_constants.MAX_LABELS):
+ page_data['label%d' % i] = ''
+
+
+class TextRun(object):
+ """A fragment of user-entered text that needs to be safely displyed."""
+
+ def __init__(self, content, tag=None, href=None):
+ self.content = content
+ self.tag = tag
+ self.href = href
+ self.title = None
+ self.css_class = None
diff --git a/appengine/monorail/framework/test/__init__.py b/appengine/monorail/framework/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/framework/test/__init__.py
diff --git a/appengine/monorail/framework/test/actionlimit_test.py b/appengine/monorail/framework/test/actionlimit_test.py
new file mode 100644
index 0000000..707c5c6
--- /dev/null
+++ b/appengine/monorail/framework/test/actionlimit_test.py
@@ -0,0 +1,165 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for actionlimit module."""
+
+import time
+import unittest
+
+from framework import actionlimit
+from proto import user_pb2
+
+
+class ActionLimitTest(unittest.TestCase):
+
+ def testNeedCaptchaNoUser(self):
+ action = actionlimit.ISSUE_COMMENT
+ self.assertFalse(actionlimit.NeedCaptcha(None, action))
+
+ def testNeedCaptchaAuthUserNoPreviousActions(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ self.assertFalse(actionlimit.NeedCaptcha(user, action))
+
+ def testNeedCaptchaAuthUserLifetimeExcessiveActivityException(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ life_max = actionlimit.ACTION_LIMITS[action][3]
+
+ for _i in range(0, life_max):
+ actionlimit.CountAction(user, action)
+
+ self.assertRaises(
+ actionlimit.ExcessiveActivityException,
+ actionlimit.NeedCaptcha, user, action)
+
+ def testNeedCaptchaAuthUserLifetimeIgnoresTimeout(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ (period, _soft_limit, _hard_limit,
+ life_max) = actionlimit.ACTION_LIMITS[action]
+ now = int(time.time())
+ later = now + period + 1 # a future in which our timestamp is expired
+
+ for _i in range(0, life_max):
+ actionlimit.CountAction(user, action, now=now)
+
+ self.assertRaises(
+ actionlimit.ExcessiveActivityException,
+ actionlimit.NeedCaptcha, user, action, now=later)
+
+ # TODO(jrobbins): write a soft limit captcha test.
+
+ def testNeedCaptchaAuthUserHardLimitExcessiveActivityException(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ (_period, _soft_limit, hard_limit,
+ _life_max) = actionlimit.ACTION_LIMITS[action]
+
+ for _i in range(0, hard_limit):
+ actionlimit.CountAction(user, action)
+
+ self.assertRaises(
+ actionlimit.ExcessiveActivityException,
+ actionlimit.NeedCaptcha, user, action)
+
+ def testNeedCaptchaAuthUserHardLimitRespectsTimeout(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ (period, _soft_limit, hard_limit,
+ _life_max) = actionlimit.ACTION_LIMITS[action]
+ now = int(time.time())
+ later = now + period + 1 # a future in which our timestamp is expired
+
+ for _i in range(0, hard_limit):
+ actionlimit.CountAction(user, action, now=now)
+
+ # if we didn't pass later, we'd get an exception
+ self.assertFalse(actionlimit.NeedCaptcha(user, action, now=later))
+
+ def testNeedCaptchaNoLifetimeLimit(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ life_max = actionlimit.ACTION_LIMITS[action][3]
+ actionlimit.GetLimitPB(user, action).lifetime_count = life_max + 1
+
+ self.assertRaises(
+ actionlimit.ExcessiveActivityException,
+ actionlimit.NeedCaptcha, user, action, skip_lifetime_check=False)
+ self.assertFalse(
+ actionlimit.NeedCaptcha(user, action, skip_lifetime_check=True))
+
+ def testCountActionResetRecentActions(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ limit = actionlimit.GetLimitPB(user, action)
+ limit.recent_count = 10
+ limit.reset_timestamp = 11
+
+ limit = actionlimit.GetLimitPB(user, action)
+ self.assertEqual(10, limit.recent_count)
+ self.assertEqual(11, limit.reset_timestamp)
+
+ actionlimit.ResetRecentActions(user, action)
+
+ limit = actionlimit.GetLimitPB(user, action)
+ self.assertEqual(0, limit.recent_count)
+ self.assertEqual(0, limit.reset_timestamp)
+
+ def testCountActionIncrementsRecentCount(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ (_period, soft_limit, _hard_limit,
+ _life_max) = actionlimit.ACTION_LIMITS[action]
+
+ for i in range(1, soft_limit):
+ actionlimit.CountAction(user, action)
+ limit = actionlimit.GetLimitPB(user, action)
+ self.assertEqual(i, limit.recent_count)
+ self.assertEqual(i, limit.lifetime_count)
+
+ def testCountActionPeriodExpiration(self):
+ action = actionlimit.ISSUE_COMMENT
+ user = user_pb2.User()
+ (period, soft_limit, _hard_limit,
+ _life_max) = actionlimit.ACTION_LIMITS[action]
+ now = int(time.time())
+ later = now + period + 1 # a future in which our timestamp is expired
+
+ for i in range(1, soft_limit):
+ actionlimit.CountAction(user, action, now=now)
+ limit = actionlimit.GetLimitPB(user, action)
+ self.assertEqual(i, limit.recent_count)
+ self.assertEqual(i, limit.lifetime_count)
+
+ actionlimit.CountAction(user, action, now=now)
+ self.assertEqual(soft_limit, limit.recent_count)
+ self.assertEqual(soft_limit, limit.lifetime_count)
+
+ actionlimit.CountAction(user, action, now=later)
+ self.assertEqual(1, limit.recent_count)
+ self.assertEqual(soft_limit + 1, limit.lifetime_count)
+
+ def testCustomizeLifetimeLimit(self):
+ user = user_pb2.User()
+
+ self.assertIsNone(user.get_assigned_value('issue_comment_limit'))
+ actionlimit.CustomizeLimit(user, actionlimit.ISSUE_COMMENT, 10, 100, 500)
+ self.assertIsNotNone(user.get_assigned_value('issue_comment_limit'))
+ limit = user.issue_comment_limit
+
+ # sets the specified limit
+ self.assertIsNotNone(limit.get_assigned_value('lifetime_limit'))
+ self.assertEqual(500, limit.lifetime_limit)
+ self.assertEqual(10, limit.period_soft_limit)
+ self.assertEqual(100, limit.period_hard_limit)
+
+ # sets initial values to zero
+ self.assertEqual(0, limit.recent_count)
+ self.assertEqual(0, limit.reset_timestamp)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/alerts_test.py b/appengine/monorail/framework/test/alerts_test.py
new file mode 100644
index 0000000..cd420e2
--- /dev/null
+++ b/appengine/monorail/framework/test/alerts_test.py
@@ -0,0 +1,44 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for alert display helpers."""
+
+import time
+import unittest
+
+from third_party import ezt
+
+from framework import alerts
+from testing import fake
+from testing import testing_helpers
+
+
+class AlertsViewTest(unittest.TestCase):
+
+ def testTimestamp(self):
+ """Tests that alerts are only shown when the timestamp is valid."""
+ project = fake.Project(project_name='testproj')
+
+ now = int(time.time())
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/testproj/?updated=10&ts=%s' % now, project=project)
+ alerts_view = alerts.AlertsView(mr)
+ self.assertEqual(10, alerts_view.updated)
+ self.assertEqual(ezt.boolean(True), alerts_view.show)
+
+ now -= 10
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/testproj/?updated=10&ts=%s' % now, project=project)
+ alerts_view = alerts.AlertsView(mr)
+ self.assertEqual(ezt.boolean(False), alerts_view.show)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/testproj/?updated=10', project=project)
+ alerts_view = alerts.AlertsView(mr)
+ self.assertEqual(ezt.boolean(False), alerts_view.show)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/artifactcollision_test.py b/appengine/monorail/framework/test/artifactcollision_test.py
new file mode 100644
index 0000000..f907d44
--- /dev/null
+++ b/appengine/monorail/framework/test/artifactcollision_test.py
@@ -0,0 +1,30 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for artifactcollision module."""
+
+import unittest
+
+from framework import artifactcollision
+from services import service_manager
+from testing import testing_helpers
+
+
+class ArtifactCollisionTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services()
+ self.servlet = artifactcollision.ArtifactCollision(
+ 'rerq', 'res', services=self.services)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ params={'name': 'artifact'}, method='POST')
+ self.mr.project_name = 'monorail'
+ self.mr.continue_issue_id = '123'
+
+ def testGatherPageData(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual('artifact', page_data['artifact_name'])
+ self.assertEqual('/p/monorail/issues/detail?id=123',
+ page_data['artifact_detail_url'])
diff --git a/appengine/monorail/framework/test/banned_test.py b/appengine/monorail/framework/test/banned_test.py
new file mode 100644
index 0000000..45f2ad3
--- /dev/null
+++ b/appengine/monorail/framework/test/banned_test.py
@@ -0,0 +1,55 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.framework.banned."""
+
+import unittest
+
+import webapp2
+
+from framework import banned
+from framework import monorailrequest
+from services import service_manager
+from testing import testing_helpers
+
+
+class BannedTest(unittest.TestCase):
+
+ def testAssertBasePermission(self):
+ servlet = banned.Banned(
+ 'request', 'response', services=service_manager.Services())
+
+ mr = monorailrequest.MonorailRequest()
+ mr.auth.user_id = 0L # Anon user cannot see banned page.
+ try:
+ servlet.AssertBasePermission(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ mr.auth.user_id = 111L # User who is not banned cannot view banned page.
+ try:
+ servlet.AssertBasePermission(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ # This should not throw exception.
+ mr.auth.user_pb.banned = 'spammer'
+ servlet.AssertBasePermission(mr)
+
+ def testGatherPageData(self):
+ servlet = banned.Banned(
+ 'request', 'response', services=service_manager.Services())
+ self.assertNotEquals(servlet.template, None)
+
+ _request, mr = testing_helpers.GetRequestObjects()
+ page_data = servlet.GatherPageData(mr)
+
+ self.assertEquals(None, page_data['currentPageURLEncoded'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/captcha_test.py b/appengine/monorail/framework/test/captcha_test.py
new file mode 100644
index 0000000..19996e5
--- /dev/null
+++ b/appengine/monorail/framework/test/captcha_test.py
@@ -0,0 +1,84 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the captcha module."""
+
+import unittest
+
+import mox
+
+from google.appengine.ext import testbed
+
+from framework import captcha
+
+
+class CaptchaTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_datastore_v3_stub()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testVerify_NoGuess(self):
+ self.mox.StubOutWithMock(captcha, '_AskRecaptcha')
+ # We are verifying that _AskRecaptcha is not called.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ (False, 'incorrect-captcha-sol'),
+ captcha.Verify('1.2.3.4', ''))
+ self.mox.VerifyAll()
+
+ def testVerify_NullGuess(self):
+ self.mox.StubOutWithMock(captcha, '_AskRecaptcha')
+ # We are verifying that _AskRecaptcha is not called.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ (False, 'incorrect-captcha-sol'),
+ captcha.Verify('1.2.3.4', None))
+ self.mox.VerifyAll()
+
+ def testVerify_WrongGuess(self):
+ self.mox.StubOutWithMock(captcha, '_AskRecaptcha')
+ captcha._AskRecaptcha(
+ '1.2.3.4', 'matching').AndReturn(
+ {'success': False, 'error-codes': ['invalid-input-response']})
+ self.mox.ReplayAll()
+ self.assertEqual(
+ (False, ['invalid-input-response']),
+ captcha.Verify('1.2.3.4', 'some challenge'))
+ self.mox.VerifyAll()
+
+ def testVerify_CorrectGuess(self):
+ self.mox.StubOutWithMock(captcha, '_AskRecaptcha')
+ captcha._AskRecaptcha(
+ '1.2.3.4', 'matching').AndReturn({'success':True})
+ self.mox.ReplayAll()
+
+ result = captcha.Verify('1.2.3.4', 'matching')
+
+ self.mox.VerifyAll()
+ self.assertEqual((True, ''), result)
+
+ def testVerify_WrongGuess(self):
+ self.mox.StubOutWithMock(captcha, '_AskRecaptcha')
+ captcha._AskRecaptcha(
+ '1.2.3.4', 'non-matching').AndReturn({'success': False})
+ self.mox.ReplayAll()
+
+ result = captcha.Verify('1.2.3.4', 'non-matching')
+
+ self.mox.VerifyAll()
+ self.assertEqual((False, 'incorrect-captcha-sol'), result)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/emailfmt_test.py b/appengine/monorail/framework/test/emailfmt_test.py
new file mode 100644
index 0000000..af8a700
--- /dev/null
+++ b/appengine/monorail/framework/test/emailfmt_test.py
@@ -0,0 +1,726 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for monorail.framework.emailfmt."""
+
+import unittest
+
+import settings
+from framework import emailfmt
+from framework import framework_views
+from proto import project_pb2
+from testing import testing_helpers
+
+from google.appengine.api import apiproxy_stub_map
+
+
+class EmailFmtTest(unittest.TestCase):
+
+ @unittest.skipIf('memcache' not in
+ apiproxy_stub_map.apiproxy._APIProxyStubMap__stub_map,
+ 'memcache api proxy will not be found')
+ def testValidateReferencesHeader(self):
+ project = project_pb2.Project()
+ project.project_name = 'open-open'
+ subject = 'slipped disk'
+ expected = emailfmt.MakeMessageID(
+ 'jrobbins@gmail.com', subject,
+ '%s@%s' % (project.project_name, emailfmt.MailDomain()))
+ self.assertTrue(
+ emailfmt.ValidateReferencesHeader(
+ expected, project, 'jrobbins@gmail.com', subject))
+
+ self.assertFalse(
+ emailfmt.ValidateReferencesHeader(
+ expected, project, 'jrobbins@gmail.com', 'something else'))
+
+ self.assertFalse(
+ emailfmt.ValidateReferencesHeader(
+ expected, project, 'someoneelse@gmail.com', subject))
+
+ project.project_name = 'other-project'
+ self.assertFalse(
+ emailfmt.ValidateReferencesHeader(
+ expected, project, 'jrobbins@gmail.com', subject))
+
+ def testParseEmailMessage(self):
+ msg = testing_helpers.MakeMessage(testing_helpers.HEADER_LINES, 'awesome!')
+
+ (from_addr, to_addrs, cc_addrs, references, subject,
+ body) = emailfmt.ParseEmailMessage(msg)
+
+ self.assertEqual('user@example.com', from_addr)
+ self.assertEqual(['proj@monorail.example.com'], to_addrs)
+ self.assertEqual(['ningerso@chromium.org'], cc_addrs)
+ # Expected msg-id was generated from a previous known-good test run.
+ self.assertEqual(['<0=969704940193871313=13442892928193434663='
+ 'proj@monorail.example.com>'],
+ references)
+ self.assertEqual('Issue 123 in proj: broken link', subject)
+ self.assertEqual('awesome!', body)
+
+ references_header = ('References', '<1234@foo.com> <5678@bar.com>')
+ msg = testing_helpers.MakeMessage(
+ testing_helpers.HEADER_LINES + [references_header], 'awesome!')
+ (from_addr, to_addrs, cc_addrs, references, subject,
+ body) = emailfmt.ParseEmailMessage(msg)
+ self.assertItemsEqual(
+ ['<5678@bar.com>',
+ '<0=969704940193871313=13442892928193434663='
+ 'proj@monorail.example.com>',
+ '<1234@foo.com>'],
+ references)
+
+ def testParseEmailMessage_Bulk(self):
+ for precedence in ['Bulk', 'Junk']:
+ msg = testing_helpers.MakeMessage(
+ testing_helpers.HEADER_LINES + [('Precedence', precedence)],
+ 'I am on vacation!')
+
+ (from_addr, to_addrs, cc_addrs, in_reply_to, subject,
+ body) = emailfmt.ParseEmailMessage(msg)
+
+ self.assertEqual('', from_addr)
+ self.assertEqual([], to_addrs)
+ self.assertEqual([], cc_addrs)
+ self.assertEqual('', in_reply_to)
+ self.assertEqual('', subject)
+ self.assertEqual('', body)
+
+ def testExtractAddrs(self):
+ header_val = ''
+ self.assertEqual(
+ [], emailfmt._ExtractAddrs(header_val))
+
+ header_val = 'J. Robbins <a@b.com>, c@d.com,\n Nick "Name" Dude <e@f.com>'
+ self.assertEqual(
+ ['a@b.com', 'c@d.com', 'e@f.com'],
+ emailfmt._ExtractAddrs(header_val))
+
+ header_val = ('hot: J. O\'Robbins <a@b.com>; '
+ 'cool: "friendly" <e.g-h@i-j.k-L.com>')
+ self.assertEqual(
+ ['a@b.com', 'e.g-h@i-j.k-L.com'],
+ emailfmt._ExtractAddrs(header_val))
+
+ def CheckIdentifiedValues(
+ self, project_addr, subject, expected_project_name, expected_local_id):
+ """Testing helper function to check 3 results against expected values."""
+ project_name, local_id = emailfmt.IdentifyProjectAndIssue(
+ project_addr, subject)
+ self.assertEqual(expected_project_name, project_name)
+ self.assertEqual(expected_local_id, local_id)
+
+ def testIdentifyProjectAndIssues(self):
+ self.CheckIdentifiedValues(
+ 'proj@monorail.example.com',
+ 'Issue 123 in proj: the dogs wont eat the dogfood',
+ 'proj', 123)
+
+ self.CheckIdentifiedValues(
+ 'Proj@MonoRail.Example.Com',
+ 'Issue 123 in proj: the dogs wont eat the dogfood',
+ 'proj', 123)
+
+ self.CheckIdentifiedValues(
+ 'proj-4-u@test-example3.com',
+ 'Issue 123 in proj-4-u: this one goes to: 11',
+ 'proj-4-u', 123)
+
+ self.CheckIdentifiedValues(
+ 'night@monorail.example.com',
+ 'Issue 451 in day: something is fishy',
+ None, 451)
+
+ self.CheckIdentifiedValues(
+ 'no_reply@chromium.org',
+ 'Issue 234 in project foo: ignore this one',
+ None, None)
+
+ def testStripSubjectPrefixes(self):
+ self.assertEqual(
+ '',
+ emailfmt._StripSubjectPrefixes(''))
+
+ self.assertEqual(
+ 'this is it',
+ emailfmt._StripSubjectPrefixes('this is it'))
+
+ self.assertEqual(
+ 'this is it',
+ emailfmt._StripSubjectPrefixes('re: this is it'))
+
+ self.assertEqual(
+ 'this is it',
+ emailfmt._StripSubjectPrefixes('Re: Fwd: aw:this is it'))
+
+ self.assertEqual(
+ 'This - . IS it',
+ emailfmt._StripSubjectPrefixes('This - . IS it'))
+
+
+class MailDomainTest(unittest.TestCase):
+
+ def testTrivialCases(self):
+ self.assertEqual(
+ 'testbed-test.appspotmail.com',
+ emailfmt.MailDomain())
+
+
+class NoReplyAddressTest(unittest.TestCase):
+
+ def testNoCommenter(self):
+ self.assertEqual(
+ 'no_reply@testbed-test.appspotmail.com',
+ emailfmt.NoReplyAddress())
+
+ def testWithCommenter(self):
+ commenter_view = framework_views.UserView(111L, 'user@example.com', True)
+ self.assertEqual(
+ 'user@example.com via Monorail <no_reply@testbed-test.appspotmail.com>',
+ emailfmt.NoReplyAddress(
+ commenter_view=commenter_view, reveal_addr=True))
+
+ def testObscuredCommenter(self):
+ commenter_view = framework_views.UserView(111L, 'user@example.com', True)
+ self.assertEqual(
+ 'u...@example.com via Monorail <no_reply@testbed-test.appspotmail.com>',
+ emailfmt.NoReplyAddress(
+ commenter_view=commenter_view, reveal_addr=False))
+
+
+class FormatFromAddrTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project = project_pb2.Project(project_name='proj')
+
+ def testNoCommenter(self):
+ self.assertEqual(settings.send_email_as,
+ emailfmt.FormatFromAddr(self.project))
+
+ def testNoCommenterWithNoReply(self):
+ self.assertEqual(settings.send_noreply_email_as,
+ emailfmt.FormatFromAddr(self.project, can_reply_to=False))
+
+ def testWithCommenter(self):
+ commenter_view = framework_views.UserView(111L, 'user@example.com', True)
+ self.assertEqual(
+ 'user@example.com via Monorail <%s>' % settings.send_email_as,
+ emailfmt.FormatFromAddr(
+ self.project, commenter_view=commenter_view, reveal_addr=True))
+
+ def testObscuredCommenter(self):
+ commenter_view = framework_views.UserView(111L, 'user@example.com', True)
+ self.assertEqual(
+ 'u...@example.com via Monorail <%s>' % settings.send_email_as,
+ emailfmt.FormatFromAddr(
+ self.project, commenter_view=commenter_view, reveal_addr=False))
+
+ def testServiceAccountCommenter(self):
+ johndoe_bot = '123456789@developer.gserviceaccount.com'
+ commenter_view = framework_views.UserView(111L, johndoe_bot, True)
+ self.assertEqual(
+ ('johndoe@example.com via Monorail <%s>' % settings.send_email_as),
+ emailfmt.FormatFromAddr(
+ self.project, commenter_view=commenter_view, reveal_addr=False))
+
+
+class NormalizeHeaderWhitespaceTest(unittest.TestCase):
+
+ def testTrivialCases(self):
+ self.assertEqual(
+ '',
+ emailfmt.NormalizeHeader(''))
+
+ self.assertEqual(
+ '',
+ emailfmt.NormalizeHeader(' \t\n'))
+
+ self.assertEqual(
+ 'a',
+ emailfmt.NormalizeHeader('a'))
+
+ self.assertEqual(
+ 'a b',
+ emailfmt.NormalizeHeader(' a b '))
+
+ def testLongSummary(self):
+ big_string = 'x' * 500
+ self.assertEqual(
+ big_string[:emailfmt.MAX_HEADER_CHARS_CONSIDERED],
+ emailfmt.NormalizeHeader(big_string))
+
+ big_string = 'x y ' * 500
+ self.assertEqual(
+ big_string[:emailfmt.MAX_HEADER_CHARS_CONSIDERED],
+ emailfmt.NormalizeHeader(big_string))
+
+ big_string = 'x ' * 100
+ self.assertEqual(
+ 'x ' * 99 + 'x',
+ emailfmt.NormalizeHeader(big_string))
+
+ def testNormalCase(self):
+ self.assertEqual(
+ '[a] b: c d',
+ emailfmt.NormalizeHeader('[a] b:\tc\n\td'))
+
+
+class MakeMessageIDTest(unittest.TestCase):
+
+ @unittest.skipIf('memcache' not in
+ apiproxy_stub_map.apiproxy._APIProxyStubMap__stub_map,
+ 'memcache api proxy will not be found')
+ def testMakeMessageIDTest(self):
+ message_id = emailfmt.MakeMessageID(
+ 'to@to.com', 'subject', 'from@from.com')
+ self.assertTrue(message_id.startswith('<0='))
+ self.assertEqual('testbed-test.appspotmail.com>',
+ message_id.split('@')[-1])
+
+ settings.mail_domain = None
+ message_id = emailfmt.MakeMessageID(
+ 'to@to.com', 'subject', 'from@from.com')
+ self.assertTrue(message_id.startswith('<0='))
+ self.assertEqual('testbed-test.appspotmail.com>',
+ message_id.split('@')[-1])
+
+ message_id = emailfmt.MakeMessageID(
+ 'to@to.com', 'subject', 'from@from.com')
+ self.assertTrue(message_id.startswith('<0='))
+ self.assertEqual('testbed-test.appspotmail.com>',
+ message_id.split('@')[-1])
+
+ message_id_ws_1 = emailfmt.MakeMessageID(
+ 'to@to.com',
+ 'this is a very long subject that is sure to be wordwrapped by gmail',
+ 'from@from.com')
+ message_id_ws_2 = emailfmt.MakeMessageID(
+ 'to@to.com',
+ 'this is a very long subject that \n\tis sure to be '
+ 'wordwrapped \t\tby gmail',
+ 'from@from.com')
+ self.assertEqual(message_id_ws_1, message_id_ws_2)
+
+
+class GetReferencesTest(unittest.TestCase):
+
+ def testNotPartOfThread(self):
+ refs = emailfmt.GetReferences(
+ 'a@a.com', 'hi', None, emailfmt.NoReplyAddress())
+ self.assertEqual(0, len(refs))
+
+ @unittest.skipIf('memcache' not in
+ apiproxy_stub_map.apiproxy._APIProxyStubMap__stub_map,
+ 'memcache api proxy will not be found')
+ def testAnywhereInThread(self):
+ refs = emailfmt.GetReferences(
+ 'a@a.com', 'hi', 0, emailfmt.NoReplyAddress())
+ self.assertTrue(len(refs))
+ self.assertTrue(refs.startswith('<0='))
+
+
+class StripQuotedTextTest(unittest.TestCase):
+
+ def CheckExpected(self, expected_output, test_input):
+ actual_output = emailfmt.StripQuotedText(test_input)
+ self.assertEqual(expected_output, actual_output)
+
+ def testAllNewText(self):
+ self.CheckExpected('', '')
+ self.CheckExpected('', '\n')
+ self.CheckExpected('', '\n\n')
+ self.CheckExpected('new', 'new')
+ self.CheckExpected('new', '\nnew\n')
+ self.CheckExpected('new\ntext', '\nnew\ntext\n')
+ self.CheckExpected('new\n\ntext', '\nnew\n\ntext\n')
+
+ def testQuotedLines(self):
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ '> something you said\n'
+ '> that took two lines'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ '> something you said\n'
+ '> that took two lines'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('> something you said\n'
+ '> that took two lines\n'
+ 'new\n'
+ 'text\n'
+ '\n'))
+
+ self.CheckExpected(
+ ('newtext'),
+ ('> something you said\n'
+ '> that took two lines\n'
+ 'newtext'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Mon, Jan 1, 2023, So-and-so <so@and-so.com> Wrote:\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Mon, Jan 1, 2023, So-and-so <so@and-so.com> Wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Mon, Jan 1, 2023, user@example.com via Monorail\n'
+ '<monorail@chromium.com> Wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Jan 14, 2016 6:19 AM, "user@example.com via Monorail" <\n'
+ 'monorail@chromium.com> Wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Jan 14, 2016 6:19 AM, "user@example.com via Monorail" <\n'
+ 'monorail@monorail-prod.appspotmail.com> wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Mon, Jan 1, 2023, So-and-so so@and-so.com wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Wed, Sep 8, 2010 at 6:56 PM, So =AND= <so@gmail.com>wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'On Mon, Jan 1, 2023, So-and-so <so@and-so.com> Wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'project-name@testbed-test.appspotmail.com wrote:\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'project-name@testbed-test.appspotmail.com a \xc3\xa9crit :\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ 'project.domain.com@testbed-test.appspotmail.com a \xc3\xa9crit :\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ '2023/01/4 <so@and-so.com>\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ '\n'
+ 'text'),
+ ('new\n'
+ '2023/01/4 <so-and@so.com>\n'
+ '\n'
+ '> something you said\n'
+ '> > in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ def testBoundaryLines(self):
+
+ self.CheckExpected(
+ ('new'),
+ ('new\n'
+ '---- forwarded message ======\n'
+ '\n'
+ 'something you said\n'
+ '> in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new'),
+ ('new\n'
+ '-----Original Message-----\n'
+ '\n'
+ 'something you said\n'
+ '> in response to some other junk\n'
+ '\n'
+ 'text\n'))
+
+ self.CheckExpected(
+ ('new'),
+ ('new\n'
+ '\n'
+ 'Updates:\n'
+ '\tStatus: Fixed\n'
+ '\n'
+ 'notification text\n'))
+
+ self.CheckExpected(
+ ('new'),
+ ('new\n'
+ '\n'
+ 'Comment #1 on issue 9 by username: Is there ...'
+ 'notification text\n'))
+
+ def testSignatures(self):
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '-- \n'
+ 'Name\n'
+ 'phone\n'
+ 'funny quote, or legal disclaimers\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '--\n'
+ 'Name\n'
+ 'phone\n'
+ 'funny quote, or legal disclaimers\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '--\n'
+ 'Name\n'
+ 'ginormous signature\n'
+ 'phone\n'
+ 'address\n'
+ 'address\n'
+ 'address\n'
+ 'homepage\n'
+ 'social network A\n'
+ 'social network B\n'
+ 'social network C\n'
+ 'funny quote\n'
+ '4 lines about why email should be short\n'
+ 'legal disclaimers\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '_______________\n'
+ 'Name\n'
+ 'phone\n'
+ 'funny quote, or legal disclaimers\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'Thanks,\n'
+ 'Name\n'
+ '\n'
+ '_______________\n'
+ 'Name\n'
+ 'phone\n'
+ 'funny quote, or legal disclaimers\n'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'Thanks,\n'
+ 'Name'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'Cheers,\n'
+ 'Name'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'Regards\n'
+ 'Name'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'best regards'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'THX'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'Thank you,\n'
+ 'Name'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'Sent from my iPhone'))
+
+ self.CheckExpected(
+ ('new\n'
+ 'text'),
+ ('new\n'
+ 'text\n'
+ '\n'
+ 'Sent from my iPod'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/filecontent_test.py b/appengine/monorail/framework/test/filecontent_test.py
new file mode 100644
index 0000000..7802d71
--- /dev/null
+++ b/appengine/monorail/framework/test/filecontent_test.py
@@ -0,0 +1,188 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the filecontent module."""
+
+import unittest
+
+from framework import filecontent
+
+
+class MimeTest(unittest.TestCase):
+ """Test methods for the mime module."""
+
+ _TEST_EXTENSIONS_TO_CTYPES = {
+ 'html': 'text/plain',
+ 'htm': 'text/plain',
+ 'jpg': 'image/jpeg',
+ 'jpeg': 'image/jpeg',
+ 'pdf': 'application/pdf',
+ }
+
+ _CODE_EXTENSIONS = [
+ 'py', 'java', 'mf', 'bat', 'sh', 'php', 'vb', 'pl', 'sql',
+ ]
+
+ def testCommonExtensions(self):
+ """Tests some common extensions for their expected content types."""
+ for ext, ctype in self._TEST_EXTENSIONS_TO_CTYPES.iteritems():
+ self.assertEqual(
+ filecontent.GuessContentTypeFromFilename('file.%s' % ext),
+ ctype)
+
+ def testCaseDoesNotMatter(self):
+ """Ensure that case (upper/lower) of extension does not matter."""
+ for ext, ctype in self._TEST_EXTENSIONS_TO_CTYPES.iteritems():
+ ext = ext.upper()
+ self.assertEqual(
+ filecontent.GuessContentTypeFromFilename('file.%s' % ext),
+ ctype)
+
+ for ext in self._CODE_EXTENSIONS:
+ ext = ext.upper()
+ self.assertEqual(
+ filecontent.GuessContentTypeFromFilename('code.%s' % ext),
+ 'text/plain')
+
+ def testCodeIsText(self):
+ """Ensure that code extensions are text/plain."""
+ for ext in self._CODE_EXTENSIONS:
+ self.assertEqual(
+ filecontent.GuessContentTypeFromFilename('code.%s' % ext),
+ 'text/plain')
+
+ def testNoExtensionIsText(self):
+ """Ensure that no extension indicates text/plain."""
+ self.assertEqual(
+ filecontent.GuessContentTypeFromFilename('noextension'),
+ 'text/plain')
+
+ def testUnknownExtension(self):
+ """Ensure that an obviously unknown extension returns is binary."""
+ self.assertEqual(
+ filecontent.GuessContentTypeFromFilename('f.madeupextension'),
+ 'application/octet-stream')
+
+ def testNoShockwaveFlash(self):
+ """Ensure that Shockwave files will NOT be served w/ that content type."""
+ self.assertEqual(
+ filecontent.GuessContentTypeFromFilename('bad.swf'),
+ 'application/octet-stream')
+
+
+class DecodeFileContentsTest(unittest.TestCase):
+
+ def IsBinary(self, contents):
+ _contents, is_binary, _is_long = (
+ filecontent.DecodeFileContents(contents))
+ return is_binary
+
+ def testFileIsBinaryEmpty(self):
+ self.assertFalse(self.IsBinary(''))
+
+ def testFileIsBinaryShortText(self):
+ self.assertFalse(self.IsBinary('This is some plain text.'))
+
+ def testLineLengthDetection(self):
+ unicode_str = (
+ u'Some non-ascii chars - '
+ u'\xa2\xfa\xb6\xe7\xfc\xea\xd0\xf4\xe6\xf0\xce\xf6\xbe')
+ short_line = unicode_str.encode('iso-8859-1')
+ long_line = (unicode_str * 100)[:filecontent._MAX_SOURCE_LINE_LEN_LOWER+1]
+ long_line = long_line.encode('iso-8859-1')
+
+ lines = [short_line] * 100
+ lines.append(long_line)
+
+ # High lower ratio - text
+ self.assertFalse(self.IsBinary('\n'.join(lines)))
+
+ lines.extend([long_line] * 99)
+
+ # 50/50 lower/upper ratio - binary
+ self.assertTrue(self.IsBinary('\n'.join(lines)))
+
+ # Single line too long - binary
+ lines = [short_line] * 100
+ lines.append(short_line * 100) # Very long line
+ self.assertTrue(self.IsBinary('\n'.join(lines)))
+
+ def testFileIsBinaryLongText(self):
+ self.assertFalse(self.IsBinary('This is plain text. \n' * 100))
+ # long utf-8 lines are OK
+ self.assertFalse(self.IsBinary('This one long line. ' * 100))
+
+ def testFileIsBinaryLongBinary(self):
+ bin_string = ''.join([chr(c) for c in range(122, 252)])
+ self.assertTrue(self.IsBinary(bin_string * 100))
+
+ def testFileIsTextByPath(self):
+ bin_string = ''.join([chr(c) for c in range(122, 252)] * 100)
+ unicode_str = (
+ u'Some non-ascii chars - '
+ u'\xa2\xfa\xb6\xe7\xfc\xea\xd0\xf4\xe6\xf0\xce\xf6\xbe')
+ long_line = (unicode_str * 100)[:filecontent._MAX_SOURCE_LINE_LEN_LOWER+1]
+ long_line = long_line.encode('iso-8859-1')
+
+ for contents in [bin_string, long_line]:
+ self.assertTrue(filecontent.DecodeFileContents(contents, path=None)[1])
+ self.assertTrue(filecontent.DecodeFileContents(contents, path='')[1])
+ self.assertTrue(filecontent.DecodeFileContents(contents, path='foo')[1])
+ self.assertTrue(
+ filecontent.DecodeFileContents(contents, path='foo.bin')[1])
+ self.assertTrue(
+ filecontent.DecodeFileContents(contents, path='foo.zzz')[1])
+ for path in ['a/b/Makefile.in', 'README', 'a/file.js', 'b.txt']:
+ self.assertFalse(
+ filecontent.DecodeFileContents(contents, path=path)[1])
+
+ def testFileIsBinaryByCommonExtensions(self):
+ contents = 'this is not examined'
+ self.assertTrue(filecontent.DecodeFileContents(
+ contents, path='junk.zip')[1])
+ self.assertTrue(filecontent.DecodeFileContents(
+ contents, path='JUNK.ZIP')[1])
+ self.assertTrue(filecontent.DecodeFileContents(
+ contents, path='/build/HelloWorld.o')[1])
+ self.assertTrue(filecontent.DecodeFileContents(
+ contents, path='/build/Hello.class')[1])
+ self.assertTrue(filecontent.DecodeFileContents(
+ contents, path='/trunk/libs.old/swing.jar')[1])
+
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='HelloWorld.cc')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='Hello.java')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='README')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='READ.ME')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='README.txt')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='README.TXT')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='/trunk/src/com/monorail/Hello.java')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='/branches/1.2/resource.el')[1])
+ self.assertFalse(filecontent.DecodeFileContents(
+ contents, path='/wiki/PageName.wiki')[1])
+
+ def testUnreasonablyLongFile(self):
+ contents = '\n' * (filecontent.SOURCE_FILE_MAX_LINES + 2)
+ _contents, is_binary, is_long = filecontent.DecodeFileContents(
+ contents)
+ self.assertFalse(is_binary)
+ self.assertTrue(is_long)
+
+ contents = '\n' * 100
+ _contents, is_binary, is_long = filecontent.DecodeFileContents(
+ contents)
+ self.assertFalse(is_binary)
+ self.assertFalse(is_long)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/framework_bizobj_test.py b/appengine/monorail/framework/test/framework_bizobj_test.py
new file mode 100644
index 0000000..2574e09
--- /dev/null
+++ b/appengine/monorail/framework/test/framework_bizobj_test.py
@@ -0,0 +1,147 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for monorail.framework.framework_bizobj."""
+
+import unittest
+
+from framework import framework_bizobj
+from framework import framework_constants
+from proto import project_pb2
+
+
+class ArtifactTest(unittest.TestCase):
+
+ def testMergeLabels(self):
+ (merged_labels, update_add, update_remove) = framework_bizobj.MergeLabels(
+ [], [], [], [])
+ self.assertEquals(merged_labels, [])
+ self.assertEquals(update_add, [])
+ self.assertEquals(update_remove, [])
+
+ (merged_labels, update_add, update_remove) = framework_bizobj.MergeLabels(
+ ['a', 'b'], [], [], [])
+ self.assertEquals(merged_labels, ['a', 'b'])
+ self.assertEquals(update_add, [])
+ self.assertEquals(update_remove, [])
+
+ (merged_labels, update_add, update_remove) = framework_bizobj.MergeLabels(
+ ['a', 'b', 'd'], ['c'], ['d'], [])
+ self.assertEquals(merged_labels, ['a', 'b', 'c'])
+ self.assertEquals(update_add, ['c'])
+ self.assertEquals(update_remove, ['d'])
+
+ (merged_labels, update_add, update_remove) = framework_bizobj.MergeLabels(
+ ['a', 'b', 'd'], ['d'], ['e'], [])
+ self.assertEquals(merged_labels, ['a', 'b', 'd'])
+ self.assertEquals(update_add, []) # d was already there.
+ self.assertEquals(update_remove, []) # there was no e.
+
+ (merged_labels, update_add, update_remove) = framework_bizobj.MergeLabels(
+ ['Priority-Medium', 'OpSys-OSX'], ['Hot'], ['OpSys-OSX'], ['Priority'])
+ self.assertEquals(merged_labels, ['Priority-Medium', 'Hot'])
+ self.assertEquals(update_add, ['Hot'])
+ self.assertEquals(update_remove, ['OpSys-OSX'])
+
+ (merged_labels, update_add, update_remove) = framework_bizobj.MergeLabels(
+ ['Priority-Medium', 'OpSys-OSX'], ['Priority-High', 'OpSys-Win'], [],
+ ['Priority'])
+ self.assertEquals(merged_labels,
+ ['OpSys-OSX', 'Priority-High', 'OpSys-Win'])
+ self.assertEquals(update_add, ['Priority-High', 'OpSys-Win'])
+ self.assertEquals(update_remove, [])
+
+ (merged_labels, update_add, update_remove) = framework_bizobj.MergeLabels(
+ ['Priority-Medium', 'OpSys-OSX'], [], ['Priority-Medium', 'OpSys-Win'],
+ ['Priority'])
+ self.assertEquals(merged_labels, ['OpSys-OSX'])
+ self.assertEquals(update_add, [])
+ self.assertEquals(update_remove, ['Priority-Medium'])
+
+
+class CanonicalizeLabelTest(unittest.TestCase):
+
+ def testCanonicalizeLabel(self):
+ self.assertEqual(None, framework_bizobj.CanonicalizeLabel(None))
+ self.assertEqual('FooBar', framework_bizobj.CanonicalizeLabel('Foo Bar '))
+ self.assertEqual('Foo.Bar',
+ framework_bizobj.CanonicalizeLabel('Foo . Bar '))
+ self.assertEqual('Foo-Bar',
+ framework_bizobj.CanonicalizeLabel('Foo - Bar '))
+
+
+class IsValidProjectNameTest(unittest.TestCase):
+
+ def testBadChars(self):
+ self.assertFalse(framework_bizobj.IsValidProjectName('spa ce'))
+ self.assertFalse(framework_bizobj.IsValidProjectName('under_score'))
+ self.assertFalse(framework_bizobj.IsValidProjectName('name.dot'))
+ self.assertFalse(framework_bizobj.IsValidProjectName('pie#sign$'))
+ self.assertFalse(framework_bizobj.IsValidProjectName('(who?)'))
+
+ def testBadHyphen(self):
+ self.assertFalse(framework_bizobj.IsValidProjectName('name-'))
+ self.assertFalse(framework_bizobj.IsValidProjectName('-name'))
+ self.assertTrue(framework_bizobj.IsValidProjectName('project-name'))
+
+ def testMinimumLength(self):
+ self.assertFalse(framework_bizobj.IsValidProjectName('x'))
+ self.assertTrue(framework_bizobj.IsValidProjectName('xy'))
+
+ def testMaximumLength(self):
+ self.assertFalse(framework_bizobj.IsValidProjectName(
+ 'x' * (framework_constants.MAX_PROJECT_NAME_LENGTH + 1)))
+ self.assertTrue(framework_bizobj.IsValidProjectName(
+ 'x' * (framework_constants.MAX_PROJECT_NAME_LENGTH)))
+
+ def testInvalidName(self):
+ self.assertFalse(framework_bizobj.IsValidProjectName(''))
+ self.assertFalse(framework_bizobj.IsValidProjectName('000'))
+
+ def testValidName(self):
+ self.assertTrue(framework_bizobj.IsValidProjectName('098asd'))
+ self.assertTrue(framework_bizobj.IsValidProjectName('one-two-three'))
+
+
+class UserIsInProjectTest(unittest.TestCase):
+
+ def testUserIsInProject(self):
+ p = project_pb2.Project()
+ self.assertFalse(framework_bizobj.UserIsInProject(p, {10}))
+ self.assertFalse(framework_bizobj.UserIsInProject(p, set()))
+
+ p.owner_ids.extend([1, 2, 3])
+ p.committer_ids.extend([4, 5, 6])
+ p.contributor_ids.extend([7, 8, 9])
+ self.assertTrue(framework_bizobj.UserIsInProject(p, {1}))
+ self.assertTrue(framework_bizobj.UserIsInProject(p, {4}))
+ self.assertTrue(framework_bizobj.UserIsInProject(p, {7}))
+ self.assertFalse(framework_bizobj.UserIsInProject(p, {10}))
+
+ # Membership via group membership
+ self.assertTrue(framework_bizobj.UserIsInProject(p, {10, 4}))
+
+ # Membership via several group memberships
+ self.assertTrue(framework_bizobj.UserIsInProject(p, {1, 4}))
+
+ # Several irrelevant group memberships
+ self.assertFalse(framework_bizobj.UserIsInProject(p, {10, 11, 12}))
+
+
+class AllProjectMembersTest(unittest.TestCase):
+
+ def testAllProjectMembers(self):
+ p = project_pb2.Project()
+ self.assertEqual(framework_bizobj.AllProjectMembers(p), [])
+
+ p.owner_ids.extend([1, 2, 3])
+ p.committer_ids.extend([4, 5, 6])
+ p.contributor_ids.extend([7, 8, 9])
+ self.assertEqual(framework_bizobj.AllProjectMembers(p),
+ [1, 2, 3, 4, 5, 6, 7, 8, 9])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/framework_helpers_test.py b/appengine/monorail/framework/test/framework_helpers_test.py
new file mode 100644
index 0000000..79ac20e
--- /dev/null
+++ b/appengine/monorail/framework/test/framework_helpers_test.py
@@ -0,0 +1,442 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the framework_helpers module."""
+
+import unittest
+
+import mox
+import time
+
+from framework import framework_helpers
+from framework import framework_views
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class HelperFunctionsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.time = self.mox.CreateMock(framework_helpers.time)
+ framework_helpers.time = self.time # Point to a mocked out time module.
+
+ def tearDown(self):
+ framework_helpers.time = time # Point back to the time module.
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testRetryDecorator_ExceedFailures(self):
+ class Tracker(object):
+ func_called = 0
+ tracker = Tracker()
+
+ # Use a function that always fails.
+ @framework_helpers.retry(2, delay=1, backoff=2)
+ def testFunc(tracker):
+ tracker.func_called += 1
+ raise Exception('Failed')
+
+ self.time.sleep(1).AndReturn(None)
+ self.time.sleep(2).AndReturn(None)
+ self.mox.ReplayAll()
+ with self.assertRaises(Exception):
+ testFunc(tracker)
+ self.mox.VerifyAll()
+ self.assertEquals(3, tracker.func_called)
+
+ def testRetryDecorator_EventuallySucceed(self):
+ class Tracker(object):
+ func_called = 0
+ tracker = Tracker()
+
+ # Use a function that succeeds on the 2nd attempt.
+ @framework_helpers.retry(2, delay=1, backoff=2)
+ def testFunc(tracker):
+ tracker.func_called += 1
+ if tracker.func_called < 2:
+ raise Exception('Failed')
+
+ self.time.sleep(1).AndReturn(None)
+ self.mox.ReplayAll()
+ testFunc(tracker)
+ self.mox.VerifyAll()
+ self.assertEquals(2, tracker.func_called)
+
+ def testGetRoleName(self):
+ proj = project_pb2.Project()
+ proj.owner_ids.append(111L)
+ proj.committer_ids.append(222L)
+ proj.contributor_ids.append(333L)
+
+ self.assertEquals(None, framework_helpers.GetRoleName(set(), proj))
+
+ self.assertEquals(
+ 'Owner', framework_helpers.GetRoleName({111L}, proj))
+ self.assertEquals(
+ 'Committer', framework_helpers.GetRoleName({222L}, proj))
+ self.assertEquals(
+ 'Contributor', framework_helpers.GetRoleName({333L}, proj))
+
+ self.assertEquals(
+ 'Owner',
+ framework_helpers.GetRoleName({111L, 222L, 999L}, proj))
+ self.assertEquals(
+ 'Committer',
+ framework_helpers.GetRoleName({222L, 333L, 999L}, proj))
+ self.assertEquals(
+ 'Contributor',
+ framework_helpers.GetRoleName({333L, 999L}, proj))
+
+
+class UrlFormattingTest(unittest.TestCase):
+ """Tests for URL formatting."""
+
+ def setUp(self):
+ self.services = service_manager.Services(user=fake.UserService())
+
+ def testFormatMovedProjectURL(self):
+ """Project foo has been moved to bar. User is visiting /p/foo/..."""
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.current_page_url = '/p/foo/'
+ self.assertEqual(
+ '/p/bar/',
+ framework_helpers.FormatMovedProjectURL(mr, 'bar'))
+
+ mr.current_page_url = '/p/foo/issues/list'
+ self.assertEqual(
+ '/p/bar/issues/list',
+ framework_helpers.FormatMovedProjectURL(mr, 'bar'))
+
+ mr.current_page_url = '/p/foo/issues/detail?id=123'
+ self.assertEqual(
+ '/p/bar/issues/detail?id=123',
+ framework_helpers.FormatMovedProjectURL(mr, 'bar'))
+
+ mr.current_page_url = '/p/foo/issues/detail?id=123#c7'
+ self.assertEqual(
+ '/p/bar/issues/detail?id=123#c7',
+ framework_helpers.FormatMovedProjectURL(mr, 'bar'))
+
+ def testFormatURL(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ path = '/dude/wheres/my/car'
+ url = framework_helpers.FormatURL(mr, path)
+ self.assertEqual(path, url)
+
+ def testFormatURLWithRecognizedParams(self):
+ params = {}
+ query = []
+ for name in framework_helpers.RECOGNIZED_PARAMS:
+ params[name] = name
+ query.append('%s=%s' % (name, name))
+ path = '/dude/wheres/my/car'
+ expected = '%s?%s' % (path, '&'.join(query))
+ mr = testing_helpers.MakeMonorailRequest(path=expected)
+ url = framework_helpers.FormatURL(mr, path) # No added params.
+ self.assertEqual(expected, url)
+
+ def testFormatURLWithKeywordArgs(self):
+ params = {}
+ query_pairs = []
+ for name in framework_helpers.RECOGNIZED_PARAMS:
+ params[name] = name
+ if name is not 'can' and name is not 'start':
+ query_pairs.append('%s=%s' % (name, name))
+ path = '/dude/wheres/my/car'
+ mr = testing_helpers.MakeMonorailRequest(
+ path='%s?%s' % (path, '&'.join(query_pairs)))
+ query_pairs.append('can=yep')
+ query_pairs.append('start=486')
+ query_string = '&'.join(query_pairs)
+ expected = '%s?%s' % (path, query_string)
+ url = framework_helpers.FormatURL(mr, path, can='yep', start=486)
+ self.assertEqual(expected, url)
+
+ def testFormatURLWithKeywordArgsAndID(self):
+ params = {}
+ query_pairs = []
+ query_pairs.append('id=200') # id should be the first parameter.
+ for name in framework_helpers.RECOGNIZED_PARAMS:
+ params[name] = name
+ if name is not 'can' and name is not 'start':
+ query_pairs.append('%s=%s' % (name, name))
+ path = '/dude/wheres/my/car'
+ mr = testing_helpers.MakeMonorailRequest(
+ path='%s?%s' % (path, '&'.join(query_pairs)))
+ query_pairs.append('can=yep')
+ query_pairs.append('start=486')
+ query_string = '&'.join(query_pairs)
+ expected = '%s?%s' % (path, query_string)
+ url = framework_helpers.FormatURL(mr, path, can='yep', start=486, id=200)
+ self.assertEqual(expected, url)
+
+ def testFormatURLWithStrangeParams(self):
+ mr = testing_helpers.MakeMonorailRequest(path='/foo?start=0')
+ url = framework_helpers.FormatURL(
+ mr, '/foo', r=0, path='/foo/bar', sketchy='/foo/ bar baz ')
+ self.assertEqual(
+ '/foo?start=0&path=/foo/bar&r=0&sketchy=/foo/%20bar%20baz%20',
+ url)
+
+ def testFormatAbsoluteURL(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/some-path',
+ headers={'Host': 'www.test.com'})
+ self.assertEqual(
+ 'http://www.test.com/p/proj/some/path',
+ framework_helpers.FormatAbsoluteURL(mr, '/some/path'))
+
+ def testFormatAbsoluteURL_CommonRequestParams(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/some-path?foo=bar&can=1',
+ headers={'Host': 'www.test.com'})
+ self.assertEqual(
+ 'http://www.test.com/p/proj/some/path?can=1',
+ framework_helpers.FormatAbsoluteURL(mr, '/some/path'))
+ self.assertEqual(
+ 'http://www.test.com/p/proj/some/path',
+ framework_helpers.FormatAbsoluteURL(
+ mr, '/some/path', copy_params=False))
+
+ def testFormatAbsoluteURL_NoProject(self):
+ path = '/some/path'
+ _request, mr = testing_helpers.GetRequestObjects(
+ headers={'Host': 'www.test.com'}, path=path)
+ url = framework_helpers.FormatAbsoluteURL(mr, path, include_project=False)
+ self.assertEqual(url, 'http://www.test.com/some/path')
+
+
+class WordWrapSuperLongLinesTest(unittest.TestCase):
+
+ def testEmptyLogMessage(self):
+ msg = ''
+ wrapped_msg = framework_helpers.WordWrapSuperLongLines(msg)
+ self.assertEqual(wrapped_msg, '')
+
+ def testShortLines(self):
+ msg = 'one\ntwo\nthree\n'
+ wrapped_msg = framework_helpers.WordWrapSuperLongLines(msg)
+ expected = 'one\ntwo\nthree\n'
+ self.assertEqual(wrapped_msg, expected)
+
+ def testOneLongLine(self):
+ msg = ('This is a super long line that just goes on and on '
+ 'and it seems like it will never stop because it is '
+ 'super long and it was entered by a user who had no '
+ 'familiarity with the return key.')
+ wrapped_msg = framework_helpers.WordWrapSuperLongLines(msg)
+ expected = ('This is a super long line that just goes on and on and it '
+ 'seems like it will never stop because it\n'
+ 'is super long and it was entered by a user who had no '
+ 'familiarity with the return key.')
+ self.assertEqual(wrapped_msg, expected)
+
+ msg2 = ('This is a super long line that just goes on and on '
+ 'and it seems like it will never stop because it is '
+ 'super long and it was entered by a user who had no '
+ 'familiarity with the return key. '
+ 'This is a super long line that just goes on and on '
+ 'and it seems like it will never stop because it is '
+ 'super long and it was entered by a user who had no '
+ 'familiarity with the return key.')
+ wrapped_msg2 = framework_helpers.WordWrapSuperLongLines(msg2)
+ expected2 = ('This is a super long line that just goes on and on and it '
+ 'seems like it will never stop because it\n'
+ 'is super long and it was entered by a user who had no '
+ 'familiarity with the return key. This is a\n'
+ 'super long line that just goes on and on and it seems like '
+ 'it will never stop because it is super\n'
+ 'long and it was entered by a user who had no familiarity '
+ 'with the return key.')
+ self.assertEqual(wrapped_msg2, expected2)
+
+ def testMixOfShortAndLong(self):
+ msg = ('[Author: mpcomplete]\n'
+ '\n'
+ # Description on one long line
+ 'Fix a memory leak in JsArray and JsObject for the IE and NPAPI '
+ 'ports. Each time you call GetElement* or GetProperty* to '
+ 'retrieve string or object token, the token would be leaked. '
+ 'I added a JsScopedToken to ensure that the right thing is '
+ 'done when the object leaves scope, depending on the platform.\n'
+ '\n'
+ 'R=zork\n'
+ 'CC=google-gears-eng@googlegroups.com\n'
+ 'DELTA=108 (52 added, 36 deleted, 20 changed)\n'
+ 'OCL=5932446\n'
+ 'SCL=5933728\n')
+ wrapped_msg = framework_helpers.WordWrapSuperLongLines(msg)
+ expected = (
+ '[Author: mpcomplete]\n'
+ '\n'
+ 'Fix a memory leak in JsArray and JsObject for the IE and NPAPI '
+ 'ports. Each time you call\n'
+ 'GetElement* or GetProperty* to retrieve string or object token, the '
+ 'token would be leaked. I added\n'
+ 'a JsScopedToken to ensure that the right thing is done when the '
+ 'object leaves scope, depending on\n'
+ 'the platform.\n'
+ '\n'
+ 'R=zork\n'
+ 'CC=google-gears-eng@googlegroups.com\n'
+ 'DELTA=108 (52 added, 36 deleted, 20 changed)\n'
+ 'OCL=5932446\n'
+ 'SCL=5933728\n')
+ self.assertEqual(wrapped_msg, expected)
+
+
+class ComputeListDeltasTest(unittest.TestCase):
+
+ def DoOne(self, old=None, new=None, added=None, removed=None):
+ """Run one call to the target method and check expected results."""
+ actual_added, actual_removed = framework_helpers.ComputeListDeltas(
+ old, new)
+ self.assertItemsEqual(added, actual_added)
+ self.assertItemsEqual(removed, actual_removed)
+
+ def testEmptyLists(self):
+ self.DoOne(old=[], new=[], added=[], removed=[])
+ self.DoOne(old=[1, 2], new=[], added=[], removed=[1, 2])
+ self.DoOne(old=[], new=[1, 2], added=[1, 2], removed=[])
+
+ def testUnchanged(self):
+ self.DoOne(old=[1], new=[1], added=[], removed=[])
+ self.DoOne(old=[1, 2], new=[1, 2], added=[], removed=[])
+ self.DoOne(old=[1, 2], new=[2, 1], added=[], removed=[])
+
+ def testCompleteChange(self):
+ self.DoOne(old=[1, 2], new=[3, 4], added=[3, 4], removed=[1, 2])
+
+ def testGeneralChange(self):
+ self.DoOne(old=[1, 2], new=[2], added=[], removed=[1])
+ self.DoOne(old=[1], new=[1, 2], added=[2], removed=[])
+ self.DoOne(old=[1, 2], new=[2, 3], added=[3], removed=[1])
+
+
+class UserSettingsTest(unittest.TestCase):
+
+ def testGatherUnifiedSettingsPageData(self):
+ email_options = []
+
+ class UserSettingsStub(framework_helpers.UserSettings):
+
+ # pylint: disable=unused-argument
+ @classmethod
+ def _GetEmailOptions(cls, user_view, conn_pool):
+ return email_options
+
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.auth.user_view = framework_views.UserView(100, 'user@invalid', True)
+ mr.auth.user_view.profile_url = '/u/profile/url'
+ page_data = UserSettingsStub.GatherUnifiedSettingsPageData(
+ mr.auth.user_id, mr.auth.user_view, mr.auth.user_pb)
+
+ expected_keys = [
+ 'api_request_reset',
+ 'api_request_lifetime_limit',
+ 'api_request_hard_limit',
+ 'api_request_soft_limit',
+ 'settings_user',
+ 'settings_user_pb',
+ 'settings_user_is_banned',
+ 'settings_user_ignore_action_limits',
+ 'self',
+ 'project_creation_reset',
+ 'issue_comment_reset',
+ 'issue_attachment_reset',
+ 'issue_bulk_edit_reset',
+ 'project_creation_lifetime_limit',
+ 'project_creation_soft_limit',
+ 'project_creation_hard_limit',
+ 'issue_comment_lifetime_limit',
+ 'issue_comment_soft_limit',
+ 'issue_comment_hard_limit',
+ 'issue_attachment_lifetime_limit',
+ 'issue_attachment_soft_limit',
+ 'issue_attachment_hard_limit',
+ 'issue_bulk_edit_lifetime_limit',
+ 'issue_bulk_edit_hard_limit',
+ 'issue_bulk_edit_soft_limit',
+ 'profile_url_fragment',
+ 'preview_on_hover',
+ ]
+ self.assertItemsEqual(expected_keys, page_data.keys())
+
+ self.assertEqual('profile/url', page_data['profile_url_fragment'])
+ # TODO(jrobbins): Test action limit support
+
+ # TODO(jrobbins): Test ProcessForm.
+
+
+class MurmurHash3Test(unittest.TestCase):
+
+ def testMurmurHash(self):
+ test_data = [
+ ('', 0),
+ ('agable@chromium.org', 4092810879),
+ (u'jrobbins@chromium.org', 904770043),
+ ('seanmccullough%google.com@gtempaccount.com', 1301269279),
+ ('rmistry+monorail@chromium.org', 4186878788),
+ ('jparent+foo@', 2923900874),
+ ('@example.com', 3043483168),
+ ]
+ hashes = [framework_helpers.MurmurHash3_x86_32(x)
+ for (x, _) in test_data]
+ self.assertListEqual(hashes, [e for (_, e) in test_data])
+
+ def testMurmurHashWithSeed(self):
+ test_data = [
+ ('', 1113155926, 2270882445),
+ ('agable@chromium.org', 772936925, 3995066671),
+ (u'jrobbins@chromium.org', 1519359761, 1273489513),
+ ('seanmccullough%google.com@gtempaccount.com', 49913829, 1202521153),
+ ('rmistry+monorail@chromium.org', 314860298, 3636123309),
+ ('jparent+foo@', 195791379, 332453977),
+ ('@example.com', 521490555, 257496459),
+ ]
+ hashes = [framework_helpers.MurmurHash3_x86_32(x, s)
+ for (x, s, _) in test_data]
+ self.assertListEqual(hashes, [e for (_, _, e) in test_data])
+
+
+class MakeRandomKeyTest(unittest.TestCase):
+
+ def testMakeRandomKey_Normal(self):
+ key1 = framework_helpers.MakeRandomKey()
+ key2 = framework_helpers.MakeRandomKey()
+ self.assertEqual(128, len(key1))
+ self.assertEqual(128, len(key2))
+ self.assertNotEqual(key1, key2)
+
+ def testMakeRandomKey_Length(self):
+ key = framework_helpers.MakeRandomKey()
+ self.assertEqual(128, len(key))
+ key16 = framework_helpers.MakeRandomKey(length=16)
+ self.assertEqual(16, len(key16))
+
+ def testMakeRandomKey_Chars(self):
+ key = framework_helpers.MakeRandomKey(chars='a', length=4)
+ self.assertEqual('aaaa', key)
+
+
+class IsServiceAccountTest(unittest.TestCase):
+
+ def testIsServiceAccount(self):
+ appspot = 'abc@appspot.gserviceaccount.com'
+ developer = '@developer.gserviceaccount.com'
+ bugdroid = 'bugdroid1@chromium.org'
+ user = 'test@example.com'
+
+ self.assertTrue(framework_helpers.IsServiceAccount(appspot))
+ self.assertTrue(framework_helpers.IsServiceAccount(developer))
+ self.assertTrue(framework_helpers.IsServiceAccount(bugdroid))
+ self.assertFalse(framework_helpers.IsServiceAccount(user))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/framework_views_test.py b/appengine/monorail/framework/test/framework_views_test.py
new file mode 100644
index 0000000..1d821eb
--- /dev/null
+++ b/appengine/monorail/framework/test/framework_views_test.py
@@ -0,0 +1,198 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for framework_views classes."""
+
+import unittest
+
+from framework import framework_views
+from framework import monorailrequest
+from proto import project_pb2
+from proto import tracker_pb2
+from proto import user_pb2
+import settings
+
+
+LONG_STR = 'VeryLongStringThatCertainlyWillNotFit'
+LONG_PART_STR = 'OnePartThatWillNotFit-OneShort'
+
+
+class LabelViewTest(unittest.TestCase):
+
+ def testLabelView(self):
+ view = framework_views.LabelView('', None)
+ self.assertEquals('', view.name)
+
+ view = framework_views.LabelView('Priority-High', None)
+ self.assertEquals('Priority-High', view.name)
+ self.assertIsNone(view.is_restrict)
+ self.assertEquals('Priority-High', view.tooltip)
+ self.assertEquals('', view.docstring)
+ self.assertEquals('Priority', view.prefix)
+ self.assertEquals('High', view.value)
+
+ view = framework_views.LabelView('%s-%s' % (LONG_STR, LONG_STR), None)
+ self.assertEquals('%s-%s' % (LONG_STR, LONG_STR), view.name)
+ self.assertEquals('%s-%s' % (LONG_STR, LONG_STR), view.tooltip)
+ self.assertEquals('', view.docstring)
+ self.assertEquals(LONG_STR, view.prefix)
+ self.assertEquals(LONG_STR, view.value)
+
+ view = framework_views.LabelView(LONG_PART_STR, None)
+ self.assertEquals(LONG_PART_STR, view.name)
+ self.assertEquals(LONG_PART_STR, view.tooltip)
+ self.assertEquals('', view.docstring)
+ self.assertEquals('OnePartThatWillNotFit', view.prefix)
+ self.assertEquals('OneShort', view.value)
+
+ config = tracker_pb2.ProjectIssueConfig()
+ config.well_known_labels.append(tracker_pb2.LabelDef(
+ label='Priority-High', label_docstring='Must ship in this milestone'))
+
+ view = framework_views.LabelView('Priority-High', config)
+ self.assertEquals('Must ship in this milestone', view.docstring)
+
+ view = framework_views.LabelView('Priority-Foo', config)
+ self.assertEquals('', view.docstring)
+
+ view = framework_views.LabelView('Restrict-View-Commit', None)
+ self.assertTrue(view.is_restrict)
+
+
+class StatusViewTest(unittest.TestCase):
+
+ def testStatusView(self):
+ view = framework_views.StatusView('', None)
+ self.assertEquals('', view.name)
+
+ view = framework_views.StatusView('Accepted', None)
+ self.assertEquals('Accepted', view.name)
+ self.assertEquals('Accepted', view.tooltip)
+ self.assertEquals('', view.docstring)
+ self.assertEquals('yes', view.means_open)
+
+ view = framework_views.StatusView(LONG_STR, None)
+ self.assertEquals(LONG_STR, view.name)
+ self.assertEquals(LONG_STR, view.tooltip)
+ self.assertEquals('', view.docstring)
+ self.assertEquals('yes', view.means_open)
+
+ config = tracker_pb2.ProjectIssueConfig()
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='SlamDunk', status_docstring='Code fixed and taught a lesson',
+ means_open=False))
+
+ view = framework_views.StatusView('SlamDunk', config)
+ self.assertEquals('Code fixed and taught a lesson', view.docstring)
+ self.assertFalse(view.means_open)
+
+ view = framework_views.StatusView('SlammedBack', config)
+ self.assertEquals('', view.docstring)
+
+
+class RevealEmailsToMembersTest(unittest.TestCase):
+
+ def setUp(self):
+ project = project_pb2.Project()
+ project.owner_ids.append(111L)
+ project.committer_ids.append(222L)
+ project.contributor_ids.append(333L)
+ project.contributor_ids.append(888L)
+ user = user_pb2.User()
+ user.is_site_admin = False
+ self.mr = monorailrequest.MonorailRequest()
+ self.mr.project = project
+ self.mr.auth.user_pb = user
+
+ def CheckRevealAllToMember(
+ self, logged_in_user_id, expected, viewed_user_id=333L, group_id=None):
+ user_view = framework_views.UserView(
+ viewed_user_id, 'user@example.com', True)
+
+ if group_id:
+ pass # xxx re-implement groups
+
+ users_by_id = {333L: user_view}
+ self.mr.auth.user_id = logged_in_user_id
+ self.mr.auth.effective_ids = {logged_in_user_id}
+ # Assert display name is obscured before the reveal.
+ self.assertEqual('u...@example.com', user_view.display_name)
+ # Assert profile url contains user ID before the reveal.
+ self.assertEqual('/u/%s/' % viewed_user_id, user_view.profile_url)
+ framework_views.RevealAllEmailsToMembers(self.mr, users_by_id)
+ self.assertEqual(expected, not user_view.obscure_email)
+ if expected:
+ # Assert display name is now revealed.
+ self.assertEqual('user@example.com', user_view.display_name)
+ # Assert profile url contains the email.
+ self.assertEqual('/u/user@example.com/', user_view.profile_url)
+ else:
+ # Assert display name is still hidden.
+ self.assertEqual('u...@example.com', user_view.display_name)
+ # Assert profile url still contains user ID.
+ self.assertEqual('/u/%s/' % viewed_user_id, user_view.profile_url)
+
+ def testRevealEmailsToPriviledgedDomain(self):
+ for priviledged_user_domain in settings.priviledged_user_domains:
+ self.mr.auth.user_pb.email = 'test@' + priviledged_user_domain
+ self.CheckRevealAllToMember(100001L, True)
+
+ def testRevealEmailToSelf(self):
+ self.mr.auth.user_pb.email = 'user@example.com'
+ self.CheckRevealAllToMember(100001L, True)
+
+ def testRevealAllEmailsToMembers_Collaborators(self):
+ self.CheckRevealAllToMember(0L, False)
+ self.CheckRevealAllToMember(111L, True)
+ self.CheckRevealAllToMember(222L, True)
+ self.CheckRevealAllToMember(333L, True)
+ self.CheckRevealAllToMember(444L, False)
+
+ # Viewed user has indirect role in the project via a group.
+ self.CheckRevealAllToMember(0, False, group_id=888L)
+ self.CheckRevealAllToMember(111L, True, group_id=888L)
+ # xxx re-implement
+ # self.CheckRevealAllToMember(
+ # 111, True, viewed_user_id=444L, group_id=888L)
+
+ # Logged in user has indirect role in the project via a group.
+ self.CheckRevealAllToMember(888L, True)
+
+ def testRevealAllEmailsToMembers_Admins(self):
+ self.CheckRevealAllToMember(555L, False)
+ self.mr.auth.user_pb.is_site_admin = True
+ self.CheckRevealAllToMember(555L, True)
+
+
+class RevealAllEmailsTest(unittest.TestCase):
+
+ def testRevealAllEmail(self):
+ users_by_id = {
+ 111L: framework_views.UserView(111L, 'a@a.com', True),
+ 222L: framework_views.UserView(222L, 'b@b.com', True),
+ 333L: framework_views.UserView(333L, 'c@c.com', True),
+ 999L: framework_views.UserView(999L, 'z@z.com', True),
+ }
+ # Assert display names are obscured before the reveal.
+ self.assertEqual('a...@a.com', users_by_id[111L].display_name)
+ self.assertEqual('b...@b.com', users_by_id[222L].display_name)
+ self.assertEqual('c...@c.com', users_by_id[333L].display_name)
+ self.assertEqual('z...@z.com', users_by_id[999L].display_name)
+
+ framework_views.RevealAllEmails(users_by_id)
+
+ self.assertFalse(users_by_id[111L].obscure_email)
+ self.assertFalse(users_by_id[222L].obscure_email)
+ self.assertFalse(users_by_id[333L].obscure_email)
+ self.assertFalse(users_by_id[999L].obscure_email)
+ # Assert display names are now revealed.
+ self.assertEqual('a@a.com', users_by_id[111L].display_name)
+ self.assertEqual('b@b.com', users_by_id[222L].display_name)
+ self.assertEqual('c@c.com', users_by_id[333L].display_name)
+ self.assertEqual('z@z.com', users_by_id[999L].display_name)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/gcs_helpers_test.py b/appengine/monorail/framework/test/gcs_helpers_test.py
new file mode 100644
index 0000000..1a9af1b
--- /dev/null
+++ b/appengine/monorail/framework/test/gcs_helpers_test.py
@@ -0,0 +1,134 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the framework_helpers module."""
+
+import unittest
+import uuid
+
+import mox
+
+from google.appengine.api import app_identity
+from google.appengine.api import images
+from third_party import cloudstorage
+
+from framework import filecontent
+from framework import gcs_helpers
+from testing import fake
+from testing import testing_helpers
+
+
+class GcsHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testDeleteObjectFromGCS(self):
+ object_id = 'aaaaa'
+ bucket_name = 'test_bucket'
+ object_path = '/' + bucket_name + object_id
+
+ self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
+ app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)
+
+ self.mox.StubOutWithMock(cloudstorage, 'delete')
+ cloudstorage.delete(object_path)
+
+ self.mox.ReplayAll()
+
+ gcs_helpers.DeleteObjectFromGCS(object_id)
+ self.mox.VerifyAll()
+
+ def testStoreObjectInGCS_ResizableMimeType(self):
+ guid = 'aaaaa'
+ project_id = 100
+ object_id = '/%s/attachments/%s' % (project_id, guid)
+ bucket_name = 'test_bucket'
+ object_path = '/' + bucket_name + object_id
+ mime_type = 'image/png'
+ content = 'content'
+ thumb_content = 'thumb_content'
+
+ self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
+ app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)
+
+ self.mox.StubOutWithMock(uuid, 'uuid4')
+ uuid.uuid4().AndReturn(guid)
+
+ self.mox.StubOutWithMock(cloudstorage, 'open')
+ cloudstorage.open(object_path, 'w', mime_type).AndReturn(fake.FakeFile())
+ cloudstorage.open(object_path + '-thumbnail', 'w', mime_type).AndReturn(
+ fake.FakeFile())
+
+ self.mox.StubOutWithMock(images, 'resize')
+ images.resize(content, gcs_helpers.DEFAULT_THUMB_WIDTH,
+ gcs_helpers.DEFAULT_THUMB_HEIGHT).AndReturn(thumb_content)
+
+ self.mox.ReplayAll()
+
+ ret_id = gcs_helpers.StoreObjectInGCS(
+ content, mime_type, project_id, gcs_helpers.DEFAULT_THUMB_WIDTH,
+ gcs_helpers.DEFAULT_THUMB_HEIGHT)
+ self.mox.VerifyAll()
+ self.assertEquals(object_id, ret_id)
+
+ def testStoreObjectInGCS_NotResizableMimeType(self):
+ guid = 'aaaaa'
+ project_id = 100
+ object_id = '/%s/attachments/%s' % (project_id, guid)
+ bucket_name = 'test_bucket'
+ object_path = '/' + bucket_name + object_id
+ mime_type = 'not_resizable_mime_type'
+ content = 'content'
+
+ self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
+ app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)
+
+ self.mox.StubOutWithMock(uuid, 'uuid4')
+ uuid.uuid4().AndReturn(guid)
+
+ self.mox.StubOutWithMock(cloudstorage, 'open')
+ cloudstorage.open(object_path, 'w', mime_type).AndReturn(fake.FakeFile())
+
+ self.mox.ReplayAll()
+
+ ret_id = gcs_helpers.StoreObjectInGCS(
+ content, mime_type, project_id, gcs_helpers.DEFAULT_THUMB_WIDTH,
+ gcs_helpers.DEFAULT_THUMB_HEIGHT)
+ self.mox.VerifyAll()
+ self.assertEquals(object_id, ret_id)
+
+ def testCheckMemeTypeResizable(self):
+ for resizable_mime_type in gcs_helpers.RESIZABLE_MIME_TYPES:
+ gcs_helpers.CheckMimeTypeResizable(resizable_mime_type)
+
+ with self.assertRaises(gcs_helpers.UnsupportedMimeType):
+ gcs_helpers.CheckMimeTypeResizable('not_resizable_mime_type')
+
+ def testStoreLogoInGCS(self):
+ file_name = 'test_file.png'
+ mime_type = 'image/png'
+ content = 'test content'
+ project_id = 100
+ object_id = 123
+
+ self.mox.StubOutWithMock(filecontent, 'GuessContentTypeFromFilename')
+ filecontent.GuessContentTypeFromFilename(file_name).AndReturn(mime_type)
+
+ self.mox.StubOutWithMock(gcs_helpers, 'StoreObjectInGCS')
+ gcs_helpers.StoreObjectInGCS(
+ content, mime_type, project_id,
+ thumb_width=gcs_helpers.LOGO_THUMB_WIDTH,
+ thumb_height=gcs_helpers.LOGO_THUMB_HEIGHT).AndReturn(object_id)
+
+ self.mox.ReplayAll()
+
+ ret_id = gcs_helpers.StoreLogoInGCS(file_name, content, project_id)
+ self.mox.VerifyAll()
+ self.assertEquals(object_id, ret_id)
diff --git a/appengine/monorail/framework/test/grid_view_helpers_test.py b/appengine/monorail/framework/test/grid_view_helpers_test.py
new file mode 100644
index 0000000..8e22138
--- /dev/null
+++ b/appengine/monorail/framework/test/grid_view_helpers_test.py
@@ -0,0 +1,174 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for grid_view_helpers classes and functions."""
+
+import unittest
+
+from framework import framework_constants
+from framework import framework_views
+from framework import grid_view_helpers
+from testing import fake
+from tracker import tracker_bizobj
+
+
+class GridViewHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.default_cols = 'a b c'
+ self.builtin_cols = 'a b x y z'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ self.art1 = fake.MakeTestIssue(
+ 789, 1, 'a summary', '', 0, derived_owner_id=111L, star_count=12,
+ derived_labels='Priority-Medium Hot Mstone-1 Mstone-2',
+ derived_status='Overdue')
+ self.art2 = fake.MakeTestIssue(
+ 789, 1, 'a summary', 'New', 111L, star_count=12, merged_into=200001,
+ labels='Priority-Medium Type-DEFECT Hot Mstone-1 Mstone-2')
+ self.users_by_id = {
+ 111L: framework_views.UserView(111, 'foo@example.com', True),
+ }
+
+ def testSortGridHeadings(self):
+ config = fake.MakeTestConfig(
+ 789, labels=('Priority-High Priority-Medium Priority-Low Hot Cold '
+ 'Milestone-Near Milestone-Far'),
+ statuses=('New Accepted Started Fixed WontFix Invalid Duplicate'))
+ asc_accessors = {
+ 'id': 'some function that is not called',
+ 'reporter': 'some function that is not called',
+ 'opened': 'some function that is not called',
+ 'modified': 'some function that is not called',
+ }
+
+ # Verify that status headings are sorted according the the status
+ # values defined in the config.
+ col_name = 'status'
+ headings = ['Duplicate', 'Limbo', 'New', 'OnHold', 'Accepted', 'Fixed']
+ sorted_headings = grid_view_helpers.SortGridHeadings(
+ col_name, headings, self.users_by_id, config, asc_accessors)
+ self.assertEqual(
+ sorted_headings,
+ ['New', 'Accepted', 'Fixed', 'Duplicate', 'Limbo', 'OnHold'])
+
+ # Verify that special columns are sorted alphabetically.
+ col_name = 'id'
+ headings = [1, 2, 5, 3, 4]
+ sorted_headings = grid_view_helpers.SortGridHeadings(
+ col_name, headings, self.users_by_id, config, asc_accessors)
+ self.assertEqual(sorted_headings,
+ [1, 2, 3, 4, 5])
+
+ # Verify that label value headings are sorted according the the labels
+ # values defined in the config.
+ col_name = 'priority'
+ headings = ['Medium', 'High', 'Low', 'dont-care']
+ sorted_headings = grid_view_helpers.SortGridHeadings(
+ col_name, headings, self.users_by_id, config, asc_accessors)
+ self.assertEqual(sorted_headings,
+ ['High', 'Medium', 'Low', 'dont-care'])
+
+ def testGetArtifactAttr_Explicit(self):
+ label_values = grid_view_helpers.MakeLabelValuesDict(self.art2)
+
+ id_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'id', self.users_by_id, label_values, self.config)
+ self.assertEqual([1], id_vals)
+ summary_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'summary', self.users_by_id, label_values, self.config)
+ self.assertEqual(['a summary'], summary_vals)
+ status_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'status', self.users_by_id, label_values, self.config)
+ self.assertEqual(['New'], status_vals)
+ stars_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'stars', self.users_by_id, label_values, self.config)
+ self.assertEqual([12], stars_vals)
+ owner_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'owner', self.users_by_id, label_values, self.config)
+ self.assertEqual(['f...@example.com'], owner_vals)
+ priority_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'priority', self.users_by_id, label_values, self.config)
+ self.assertEqual(['Medium'], priority_vals)
+ mstone_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'mstone', self.users_by_id, label_values, self.config)
+ self.assertEqual(['1', '2'], mstone_vals)
+ foo_vals = grid_view_helpers.GetArtifactAttr(
+ self.art2, 'foo', self.users_by_id, label_values, self.config)
+ self.assertEqual([framework_constants.NO_VALUES], foo_vals)
+ # merged_into_vals = grid_view_helpers.GetArtifactAttr(
+ # self.art2, 'mergedinto', self.users_by_id, label_values,
+ # self.config)
+ # self.assertEqual(['other-project:1'], merged_into_vals)
+
+ def testGetArtifactAttr_Dervied(self):
+ label_values = grid_view_helpers.MakeLabelValuesDict(self.art1)
+ status_vals = grid_view_helpers.GetArtifactAttr(
+ self.art1, 'status', self.users_by_id, label_values, self.config)
+ self.assertEqual(['Overdue'], status_vals)
+ owner_vals = grid_view_helpers.GetArtifactAttr(
+ self.art1, 'owner', self.users_by_id, label_values, self.config)
+ self.assertEqual(['f...@example.com'], owner_vals)
+ priority_vals = grid_view_helpers.GetArtifactAttr(
+ self.art1, 'priority', self.users_by_id, label_values, self.config)
+ self.assertEqual(['Medium'], priority_vals)
+ mstone_vals = grid_view_helpers.GetArtifactAttr(
+ self.art1, 'mstone', self.users_by_id, label_values, self.config)
+ self.assertEqual(['1', '2'], mstone_vals)
+
+ def testMakeLabelValuesDict_Empty(self):
+ art = fake.MakeTestIssue(
+ 789, 1, 'a summary', '', 0, derived_owner_id=111L, star_count=12)
+ label_values = grid_view_helpers.MakeLabelValuesDict(art)
+ self.assertEqual({}, label_values)
+
+ def testMakeLabelValuesDict(self):
+ art = fake.MakeTestIssue(
+ 789, 1, 'a summary', '', 0, derived_owner_id=111L, star_count=12,
+ labels=['Priority-Medium', 'Hot', 'Mstone-1', 'Mstone-2'])
+ label_values = grid_view_helpers.MakeLabelValuesDict(art)
+ self.assertEqual(
+ {'priority': ['Medium'], 'mstone': ['1', '2']},
+ label_values)
+
+ art = fake.MakeTestIssue(
+ 789, 1, 'a summary', '', 0, derived_owner_id=111L, star_count=12,
+ labels='Priority-Medium Hot Mstone-1'.split(),
+ derived_labels=['Mstone-2'])
+ label_values = grid_view_helpers.MakeLabelValuesDict(art)
+ self.assertEqual(
+ {'priority': ['Medium'], 'mstone': ['1', '2']},
+ label_values)
+
+ def testMakeDrillDownSearch(self):
+ self.assertEqual('-has:milestone ',
+ grid_view_helpers.MakeDrillDownSearch('milestone', '----'))
+ self.assertEqual('milestone=22 ',
+ grid_view_helpers.MakeDrillDownSearch('milestone', '22'))
+ self.assertEqual(
+ 'owner=a@example.com ',
+ grid_view_helpers.MakeDrillDownSearch('owner', 'a@example.com'))
+
+ def testAnyArtifactHasNoAttr_Empty(self):
+ artifacts = []
+ all_label_values = {}
+ self.assertFalse(grid_view_helpers.AnyArtifactHasNoAttr(
+ artifacts, 'milestone', self.users_by_id, all_label_values,
+ self.config))
+
+ def testAnyArtifactHasNoAttr(self):
+ artifacts = [self.art1]
+ all_label_values = {
+ self.art1.local_id: grid_view_helpers.MakeLabelValuesDict(self.art1),
+ }
+ self.assertFalse(grid_view_helpers.AnyArtifactHasNoAttr(
+ artifacts, 'mstone', self.users_by_id, all_label_values, self.config))
+ self.assertTrue(grid_view_helpers.AnyArtifactHasNoAttr(
+ artifacts, 'milestone', self.users_by_id, all_label_values,
+ self.config))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/jsonfeed_test.py b/appengine/monorail/framework/test/jsonfeed_test.py
new file mode 100644
index 0000000..2ebe0dc
--- /dev/null
+++ b/appengine/monorail/framework/test/jsonfeed_test.py
@@ -0,0 +1,142 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for jsonfeed module."""
+
+import httplib
+import logging
+import unittest
+
+from google.appengine.api import app_identity
+
+from framework import jsonfeed
+from framework import servlet
+from framework import xsrf
+from services import service_manager
+from testing import testing_helpers
+
+
+class JsonFeedTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+
+ def testGet(self):
+ """Tests handling of GET requests."""
+ feed = TestableJsonFeed()
+
+ # all expected args are present + a bonus arg that should be ignored
+ feed.mr = testing_helpers.MakeMonorailRequest(
+ path='/foo/bar/wee?sna=foo', method='POST',
+ params={'a': '123', 'z': 'zebra'})
+ self.assertRaises(servlet.AlreadySentResponseException, feed.get)
+
+ self.assertEqual(True, feed.handle_request_called)
+ self.assertEqual(1, len(feed.json_data))
+
+ def testPost(self):
+ """Tests handling of POST requests."""
+ feed = TestableJsonFeed()
+ feed.mr = testing_helpers.MakeMonorailRequest(
+ path='/foo/bar/wee?sna=foo', method='POST',
+ params={'a': '123', 'z': 'zebra'})
+
+ self.assertRaises(servlet.AlreadySentResponseException, feed.post)
+
+ self.assertEqual(True, feed.handle_request_called)
+ self.assertEqual(1, len(feed.json_data))
+
+ def testSecurityTokenChecked_BadToken(self):
+ feed = TestableJsonFeed()
+ feed.mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 555})
+ # Note that feed.mr has no token set.
+ self.assertRaises(xsrf.TokenIncorrect, feed.get)
+ self.assertRaises(xsrf.TokenIncorrect, feed.post)
+
+ feed.mr.token = 'bad token'
+ self.assertRaises(xsrf.TokenIncorrect, feed.get)
+ self.assertRaises(xsrf.TokenIncorrect, feed.post)
+
+ def testSecurityTokenChecked_HandlerDoesNotNeedToken(self):
+ feed = TestableJsonFeed()
+ feed.mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 555})
+ # Note that feed.mr has no token set.
+ feed.CHECK_SECURITY_TOKEN = False
+ self.assertRaises(servlet.AlreadySentResponseException, feed.get)
+ self.assertRaises(servlet.AlreadySentResponseException, feed.post)
+
+ def testSecurityTokenChecked_AnonUserDoesNotNeedToken(self):
+ feed = TestableJsonFeed()
+ feed.mr = testing_helpers.MakeMonorailRequest()
+ # Note that feed.mr has no token set, but also no auth.user_id.
+ self.assertRaises(servlet.AlreadySentResponseException, feed.get)
+ self.assertRaises(servlet.AlreadySentResponseException, feed.post)
+
+ def testSameAppOnly_ExternallyAccessible(self):
+ feed = TestableJsonFeed()
+ feed.mr = testing_helpers.MakeMonorailRequest()
+ # Note that request has no X-Appengine-Inbound-Appid set.
+ self.assertRaises(servlet.AlreadySentResponseException, feed.get)
+ self.assertRaises(servlet.AlreadySentResponseException, feed.post)
+
+ def testSameAppOnly_InternalOnlyCalledFromSameApp(self):
+ feed = TestableJsonFeed()
+ feed.CHECK_SAME_APP = True
+ feed.mr = testing_helpers.MakeMonorailRequest()
+ app_id = app_identity.get_application_id()
+ feed.mr.request.headers['X-Appengine-Inbound-Appid'] = app_id
+ self.assertRaises(servlet.AlreadySentResponseException, feed.get)
+ self.assertRaises(servlet.AlreadySentResponseException, feed.post)
+
+ def testSameAppOnly_InternalOnlyCalledExternally(self):
+ feed = TestableJsonFeed()
+ feed.CHECK_SAME_APP = True
+ feed.mr = testing_helpers.MakeMonorailRequest()
+ # Note that request has no X-Appengine-Inbound-Appid set.
+ self.assertIsNone(feed.get())
+ self.assertFalse(feed.handle_request_called)
+ self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+ self.assertIsNone(feed.post())
+ self.assertFalse(feed.handle_request_called)
+ self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+
+ def testSameAppOnly_InternalOnlyCalledFromWrongApp(self):
+ feed = TestableJsonFeed()
+ feed.CHECK_SAME_APP = True
+ feed.mr = testing_helpers.MakeMonorailRequest()
+ feed.mr.request.headers['X-Appengine-Inbound-Appid'] = 'wrong'
+ self.assertIsNone(feed.get())
+ self.assertFalse(feed.handle_request_called)
+ self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+ self.assertIsNone(feed.post())
+ self.assertFalse(feed.handle_request_called)
+ self.assertEqual(httplib.FORBIDDEN, feed.response.status)
+
+
+class TestableJsonFeed(jsonfeed.JsonFeed):
+
+ def __init__(self, request=None):
+ response = testing_helpers.Blank()
+ super(TestableJsonFeed, self).__init__(
+ request or 'req', response, services=service_manager.Services())
+
+ self.response_data = None
+ self.handle_request_called = False
+ self.json_data = None
+
+ def HandleRequest(self, mr):
+ self.handle_request_called = True
+ return {'a': mr.GetParam('a')}
+
+ # The output chain is hard to double so we pass on that phase,
+ # but save the response data for inspection
+ def _RenderJsonResponse(self, json_data):
+ self.json_data = json_data
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/monorailrequest_test.py b/appengine/monorail/framework/test/monorailrequest_test.py
new file mode 100644
index 0000000..6a94c93
--- /dev/null
+++ b/appengine/monorail/framework/test/monorailrequest_test.py
@@ -0,0 +1,413 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the monorailrequest module."""
+
+import re
+import unittest
+
+import mox
+
+from google.appengine.api import users
+
+import webapp2
+
+from framework import monorailrequest
+from framework import permissions
+from framework import profiler
+from proto import project_pb2
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_constants
+
+
+class HostportReTest(unittest.TestCase):
+
+ def testGood(self):
+ test_data = [
+ 'localhost:8080',
+ 'app.appspot.com',
+ 'bugs-staging.chromium.org',
+ 'vers10n-h3x-dot-app-id.appspot.com',
+ ]
+ for hostport in test_data:
+ self.assertTrue(monorailrequest._HOSTPORT_RE.match(hostport),
+ msg='Incorrectly rejected %r' % hostport)
+
+ def testBad(self):
+ test_data = [
+ '',
+ ' ',
+ '\t',
+ '\n',
+ '\'',
+ '"',
+ 'version"cruft-dot-app-id.appspot.com',
+ '\nother header',
+ 'version&cruft-dot-app-id.appspot.com',
+ ]
+ for hostport in test_data:
+ self.assertFalse(monorailrequest._HOSTPORT_RE.match(hostport),
+ msg='Incorrectly accepted %r' % hostport)
+
+class AuthDataTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+
+ def testGetUserID(self):
+ pass # TODO(jrobbins): re-impement
+
+ def testExamineRequestUserID(self):
+ pass # TODO(jrobbins): re-implement
+
+
+class MonorailRequestUnitTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ self.project = self.services.project.TestAddProject('proj')
+ self.services.user.TestAddUser('jrobbins@example.com', 111)
+
+ self.profiler = profiler.Profiler()
+ self.mox = mox.Mox()
+ self.mox.StubOutWithMock(users, 'get_current_user')
+ users.get_current_user().AndReturn(None)
+ self.mox.ReplayAll()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+
+ def testGetIntParamConvertsQueryParamToInt(self):
+ notice_id = 12345
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/foo?notice=%s' % notice_id)
+
+ value = mr.GetIntParam('notice')
+ self.assert_(isinstance(value, int))
+ self.assertEqual(notice_id, value)
+
+ def testGetIntParamConvertsQueryParamToLong(self):
+ notice_id = 12345678901234567890
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/foo?notice=%s' % notice_id)
+
+ value = mr.GetIntParam('notice')
+ self.assertTrue(isinstance(value, long))
+ self.assertEqual(notice_id, value)
+
+ def testGetIntListParamNoParam(self):
+ mr = monorailrequest.MonorailRequest()
+ mr.ParseRequest(
+ webapp2.Request.blank('servlet'), self.services, self.profiler)
+ self.assertEquals(mr.GetIntListParam('ids'), None)
+ self.assertEquals(mr.GetIntListParam('ids', default_value=['test']),
+ ['test'])
+
+ def testGetIntListParamOneValue(self):
+ mr = monorailrequest.MonorailRequest()
+ mr.ParseRequest(
+ webapp2.Request.blank('servlet?ids=11'), self.services, self.profiler)
+ self.assertEquals(mr.GetIntListParam('ids'), [11])
+ self.assertEquals(mr.GetIntListParam('ids', default_value=['test']),
+ [11])
+
+ def testGetIntListParamMultiValue(self):
+ mr = monorailrequest.MonorailRequest()
+ mr.ParseRequest(
+ webapp2.Request.blank('servlet?ids=21,22,23'), self.services,
+ self.profiler)
+ self.assertEquals(mr.GetIntListParam('ids'), [21, 22, 23])
+ self.assertEquals(mr.GetIntListParam('ids', default_value=['test']),
+ [21, 22, 23])
+
+ def testGetIntListParamBogusValue(self):
+ mr = monorailrequest.MonorailRequest()
+ mr.ParseRequest(
+ webapp2.Request.blank('servlet?ids=not_an_int'), self.services,
+ self.profiler)
+ self.assertEquals(mr.GetIntListParam('ids'), None)
+ self.assertEquals(mr.GetIntListParam('ids', default_value=['test']),
+ ['test'])
+
+ def testGetIntListParamMalformed(self):
+ mr = monorailrequest.MonorailRequest()
+ mr.ParseRequest(
+ webapp2.Request.blank('servlet?ids=31,32,,'), self.services,
+ self.profiler)
+ self.assertEquals(mr.GetIntListParam('ids'), None)
+ self.assertEquals(mr.GetIntListParam('ids', default_value=['test']),
+ ['test'])
+
+ def testDefaultValuesNoUrl(self):
+ """If request has no param, default param values should be used."""
+ mr = monorailrequest.MonorailRequest()
+ mr.ParseRequest(
+ webapp2.Request.blank('servlet'), self.services, self.profiler)
+ self.assertEquals(mr.GetParam('r', 3), 3)
+ self.assertEquals(mr.GetIntParam('r', 3), 3)
+ self.assertEquals(mr.GetPositiveIntParam('r', 3), 3)
+ self.assertEquals(mr.GetIntListParam('r', [3, 4]), [3, 4])
+
+ def _MRWithMockRequest(
+ self, path, headers=None, *mr_args, **mr_kwargs):
+ request = webapp2.Request.blank(path, headers=headers)
+ mr = monorailrequest.MonorailRequest(*mr_args, **mr_kwargs)
+ mr.ParseRequest(request, self.services, self.profiler)
+ return mr
+
+ def testParseQueryParameters(self):
+ mr = self._MRWithMockRequest(
+ '/p/proj/issues/list?q=foo+OR+bar&num=50')
+ self.assertEquals('foo OR bar', mr.query)
+ self.assertEquals(50, mr.num)
+
+ def testParseRequest_Scheme(self):
+ mr = self._MRWithMockRequest('/p/proj/')
+ self.assertEquals('http', mr.request.scheme)
+
+ def testParseRequest_HostportAndCurrentPageURL(self):
+ mr = self._MRWithMockRequest('/p/proj/', headers={
+ 'Host': 'example.com',
+ 'Cookie': 'asdf',
+ })
+ self.assertEquals('http', mr.request.scheme)
+ self.assertEquals('example.com', mr.request.host)
+ self.assertEquals('http://example.com/p/proj/', mr.current_page_url)
+
+ def testViewedUser_WithEmail(self):
+ mr = self._MRWithMockRequest('/u/jrobbins@example.com/')
+ self.assertEquals('jrobbins@example.com', mr.viewed_username)
+ self.assertEquals(111, mr.viewed_user_auth.user_id)
+ self.assertEquals(
+ self.services.user.GetUser('fake cnxn', 111),
+ mr.viewed_user_auth.user_pb)
+
+ def testViewedUser_WithUserID(self):
+ mr = self._MRWithMockRequest('/u/111/')
+ self.assertEquals('jrobbins@example.com', mr.viewed_username)
+ self.assertEquals(111, mr.viewed_user_auth.user_id)
+ self.assertEquals(
+ self.services.user.GetUser('fake cnxn', 111),
+ mr.viewed_user_auth.user_pb)
+
+ def testViewedUser_NoSuchEmail(self):
+ try:
+ self._MRWithMockRequest('/u/unknownuser@example.com/')
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ def testViewedUser_NoSuchUserID(self):
+ with self.assertRaises(webapp2.HTTPException) as e:
+ self._MRWithMockRequest('/u/234521111/')
+ self.assertEquals(404, e.code)
+
+ def testGetParam(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/foo?syn=error!&a=a&empty=',
+ params=dict(over1='over_value1', over2='over_value2'))
+
+ # test tampering
+ self.assertRaises(monorailrequest.InputException, mr.GetParam, 'a',
+ antitamper_re=re.compile(r'^$'))
+ self.assertRaises(monorailrequest.InputException, mr.GetParam,
+ 'undefined', default_value='default',
+ antitamper_re=re.compile(r'^$'))
+
+ # test empty value
+ self.assertEquals('', mr.GetParam(
+ 'empty', default_value='default', antitamper_re=re.compile(r'^$')))
+
+ # test default
+ self.assertEquals('default', mr.GetParam(
+ 'undefined', default_value='default'))
+
+ def testComputeColSpec(self):
+ # No config passed, and nothing in URL
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123')
+ mr.ComputeColSpec(None)
+ self.assertEquals(tracker_constants.DEFAULT_COL_SPEC, mr.col_spec)
+
+ # No config passed, but set in URL
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&colspec=a b C')
+ mr.ComputeColSpec(None)
+ self.assertEquals('a b C', mr.col_spec)
+
+ config = tracker_pb2.ProjectIssueConfig()
+
+ # No default in the config, and nothing in URL
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123')
+ mr.ComputeColSpec(config)
+ self.assertEquals(tracker_constants.DEFAULT_COL_SPEC, mr.col_spec)
+
+ # No default in the config, but set in URL
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&colspec=a b C')
+ mr.ComputeColSpec(config)
+ self.assertEquals('a b C', mr.col_spec)
+
+ config.default_col_spec = 'd e f'
+
+ # Default in the config, and nothing in URL
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123')
+ mr.ComputeColSpec(config)
+ self.assertEquals('d e f', mr.col_spec)
+
+ # Default in the config, but overrided via URL
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&colspec=a b C')
+ mr.ComputeColSpec(config)
+ self.assertEquals('a b C', mr.col_spec)
+
+ def testComputeColSpec_XSS(self):
+ config_1 = tracker_pb2.ProjectIssueConfig()
+ config_2 = tracker_pb2.ProjectIssueConfig()
+ config_2.default_col_spec = "id '+alert(1)+'"
+ mr_1 = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123')
+ mr_2 = testing_helpers.MakeMonorailRequest(
+ path="/p/proj/issues/detail?id=123&colspec=id '+alert(1)+'")
+
+ # Normal colspec in config but malicious request
+ self.assertRaises(
+ monorailrequest.InputException,
+ mr_2.ComputeColSpec, config_1)
+
+ # Malicious colspec in config but normal request
+ self.assertRaises(
+ monorailrequest.InputException,
+ mr_1.ComputeColSpec, config_2)
+
+ # Malicious colspec in config and malicious request
+ self.assertRaises(
+ monorailrequest.InputException,
+ mr_2.ComputeColSpec, config_2)
+
+
+class TestMonorailRequestFunctions(unittest.TestCase):
+
+ def testExtractPathIdenifiers_ProjectOnly(self):
+ username, project_name = monorailrequest._ParsePathIdentifiers(
+ '/p/proj/issues/list?q=foo+OR+bar&ts=1234')
+ self.assertIsNone(username)
+ self.assertEquals('proj', project_name)
+
+ def testExtractPathIdenifiers_ViewedUserOnly(self):
+ username, project_name = monorailrequest._ParsePathIdentifiers(
+ '/u/jrobbins@example.com/')
+ self.assertEquals('jrobbins@example.com', username)
+ self.assertIsNone(project_name)
+
+ def testExtractPathIdenifiers_ViewedUserURLSpace(self):
+ username, project_name = monorailrequest._ParsePathIdentifiers(
+ '/u/jrobbins@example.com/updates')
+ self.assertEquals('jrobbins@example.com', username)
+ self.assertIsNone(project_name)
+
+ def testExtractPathIdenifiers_ViewedGroupURLSpace(self):
+ username, project_name = monorailrequest._ParsePathIdentifiers(
+ '/g/user-group@example.com/updates')
+ self.assertEquals('user-group@example.com', username)
+ self.assertIsNone(project_name)
+
+ def testParseColSpec(self):
+ parse = monorailrequest.ParseColSpec
+ self.assertEqual(['PageName', 'Summary', 'Changed', 'ChangedBy'],
+ parse(u'PageName Summary Changed ChangedBy'))
+ self.assertEqual(['Foo-Bar', 'Foo-Bar-Baz', 'Release-1.2', 'Hey', 'There'],
+ parse('Foo-Bar Foo-Bar-Baz Release-1.2 Hey!There'))
+ self.assertEqual(
+ ['\xe7\xaa\xbf\xe8\x8b\xa5\xe7\xb9\xb9'.decode('utf-8'),
+ '\xe5\x9f\xba\xe5\x9c\xb0\xe3\x81\xaf'.decode('utf-8')],
+ parse('\xe7\xaa\xbf\xe8\x8b\xa5\xe7\xb9\xb9 '
+ '\xe5\x9f\xba\xe5\x9c\xb0\xe3\x81\xaf'.decode('utf-8')))
+
+
+class TestPermissionLookup(unittest.TestCase):
+ OWNER_ID = 1
+ OTHER_USER_ID = 2
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ self.services.user.TestAddUser('owner@gmail.com', self.OWNER_ID)
+ self.services.user.TestAddUser('user@gmail.com', self.OTHER_USER_ID)
+ self.live_project = self.services.project.TestAddProject(
+ 'live', owner_ids=[self.OWNER_ID])
+ self.archived_project = self.services.project.TestAddProject(
+ 'archived', owner_ids=[self.OWNER_ID],
+ state=project_pb2.ProjectState.ARCHIVED)
+ self.members_only_project = self.services.project.TestAddProject(
+ 'members-only', owner_ids=[self.OWNER_ID],
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY)
+
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+
+ def CheckPermissions(self, perms, expect_view, expect_commit, expect_edit):
+ may_view = perms.HasPerm(permissions.VIEW, None, None)
+ self.assertEqual(expect_view, may_view)
+ may_commit = perms.HasPerm(permissions.COMMIT, None, None)
+ self.assertEqual(expect_commit, may_commit)
+ may_edit = perms.HasPerm(permissions.EDIT_PROJECT, None, None)
+ self.assertEqual(expect_edit, may_edit)
+
+ def MakeRequestAsUser(self, project_name, email):
+ self.mox.StubOutWithMock(users, 'get_current_user')
+ users.get_current_user().AndReturn(testing_helpers.Blank(
+ email=lambda: email))
+ self.mox.ReplayAll()
+
+ request = webapp2.Request.blank('/p/' + project_name)
+ mr = monorailrequest.MonorailRequest()
+ prof = profiler.Profiler()
+ with prof.Phase('parse user info'):
+ mr.ParseRequest(request, self.services, prof)
+ return mr
+
+ def testOwnerPermissions_Live(self):
+ mr = self.MakeRequestAsUser('live', 'owner@gmail.com')
+ self.CheckPermissions(mr.perms, True, True, True)
+
+ def testOwnerPermissions_Archived(self):
+ mr = self.MakeRequestAsUser('archived', 'owner@gmail.com')
+ self.CheckPermissions(mr.perms, True, False, True)
+
+ def testOwnerPermissions_MembersOnly(self):
+ mr = self.MakeRequestAsUser('members-only', 'owner@gmail.com')
+ self.CheckPermissions(mr.perms, True, True, True)
+
+ def testExternalUserPermissions_Live(self):
+ mr = self.MakeRequestAsUser('live', 'user@gmail.com')
+ self.CheckPermissions(mr.perms, True, False, False)
+
+ def testExternalUserPermissions_Archived(self):
+ mr = self.MakeRequestAsUser('archived', 'user@gmail.com')
+ self.CheckPermissions(mr.perms, False, False, False)
+
+ def testExternalUserPermissions_MembersOnly(self):
+ mr = self.MakeRequestAsUser('members-only', 'user@gmail.com')
+ self.CheckPermissions(mr.perms, False, False, False)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/paginate_test.py b/appengine/monorail/framework/test/paginate_test.py
new file mode 100644
index 0000000..a05d1d5
--- /dev/null
+++ b/appengine/monorail/framework/test/paginate_test.py
@@ -0,0 +1,76 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for pagination classes."""
+
+import unittest
+
+from framework import paginate
+from testing import testing_helpers
+
+
+class PaginateTest(unittest.TestCase):
+
+ def testVirtualPagination(self):
+ # Paginating 0 results on a page that can hold 100.
+ mr = testing_helpers.MakeMonorailRequest(path='/issues/list')
+ vp = paginate.VirtualPagination(mr, 0, 100)
+ self.assertEquals(vp.num, 100)
+ self.assertEquals(vp.start, 1)
+ self.assertEquals(vp.last, 0)
+ self.assertFalse(vp.visible)
+
+ # Paginationg 12 results on a page that can hold 100.
+ mr = testing_helpers.MakeMonorailRequest(path='/issues/list')
+ vp = paginate.VirtualPagination(mr, 12, 100)
+ self.assertEquals(vp.num, 100)
+ self.assertEquals(vp.start, 1)
+ self.assertEquals(vp.last, 12)
+ self.assertTrue(vp.visible)
+
+ # Paginationg 12 results on a page that can hold 10.
+ mr = testing_helpers.MakeMonorailRequest(path='/issues/list?num=10')
+ vp = paginate.VirtualPagination(mr, 12, 100)
+ self.assertEquals(vp.num, 10)
+ self.assertEquals(vp.start, 1)
+ self.assertEquals(vp.last, 10)
+ self.assertTrue(vp.visible)
+
+ # Paginationg 12 results starting at 5 on page that can hold 10.
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/issues/list?start=5&num=10')
+ vp = paginate.VirtualPagination(mr, 12, 100)
+ self.assertEquals(vp.num, 10)
+ self.assertEquals(vp.start, 6)
+ self.assertEquals(vp.last, 12)
+ self.assertTrue(vp.visible)
+
+ # Paginationg 123 results on a page that can hold 100.
+ mr = testing_helpers.MakeMonorailRequest(path='/issues/list')
+ vp = paginate.VirtualPagination(mr, 123, 100)
+ self.assertEquals(vp.num, 100)
+ self.assertEquals(vp.start, 1)
+ self.assertEquals(vp.last, 100)
+ self.assertTrue(vp.visible)
+
+ # Paginationg 123 results on second page that can hold 100.
+ mr = testing_helpers.MakeMonorailRequest(path='/issues/list?start=100')
+ vp = paginate.VirtualPagination(mr, 123, 100)
+ self.assertEquals(vp.num, 100)
+ self.assertEquals(vp.start, 101)
+ self.assertEquals(vp.last, 123)
+ self.assertTrue(vp.visible)
+
+ # Paginationg a huge number of objects will show at most 5000 per page.
+ mr = testing_helpers.MakeMonorailRequest(path='/issues/list?num=9999')
+ vp = paginate.VirtualPagination(mr, 12345, 100)
+ self.assertEquals(vp.num, 1000)
+ self.assertEquals(vp.start, 1)
+ self.assertEquals(vp.last, 1000)
+ self.assertTrue(vp.visible)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/permissions_test.py b/appengine/monorail/framework/test/permissions_test.py
new file mode 100644
index 0000000..f872536
--- /dev/null
+++ b/appengine/monorail/framework/test/permissions_test.py
@@ -0,0 +1,1181 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for permissions.py."""
+
+import time
+import unittest
+
+import mox
+
+import settings
+from framework import framework_constants
+from framework import framework_views
+from framework import monorailrequest
+from framework import permissions
+from proto import project_pb2
+from proto import site_pb2
+from proto import tracker_pb2
+from proto import user_pb2
+from proto import usergroup_pb2
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+class PermissionSetTest(unittest.TestCase):
+
+ def setUp(self):
+ self.perms = permissions.PermissionSet(['A', 'b', 'Cc'])
+ self.proj = project_pb2.Project()
+ self.proj.contributor_ids.append(111L)
+ self.proj.contributor_ids.append(222L)
+ self.proj.extra_perms.append(project_pb2.Project.ExtraPerms(
+ member_id=111L, perms=['Cc', 'D', 'e', 'Ff']))
+ self.proj.extra_perms.append(project_pb2.Project.ExtraPerms(
+ member_id=222L, perms=['G', 'H']))
+ # user 3 used to be a member and had extra perms, but no longer in project.
+ self.proj.extra_perms.append(project_pb2.Project.ExtraPerms(
+ member_id=333L, perms=['G', 'H']))
+
+ def testGetAttr(self):
+ self.assertTrue(self.perms.a)
+ self.assertTrue(self.perms.A)
+ self.assertTrue(self.perms.b)
+ self.assertTrue(self.perms.Cc)
+ self.assertTrue(self.perms.CC)
+
+ self.assertFalse(self.perms.z)
+ self.assertFalse(self.perms.Z)
+
+ def testCanUsePerm_Anonymous(self):
+ effective_ids = set()
+ self.assertTrue(self.perms.CanUsePerm('A', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('D', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('Z', effective_ids, self.proj, []))
+
+ def testCanUsePerm_SignedInNoGroups(self):
+ effective_ids = {111L}
+ self.assertTrue(self.perms.CanUsePerm('A', effective_ids, self.proj, []))
+ self.assertTrue(self.perms.CanUsePerm('D', effective_ids, self.proj, []))
+ self.assertTrue(self.perms.CanUsePerm(
+ 'D', effective_ids, self.proj, ['Restrict-D-A']))
+ self.assertFalse(self.perms.CanUsePerm('G', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('Z', effective_ids, self.proj, []))
+
+ effective_ids = {222L}
+ self.assertTrue(self.perms.CanUsePerm('A', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('D', effective_ids, self.proj, []))
+ self.assertTrue(self.perms.CanUsePerm('G', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('Z', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm(
+ 'Z', effective_ids, self.proj, ['Restrict-Z-A']))
+
+ def testCanUsePerm_SignedInWithGroups(self):
+ effective_ids = {111L, 222L, 333L}
+ self.assertTrue(self.perms.CanUsePerm('A', effective_ids, self.proj, []))
+ self.assertTrue(self.perms.CanUsePerm('D', effective_ids, self.proj, []))
+ self.assertTrue(self.perms.CanUsePerm('G', effective_ids, self.proj, []))
+ self.assertTrue(self.perms.CanUsePerm(
+ 'G', effective_ids, self.proj, ['Restrict-G-D']))
+ self.assertFalse(self.perms.CanUsePerm('Z', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm(
+ 'G', effective_ids, self.proj, ['Restrict-G-Z']))
+
+ def testCanUsePerm_FormerMember(self):
+ effective_ids = {333L}
+ self.assertTrue(self.perms.CanUsePerm('A', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('D', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('G', effective_ids, self.proj, []))
+ self.assertFalse(self.perms.CanUsePerm('Z', effective_ids, self.proj, []))
+
+ def testHasPerm_InPermSet(self):
+ self.assertTrue(self.perms.HasPerm('a', 0, None))
+ self.assertTrue(self.perms.HasPerm('a', 0, self.proj))
+ self.assertTrue(self.perms.HasPerm('A', 0, None))
+ self.assertTrue(self.perms.HasPerm('A', 0, self.proj))
+ self.assertFalse(self.perms.HasPerm('Z', 0, None))
+ self.assertFalse(self.perms.HasPerm('Z', 0, self.proj))
+
+ def testHasPerm_InExtraPerms(self):
+ self.assertTrue(self.perms.HasPerm('d', 111L, self.proj))
+ self.assertTrue(self.perms.HasPerm('D', 111L, self.proj))
+ self.assertTrue(self.perms.HasPerm('Cc', 111L, self.proj))
+ self.assertTrue(self.perms.HasPerm('CC', 111L, self.proj))
+ self.assertFalse(self.perms.HasPerm('Z', 111L, self.proj))
+
+ self.assertFalse(self.perms.HasPerm('d', 222L, self.proj))
+ self.assertFalse(self.perms.HasPerm('D', 222L, self.proj))
+
+ # Only current members can have extra permissions
+ self.proj.contributor_ids = []
+ self.assertFalse(self.perms.HasPerm('d', 111L, self.proj))
+
+ # TODO(jrobbins): also test consider_restrictions=False and
+ # restriction labels directly in this class.
+
+ def testHasPerm_GrantedPerms(self):
+ self.assertTrue(self.perms.CanUsePerm(
+ 'A', {111L}, self.proj, [], granted_perms=['z']))
+ self.assertTrue(self.perms.CanUsePerm(
+ 'a', {111L}, self.proj, [], granted_perms=['z']))
+ self.assertTrue(self.perms.CanUsePerm(
+ 'a', {111L}, self.proj, [], granted_perms=['a']))
+ self.assertTrue(self.perms.CanUsePerm(
+ 'Z', {111L}, self.proj, [], granted_perms=['y', 'z']))
+ self.assertTrue(self.perms.CanUsePerm(
+ 'z', {111L}, self.proj, [], granted_perms=['y', 'z']))
+ self.assertFalse(self.perms.CanUsePerm(
+ 'z', {111L}, self.proj, [], granted_perms=['y']))
+
+ def testDebugString(self):
+ self.assertEqual('PermissionSet()',
+ permissions.PermissionSet([]).DebugString())
+ self.assertEqual('PermissionSet(a)',
+ permissions.PermissionSet(['A']).DebugString())
+ self.assertEqual('PermissionSet(a, b, cc)', self.perms.DebugString())
+
+ def testRepr(self):
+ self.assertEqual('PermissionSet(frozenset([]))',
+ permissions.PermissionSet([]).__repr__())
+ self.assertEqual('PermissionSet(frozenset([\'a\']))',
+ permissions.PermissionSet(['A']).__repr__())
+
+
+class PermissionsTest(unittest.TestCase):
+
+ NOW = 1277762224 # Any timestamp will do, we only compare it to itself +/- 1
+ COMMITTER_USER_ID = 111L
+ OWNER_USER_ID = 222L
+ CONTRIB_USER_ID = 333L
+ SITE_ADMIN_USER_ID = 444L
+
+ def MakeProject(self, project_name, state, add_members=True, access=None):
+ args = dict(project_name=project_name, state=state)
+ if add_members:
+ args.update(owner_ids=[self.OWNER_USER_ID],
+ committer_ids=[self.COMMITTER_USER_ID],
+ contributor_ids=[self.CONTRIB_USER_ID])
+
+ if access:
+ args.update(access=access)
+
+ return fake.Project(**args)
+
+ def setUp(self):
+ self.live_project = self.MakeProject('live', project_pb2.ProjectState.LIVE)
+ self.archived_project = self.MakeProject(
+ 'archived', project_pb2.ProjectState.ARCHIVED)
+ self.other_live_project = self.MakeProject(
+ 'other_live', project_pb2.ProjectState.LIVE, add_members=False)
+ self.members_only_project = self.MakeProject(
+ 's3kr3t', project_pb2.ProjectState.LIVE,
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY)
+
+ self.nonmember = user_pb2.User()
+ self.member = user_pb2.User()
+ self.owner = user_pb2.User()
+ self.contrib = user_pb2.User()
+ self.site_admin = user_pb2.User()
+ self.site_admin.is_site_admin = True
+ self.borg_user = user_pb2.User(email=settings.borg_service_account)
+
+ self.normal_artifact = tracker_pb2.Issue()
+ self.normal_artifact.labels.extend(['hot', 'Key-Value'])
+ self.normal_artifact.reporter_id = 111L
+
+ # Two PermissionSets w/ permissions outside of any project.
+ self.normal_user_perms = permissions.GetPermissions(
+ None, {111L}, None)
+ self.admin_perms = permissions.PermissionSet(
+ [permissions.ADMINISTER_SITE,
+ permissions.CREATE_PROJECT])
+
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+
+ def testGetPermissions_Admin(self):
+ self.assertEqual(
+ permissions.ADMIN_PERMISSIONSET,
+ permissions.GetPermissions(self.site_admin, None, None))
+
+ def testGetPermissions_BorgServiceAccount(self):
+ self.assertEqual(
+ permissions.GROUP_IMPORT_BORG_PERMISSIONSET,
+ permissions.GetPermissions(self.borg_user, None, None))
+
+ def CheckPermissions(self, perms, expected_list):
+ expect_view, expect_commit, expect_edit_project = expected_list
+ self.assertEqual(
+ expect_view, perms.HasPerm(permissions.VIEW, None, None))
+ self.assertEqual(
+ expect_commit, perms.HasPerm(permissions.COMMIT, None, None))
+ self.assertEqual(
+ expect_edit_project,
+ perms.HasPerm(permissions.EDIT_PROJECT, None, None))
+
+ def testAnonPermissions(self):
+ perms = permissions.GetPermissions(None, set(), self.live_project)
+ self.CheckPermissions(perms, [True, False, False])
+
+ perms = permissions.GetPermissions(None, set(), self.members_only_project)
+ self.CheckPermissions(perms, [False, False, False])
+
+ def testNonmemberPermissions(self):
+ perms = permissions.GetPermissions(
+ self.nonmember, {123}, self.live_project)
+ self.CheckPermissions(perms, [True, False, False])
+
+ perms = permissions.GetPermissions(
+ self.nonmember, {123}, self.members_only_project)
+ self.CheckPermissions(perms, [False, False, False])
+
+ def testMemberPermissions(self):
+ perms = permissions.GetPermissions(
+ self.member, {self.COMMITTER_USER_ID}, self.live_project)
+ self.CheckPermissions(perms, [True, True, False])
+
+ perms = permissions.GetPermissions(
+ self.member, {self.COMMITTER_USER_ID}, self.other_live_project)
+ self.CheckPermissions(perms, [True, False, False])
+
+ perms = permissions.GetPermissions(
+ self.member, {self.COMMITTER_USER_ID}, self.members_only_project)
+ self.CheckPermissions(perms, [True, True, False])
+
+ def testOwnerPermissions(self):
+ perms = permissions.GetPermissions(
+ self.owner, {self.OWNER_USER_ID}, self.live_project)
+ self.CheckPermissions(perms, [True, True, True])
+
+ perms = permissions.GetPermissions(
+ self.owner, {self.OWNER_USER_ID}, self.other_live_project)
+ self.CheckPermissions(perms, [True, False, False])
+
+ perms = permissions.GetPermissions(
+ self.owner, {self.OWNER_USER_ID}, self.members_only_project)
+ self.CheckPermissions(perms, [True, True, True])
+
+ def testContributorPermissions(self):
+ perms = permissions.GetPermissions(
+ self.contrib, {self.CONTRIB_USER_ID}, self.live_project)
+ self.CheckPermissions(perms, [True, False, False])
+
+ perms = permissions.GetPermissions(
+ self.contrib, {self.CONTRIB_USER_ID}, self.other_live_project)
+ self.CheckPermissions(perms, [True, False, False])
+
+ perms = permissions.GetPermissions(
+ self.contrib, {self.CONTRIB_USER_ID}, self.members_only_project)
+ self.CheckPermissions(perms, [True, False, False])
+
+ def testLookupPermset_ExactMatch(self):
+ self.assertEqual(
+ permissions.USER_PERMISSIONSET,
+ permissions._LookupPermset(
+ permissions.USER_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE))
+
+ def testLookupPermset_WildcardAccess(self):
+ self.assertEqual(
+ permissions.OWNER_ACTIVE_PERMISSIONSET,
+ permissions._LookupPermset(
+ permissions.OWNER_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.MEMBERS_ONLY))
+
+ def testGetPermissionKey_AnonUser(self):
+ self.assertEqual(
+ (permissions.ANON_ROLE, permissions.UNDEFINED_STATUS,
+ permissions.UNDEFINED_ACCESS),
+ permissions._GetPermissionKey(None, None))
+ self.assertEqual(
+ (permissions.ANON_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE),
+ permissions._GetPermissionKey(None, self.live_project))
+
+ def testGetPermissionKey_ExpiredProject(self):
+ self.archived_project.delete_time = self.NOW
+ # In an expired project, the user's committe role does not count.
+ self.assertEqual(
+ (permissions.USER_ROLE, project_pb2.ProjectState.ARCHIVED,
+ project_pb2.ProjectAccess.ANYONE),
+ permissions._GetPermissionKey(
+ self.COMMITTER_USER_ID, self.archived_project,
+ expired_before=self.NOW + 1))
+ # If not expired yet, the user's committe role still counts.
+ self.assertEqual(
+ (permissions.COMMITTER_ROLE, project_pb2.ProjectState.ARCHIVED,
+ project_pb2.ProjectAccess.ANYONE),
+ permissions._GetPermissionKey(
+ self.COMMITTER_USER_ID, self.archived_project,
+ expired_before=self.NOW - 1))
+
+ def testGetPermissionKey_DefinedRoles(self):
+ self.assertEqual(
+ (permissions.OWNER_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE),
+ permissions._GetPermissionKey(
+ self.OWNER_USER_ID, self.live_project))
+ self.assertEqual(
+ (permissions.COMMITTER_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE),
+ permissions._GetPermissionKey(
+ self.COMMITTER_USER_ID, self.live_project))
+ self.assertEqual(
+ (permissions.CONTRIBUTOR_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE),
+ permissions._GetPermissionKey(
+ self.CONTRIB_USER_ID, self.live_project))
+
+ def testGetPermissionKey_Nonmember(self):
+ self.assertEqual(
+ (permissions.USER_ROLE, project_pb2.ProjectState.LIVE,
+ project_pb2.ProjectAccess.ANYONE),
+ permissions._GetPermissionKey(
+ 999L, self.live_project))
+
+ def testPermissionsImmutable(self):
+ self.assertTrue(isinstance(
+ permissions.EMPTY_PERMISSIONSET.perm_names, frozenset))
+ self.assertTrue(isinstance(
+ permissions.READ_ONLY_PERMISSIONSET.perm_names, frozenset))
+ self.assertTrue(isinstance(
+ permissions.COMMITTER_ACTIVE_PERMISSIONSET.perm_names, frozenset))
+ self.assertTrue(isinstance(
+ permissions.OWNER_ACTIVE_PERMISSIONSET.perm_names, frozenset))
+
+ def testGetExtraPerms(self):
+ project = project_pb2.Project()
+ project.committer_ids.append(222L)
+ project.extra_perms.append(project_pb2.Project.ExtraPerms(
+ member_id=222L, perms=['a', 'b', 'c']))
+ # User 1 is a former member with left-over extra perms that don't count.
+ project.extra_perms.append(project_pb2.Project.ExtraPerms(
+ member_id=111L, perms=['a', 'b', 'c']))
+
+ self.assertListEqual(
+ [],
+ permissions.GetExtraPerms(project, 111L))
+ self.assertListEqual(
+ ['a', 'b', 'c'],
+ permissions.GetExtraPerms(project, 222L))
+ self.assertListEqual(
+ [],
+ permissions.GetExtraPerms(project, 333L))
+
+ def testAnonUsersCannotDelete(self):
+ perms = permissions.PermissionSet([permissions.DELETE_ANY])
+ # No logged in user, no perms specfied.
+ self.assertFalse(permissions.CanDelete(
+ framework_constants.NO_USER_SPECIFIED, set(), None, 0, 0, None, []))
+ # No logged in user, even with perms from somewhere.
+ self.assertFalse(permissions.CanDelete(
+ framework_constants.NO_USER_SPECIFIED, set(), perms, 0, 0, None, []))
+ # No logged in user, even if artifact was already deleted.
+ self.assertFalse(permissions.CanDelete(
+ framework_constants.NO_USER_SPECIFIED, set(), perms,
+ 111L, 111L, None, []))
+ # No logged in user, even if artifact was already deleted by project owner.
+ self.assertFalse(permissions.CanDelete(
+ framework_constants.NO_USER_SPECIFIED, set(), perms,
+ 111L, 222L, None, []))
+
+ def testProjectOwnerCanDeleteAnyArtifact(self):
+ perms = permissions.PermissionSet([permissions.DELETE_ANY])
+ # No artifact owner, and not already deleted.
+ self.assertTrue(permissions.CanDelete(
+ 111L, {111L}, perms, 0, 0, None, []))
+ # I already deleted, can undelete.
+ self.assertTrue(permissions.CanDelete(
+ 111L, {111L}, perms, 111L, 0, None, []))
+ # I can delete my own thing.
+ self.assertTrue(permissions.CanDelete(
+ 111L, {111L}, perms, 0, 111L, None, []))
+ # I can also delete another user's artifacts, because I have DELETE_ANY.
+ self.assertTrue(permissions.CanDelete(
+ 111L, {111L}, perms, 0, 222L, None, []))
+ # I can always undelete, even if another PO deleted it.
+ self.assertTrue(permissions.CanDelete(
+ 111L, {111L}, perms, 333L, 222L, None, []))
+
+ def testUserCanDeleteTheirOwnStuff(self):
+ perms = permissions.PermissionSet([permissions.DELETE_OWN])
+ # I can delete/withdraw my artifact or comment.
+ self.assertTrue(permissions.CanDelete(
+ 111L, {111L}, perms, 0, 111L, None, []))
+ # I can undelete what I deleted.
+ self.assertTrue(permissions.CanDelete(
+ 111L, {111L}, perms, 111L, 111L, None, []))
+ # I cannot undelete if someone else deleted my spam.
+ self.assertFalse(permissions.CanDelete(
+ 111L, {111L}, perms, 222L, 111L, None, []))
+ # I cannot delete other people's stuff.
+ self.assertFalse(permissions.CanDelete(
+ 111L, {111L}, perms, 0, 222L, None, []))
+ # I cannot undelete what other people withdrew.
+ self.assertFalse(permissions.CanDelete(
+ 111L, {111L}, perms, 222L, 222L, None, []))
+
+ def testCanViewNormalArifact(self):
+ # Anyone can view a non-restricted artifact.
+ self.assertTrue(permissions.CanView(
+ {111L}, permissions.READ_ONLY_PERMISSIONSET,
+ self.live_project, []))
+
+ def testCanCreateProject_NoPerms(self):
+ """Signed out users cannot create projects."""
+ self.assertFalse(permissions.CanCreateProject(
+ permissions.EMPTY_PERMISSIONSET))
+
+ self.assertFalse(permissions.CanCreateProject(
+ permissions.READ_ONLY_PERMISSIONSET))
+
+ def testCanCreateProject_Admin(self):
+ """Site admins can create projects."""
+ self.assertTrue(permissions.CanCreateProject(
+ permissions.ADMIN_PERMISSIONSET))
+
+ def testCanCreateProject_RegularUser(self):
+ """Signed in non-admins can create a project if settings allow ANYONE."""
+ try:
+ orig_restriction = settings.project_creation_restriction
+ ANYONE = site_pb2.UserTypeRestriction.ANYONE
+ ADMIN_ONLY = site_pb2.UserTypeRestriction.ADMIN_ONLY
+ NO_ONE = site_pb2.UserTypeRestriction.NO_ONE
+ perms = permissions.PermissionSet([permissions.CREATE_PROJECT])
+
+ settings.project_creation_restriction = ANYONE
+ self.assertTrue(permissions.CanCreateProject(perms))
+
+ settings.project_creation_restriction = ADMIN_ONLY
+ self.assertFalse(permissions.CanCreateProject(perms))
+
+ settings.project_creation_restriction = NO_ONE
+ self.assertFalse(permissions.CanCreateProject(perms))
+ self.assertFalse(permissions.CanCreateProject(
+ permissions.ADMIN_PERMISSIONSET))
+ finally:
+ settings.project_creation_restriction = orig_restriction
+
+ def testCanCreateGroup_AnyoneWithCreateGroup(self):
+ orig_setting = settings.group_creation_restriction
+ try:
+ settings.group_creation_restriction = site_pb2.UserTypeRestriction.ANYONE
+ self.assertTrue(permissions.CanCreateGroup(
+ permissions.PermissionSet([permissions.CREATE_GROUP])))
+ self.assertFalse(permissions.CanCreateGroup(
+ permissions.PermissionSet([])))
+ finally:
+ settings.group_creation_restriction = orig_setting
+
+ def testCanCreateGroup_AdminOnly(self):
+ orig_setting = settings.group_creation_restriction
+ try:
+ ADMIN_ONLY = site_pb2.UserTypeRestriction.ADMIN_ONLY
+ settings.group_creation_restriction = ADMIN_ONLY
+ self.assertTrue(permissions.CanCreateGroup(
+ permissions.PermissionSet([permissions.ADMINISTER_SITE])))
+ self.assertFalse(permissions.CanCreateGroup(
+ permissions.PermissionSet([permissions.CREATE_GROUP])))
+ self.assertFalse(permissions.CanCreateGroup(
+ permissions.PermissionSet([])))
+ finally:
+ settings.group_creation_restriction = orig_setting
+
+ def testCanCreateGroup_UnspecifiedSetting(self):
+ orig_setting = settings.group_creation_restriction
+ try:
+ settings.group_creation_restriction = None
+ self.assertFalse(permissions.CanCreateGroup(
+ permissions.PermissionSet([permissions.ADMINISTER_SITE])))
+ self.assertFalse(permissions.CanCreateGroup(
+ permissions.PermissionSet([permissions.CREATE_GROUP])))
+ self.assertFalse(permissions.CanCreateGroup(
+ permissions.PermissionSet([])))
+ finally:
+ settings.group_creation_restriction = orig_setting
+
+ def testCanEditGroup_HasPerm(self):
+ self.assertTrue(permissions.CanEditGroup(
+ permissions.PermissionSet([permissions.EDIT_GROUP]), None, None))
+
+ def testCanEditGroup_IsOwner(self):
+ self.assertTrue(permissions.CanEditGroup(
+ permissions.PermissionSet([]), {111L}, {111L}))
+
+ def testCanEditGroup_Otherwise(self):
+ self.assertFalse(permissions.CanEditGroup(
+ permissions.PermissionSet([]), {111L}, {222L}))
+
+ def testCanViewGroup_HasPerm(self):
+ self.assertTrue(permissions.CanViewGroup(
+ permissions.PermissionSet([permissions.VIEW_GROUP]),
+ None, None, None, None, None))
+
+ def testCanViewGroup_IsMemberOfFriendProject(self):
+ group_settings = usergroup_pb2.MakeSettings('owners', friend_projects=[890])
+ self.assertFalse(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {111L}, group_settings, {222L}, {333L}, {789}))
+ self.assertTrue(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {111L}, group_settings, {222L}, {333L}, {789, 890}))
+
+ def testCanViewGroup_VisibleToOwner(self):
+ group_settings = usergroup_pb2.MakeSettings('owners')
+ self.assertFalse(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {111L}, group_settings, {222L}, {333L}, {789}))
+ self.assertFalse(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {222L}, group_settings, {222L}, {333L}, {789}))
+ self.assertTrue(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {333L}, group_settings, {222L}, {333L}, {789}))
+
+ def testCanViewGroup_IsVisibleToMember(self):
+ group_settings = usergroup_pb2.MakeSettings('members')
+ self.assertFalse(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {111L}, group_settings, {222L}, {333L}, {789}))
+ self.assertTrue(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {222L}, group_settings, {222L}, {333L}, {789}))
+ self.assertTrue(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {333L}, group_settings, {222L}, {333L}, {789}))
+
+ def testCanViewGroup_AnyoneCanView(self):
+ group_settings = usergroup_pb2.MakeSettings('anyone')
+ self.assertTrue(permissions.CanViewGroup(
+ permissions.PermissionSet([]),
+ {111L}, group_settings, {222L}, {333L}, {789}))
+
+ def testIsBanned_AnonUser(self):
+ user_view = framework_views.UserView(None, None, True)
+ self.assertFalse(permissions.IsBanned(None, user_view))
+
+ def testIsBanned_NormalUser(self):
+ user = user_pb2.User()
+ user_view = framework_views.UserView(None, None, True)
+ self.assertFalse(permissions.IsBanned(user, user_view))
+
+ def testIsBanned_BannedUser(self):
+ user = user_pb2.User()
+ user.banned = 'spammer'
+ user_view = framework_views.UserView(None, None, True)
+ self.assertTrue(permissions.IsBanned(user, user_view))
+
+ def testIsBanned_BadDomainUser(self):
+ settings.banned_user_domains = ['spammer.com', 'phisher.com']
+ user = user_pb2.User()
+ user_view = framework_views.UserView(None, None, True)
+ user_view.domain = 'spammer.com'
+ self.assertTrue(permissions.IsBanned(user, user_view))
+
+ def testGetCustomPermissions(self):
+ project = project_pb2.Project()
+ self.assertListEqual([], permissions.GetCustomPermissions(project))
+
+ project.extra_perms.append(project_pb2.Project.ExtraPerms(
+ perms=['Core', 'Elite', 'Gold']))
+ self.assertListEqual(['Core', 'Elite', 'Gold'],
+ permissions.GetCustomPermissions(project))
+
+ project.extra_perms.append(project_pb2.Project.ExtraPerms(
+ perms=['Silver', 'Gold', 'Bronze']))
+ self.assertListEqual(['Bronze', 'Core', 'Elite', 'Gold', 'Silver'],
+ permissions.GetCustomPermissions(project))
+
+ def testUserCanViewProject(self):
+ self.mox.StubOutWithMock(time, 'time')
+ for _ in range(8):
+ time.time().AndReturn(self.NOW)
+ self.mox.ReplayAll()
+
+ self.assertTrue(permissions.UserCanViewProject(
+ self.member, {self.COMMITTER_USER_ID}, self.live_project))
+ self.assertTrue(permissions.UserCanViewProject(
+ None, None, self.live_project))
+
+ self.archived_project.delete_time = self.NOW + 1
+ self.assertFalse(permissions.UserCanViewProject(
+ None, None, self.archived_project))
+ self.assertTrue(permissions.UserCanViewProject(
+ self.owner, {self.OWNER_USER_ID}, self.archived_project))
+ self.assertTrue(permissions.UserCanViewProject(
+ self.site_admin, {self.SITE_ADMIN_USER_ID},
+ self.archived_project))
+
+ self.archived_project.delete_time = self.NOW - 1
+ self.assertFalse(permissions.UserCanViewProject(
+ None, None, self.archived_project))
+ self.assertFalse(permissions.UserCanViewProject(
+ self.owner, {self.OWNER_USER_ID}, self.archived_project))
+ self.assertTrue(permissions.UserCanViewProject(
+ self.site_admin, {self.SITE_ADMIN_USER_ID},
+ self.archived_project))
+
+ self.mox.VerifyAll()
+
+ def CheckExpired(self, state, expected_to_be_reapable):
+ proj = project_pb2.Project()
+ proj.state = state
+ proj.delete_time = self.NOW + 1
+ self.assertFalse(permissions.IsExpired(proj))
+
+ proj.delete_time = self.NOW - 1
+ self.assertEqual(expected_to_be_reapable, permissions.IsExpired(proj))
+
+ proj.delete_time = self.NOW - 1
+ self.assertFalse(permissions.IsExpired(proj, expired_before=self.NOW - 2))
+
+ def testIsExpired_Live(self):
+ self.CheckExpired(project_pb2.ProjectState.LIVE, False)
+
+ def testIsExpired_Archived(self):
+ self.mox.StubOutWithMock(time, 'time')
+ for _ in range(2):
+ time.time().AndReturn(self.NOW)
+ self.mox.ReplayAll()
+
+ self.CheckExpired(project_pb2.ProjectState.ARCHIVED, True)
+
+ self.mox.VerifyAll()
+
+
+class PermissionsCheckTest(unittest.TestCase):
+
+ def setUp(self):
+ self.perms = permissions.PermissionSet(['a', 'b', 'c'])
+
+ self.proj = project_pb2.Project()
+ self.proj.committer_ids.append(111L)
+ self.proj.extra_perms.append(project_pb2.Project.ExtraPerms(
+ member_id=111L, perms=['d']))
+
+ # Note: z is an example of a perm that the user does not have.
+ # Note: q is an example of an irrelevant perm that the user does not have.
+
+ def DoCanUsePerm(self, perm, project='default', user_id=None, restrict=''):
+ """Wrapper function to call CanUsePerm()."""
+ if project == 'default':
+ project = self.proj
+ return self.perms.CanUsePerm(
+ perm, {user_id or 111L}, project, restrict.split())
+
+ def testHasPermNoRestrictions(self):
+ self.assertTrue(self.DoCanUsePerm('a'))
+ self.assertTrue(self.DoCanUsePerm('A'))
+ self.assertFalse(self.DoCanUsePerm('z'))
+ self.assertTrue(self.DoCanUsePerm('d'))
+ self.assertFalse(self.DoCanUsePerm('d', user_id=222L))
+ self.assertFalse(self.DoCanUsePerm('d', project=project_pb2.Project()))
+
+ def testHasPermOperationRestrictions(self):
+ self.assertTrue(self.DoCanUsePerm('a', restrict='Restrict-a-b'))
+ self.assertTrue(self.DoCanUsePerm('a', restrict='Restrict-b-z'))
+ self.assertTrue(self.DoCanUsePerm('a', restrict='Restrict-a-d'))
+ self.assertTrue(self.DoCanUsePerm('d', restrict='Restrict-d-a'))
+ self.assertTrue(self.DoCanUsePerm(
+ 'd', restrict='Restrict-q-z Restrict-q-d Restrict-d-a'))
+
+ self.assertFalse(self.DoCanUsePerm('a', restrict='Restrict-a-z'))
+ self.assertFalse(self.DoCanUsePerm('d', restrict='Restrict-d-z'))
+ self.assertFalse(self.DoCanUsePerm(
+ 'd', restrict='Restrict-d-a Restrict-d-z'))
+
+ def testHasPermOutsideProjectScope(self):
+ self.assertTrue(self.DoCanUsePerm('a', project=None))
+ self.assertTrue(self.DoCanUsePerm(
+ 'a', project=None, restrict='Restrict-a-c'))
+ self.assertTrue(self.DoCanUsePerm(
+ 'a', project=None, restrict='Restrict-q-z'))
+
+ self.assertFalse(self.DoCanUsePerm('z', project=None))
+ self.assertFalse(self.DoCanUsePerm(
+ 'a', project=None, restrict='Restrict-a-d'))
+
+
+class CanViewProjectContributorListTest(unittest.TestCase):
+
+ def testCanViewProjectContributorList_NoProject(self):
+ mr = testing_helpers.MakeMonorailRequest(path='/')
+ self.assertFalse(permissions.CanViewContributorList(mr))
+
+ def testCanViewProjectContributorList_NormalProject(self):
+ project = project_pb2.Project()
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/', project=project)
+ self.assertTrue(permissions.CanViewContributorList(mr))
+
+ def testCanViewProjectContributorList_ProjectWithOptionSet(self):
+ project = project_pb2.Project()
+ project.only_owners_see_contributors = True
+
+ for perms in [permissions.READ_ONLY_PERMISSIONSET,
+ permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ permissions.CONTRIBUTOR_INACTIVE_PERMISSIONSET]:
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/', project=project, perms=perms)
+ self.assertFalse(permissions.CanViewContributorList(mr))
+
+ for perms in [permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ permissions.COMMITTER_INACTIVE_PERMISSIONSET,
+ permissions.OWNER_ACTIVE_PERMISSIONSET,
+ permissions.OWNER_INACTIVE_PERMISSIONSET,
+ permissions.ADMIN_PERMISSIONSET]:
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/', project=project, perms=perms)
+ self.assertTrue(permissions.CanViewContributorList(mr))
+
+
+class ShouldCheckForAbandonmentTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mr = testing_helpers.Blank(
+ project=project_pb2.Project(),
+ auth=monorailrequest.AuthData())
+
+ def testOwner(self):
+ self.mr.auth.effective_ids = {111L}
+ self.mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ self.assertTrue(permissions.ShouldCheckForAbandonment(self.mr))
+
+ def testNonOwner(self):
+ self.mr.auth.effective_ids = {222L}
+ self.mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ self.assertFalse(permissions.ShouldCheckForAbandonment(self.mr))
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.assertFalse(permissions.ShouldCheckForAbandonment(self.mr))
+ self.mr.perms = permissions.USER_PERMISSIONSET
+ self.assertFalse(permissions.ShouldCheckForAbandonment(self.mr))
+ self.mr.perms = permissions.EMPTY_PERMISSIONSET
+ self.assertFalse(permissions.ShouldCheckForAbandonment(self.mr))
+
+ def testSiteAdmin(self):
+ self.mr.auth.effective_ids = {111L}
+ self.mr.perms = permissions.ADMIN_PERMISSIONSET
+ self.assertFalse(permissions.ShouldCheckForAbandonment(self.mr))
+
+
+class RestrictionLabelsTest(unittest.TestCase):
+
+ ORIG_SUMMARY = 'this is the orginal summary'
+ ORIG_LABELS = ['one', 'two']
+
+ def testGetRestrictions_NoIssue(self):
+ self.assertEqual([], permissions.GetRestrictions(None))
+
+ def testGetRestrictions(self):
+ art = fake.MakeTestIssue(
+ 789, 1, self.ORIG_SUMMARY, 'New', 0L, labels=self.ORIG_LABELS)
+ self.assertEquals([], permissions.GetRestrictions(art))
+
+ art = fake.MakeTestIssue(
+ 789, 1, self.ORIG_SUMMARY, 'New', 0L,
+ labels=['Restrict-MissingThirdPart', 'Hot'])
+ self.assertEquals([], permissions.GetRestrictions(art))
+
+ art = fake.MakeTestIssue(
+ 789, 1, self.ORIG_SUMMARY, 'New', 0L,
+ labels=['Restrict-View-Core', 'Hot'])
+ self.assertEquals(['restrict-view-core'], permissions.GetRestrictions(art))
+
+ art = fake.MakeTestIssue(
+ 789, 1, self.ORIG_SUMMARY, 'New', 0L,
+ labels=['Restrict-View-Core', 'Hot'],
+ derived_labels=['Color-Red', 'Restrict-EditIssue-GoldMembers'])
+ self.assertEquals(
+ ['restrict-view-core', 'restrict-editissue-goldmembers'],
+ permissions.GetRestrictions(art))
+
+ art = fake.MakeTestIssue(
+ 789, 1, self.ORIG_SUMMARY, 'New', 0L,
+ labels=['restrict-view-core', 'hot'],
+ derived_labels=['Color-Red', 'RESTRICT-EDITISSUE-GOLDMEMBERS'])
+ self.assertEquals(
+ ['restrict-view-core', 'restrict-editissue-goldmembers'],
+ permissions.GetRestrictions(art))
+
+
+REPORTER_ID = 111L
+OWNER_ID = 222L
+CC_ID = 333L
+OTHER_ID = 444L
+
+
+class IssuePermissionsTest(unittest.TestCase):
+
+ REGULAR_ISSUE = tracker_pb2.Issue()
+ REGULAR_ISSUE.reporter_id = REPORTER_ID
+
+ DELETED_ISSUE = tracker_pb2.Issue()
+ DELETED_ISSUE.deleted = True
+ DELETED_ISSUE.reporter_id = REPORTER_ID
+
+ RESTRICTED_ISSUE = tracker_pb2.Issue()
+ RESTRICTED_ISSUE.reporter_id = REPORTER_ID
+ RESTRICTED_ISSUE.owner_id = OWNER_ID
+ RESTRICTED_ISSUE.cc_ids.append(CC_ID)
+ RESTRICTED_ISSUE.labels.append('Restrict-View-Commit')
+
+ RESTRICTED_ISSUE2 = tracker_pb2.Issue()
+ RESTRICTED_ISSUE2.reporter_id = REPORTER_ID
+ # RESTRICTED_ISSUE2 has no owner
+ RESTRICTED_ISSUE2.cc_ids.append(CC_ID)
+ RESTRICTED_ISSUE2.labels.append('Restrict-View-Commit')
+
+ RESTRICTED_ISSUE3 = tracker_pb2.Issue()
+ RESTRICTED_ISSUE3.reporter_id = REPORTER_ID
+ RESTRICTED_ISSUE3.owner_id = OWNER_ID
+ # Restrict to a permission that no one has.
+ RESTRICTED_ISSUE3.labels.append('Restrict-EditIssue-Foo')
+
+ PROJECT = project_pb2.Project()
+
+ def testCanViewIssue_Deleted(self):
+ self.assertFalse(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.DELETED_ISSUE))
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.DELETED_ISSUE, allow_viewing_deleted=True))
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+
+ def testCanViewIssue_Regular(self):
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID},
+ permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.USER_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanViewIssue(
+ set(), permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+
+ def testCanViewIssue_Restricted(self):
+ # Project owner can always view issue.
+ self.assertTrue(permissions.CanViewIssue(
+ {OTHER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Member can view because they have Commit perm.
+ self.assertTrue(permissions.CanViewIssue(
+ {OTHER_ID}, permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Contributors normally do not have Commit perm.
+ self.assertFalse(permissions.CanViewIssue(
+ {OTHER_ID}, permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Non-members do not have Commit perm.
+ self.assertFalse(permissions.CanViewIssue(
+ {OTHER_ID}, permissions.USER_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Anon user's do not have Commit perm.
+ self.assertFalse(permissions.CanViewIssue(
+ set(), permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+
+ def testCanViewIssue_RestrictedParticipants(self):
+ # Reporter can always view issue
+ self.assertTrue(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Issue owner can always view issue
+ self.assertTrue(permissions.CanViewIssue(
+ {OWNER_ID}, permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # CC'd user can always view issue
+ self.assertTrue(permissions.CanViewIssue(
+ {CC_ID}, permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Non-participants cannot view issue if they don't have the needed perm.
+ self.assertFalse(permissions.CanViewIssue(
+ {OTHER_ID}, permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Anon user's do not have Commit perm.
+ self.assertFalse(permissions.CanViewIssue(
+ set(), permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE))
+ # Anon user's cannot match owner 0.
+ self.assertFalse(permissions.CanViewIssue(
+ set(), permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE2))
+
+ def testCannotViewIssueIfCannotViewProject(self):
+ """Cross-project search should not be a backdoor to viewing issues."""
+ # Reporter cannot view issue if they not long have access to the project.
+ self.assertFalse(permissions.CanViewIssue(
+ {REPORTER_ID}, permissions.EMPTY_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ # Issue owner cannot always view issue
+ self.assertFalse(permissions.CanViewIssue(
+ {OWNER_ID}, permissions.EMPTY_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ # CC'd user cannot always view issue
+ self.assertFalse(permissions.CanViewIssue(
+ {CC_ID}, permissions.EMPTY_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ # Non-participants cannot view issue if they don't have the needed perm.
+ self.assertFalse(permissions.CanViewIssue(
+ {OTHER_ID}, permissions.EMPTY_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ # Anon user's do not have Commit perm.
+ self.assertFalse(permissions.CanViewIssue(
+ set(), permissions.EMPTY_PERMISSIONSET, self.PROJECT,
+ self.REGULAR_ISSUE))
+ # Anon user's cannot match owner 0.
+ self.assertFalse(permissions.CanViewIssue(
+ set(), permissions.EMPTY_PERMISSIONSET, self.PROJECT,
+ self.REGULAR_ISSUE))
+
+ def testCanEditIssue(self):
+ # Non-members and contributors cannot edit issues,
+ # even if they reported them.
+ self.assertFalse(permissions.CanEditIssue(
+ {REPORTER_ID}, permissions.READ_ONLY_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertFalse(permissions.CanEditIssue(
+ {REPORTER_ID}, permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+
+ # Project committers and project owners can edit issues, regardless
+ # of their role in the issue.
+ self.assertTrue(permissions.CanEditIssue(
+ {REPORTER_ID}, permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanEditIssue(
+ {REPORTER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanEditIssue(
+ {OWNER_ID}, permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanEditIssue(
+ {OWNER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanEditIssue(
+ {OTHER_ID}, permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+ self.assertTrue(permissions.CanEditIssue(
+ {OTHER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.REGULAR_ISSUE))
+
+ def testCanEditIssue_Restricted(self):
+ # Project committers cannot edit issues with a restriction to a custom
+ # permission that they don't have.
+ self.assertFalse(permissions.CanEditIssue(
+ {OTHER_ID}, permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE3))
+
+ # *Issue* owners can always edit the issues that they own, even if
+ # those issues are restricted to perms that they don't have.
+ self.assertTrue(permissions.CanEditIssue(
+ {OWNER_ID}, permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE3))
+
+ # Project owners can always edit, they cannot lock themselves out.
+ self.assertTrue(permissions.CanEditIssue(
+ {OTHER_ID}, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE3))
+
+ # A committer with edit permission but not view permission
+ # should not be able to edit the issue.
+ self.assertFalse(permissions.CanEditIssue(
+ {OTHER_ID}, permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ self.PROJECT, self.RESTRICTED_ISSUE2))
+
+ def testCanCommentIssue_HasPerm(self):
+ self.assertTrue(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([permissions.ADD_ISSUE_COMMENT]),
+ None, None))
+ self.assertFalse(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([]),
+ None, None))
+
+ def testCanCommentIssue_HasExtraPerm(self):
+ project = project_pb2.Project()
+ project.committer_ids.append(111L)
+ extra_perm = project_pb2.Project.ExtraPerms(
+ member_id=111L, perms=[permissions.ADD_ISSUE_COMMENT])
+ project.extra_perms.append(extra_perm)
+ self.assertTrue(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([]),
+ project, None))
+ self.assertFalse(permissions.CanCommentIssue(
+ {222L}, permissions.PermissionSet([]),
+ project, None))
+
+ def testCanCommentIssue_Restricted(self):
+ issue = tracker_pb2.Issue(labels=['Restrict-AddIssueComment-CoreTeam'])
+ # User is granted exactly the perm they need specifically in this issue.
+ self.assertTrue(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([]),
+ None, issue, granted_perms=['addissuecomment']))
+ # User is granted CoreTeam, which satifies the restriction, and allows
+ # them to use the AddIssueComment permission that they have and would
+ # normally be able to use in an unrestricted issue.
+ self.assertTrue(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([permissions.ADD_ISSUE_COMMENT]),
+ None, issue, granted_perms=['coreteam']))
+ # User was granted CoreTeam, but never had AddIssueComment.
+ self.assertFalse(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([]),
+ None, issue, granted_perms=['coreteam']))
+ # User has AddIssueComment, but cannot satisfy restriction.
+ self.assertFalse(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([permissions.ADD_ISSUE_COMMENT]),
+ None, issue))
+
+ def testCanCommentIssue_Granted(self):
+ self.assertTrue(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([]),
+ None, None, granted_perms=['addissuecomment']))
+ self.assertFalse(permissions.CanCommentIssue(
+ {111L}, permissions.PermissionSet([]),
+ None, None))
+
+ def testCanViewComponentDef_ComponentAdmin(self):
+ cd = tracker_pb2.ComponentDef(admin_ids=[111L])
+ perms = permissions.PermissionSet([])
+ self.assertTrue(permissions.CanViewComponentDef(
+ {111L}, perms, None, cd))
+ self.assertFalse(permissions.CanViewComponentDef(
+ {999L}, perms, None, cd))
+
+ def testCanViewComponentDef_NormalUser(self):
+ cd = tracker_pb2.ComponentDef()
+ self.assertTrue(permissions.CanViewComponentDef(
+ {111L}, permissions.PermissionSet([permissions.VIEW]),
+ None, cd))
+ self.assertFalse(permissions.CanViewComponentDef(
+ {111L}, permissions.PermissionSet([]),
+ None, cd))
+
+ def testCanEditComponentDef_ComponentAdmin(self):
+ cd = tracker_pb2.ComponentDef(admin_ids=[111L], path='Whole')
+ sub_cd = tracker_pb2.ComponentDef(admin_ids=[222L], path='Whole>Part')
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.component_defs.append(cd)
+ config.component_defs.append(sub_cd)
+ perms = permissions.PermissionSet([])
+ self.assertTrue(permissions.CanEditComponentDef(
+ {111L}, perms, None, cd, config))
+ self.assertFalse(permissions.CanEditComponentDef(
+ {222L}, perms, None, cd, config))
+ self.assertFalse(permissions.CanEditComponentDef(
+ {999L}, perms, None, cd, config))
+ self.assertTrue(permissions.CanEditComponentDef(
+ {111L}, perms, None, sub_cd, config))
+ self.assertTrue(permissions.CanEditComponentDef(
+ {222L}, perms, None, sub_cd, config))
+ self.assertFalse(permissions.CanEditComponentDef(
+ {999L}, perms, None, sub_cd, config))
+
+ def testCanEditComponentDef_ProjectOwners(self):
+ cd = tracker_pb2.ComponentDef(path='Whole')
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.component_defs.append(cd)
+ self.assertTrue(permissions.CanEditComponentDef(
+ {111L}, permissions.PermissionSet([permissions.EDIT_PROJECT]),
+ None, cd, config))
+ self.assertFalse(permissions.CanEditComponentDef(
+ {111L}, permissions.PermissionSet([]),
+ None, cd, config))
+
+ def testCanViewFieldDef_FieldAdmin(self):
+ fd = tracker_pb2.FieldDef(admin_ids=[111L])
+ perms = permissions.PermissionSet([])
+ self.assertTrue(permissions.CanViewFieldDef(
+ {111L}, perms, None, fd))
+ self.assertFalse(permissions.CanViewFieldDef(
+ {999L}, perms, None, fd))
+
+ def testCanViewFieldDef_NormalUser(self):
+ fd = tracker_pb2.FieldDef()
+ self.assertTrue(permissions.CanViewFieldDef(
+ {111L}, permissions.PermissionSet([permissions.VIEW]),
+ None, fd))
+ self.assertFalse(permissions.CanViewFieldDef(
+ {111L}, permissions.PermissionSet([]),
+ None, fd))
+
+ def testCanEditFieldDef_FieldAdmin(self):
+ fd = tracker_pb2.FieldDef(admin_ids=[111L])
+ perms = permissions.PermissionSet([])
+ self.assertTrue(permissions.CanEditFieldDef(
+ {111L}, perms, None, fd))
+ self.assertFalse(permissions.CanEditFieldDef(
+ {999L}, perms, None, fd))
+
+ def testCanEditFieldDef_ProjectOwners(self):
+ fd = tracker_pb2.FieldDef()
+ self.assertTrue(permissions.CanEditFieldDef(
+ {111L}, permissions.PermissionSet([permissions.EDIT_PROJECT]),
+ None, fd))
+ self.assertFalse(permissions.CanEditFieldDef(
+ {111L}, permissions.PermissionSet([]),
+ None, fd))
+
+ def testCanViewTemplate_TemplateAdmin(self):
+ td = tracker_pb2.TemplateDef(admin_ids=[111L])
+ perms = permissions.PermissionSet([])
+ self.assertTrue(permissions.CanViewTemplate(
+ {111L}, perms, None, td))
+ self.assertFalse(permissions.CanViewTemplate(
+ {999L}, perms, None, td))
+
+ def testCanViewTemplate_MembersOnly(self):
+ td = tracker_pb2.TemplateDef(members_only=True)
+ project = project_pb2.Project(committer_ids=[111L])
+ self.assertTrue(permissions.CanViewTemplate(
+ {111L}, permissions.PermissionSet([]),
+ project, td))
+ self.assertFalse(permissions.CanViewTemplate(
+ {999L}, permissions.PermissionSet([]),
+ project, td))
+
+ def testCanViewTemplate_AnyoneWhoCanViewProject(self):
+ td = tracker_pb2.TemplateDef()
+ self.assertTrue(permissions.CanViewTemplate(
+ {111L}, permissions.PermissionSet([permissions.VIEW]),
+ None, td))
+ self.assertFalse(permissions.CanViewTemplate(
+ {111L}, permissions.PermissionSet([]),
+ None, td))
+
+ def testCanEditTemplate_TemplateAdmin(self):
+ td = tracker_pb2.TemplateDef(admin_ids=[111L])
+ perms = permissions.PermissionSet([])
+ self.assertTrue(permissions.CanEditTemplate(
+ {111L}, perms, None, td))
+ self.assertFalse(permissions.CanEditTemplate(
+ {999L}, perms, None, td))
+
+ def testCanEditTemplate_ProjectOwners(self):
+ td = tracker_pb2.TemplateDef()
+ self.assertTrue(permissions.CanEditTemplate(
+ {111L}, permissions.PermissionSet([permissions.EDIT_PROJECT]),
+ None, td))
+ self.assertFalse(permissions.CanEditTemplate(
+ {111L}, permissions.PermissionSet([]),
+ None, td))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/profiler_test.py b/appengine/monorail/framework/test/profiler_test.py
new file mode 100644
index 0000000..cd016cd
--- /dev/null
+++ b/appengine/monorail/framework/test/profiler_test.py
@@ -0,0 +1,48 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Test for monorail.framework.profiler."""
+
+import unittest
+
+from framework import profiler
+
+
+class ProfilerTest(unittest.TestCase):
+
+ def testTopLevelPhase(self):
+ prof = profiler.Profiler()
+ self.assertEquals(prof.current_phase.name, 'overall profile')
+ self.assertEquals(prof.current_phase.parent, None)
+ self.assertEquals(prof.current_phase, prof.top_phase)
+ self.assertEquals(prof.next_color, 0)
+
+ def testSinglePhase(self):
+ prof = profiler.Profiler()
+ self.assertEquals(prof.current_phase.name, 'overall profile')
+ with prof.Phase('test'):
+ self.assertEquals(prof.current_phase.name, 'test')
+ self.assertEquals(prof.current_phase.parent.name, 'overall profile')
+ self.assertEquals(prof.current_phase.name, 'overall profile')
+ self.assertEquals(prof.next_color, 1)
+
+ def testSubphaseExecption(self):
+ prof = profiler.Profiler()
+ try:
+ with prof.Phase('foo'):
+ with prof.Phase('bar'):
+ pass
+ with prof.Phase('baz'):
+ raise Exception('whoops')
+ except Exception as e:
+ self.assertEquals(e.message, 'whoops')
+ finally:
+ self.assertEquals(prof.current_phase.name, 'overall profile')
+ self.assertEquals(
+ prof.top_phase.subphases[0].subphases[1].name, 'baz')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/ratelimiter_test.py b/appengine/monorail/framework/test/ratelimiter_test.py
new file mode 100644
index 0000000..82537cc
--- /dev/null
+++ b/appengine/monorail/framework/test/ratelimiter_test.py
@@ -0,0 +1,330 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for RateLimiter.
+"""
+import unittest
+
+from google.appengine.api import memcache
+from google.appengine.ext import testbed
+
+import mox
+import os
+import settings
+
+from framework import ratelimiter
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class RateLimiterTest(unittest.TestCase):
+ def setUp(self):
+ settings.ratelimiting_enabled = True
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_user_stub()
+
+ self.mox = mox.Mox()
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ )
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+
+ self.ratelimiter = ratelimiter.RateLimiter()
+ ratelimiter.COUNTRY_LIMITS = {}
+ os.environ['USER_EMAIL'] = ''
+ settings.ratelimiting_enabled = True
+ settings.ratelimiting_cost_enabled = True
+ ratelimiter.DEFAULT_LIMIT = 10
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+ # settings.ratelimiting_enabled = True
+
+ def testCheckStart_pass(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.0'
+ self.ratelimiter.CheckStart(request)
+ # Should not throw an exception.
+
+ def testCheckStart_fail(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.0'
+ now = 0.0
+ cachekeysets, _, _, _ = ratelimiter._CacheKeys(request, now)
+ values = [{key: ratelimiter.DEFAULT_LIMIT for key in cachekeys} for
+ cachekeys in cachekeysets]
+ for value in values:
+ memcache.add_multi(value)
+ with self.assertRaises(ratelimiter.RateLimitExceeded):
+ self.ratelimiter.CheckStart(request, now)
+
+ def testCheckStart_expiredEntries(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.0'
+ now = 0.0
+ cachekeysets, _, _, _ = ratelimiter._CacheKeys(request, now)
+ values = [{key: ratelimiter.DEFAULT_LIMIT for key in cachekeys} for
+ cachekeys in cachekeysets]
+ for value in values:
+ memcache.add_multi(value)
+
+ now = now + 2 * ratelimiter.EXPIRE_AFTER_SECS
+ self.ratelimiter.CheckStart(request, now)
+ # Should not throw an exception.
+
+ def testCheckStart_repeatedCalls(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.0'
+ now = 0.0
+
+ # Call CheckStart once every minute. Should be ok.
+ for _ in range(ratelimiter.N_MINUTES):
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 120.0
+
+ # Call CheckStart more than DEFAULT_LIMIT times in the same minute.
+ with self.assertRaises(ratelimiter.RateLimitExceeded):
+ for _ in range(ratelimiter.DEFAULT_LIMIT + 2):
+ now = now + 0.001
+ self.ratelimiter.CheckStart(request, now)
+
+ def testCheckStart_differentIPs(self):
+ now = 0.0
+
+ ratelimiter.COUNTRY_LIMITS = {}
+ # Exceed DEFAULT_LIMIT calls, but vary remote_addr so different
+ # remote addresses aren't ratelimited together.
+ for m in range(ratelimiter.DEFAULT_LIMIT * 2):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.%d' % (m % 16)
+ ratelimiter._CacheKeys(request, now)
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 0.001
+
+ # Exceed the limit, but only for one IP address. The
+ # others should be fine.
+ with self.assertRaises(ratelimiter.RateLimitExceeded):
+ for m in range(ratelimiter.DEFAULT_LIMIT):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.0'
+ ratelimiter._CacheKeys(request, now)
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 0.001
+
+ # Now proceed to make requests for all of the other IP
+ # addresses besides .0.
+ for m in range(ratelimiter.DEFAULT_LIMIT * 2):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ # Skip .0 since it's already exceeded the limit.
+ request.remote_addr = '192.168.1.%d' % (m + 1)
+ ratelimiter._CacheKeys(request, now)
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 0.001
+
+ def testCheckStart_sameIPDifferentUserIDs(self):
+ # Behind a NAT, e.g.
+ now = 0.0
+
+ # Exceed DEFAULT_LIMIT calls, but vary user_id so different
+ # users behind the same IP aren't ratelimited together.
+ for m in range(ratelimiter.DEFAULT_LIMIT * 2):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.remote_addr = '192.168.1.0'
+ os.environ['USER_EMAIL'] = '%s@example.com' % m
+ request.headers['X-AppEngine-Country'] = 'US'
+ ratelimiter._CacheKeys(request, now)
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 0.001
+
+ # Exceed the limit, but only for one userID+IP address. The
+ # others should be fine.
+ with self.assertRaises(ratelimiter.RateLimitExceeded):
+ for m in range(ratelimiter.DEFAULT_LIMIT + 2):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.0'
+ os.environ['USER_EMAIL'] = '42@example.com'
+ ratelimiter._CacheKeys(request, now)
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 0.001
+
+ # Now proceed to make requests for other user IDs
+ # besides 42.
+ for m in range(ratelimiter.DEFAULT_LIMIT * 2):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ # Skip .0 since it's already exceeded the limit.
+ request.remote_addr = '192.168.1.0'
+ os.environ['USER_EMAIL'] = '%s@example.com' % (43 + m)
+ ratelimiter._CacheKeys(request, now)
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 0.001
+
+ def testCheckStart_ratelimitingDisabled(self):
+ settings.ratelimiting_enabled = False
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers['X-AppEngine-Country'] = 'US'
+ request.remote_addr = '192.168.1.0'
+ now = 0.0
+
+ # Call CheckStart a lot. Should be ok.
+ for _ in range(ratelimiter.DEFAULT_LIMIT):
+ self.ratelimiter.CheckStart(request, now)
+ now = now + 0.001
+
+ def testCheckStart_perCountryLoggedOutLimit(self):
+ ratelimiter.COUNTRY_LIMITS['US'] = 10
+
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers[ratelimiter.COUNTRY_HEADER] = 'US'
+ request.remote_addr = '192.168.1.1'
+ now = 0.0
+
+ with self.assertRaises(ratelimiter.RateLimitExceeded):
+ for m in range(ratelimiter.DEFAULT_LIMIT + 2):
+ self.ratelimiter.CheckStart(request, now)
+ # Vary remote address to make sure the limit covers
+ # the whole country, regardless of IP.
+ request.remote_addr = '192.168.1.%d' % m
+ now = now + 0.001
+
+ # CheckStart for a country that isn't covered by a country-specific limit.
+ request.headers['X-AppEngine-Country'] = 'UK'
+ for m in range(11):
+ self.ratelimiter.CheckStart(request, now)
+ # Vary remote address to make sure the limit covers
+ # the whole country, regardless of IP.
+ request.remote_addr = '192.168.1.%d' % m
+ now = now + 0.001
+
+ # And regular rate limits work per-IP.
+ request.remote_addr = '192.168.1.1'
+ with self.assertRaises(ratelimiter.RateLimitExceeded):
+ for m in range(ratelimiter.DEFAULT_LIMIT):
+ self.ratelimiter.CheckStart(request, now)
+ # Vary remote address to make sure the limit covers
+ # the whole country, regardless of IP.
+ now = now + 0.001
+
+ def testCheckEnd_overCostThresh(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers[ratelimiter.COUNTRY_HEADER] = 'US'
+ request.remote_addr = '192.168.1.1'
+ start_time = 0.0
+
+ # Send some requests, all under the limit.
+ for _ in range(ratelimiter.DEFAULT_LIMIT-1):
+ start_time = start_time + 0.001
+ self.ratelimiter.CheckStart(request, start_time)
+ now = start_time + 0.010
+ self.ratelimiter.CheckEnd(request, now, start_time)
+
+ # Now issue some more request, this time taking long
+ # enough to get the cost threshold penalty.
+ # Fast forward enough to impact a later bucket than the
+ # previous requests.
+ start_time = now + 120.0
+ self.ratelimiter.CheckStart(request, start_time)
+
+ # Take longer than the threshold to process the request.
+ now = start_time + (settings.ratelimiting_cost_thresh_ms + 1) / 1000
+
+ # The request finished, taking longer than the cost
+ # threshold.
+ self.ratelimiter.CheckEnd(request, now, start_time)
+
+ with self.assertRaises(ratelimiter.RateLimitExceeded):
+ # One more request after the expensive query should
+ # throw an excpetion.
+ self.ratelimiter.CheckStart(request, start_time)
+
+ def testCheckEnd_overCostThreshButDisabled(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers[ratelimiter.COUNTRY_HEADER] = 'US'
+ request.remote_addr = '192.168.1.1'
+ start_time = 0.0
+ settings.ratelimiting_cost_enabled = False
+
+ # Send some requests, all under the limit.
+ for _ in range(ratelimiter.DEFAULT_LIMIT-1):
+ start_time = start_time + 0.001
+ self.ratelimiter.CheckStart(request, start_time)
+ now = start_time + 0.010
+ self.ratelimiter.CheckEnd(request, now, start_time)
+
+ # Now issue some more request, this time taking long
+ # enough to get the cost threshold penalty.
+ # Fast forward enough to impact a later bucket than the
+ # previous requests.
+ start_time = now + 120.0
+ self.ratelimiter.CheckStart(request, start_time)
+
+ # Take longer than the threshold to process the request.
+ now = start_time + (settings.ratelimiting_cost_thresh_ms + 10)/1000
+
+ # The request finished, taking longer than the cost
+ # threshold.
+ self.ratelimiter.CheckEnd(request, now, start_time)
+
+ # One more request after the expensive query should
+ # throw an excpetion, but cost thresholds are disabled.
+ self.ratelimiter.CheckStart(request, start_time)
+
+ def testChekcEnd_underCostThresh(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers[ratelimiter.COUNTRY_HEADER] = 'asdasd'
+ request.remote_addr = '192.168.1.1'
+ start_time = 0.0
+
+ # Send some requests, all under the limit.
+ for _ in range(ratelimiter.DEFAULT_LIMIT):
+ self.ratelimiter.CheckStart(request, start_time)
+ now = start_time + 0.010
+ self.ratelimiter.CheckEnd(request, now, start_time)
+ start_time = now + 0.010
+
+ def testChekcEnd_underCostThresh(self):
+ request, _ = testing_helpers.GetRequestObjects(
+ project=self.project)
+ request.headers[ratelimiter.COUNTRY_HEADER] = 'asdasd'
+ request.remote_addr = '192.168.1.1'
+ start_time = 0.0
+
+ # Send some requests, all under the limit.
+ for _ in range(ratelimiter.DEFAULT_LIMIT):
+ self.ratelimiter.CheckStart(request, start_time)
+ now = start_time + 0.01
+ self.ratelimiter.CheckEnd(request, now, start_time)
+ start_time = now + 0.01
diff --git a/appengine/monorail/framework/test/reap_test.py b/appengine/monorail/framework/test/reap_test.py
new file mode 100644
index 0000000..dc71506
--- /dev/null
+++ b/appengine/monorail/framework/test/reap_test.py
@@ -0,0 +1,117 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the reap module."""
+
+import unittest
+
+import mox
+
+from framework import reap
+from framework import sql
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class ReapTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project_service = fake.ProjectService()
+ self.issue_service = fake.IssueService()
+ self.issue_star_service = fake.IssueStarService()
+ self.config_service = fake.ConfigService()
+ self.features_service = fake.FeaturesService()
+ self.project_star_service = fake.ProjectStarService()
+ self.services = service_manager.Services(
+ project=self.project_service,
+ issue=self.issue_service,
+ issue_star=self.issue_star_service,
+ config=self.config_service,
+ features=self.features_service,
+ project_star=self.project_star_service,
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+
+ self.proj1_id = 1001
+ self.proj1_issue_id = 111
+ self.proj1 = self.project_service.TestAddProject(
+ name='proj1', project_id=self.proj1_id)
+ self.proj2_id = 1002
+ self.proj2_issue_id = 112
+ self.proj2 = self.project_service.TestAddProject(
+ name='proj2', project_id=self.proj2_id)
+
+ self.mox = mox.Mox()
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.project_service.project_tbl = self.mox.CreateMock(sql.SQLTableManager)
+ self.issue_service.issue_tbl = self.mox.CreateMock(sql.SQLTableManager)
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def setUpMarkDoomedProjects(self):
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'], limit=1000, state='archived',
+ where=mox.IgnoreArg()).AndReturn([[self.proj1_id]])
+
+ def testMarkDoomedProjects(self):
+ self.setUpMarkDoomedProjects()
+ reaper = reap.Reap('req', 'resp', services=self.services)
+
+ self.mox.ReplayAll()
+ doomed_project_ids = reaper._MarkDoomedProjects(self.cnxn)
+ self.mox.VerifyAll()
+
+ self.assertEquals([self.proj1_id], doomed_project_ids)
+ self.assertEquals(project_pb2.ProjectState.DELETABLE, self.proj1.state)
+ self.assertEquals('DELETABLE_%s' % self.proj1_id , self.proj1.project_name)
+
+ def setUpExpungeParts(self):
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'], limit=100,
+ state='deletable').AndReturn([[self.proj1_id], [self.proj2_id]])
+ self.issue_service.issue_tbl.Select(
+ self.cnxn, cols=['id'], limit=1000,
+ project_id=self.proj1_id).AndReturn([[self.proj1_issue_id]])
+ self.issue_service.issue_tbl.Select(
+ self.cnxn, cols=['id'], limit=1000,
+ project_id=self.proj2_id).AndReturn([[self.proj2_issue_id]])
+
+ def testExpungeDeletableProjects(self):
+ self.setUpExpungeParts()
+ reaper = reap.Reap('req', 'resp', services=self.services)
+
+ self.mox.ReplayAll()
+ expunged_project_ids = reaper._ExpungeDeletableProjects(self.cnxn)
+ self.mox.VerifyAll()
+
+ self.assertEquals([self.proj1_id, self.proj2_id], expunged_project_ids)
+ # Verify all expected expunge methods were called.
+ self.assertEquals([self.proj1_issue_id, self.proj2_issue_id],
+ self.services.issue_star.expunged_item_ids)
+ self.assertEquals([self.proj1_issue_id, self.proj2_issue_id],
+ self.services.issue.expunged_issues)
+ self.assertEquals([self.proj1_id, self.proj2_id],
+ self.services.config.expunged_configs)
+ self.assertEquals([self.proj1_id, self.proj2_id],
+ self.services.features.expunged_saved_queries)
+ self.assertEquals([self.proj1_id, self.proj2_id],
+ self.services.features.expunged_filter_rules)
+ self.assertEquals([self.proj1_id, self.proj2_id],
+ self.services.issue.expunged_former_locations)
+ self.assertEquals([self.proj1_id, self.proj2_id],
+ self.services.issue.expunged_local_ids)
+ self.assertEquals([self.proj1_id, self.proj2_id],
+ self.services.features.expunged_quick_edit)
+ self.assertEquals([self.proj1_id, self.proj2_id],
+ self.services.project_star.expunged_item_ids)
+ self.assertEquals(0, len(self.services.project.test_projects))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/registerpages_helpers_test.py b/appengine/monorail/framework/test/registerpages_helpers_test.py
new file mode 100644
index 0000000..8b2daee
--- /dev/null
+++ b/appengine/monorail/framework/test/registerpages_helpers_test.py
@@ -0,0 +1,49 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for URL handler registration helper functions."""
+
+import unittest
+
+import webapp2
+
+from framework import registerpages_helpers
+
+
+class SendRedirectInScopeTest(unittest.TestCase):
+
+ def testMakeRedirectInScope_Error(self):
+ self.assertRaises(
+ AssertionError,
+ registerpages_helpers.MakeRedirectInScope, 'no/initial/slash', 'p')
+ self.assertRaises(
+ AssertionError,
+ registerpages_helpers.MakeRedirectInScope, '', 'p')
+
+ def testMakeRedirectInScope_Normal(self):
+ factory = registerpages_helpers.MakeRedirectInScope('/', 'p')
+ # Non-dasher, normal case
+ request = webapp2.Request.blank(
+ path='/p/foo', headers={'Host': 'example.com'})
+ response = webapp2.Response()
+ redirector = factory(request, response)
+ redirector.get()
+ self.assertEqual(response.location, '//example.com/p/foo/')
+ self.assertEqual(response.status, '301 Moved Permanently')
+
+ def testMakeRedirectInScope_Temporary(self):
+ factory = registerpages_helpers.MakeRedirectInScope(
+ '/', 'p', permanent=False)
+ request = webapp2.Request.blank(
+ path='/p/foo', headers={'Host': 'example.com'})
+ response = webapp2.Response()
+ redirector = factory(request, response)
+ redirector.get()
+ self.assertEqual(response.location, '//example.com/p/foo/')
+ self.assertEqual(response.status, '302 Moved Temporarily')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/servlet_helpers_test.py b/appengine/monorail/framework/test/servlet_helpers_test.py
new file mode 100644
index 0000000..34c9a89
--- /dev/null
+++ b/appengine/monorail/framework/test/servlet_helpers_test.py
@@ -0,0 +1,74 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for servlet base class helper functions."""
+
+import unittest
+
+from framework import permissions
+from framework import servlet_helpers
+from proto import project_pb2
+from testing import testing_helpers
+
+
+class EztDataTest(unittest.TestCase):
+
+ def testGetBannerTime(self):
+ """Tests GetBannerTime method."""
+ timestamp = ['2009', '3', '13', '21', '24', '5']
+
+ banner_time = servlet_helpers.GetBannerTime(timestamp)
+
+ # Ensure that the banner timestamp falls in a timestamp range to account for
+ # the test being run in different timezones.
+ # Using "Sun, 12 Mar 2009 00:00:00 GMT" and "Sun, 15 Mar 2009 00:00:00 GMT".
+ self.assertTrue(1236816000000 <= banner_time.ts <= 1237075200000)
+ self.assertEqual(2009, banner_time.year)
+ self.assertEqual(3, banner_time.month)
+ self.assertEqual(13, banner_time.day)
+ self.assertEqual(21, banner_time.hour)
+ self.assertEqual(24, banner_time.minute)
+ self.assertEqual(5, banner_time.second)
+ self.assertEqual('Friday', banner_time.weekday)
+ self.assertEqual('09:24PM', banner_time.hour_min)
+
+
+class AssertBasePermissionTest(unittest.TestCase):
+
+ def testAccessGranted(self):
+ _, mr = testing_helpers.GetRequestObjects(path='/hosting')
+ # No exceptions should be raised.
+ servlet_helpers.AssertBasePermission(mr)
+
+ mr.auth.user_id = 123L
+ # No exceptions should be raised.
+ servlet_helpers.AssertBasePermission(mr)
+ servlet_helpers.AssertBasePermissionForUser(
+ mr.auth.user_pb, mr.auth.user_view)
+
+ def testBanned(self):
+ _, mr = testing_helpers.GetRequestObjects(path='/hosting')
+ mr.auth.user_pb.banned = 'spammer'
+ self.assertRaises(
+ permissions.BannedUserException,
+ servlet_helpers.AssertBasePermissionForUser,
+ mr.auth.user_pb, mr.auth.user_view)
+ self.assertRaises(
+ permissions.BannedUserException,
+ servlet_helpers.AssertBasePermission, mr)
+
+ def testNoAccessToProject(self):
+ project = project_pb2.Project()
+ project.project_name = 'proj'
+ project.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ _, mr = testing_helpers.GetRequestObjects(path='/p/proj/', project=project)
+ mr.perms = permissions.EMPTY_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ servlet_helpers.AssertBasePermission, mr)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/servlet_test.py b/appengine/monorail/framework/test/servlet_test.py
new file mode 100644
index 0000000..5b01b06
--- /dev/null
+++ b/appengine/monorail/framework/test/servlet_test.py
@@ -0,0 +1,298 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for servlet base class module."""
+
+import time
+import unittest
+
+from google.appengine.ext import testbed
+
+import webapp2
+
+from framework import framework_constants
+from framework import servlet
+from framework import xsrf
+from proto import project_pb2
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class TestableServlet(servlet.Servlet):
+ """A tiny concrete subclass of abstract class Servlet."""
+
+ def __init__(self, request, response, services=None, do_post_redirect=True):
+ super(TestableServlet, self).__init__(request, response, services=services)
+ self.do_post_redirect = do_post_redirect
+ self.seen_post_data = None
+
+ def ProcessFormData(self, _mr, post_data):
+ self.seen_post_data = post_data
+ if self.do_post_redirect:
+ return '/This/Is?The=Next#Page'
+ else:
+ self.response.write('sending raw data to browser')
+
+
+class ServletTest(unittest.TestCase):
+
+ def setUp(self):
+ services = service_manager.Services(project=fake.ProjectService())
+ self.page_class = TestableServlet(
+ webapp2.Request.blank('/'), webapp2.Response(), services=services)
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_datastore_v3_stub()
+
+ def testDefaultValues(self):
+ self.assertEqual(None, self.page_class._MAIN_TAB_MODE)
+ self.assertTrue(self.page_class._TEMPLATE_PATH.endswith('/templates/'))
+ self.assertEqual(None, self.page_class._PAGE_TEMPLATE)
+
+ def testGatherBaseData(self):
+ project = fake.Project(
+ project_name='testproj', cached_content_timestamp=12345,
+ state=project_pb2.ProjectState.LIVE)
+
+ (_request, mr) = testing_helpers.GetRequestObjects(
+ path='/p/testproj/feeds', project=project)
+ nonce = '1a2b3c4d5e6f7g'
+
+ base_data = self.page_class.GatherBaseData(mr, nonce)
+
+ self.assertEqual(base_data['nonce'], nonce)
+ self.assertEqual(base_data['projectname'], 'testproj')
+ self.assertEqual(base_data['project'].cached_content_timestamp, 12345)
+ self.assertEqual(base_data['project_alert'], None)
+
+ self.assert_(
+ base_data['currentPageURL'].endswith('/p/testproj/feeds'))
+ self.assert_(
+ base_data['currentPageURLEncoded'].endswith('%2Fp%2Ftestproj%2Ffeeds'))
+
+ def testFormHandlerURL(self):
+ self.assertEqual('/edit.do', self.page_class._FormHandlerURL('/'))
+ self.assertEqual(
+ '/something/edit.do',
+ self.page_class._FormHandlerURL('/something/'))
+ self.assertEqual(
+ '/something/edit.do',
+ self.page_class._FormHandlerURL('/something/edit.do'))
+ self.assertEqual(
+ '/something/detail.do',
+ self.page_class._FormHandlerURL('/something/detail'))
+
+ def testProcessForm_NoToken(self):
+ user_id = 111L
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/we/we/we?so=excited',
+ params={'yesterday': 'thursday', 'today': 'friday'},
+ user_info={'user_id': user_id},
+ method='POST',
+ )
+ # Normally, every form needs a security token.
+ self.assertRaises(
+ xsrf.TokenIncorrect, self.page_class._DoFormProcessing, request, mr)
+ self.assertEqual(None, self.page_class.seen_post_data)
+
+ # We can make an explicit exception to that.
+ self.page_class.CHECK_SECURITY_TOKEN = False
+ try:
+ self.page_class._DoFormProcessing(request, mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEqual(302, e.code) # forms redirect on succcess
+
+ self.assertDictEqual(
+ {'yesterday': 'thursday', 'today': 'friday'},
+ dict(self.page_class.seen_post_data))
+
+ def testProcessForm_BadToken(self):
+
+ user_id = 111L
+ token = 'no soup for you'
+
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/we/we/we?so=excited',
+ params={'yesterday': 'thursday', 'today': 'friday', 'token': token},
+ user_info={'user_id': user_id},
+ method='POST',
+ )
+ self.assertRaises(
+ xsrf.TokenIncorrect, self.page_class._DoFormProcessing, request, mr)
+ self.assertEqual(None, self.page_class.seen_post_data)
+
+ def testProcessForm_Normal(self):
+ user_id = 111L
+ token = xsrf.GenerateToken(user_id, '/we/we/we')
+
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/we/we/we?so=excited',
+ params={'yesterday': 'thursday', 'today': 'friday', 'token': token},
+ user_info={'user_id': user_id},
+ method='POST',
+ )
+ try:
+ self.page_class._DoFormProcessing(request, mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEqual(302, e.code) # forms redirect on succcess
+
+ self.assertDictEqual(
+ {'yesterday': 'thursday', 'today': 'friday', 'token': token},
+ dict(self.page_class.seen_post_data))
+
+ def testCalcProjectAlert(self):
+ project = fake.Project(
+ project_name='alerttest', state=project_pb2.ProjectState.LIVE)
+
+ project_alert = servlet._CalcProjectAlert(project)
+ self.assertEqual(project_alert, None)
+
+ project.state = project_pb2.ProjectState.ARCHIVED
+ project_alert = servlet._CalcProjectAlert(project)
+ self.assertEqual(
+ project_alert,
+ 'Project is archived: read-only by members only.')
+
+ delete_time = int(time.time() + framework_constants.SECS_PER_DAY * 1.5)
+ project.delete_time = delete_time
+ project_alert = servlet._CalcProjectAlert(project)
+ self.assertEqual(project_alert, 'Scheduled for deletion in 1 day.')
+
+ delete_time = int(time.time() + framework_constants.SECS_PER_DAY * 2.5)
+ project.delete_time = delete_time
+ project_alert = servlet._CalcProjectAlert(project)
+ self.assertEqual(project_alert, 'Scheduled for deletion in 2 days.')
+
+ def testCheckForMovedProject_NoRedirect(self):
+ project = fake.Project(
+ project_name='proj', state=project_pb2.ProjectState.LIVE)
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj', project=project)
+ self.page_class._CheckForMovedProject(mr, request)
+
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/source/browse/p/adminAdvanced', project=project)
+ self.page_class._CheckForMovedProject(mr, request)
+
+ def testCheckForMovedProject_Redirect(self):
+ project = fake.Project(project_name='proj', moved_to='http://example.com')
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj', project=project)
+ try:
+ self.page_class._CheckForMovedProject(mr, request)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEqual(302, e.code) # redirect because project moved
+
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/source/browse/p/adminAdvanced', project=project)
+ try:
+ self.page_class._CheckForMovedProject(mr, request)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEqual(302, e.code) # redirect because project moved
+
+ def testCheckForMovedProject_AdminAdvanced(self):
+ """We do not redirect away from the page that edits project state."""
+ project = fake.Project(project_name='proj', moved_to='http://example.com')
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/adminAdvanced', project=project)
+ self.page_class._CheckForMovedProject(mr, request)
+
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/adminAdvanced?ts=123234', project=project)
+ self.page_class._CheckForMovedProject(mr, request)
+
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/adminAdvanced.do', project=project)
+ self.page_class._CheckForMovedProject(mr, request)
+
+ def testGatherHelpData(self):
+ help_data = self.page_class.GatherHelpData('fake mr', {})
+ self.assertEqual(None, help_data['cue'])
+
+ def testGatherDebugData_Visibility(self):
+ project = fake.Project(
+ project_name='testtest', state=project_pb2.ProjectState.LIVE)
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/foo/servlet_path', project=project)
+ debug_data = self.page_class.GatherDebugData(mr, {})
+ self.assertEqual('off', debug_data['dbg'])
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/foo/servlet_path?debug=1', project=project)
+ debug_data = self.page_class.GatherDebugData(mr, {})
+ self.assertEqual('on', debug_data['dbg'])
+
+
+class ProjectIsRestrictedTest(unittest.TestCase):
+
+ def testNonRestrictedProject(self):
+ proj = project_pb2.Project()
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.project = proj
+
+ proj.access = project_pb2.ProjectAccess.ANYONE
+ proj.state = project_pb2.ProjectState.LIVE
+ self.assertFalse(servlet._ProjectIsRestricted(mr))
+
+ proj.state = project_pb2.ProjectState.ARCHIVED
+ self.assertFalse(servlet._ProjectIsRestricted(mr))
+
+ def testRestrictedProject(self):
+ proj = project_pb2.Project()
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.project = proj
+
+ proj.state = project_pb2.ProjectState.LIVE
+ proj.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ self.assertTrue(servlet._ProjectIsRestricted(mr))
+
+
+FORM_URL = 'http://example.com/issues/form.php'
+
+
+class ComputeIssueEntryURLTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project = project_pb2.Project()
+ self.project.project_name = 'proj'
+ self.config = tracker_pb2.ProjectIssueConfig()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_datastore_v3_stub()
+
+ def testComputeIssueEntryURL_Normal(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/detail?id=123&q=term',
+ project=self.project)
+
+ url = servlet._ComputeIssueEntryURL(mr, self.config)
+ self.assertEqual('entry', url)
+
+ def testComputeIssueEntryURL_Customized(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/detail?id=123&q=term',
+ project=self.project)
+ mr.auth.user_id = 111L
+ self.config.custom_issue_entry_url = FORM_URL
+
+ url = servlet._ComputeIssueEntryURL(mr, self.config)
+ self.assertTrue(url.startswith(FORM_URL))
+ self.assertIn('token=', url)
+ self.assertIn('role=', url)
+ self.assertIn('continue=', url)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/sorting_test.py b/appengine/monorail/framework/test/sorting_test.py
new file mode 100644
index 0000000..2fe1176
--- /dev/null
+++ b/appengine/monorail/framework/test/sorting_test.py
@@ -0,0 +1,220 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for sorting.py functions."""
+
+import unittest
+
+import mox
+
+from framework import sorting
+from proto import tracker_pb2
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+def MakeDescending(accessor):
+ return sorting._MaybeMakeDescending(accessor, True)
+
+
+class SortingTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.default_cols = 'a b c'
+ self.builtin_cols = 'a b x y z'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 11, 789, 'Database', 'doc', False, [], [], 0, 0))
+ self.config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 22, 789, 'User Interface', 'doc', True, [], [], 0, 0))
+ self.config.component_defs.append(tracker_bizobj.MakeComponentDef(
+ 33, 789, 'Installer', 'doc', False, [], [], 0, 0))
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testMakeSingleSortKeyAccessor_Status(self):
+ """Sorting by status should create an accessor for that column."""
+ self.mox.StubOutWithMock(sorting, '_IndexOrLexical')
+ status_names = [wks.status for wks in self.config.well_known_statuses]
+ sorting._IndexOrLexical(status_names, 'status accessor')
+ self.mox.ReplayAll()
+
+ sorting._MakeSingleSortKeyAccessor(
+ 'status', self.config, {'status': 'status accessor'}, [], {}, [])
+ self.mox.VerifyAll()
+
+ def testMakeSingleSortKeyAccessor_Component(self):
+ """Sorting by component should create an accessor for that column."""
+ self.mox.StubOutWithMock(sorting, '_IndexListAccessor')
+ component_ids = [11, 33, 22]
+ sorting._IndexListAccessor(component_ids, 'component accessor')
+ self.mox.ReplayAll()
+
+ sorting._MakeSingleSortKeyAccessor(
+ 'component', self.config, {'component': 'component accessor'}, [], {}, [])
+ self.mox.VerifyAll()
+
+ def testMakeSingleSortKeyAccessor_OtherBuiltInColunms(self):
+ """Sorting a built-in column should create an accessor for that column."""
+ accessor = sorting._MakeSingleSortKeyAccessor(
+ 'buildincol', self.config, {'buildincol': 'accessor'}, [], {}, [])
+ self.assertEqual('accessor', accessor)
+
+ def testMakeSingleSortKeyAccessor_OtherBuiltInUserColunms(self):
+ """Sorting a built-in user column should create a user accessor."""
+ self.mox.StubOutWithMock(sorting, '_UserEditNameAccessor')
+ users_by_id = {111L: 'fake user'}
+ sorting._UserEditNameAccessor(users_by_id, 'owner accessor')
+ self.mox.ReplayAll()
+
+ sorting._MakeSingleSortKeyAccessor(
+ 'owner', self.config, {'owner': 'owner accessor'},
+ ['owner'], users_by_id, [])
+ self.mox.VerifyAll()
+
+ def testIndexOrLexical(self):
+ well_known_values = ['x-a', 'x-b', 'x-c', 'x-d']
+ art = 'this is a fake artifact'
+
+ # Case 1: accessor generates no values.
+ base_accessor = lambda art: None
+ accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
+ self.assertEqual(sorting.MAX_STRING, accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(sorting.DescendingValue(sorting.MAX_STRING),
+ neg_accessor(art))
+
+ # Case 2: accessor generates a value, but it is an empty value.
+ base_accessor = lambda art: ''
+ accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
+ self.assertEqual(sorting.MAX_STRING, accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(sorting.DescendingValue(sorting.MAX_STRING),
+ neg_accessor(art))
+
+ # Case 3: A single well-known value
+ base_accessor = lambda art: 'x-c'
+ accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
+ self.assertEqual(2, accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(-2, neg_accessor(art))
+
+ # Case 4: A single odd-ball value
+ base_accessor = lambda art: 'x-zzz'
+ accessor = sorting._IndexOrLexical(well_known_values, base_accessor)
+ self.assertEqual('x-zzz', accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(
+ sorting.DescendingValue('x-zzz'), neg_accessor(art))
+
+ def testIndexListAccessor_SomeWellKnownValues(self):
+ """Values sort according to their position in the well-known list."""
+ well_known_values = [11, 33, 22] # These represent component IDs.
+ art = fake.MakeTestIssue(789, 1, 'sum 1', 'New', 111L)
+ base_accessor = lambda issue: issue.component_ids
+ accessor = sorting._IndexListAccessor(well_known_values, base_accessor)
+
+ # Case 1: accessor generates no values.
+ self.assertEqual(sorting.MAX_STRING, accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(sorting.MAX_STRING, neg_accessor(art))
+
+ # Case 2: A single well-known value
+ art.component_ids = [33]
+ self.assertEqual([1], accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual([-1], neg_accessor(art))
+
+ # Case 3: Multiple well-known and odd-ball values
+ art.component_ids = [33, 11, 99]
+ self.assertEqual([0, 1, sorting.MAX_STRING], accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual([sorting.MAX_STRING, -1, 0],
+ neg_accessor(art))
+
+ def testIndexListAccessor_NoWellKnownValues(self):
+ """When there are no well-known values, all values sort last."""
+ well_known_values = [] # Nothing pre-defined, so everything is oddball
+ art = fake.MakeTestIssue(789, 1, 'sum 1', 'New', 111L)
+ base_accessor = lambda issue: issue.component_ids
+ accessor = sorting._IndexListAccessor(well_known_values, base_accessor)
+
+ # Case 1: accessor generates no values.
+ self.assertEqual(sorting.MAX_STRING, accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(sorting.MAX_STRING, neg_accessor(art))
+
+ # Case 2: A single oddball value
+ art.component_ids = [33]
+ self.assertEqual([sorting.MAX_STRING], accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual([sorting.MAX_STRING], neg_accessor(art))
+
+ # Case 3: Multiple odd-ball values
+ art.component_ids = [33, 11, 99]
+ self.assertEqual(
+ [sorting.MAX_STRING, sorting.MAX_STRING, sorting.MAX_STRING],
+ accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(
+ [sorting.MAX_STRING, sorting.MAX_STRING, sorting.MAX_STRING],
+ neg_accessor(art))
+
+ def testIndexOrLexicalList(self):
+ well_known_values = ['Pri-High', 'Pri-Med', 'Pri-Low']
+ art = fake.MakeTestIssue(789, 1, 'sum 1', 'New', 111L, merged_into=200001)
+
+ # Case 1: accessor generates no values.
+ accessor = sorting._IndexOrLexicalList(well_known_values, [], 'pri', {})
+ self.assertEqual(sorting.MAX_STRING, accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual(sorting.MAX_STRING, neg_accessor(art))
+
+ # Case 2: A single well-known value
+ art.labels = ['Pri-Med']
+ accessor = sorting._IndexOrLexicalList(well_known_values, [], 'pri', {})
+ self.assertEqual([1], accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual([-1], neg_accessor(art))
+
+ # Case 3: Multiple well-known and odd-ball values
+ art.labels = ['Pri-zzz', 'Pri-Med', 'yyy', 'Pri-High']
+ accessor = sorting._IndexOrLexicalList(well_known_values, [], 'pri', {})
+ self.assertEqual([0, 1, 'zzz'], accessor(art))
+ neg_accessor = MakeDescending(accessor)
+ self.assertEqual([sorting.DescendingValue('zzz'), -1, 0],
+ neg_accessor(art))
+
+ def testComputeSortDirectives(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123')
+ self.assertEquals(['project', 'id'],
+ sorting.ComputeSortDirectives(mr, config))
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&sort=a b C')
+ self.assertEquals(['a', 'b', 'c', 'project', 'id'],
+ sorting.ComputeSortDirectives(mr, config))
+
+ config.default_sort_spec = 'id -reporter Owner'
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123')
+ self.assertEquals(['id', '-reporter', 'owner', 'project'],
+ sorting.ComputeSortDirectives(mr, config))
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&sort=A -b c -owner')
+ self.assertEquals(
+ ['a', '-b', 'c', '-owner', 'id', '-reporter', 'project'],
+ sorting.ComputeSortDirectives(mr, config))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/sql_test.py b/appengine/monorail/framework/test/sql_test.py
new file mode 100644
index 0000000..bc3ce64
--- /dev/null
+++ b/appengine/monorail/framework/test/sql_test.py
@@ -0,0 +1,471 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the sql module."""
+
+import unittest
+
+import settings
+from framework import sql
+
+
+class MockSQLCnxn(object):
+ """This class mocks the connection and cursor classes."""
+
+ def __init__(self, instance, database):
+ self.instance = instance
+ self.database = database
+ self.last_executed = None
+ self.last_executed_args = None
+ self.result_rows = None
+ self.rowcount = 0
+ self.lastrowid = None
+
+ def execute(self, stmt_str, args=None):
+ self.last_executed = stmt_str % tuple(args or [])
+
+ def executemany(self, stmt_str, args):
+ # We cannot format the string because args has many values for each %s.
+ self.last_executed = stmt_str
+ self.last_executed_args = tuple(args)
+
+ if stmt_str.startswith('INSERT'):
+ self.lastrowid = 123
+
+ def fetchall(self):
+ return self.result_rows
+
+ def cursor(self):
+ return self
+
+ def close(self):
+ pass
+
+ def commit(self):
+ pass
+
+
+sql.MakeConnection = MockSQLCnxn
+
+
+class MonorailConnectionTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = sql.MonorailConnection()
+ self.orig_dev_mode = settings.dev_mode
+ self.orig_num_logical_shards = settings.num_logical_shards
+ settings.dev_mode = False
+
+ def tearDown(self):
+ settings.dev_mode = self.orig_dev_mode
+ settings.num_logical_shards = self.orig_num_logical_shards
+
+ def testGetMasterConnection(self):
+ sql_cnxn = self.cnxn.GetMasterConnection()
+ self.assertEqual(settings.db_instance, sql_cnxn.instance)
+ self.assertEqual(settings.db_database_name, sql_cnxn.database)
+
+ sql_cnxn2 = self.cnxn.GetMasterConnection()
+ self.assertIs(sql_cnxn2, sql_cnxn)
+
+ def testGetConnectionForShard(self):
+ sql_cnxn = self.cnxn.GetConnectionForShard(1)
+ self.assertEqual(settings.physical_db_name_format % 1,
+ sql_cnxn.instance)
+ self.assertEqual(settings.db_database_name, sql_cnxn.database)
+
+ sql_cnxn2 = self.cnxn.GetConnectionForShard(1)
+ self.assertIs(sql_cnxn2, sql_cnxn)
+
+
+class TableManagerTest(unittest.TestCase):
+
+ def setUp(self):
+ self.emp_tbl = sql.SQLTableManager('Employee')
+ self.cnxn = sql.MonorailConnection()
+ self.master_cnxn = self.cnxn.GetMasterConnection()
+
+ def testSelect_Trivial(self):
+ self.master_cnxn.result_rows = [(111, True), (222, False)]
+ rows = self.emp_tbl.Select(self.cnxn)
+ self.assertEqual('SELECT * FROM Employee', self.master_cnxn.last_executed)
+ self.assertEqual([(111, True), (222, False)], rows)
+
+ def testSelect_Conditions(self):
+ self.master_cnxn.result_rows = [(111,)]
+ rows = self.emp_tbl.Select(
+ self.cnxn, cols=['emp_id'], fulltime=True, dept_id=[10, 20])
+ self.assertEqual(
+ 'SELECT emp_id FROM Employee'
+ '\nWHERE dept_id IN (10,20)'
+ '\n AND fulltime = 1',
+ self.master_cnxn.last_executed)
+ self.assertEqual([(111,)], rows)
+
+ def testSelectRow(self):
+ self.master_cnxn.result_rows = [(111,)]
+ row = self.emp_tbl.SelectRow(
+ self.cnxn, cols=['emp_id'], fulltime=True, dept_id=[10, 20])
+ self.assertEqual(
+ 'SELECT DISTINCT emp_id FROM Employee'
+ '\nWHERE dept_id IN (10,20)'
+ '\n AND fulltime = 1',
+ self.master_cnxn.last_executed)
+ self.assertEqual((111,), row)
+
+ def testSelectRow_NoMatches(self):
+ self.master_cnxn.result_rows = []
+ row = self.emp_tbl.SelectRow(
+ self.cnxn, cols=['emp_id'], fulltime=True, dept_id=[99])
+ self.assertEqual(
+ 'SELECT DISTINCT emp_id FROM Employee'
+ '\nWHERE dept_id IN (99)'
+ '\n AND fulltime = 1',
+ self.master_cnxn.last_executed)
+ self.assertEqual(None, row)
+
+ row = self.emp_tbl.SelectRow(
+ self.cnxn, cols=['emp_id'], fulltime=True, dept_id=[99],
+ default=(-1,))
+ self.assertEqual((-1,), row)
+
+ def testSelectValue(self):
+ self.master_cnxn.result_rows = [(111,)]
+ val = self.emp_tbl.SelectValue(
+ self.cnxn, 'emp_id', fulltime=True, dept_id=[10, 20])
+ self.assertEqual(
+ 'SELECT DISTINCT emp_id FROM Employee'
+ '\nWHERE dept_id IN (10,20)'
+ '\n AND fulltime = 1',
+ self.master_cnxn.last_executed)
+ self.assertEqual(111, val)
+
+ def testSelectValue_NoMatches(self):
+ self.master_cnxn.result_rows = []
+ val = self.emp_tbl.SelectValue(
+ self.cnxn, 'emp_id', fulltime=True, dept_id=[99])
+ self.assertEqual(
+ 'SELECT DISTINCT emp_id FROM Employee'
+ '\nWHERE dept_id IN (99)'
+ '\n AND fulltime = 1',
+ self.master_cnxn.last_executed)
+ self.assertEqual(None, val)
+
+ val = self.emp_tbl.SelectValue(
+ self.cnxn, 'emp_id', fulltime=True, dept_id=[99],
+ default=-1)
+ self.assertEqual(-1, val)
+
+ def testInsertRow(self):
+ self.master_cnxn.rowcount = 1
+ generated_id = self.emp_tbl.InsertRow(self.cnxn, emp_id=111, fulltime=True)
+ self.assertEqual(
+ 'INSERT INTO Employee (emp_id, fulltime)'
+ '\nVALUES (%s,%s)',
+ self.master_cnxn.last_executed)
+ self.assertEqual(
+ ([111, 1],),
+ self.master_cnxn.last_executed_args)
+ self.assertEqual(123, generated_id)
+
+ def testInsertRows_Empty(self):
+ generated_id = self.emp_tbl.InsertRows(
+ self.cnxn, ['emp_id', 'fulltime'], [])
+ self.assertIsNone(self.master_cnxn.last_executed)
+ self.assertIsNone(self.master_cnxn.last_executed_args)
+ self.assertEqual(None, generated_id)
+
+ def testInsertRows(self):
+ self.master_cnxn.rowcount = 2
+ generated_ids = self.emp_tbl.InsertRows(
+ self.cnxn, ['emp_id', 'fulltime'], [(111, True), (222, False)])
+ self.assertEqual(
+ 'INSERT INTO Employee (emp_id, fulltime)'
+ '\nVALUES (%s,%s)',
+ self.master_cnxn.last_executed)
+ self.assertEqual(
+ ([111, 1], [222, 0]),
+ self.master_cnxn.last_executed_args)
+ self.assertEqual([], generated_ids)
+
+ def testUpdate(self):
+ self.master_cnxn.rowcount = 2
+ rowcount = self.emp_tbl.Update(
+ self.cnxn, {'fulltime': True}, emp_id=[111, 222])
+ self.assertEqual(
+ 'UPDATE Employee SET fulltime=1'
+ '\nWHERE emp_id IN (111,222)',
+ self.master_cnxn.last_executed)
+ self.assertEqual(2, rowcount)
+
+ def testIncrementCounterValue(self):
+ self.master_cnxn.rowcount = 1
+ self.master_cnxn.lastrowid = 9
+ new_counter_val = self.emp_tbl.IncrementCounterValue(
+ self.cnxn, 'years_worked', emp_id=111)
+ self.assertEqual(
+ 'UPDATE Employee SET years_worked = LAST_INSERT_ID(years_worked + 1)'
+ '\nWHERE emp_id = 111',
+ self.master_cnxn.last_executed)
+ self.assertEqual(9, new_counter_val)
+
+ def testDelete(self):
+ self.master_cnxn.rowcount = 1
+ rowcount = self.emp_tbl.Delete(self.cnxn, fulltime=True)
+ self.assertEqual(
+ 'DELETE FROM Employee'
+ '\nWHERE fulltime = 1',
+ self.master_cnxn.last_executed)
+ self.assertEqual(1, rowcount)
+
+
+class StatementTest(unittest.TestCase):
+
+ def testMakeSelect(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee',
+ stmt_str)
+ self.assertEqual([], args)
+
+ stmt = sql.Statement.MakeSelect(
+ 'Employee', ['emp_id', 'fulltime'], distinct=True)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT DISTINCT emp_id, fulltime FROM Employee',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testMakeInsert(self):
+ stmt = sql.Statement.MakeInsert(
+ 'Employee', ['emp_id', 'fulltime'], [(111, True), (222, False)])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'INSERT INTO Employee (emp_id, fulltime)'
+ '\nVALUES (%s,%s)',
+ stmt_str)
+ self.assertEqual([[111, 1], [222, 0]], args)
+
+ stmt = sql.Statement.MakeInsert(
+ 'Employee', ['emp_id', 'fulltime'], [(111, False)], replace=True)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'INSERT INTO Employee (emp_id, fulltime)'
+ '\nVALUES (%s,%s)'
+ '\nON DUPLICATE KEY UPDATE '
+ 'emp_id=VALUES(emp_id), fulltime=VALUES(fulltime)',
+ stmt_str)
+ self.assertEqual([[111, 0]], args)
+
+ stmt = sql.Statement.MakeInsert(
+ 'Employee', ['emp_id', 'fulltime'], [(111, False)], ignore=True)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'INSERT IGNORE INTO Employee (emp_id, fulltime)'
+ '\nVALUES (%s,%s)',
+ stmt_str)
+ self.assertEqual([[111, 0]], args)
+
+ def testMakeUpdate(self):
+ stmt = sql.Statement.MakeUpdate('Employee', {'fulltime': True})
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'UPDATE Employee SET fulltime=%s',
+ stmt_str)
+ self.assertEqual([1], args)
+
+ def testMakeIncrement(self):
+ stmt = sql.Statement.MakeIncrement('Employee', 'years_worked')
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'UPDATE Employee SET years_worked = LAST_INSERT_ID(years_worked + %s)',
+ stmt_str)
+ self.assertEqual([1], args)
+
+ stmt = sql.Statement.MakeIncrement('Employee', 'years_worked', step=5)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'UPDATE Employee SET years_worked = LAST_INSERT_ID(years_worked + %s)',
+ stmt_str)
+ self.assertEqual([5], args)
+
+ def testMakeDelete(self):
+ stmt = sql.Statement.MakeDelete('Employee')
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'DELETE FROM Employee',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddUseClause(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddUseClause('USE INDEX (emp_id) USE INDEX FOR ORDER BY (emp_id)')
+ stmt.AddOrderByTerms([('emp_id', [])])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\nUSE INDEX (emp_id) USE INDEX FOR ORDER BY (emp_id)'
+ '\nORDER BY emp_id',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddJoinClause_Empty(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddJoinClauses([])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddJoinClause(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddJoinClauses([('CorporateHoliday', [])])
+ stmt.AddJoinClauses(
+ [('Product ON Project.inventor_id = emp_id', [])], left=True)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\n JOIN CorporateHoliday'
+ '\n LEFT JOIN Product ON Project.inventor_id = emp_id',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddGroupByTerms_Empty(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddGroupByTerms([])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddGroupByTerms(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddGroupByTerms(['dept_id', 'location_id'])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\nGROUP BY dept_id, location_id',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddOrderByTerms_Empty(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddOrderByTerms([])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddOrderByTerms(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddOrderByTerms([('dept_id', []), ('emp_id DESC', [])])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\nORDER BY dept_id, emp_id DESC',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testSetLimitAndOffset(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.SetLimitAndOffset(100, 0)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\nLIMIT 100',
+ stmt_str)
+ self.assertEqual([], args)
+
+ stmt.SetLimitAndOffset(100, 500)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\nLIMIT 100 OFFSET 500',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddWhereTerms_Select(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddWhereTerms([], emp_id=[111, 222])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\nWHERE emp_id IN (%s,%s)',
+ stmt_str)
+ self.assertEqual([111, 222], args)
+
+ def testAddWhereTerms_Update(self):
+ stmt = sql.Statement.MakeUpdate('Employee', {'fulltime': True})
+ stmt.AddWhereTerms([], emp_id=[111, 222])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'UPDATE Employee SET fulltime=%s'
+ '\nWHERE emp_id IN (%s,%s)',
+ stmt_str)
+ self.assertEqual([1, 111, 222], args)
+
+ def testAddWhereTerms_Delete(self):
+ stmt = sql.Statement.MakeDelete('Employee')
+ stmt.AddWhereTerms([], emp_id=[111, 222])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'DELETE FROM Employee'
+ '\nWHERE emp_id IN (%s,%s)',
+ stmt_str)
+ self.assertEqual([111, 222], args)
+
+ def testAddWhereTerms_Empty(self):
+ """Add empty terms should have no effect."""
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddWhereTerms([])
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee',
+ stmt_str)
+ self.assertEqual([], args)
+
+ def testAddWhereTerms_MulitpleTerms(self):
+ stmt = sql.Statement.MakeSelect('Employee', ['emp_id', 'fulltime'])
+ stmt.AddWhereTerms(
+ [('emp_id %% %s = %s', [2, 0])], fulltime=True, emp_id_not=222)
+ stmt_str, args = stmt.Generate()
+ self.assertEqual(
+ 'SELECT emp_id, fulltime FROM Employee'
+ '\nWHERE emp_id %% %s = %s'
+ '\n AND emp_id != %s'
+ '\n AND fulltime = %s',
+ stmt_str)
+ self.assertEqual([2, 0, 222, 1], args)
+
+
+
+class FunctionsTest(unittest.TestCase):
+
+ def testBoolsToInts_NoChanges(self):
+ self.assertEqual(['hello'], sql._BoolsToInts(['hello']))
+ self.assertEqual([['hello']], sql._BoolsToInts([['hello']]))
+ self.assertEqual([['hello']], sql._BoolsToInts([('hello',)]))
+ self.assertEqual([12], sql._BoolsToInts([12]))
+ self.assertEqual([[12]], sql._BoolsToInts([[12]]))
+ self.assertEqual([[12]], sql._BoolsToInts([(12,)]))
+ self.assertEqual(
+ [12, 13, 'hi', [99, 'yo']],
+ sql._BoolsToInts([12, 13, 'hi', [99, 'yo']]))
+
+ def testBoolsToInts_WithChanges(self):
+ self.assertEqual([1, 0], sql._BoolsToInts([True, False]))
+ self.assertEqual([[1, 0]], sql._BoolsToInts([[True, False]]))
+ self.assertEqual([[1, 0]], sql._BoolsToInts([(True, False)]))
+ self.assertEqual(
+ [12, 1, 'hi', [0, 'yo']],
+ sql._BoolsToInts([12, True, 'hi', [False, 'yo']]))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/table_view_helpers_test.py b/appengine/monorail/framework/test/table_view_helpers_test.py
new file mode 100644
index 0000000..c92d623
--- /dev/null
+++ b/appengine/monorail/framework/test/table_view_helpers_test.py
@@ -0,0 +1,487 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for table_view_helpers classes and functions."""
+
+import collections
+import unittest
+
+from framework import framework_views
+from framework import table_view_helpers
+from proto import tracker_pb2
+from testing import fake
+from tracker import tracker_bizobj
+
+
+EMPTY_SEARCH_RESULTS = []
+
+SEARCH_RESULTS_WITH_LABELS = [
+ fake.MakeTestIssue(
+ 789, 1, 'sum 1', 'New', 111L, labels='Priority-High Mstone-1',
+ merged_into=200001, star_count=1),
+ fake.MakeTestIssue(
+ 789, 2, 'sum 2', 'New', 111L, labels='Priority-High Mstone-1',
+ merged_into=1, star_count=1),
+ fake.MakeTestIssue(
+ 789, 3, 'sum 3', 'New', 111L, labels='Priority-Low Mstone-1.1',
+ merged_into=1, star_count=1),
+ # 'Visibility-Super-High' tests that only first dash counts
+ fake.MakeTestIssue(
+ 789, 4, 'sum 4', 'New', 111L, labels='Visibility-Super-High',
+ star_count=1),
+ ]
+
+
+def MakeTestIssue(local_id, issue_id, summary):
+ issue = tracker_pb2.Issue()
+ issue.local_id = local_id
+ issue.issue_id = issue_id
+ issue.summary = summary
+ return issue
+
+
+class TableCellTest(unittest.TestCase):
+
+ USERS_BY_ID = {}
+
+ def setUp(self):
+ self.issue1 = MakeTestIssue(
+ local_id=1, issue_id=100001, summary='One')
+ self.issue2 = MakeTestIssue(
+ local_id=2, issue_id=100002, summary='Two')
+ self.issue3 = MakeTestIssue(
+ local_id=3, issue_id=100003, summary='Three')
+
+ def testTableCellSummary(self):
+ """TableCellSummary stores the data given to it."""
+ cell = table_view_helpers.TableCellSummary(
+ MakeTestIssue(4, 4, 'Lame default summary.'), None, self.USERS_BY_ID,
+ [('lab', False)], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_SUMMARY)
+ self.assertEqual(cell.values[0].item, 'Lame default summary.')
+ self.assertEqual(cell.non_column_labels[0].value, 'lab')
+
+ def testTableCellSummary_NoPythonEscaping(self):
+ """TableCellSummary stores the summary without escaping it in python."""
+ cell = table_view_helpers.TableCellSummary(
+ MakeTestIssue(4, 4, '<b>bold</b> "summary".'), None, self.USERS_BY_ID,
+ [('lab', False)], {}, {}, 'fake config')
+ self.assertEqual(cell.values[0].item,'<b>bold</b> "summary".')
+
+ # TODO(jrobbins): TableCellProject, TableCellStars
+
+
+class TableViewHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.default_cols = 'a b c'
+ self.builtin_cols = ['a', 'b', 'x', 'y', 'z']
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ def testComputeUnshownColumns_CommonCase(self):
+ shown_cols = ['a', 'b', 'c']
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = self.default_cols
+ config.well_known_labels = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['x', 'y', 'z'])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(
+ unshown, ['Mstone', 'Priority', 'Visibility', 'x', 'y', 'z'])
+
+ def testComputeUnshownColumns_MoreBuiltins(self):
+ shown_cols = ['a', 'b', 'c', 'x', 'y']
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = self.default_cols
+ config.well_known_labels = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['z'])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['Mstone', 'Priority', 'Visibility', 'z'])
+
+ def testComputeUnshownColumns_NotAllDefaults(self):
+ shown_cols = ['a', 'b']
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = self.default_cols
+ config.well_known_labels = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['c', 'x', 'y', 'z'])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(
+ unshown, ['Mstone', 'Priority', 'Visibility', 'c', 'x', 'y', 'z'])
+
+ def testComputeUnshownColumns_ExtraNonDefaults(self):
+ shown_cols = ['a', 'b', 'c', 'd', 'e', 'f']
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = self.default_cols
+ config.well_known_labels = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['x', 'y', 'z'])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(
+ unshown, ['Mstone', 'Priority', 'Visibility', 'x', 'y', 'z'])
+
+ def testComputeUnshownColumns_UserColumnsShown(self):
+ shown_cols = ['a', 'b', 'c', 'Priority']
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = self.default_cols
+ config.well_known_labels = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['x', 'y', 'z'])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['Mstone', 'Visibility', 'x', 'y', 'z'])
+
+ def testComputeUnshownColumns_EverythingShown(self):
+ shown_cols = [
+ 'a', 'b', 'c', 'x', 'y', 'z', 'Priority', 'Mstone', 'Visibility']
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = self.default_cols
+ config.well_known_labels = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, [])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, [])
+
+ def testComputeUnshownColumns_NothingShown(self):
+ shown_cols = []
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = self.default_cols
+ config.well_known_labels = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(unshown, ['a', 'b', 'c', 'x', 'y', 'z'])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, self.builtin_cols)
+ self.assertEquals(
+ unshown,
+ ['Mstone', 'Priority', 'Visibility', 'a', 'b', 'c', 'x', 'y', 'z'])
+
+ def testComputeUnshownColumns_NoBuiltins(self):
+ shown_cols = ['a', 'b', 'c']
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_col_spec = 'a b c'
+ config.well_known_labels = []
+ builtin_cols = []
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ EMPTY_SEARCH_RESULTS, shown_cols, config, builtin_cols)
+ self.assertEquals(unshown, [])
+
+ unshown = table_view_helpers.ComputeUnshownColumns(
+ SEARCH_RESULTS_WITH_LABELS, shown_cols, config, builtin_cols)
+ self.assertEquals(unshown, ['Mstone', 'Priority', 'Visibility'])
+
+ def testExtractUniqueValues_NoColumns(self):
+ column_values = table_view_helpers.ExtractUniqueValues(
+ [], SEARCH_RESULTS_WITH_LABELS, {}, self.config)
+ self.assertEquals([], column_values)
+
+ def testExtractUniqueValues_NoResults(self):
+ cols = ['type', 'priority', 'owner', 'status', 'stars', 'attachments']
+ column_values = table_view_helpers.ExtractUniqueValues(
+ cols, EMPTY_SEARCH_RESULTS, {}, self.config)
+ self.assertEquals(6, len(column_values))
+ for index, col in enumerate(cols):
+ self.assertEquals(index, column_values[index].col_index)
+ self.assertEquals(col, column_values[index].column_name)
+ self.assertEquals([], column_values[index].filter_values)
+
+ def testExtractUniqueValues_ExplicitResults(self):
+ cols = ['priority', 'owner', 'status', 'stars', 'mstone', 'foo']
+ users_by_id = {
+ 111L: framework_views.UserView(111, 'foo@example.com', True),
+ }
+ column_values = table_view_helpers.ExtractUniqueValues(
+ cols, SEARCH_RESULTS_WITH_LABELS, users_by_id, self.config)
+ self.assertEquals(len(cols), len(column_values))
+
+ self.assertEquals('priority', column_values[0].column_name)
+ self.assertEquals(['High', 'Low'], column_values[0].filter_values)
+
+ self.assertEquals('owner', column_values[1].column_name)
+ self.assertEquals(['f...@example.com'], column_values[1].filter_values)
+
+ self.assertEquals('status', column_values[2].column_name)
+ self.assertEquals(['New'], column_values[2].filter_values)
+
+ self.assertEquals('stars', column_values[3].column_name)
+ self.assertEquals([1], column_values[3].filter_values)
+
+ self.assertEquals('mstone', column_values[4].column_name)
+ self.assertEquals(['1', '1.1'], column_values[4].filter_values)
+
+ self.assertEquals('foo', column_values[5].column_name)
+ self.assertEquals([], column_values[5].filter_values)
+
+ # self.assertEquals('mergedinto', column_values[6].column_name)
+ # self.assertEquals(
+ # ['1', 'other-project:1'], column_values[6].filter_values)
+
+ def testExtractUniqueValues_CombinedColumns(self):
+ cols = ['priority/pri', 'owner', 'status', 'stars', 'mstone/milestone']
+ users_by_id = {
+ 111L: framework_views.UserView(111, 'foo@example.com', True),
+ }
+ issue = fake.MakeTestIssue(
+ 789, 5, 'sum 5', 'New', 111L, merged_into=200001,
+ labels='Priority-High Pri-0 Milestone-1.0 mstone-1',
+ star_count=15)
+
+ column_values = table_view_helpers.ExtractUniqueValues(
+ cols, SEARCH_RESULTS_WITH_LABELS + [issue], users_by_id,
+ self.config)
+ self.assertEquals(5, len(column_values))
+
+ self.assertEquals('priority/pri', column_values[0].column_name)
+ self.assertEquals(['0', 'High', 'Low'], column_values[0].filter_values)
+
+ self.assertEquals('owner', column_values[1].column_name)
+ self.assertEquals(['f...@example.com'], column_values[1].filter_values)
+
+ self.assertEquals('status', column_values[2].column_name)
+ self.assertEquals(['New'], column_values[2].filter_values)
+
+ self.assertEquals('stars', column_values[3].column_name)
+ self.assertEquals([1, 15], column_values[3].filter_values)
+
+ self.assertEquals('mstone/milestone', column_values[4].column_name)
+ self.assertEquals(['1', '1.0', '1.1'], column_values[4].filter_values)
+
+ def testExtractUniqueValues_DerivedValues(self):
+ cols = ['priority', 'milestone', 'owner', 'status']
+ users_by_id = {
+ 111L: framework_views.UserView(111, 'foo@example.com', True),
+ 222L: framework_views.UserView(222, 'bar@example.com', True),
+ 333L: framework_views.UserView(333, 'lol@example.com', True),
+ }
+ search_results = [
+ fake.MakeTestIssue(
+ 789, 1, 'sum 1', '', 111L, labels='Priority-High Milestone-1.0',
+ derived_labels='Milestone-2.0 Foo', derived_status='Started'),
+ fake.MakeTestIssue(
+ 789, 2, 'sum 2', 'New', 111L, labels='Priority-High Milestone-1.0',
+ derived_owner_id=333L), # Not seen because of owner_id
+ fake.MakeTestIssue(
+ 789, 3, 'sum 3', 'New', 0, labels='Priority-Low Milestone-1.1',
+ derived_owner_id=222L),
+ ]
+
+ column_values = table_view_helpers.ExtractUniqueValues(
+ cols, search_results, users_by_id, self.config)
+ self.assertEquals(4, len(column_values))
+
+ self.assertEquals('priority', column_values[0].column_name)
+ self.assertEquals(['High', 'Low'], column_values[0].filter_values)
+
+ self.assertEquals('milestone', column_values[1].column_name)
+ self.assertEquals(['1.0', '1.1', '2.0'],
+ column_values[1].filter_values)
+
+ self.assertEquals('owner', column_values[2].column_name)
+ self.assertEquals(['b...@example.com', 'f...@example.com'],
+ column_values[2].filter_values)
+
+ self.assertEquals('status', column_values[3].column_name)
+ self.assertEquals(['New', 'Started'], column_values[3].filter_values)
+
+ def testExtractUniqueValues_ColumnsRobustness(self):
+ cols = ['reporter', 'cc', 'owner', 'status', 'attachments']
+ search_results = [
+ tracker_pb2.Issue(),
+ ]
+ column_values = table_view_helpers.ExtractUniqueValues(
+ cols, search_results, {}, self.config)
+
+ self.assertEquals(5, len(column_values))
+ for col_val in column_values:
+ if col_val.column_name == 'attachments':
+ self.assertEquals([0], col_val.filter_values)
+ else:
+ self.assertEquals([], col_val.filter_values)
+
+ def testMakeTableData_Empty(self):
+ visible_results = []
+ lower_columns = []
+ cell_factories = {}
+ table_data = table_view_helpers.MakeTableData(
+ visible_results, None, [], lower_columns, lower_columns,
+ cell_factories, [], lambda art: 'id', {}, self.config)
+ self.assertEqual([], table_data)
+
+ lower_columns = ['type', 'priority', 'summary', 'stars']
+ cell_factories = {
+ 'summary': table_view_helpers.TableCellSummary,
+ 'stars': table_view_helpers.TableCellStars,
+ }
+
+ table_data = table_view_helpers.MakeTableData(
+ visible_results, None, [], lower_columns, [], {},
+ cell_factories, lambda art: 'id', {}, self.config)
+ self.assertEqual([], table_data)
+
+ def testMakeTableData_Normal(self):
+ art = fake.MakeTestIssue(
+ 789, 1, 'sum 1', 'New', 111L, labels='Type-Defect Priority-Medium')
+ visible_results = [art]
+ lower_columns = ['type', 'priority', 'summary', 'stars']
+ cell_factories = {
+ 'summary': table_view_helpers.TableCellSummary,
+ 'stars': table_view_helpers.TableCellStars,
+ }
+
+ table_data = table_view_helpers.MakeTableData(
+ visible_results, None, [], lower_columns, lower_columns, {},
+ cell_factories, lambda art: 'id', {}, self.config)
+ self.assertEqual(1, len(table_data))
+ row = table_data[0]
+ self.assertEqual(4, len(row.cells))
+ self.assertEqual('Defect', row.cells[0].values[0].item)
+
+ def testMakeTableData_Groups(self):
+ art = fake.MakeTestIssue(
+ 789, 1, 'sum 1', 'New', 111L, labels='Type-Defect Priority-Medium')
+ visible_results = [art]
+ lower_columns = ['type', 'priority', 'summary', 'stars']
+ lower_group_by = ['priority']
+ cell_factories = {
+ 'summary': table_view_helpers.TableCellSummary,
+ 'stars': table_view_helpers.TableCellStars,
+ }
+
+ table_data = table_view_helpers.MakeTableData(
+ visible_results, None, [], lower_columns, lower_group_by, {},
+ cell_factories, lambda art: 'id', {}, self.config)
+ self.assertEqual(1, len(table_data))
+ row = table_data[0]
+ self.assertEqual(1, len(row.group.cells))
+ self.assertEqual('Medium', row.group.cells[0].values[0].item)
+
+ def testMakeRowData(self):
+ art = fake.MakeTestIssue(
+ 789, 1, 'sum 1', 'New', 111L, labels='Type-Defect Priority-Medium',
+ star_count=1)
+ columns = ['type', 'priority', 'summary', 'stars']
+
+ cell_factories = [table_view_helpers.TableCellKeyLabels,
+ table_view_helpers.TableCellKeyLabels,
+ table_view_helpers.TableCellSummary,
+ table_view_helpers.TableCellStars]
+
+ # a result is an table_view_helpers.TableRow object with a "cells" field
+ # containing a list of table_view_helpers.TableCell objects.
+ result = table_view_helpers.MakeRowData(
+ art, columns, True, {}, cell_factories, {}, self.config)
+
+ self.assertEqual(len(columns), len(result.cells))
+
+ for i in range(len(columns)):
+ cell = result.cells[i]
+ self.assertEqual(i, cell.col_index)
+
+ self.assertEqual(table_view_helpers.CELL_TYPE_ATTR, result.cells[0].type)
+ self.assertEqual('Defect', result.cells[0].values[0].item)
+ self.assertFalse(result.cells[0].values[0].is_derived)
+
+ self.assertEqual(table_view_helpers.CELL_TYPE_ATTR, result.cells[1].type)
+ self.assertEqual('Medium', result.cells[1].values[0].item)
+ self.assertFalse(result.cells[1].values[0].is_derived)
+
+ self.assertEqual(
+ table_view_helpers.CELL_TYPE_SUMMARY, result.cells[2].type)
+ self.assertEqual('sum 1', result.cells[2].values[0].item)
+ self.assertFalse(result.cells[2].values[0].is_derived)
+
+ self.assertEqual(table_view_helpers.CELL_TYPE_ATTR, result.cells[3].type)
+ self.assertEqual(1, result.cells[3].values[0].item)
+ self.assertFalse(result.cells[3].values[0].is_derived)
+
+ def testAccumulateLabelValues_Empty(self):
+ label_values, non_col_labels = collections.defaultdict(list), []
+ table_view_helpers._AccumulateLabelValues(
+ [], [], label_values, non_col_labels)
+ self.assertEqual({}, label_values)
+ self.assertEqual([], non_col_labels)
+
+ label_values, non_col_labels = collections.defaultdict(list), []
+ table_view_helpers._AccumulateLabelValues(
+ [], ['Type', 'Priority'], label_values, non_col_labels)
+ self.assertEqual({}, label_values)
+ self.assertEqual([], non_col_labels)
+
+ def testAccumulateLabelValues_OneWordLabels(self):
+ label_values, non_col_labels = collections.defaultdict(list), []
+ table_view_helpers._AccumulateLabelValues(
+ ['HelloThere'], [], label_values, non_col_labels)
+ self.assertEqual({}, label_values)
+ self.assertEqual([('HelloThere', False)], non_col_labels)
+
+ label_values, non_col_labels = collections.defaultdict(list), []
+ table_view_helpers._AccumulateLabelValues(
+ ['HelloThere'], [], label_values, non_col_labels, is_derived=True)
+ self.assertEqual({}, label_values)
+ self.assertEqual([('HelloThere', True)], non_col_labels)
+
+ def testAccumulateLabelValues_KeyValueLabels(self):
+ label_values, non_col_labels = collections.defaultdict(list), []
+ table_view_helpers._AccumulateLabelValues(
+ ['Type-Defect', 'Milestone-Soon'], ['type', 'milestone'],
+ label_values, non_col_labels)
+ self.assertEqual(
+ {'type': [('Defect', False)],
+ 'milestone': [('Soon', False)]},
+ label_values)
+ self.assertEqual([], non_col_labels)
+
+ def testAccumulateLabelValues_MultiValueLabels(self):
+ label_values, non_col_labels = collections.defaultdict(list), []
+ table_view_helpers._AccumulateLabelValues(
+ ['OS-Mac', 'OS-Linux'], ['os', 'arch'],
+ label_values, non_col_labels)
+ self.assertEqual(
+ {'os': [('Mac', False), ('Linux', False)]},
+ label_values)
+ self.assertEqual([], non_col_labels)
+
+ def testAccumulateLabelValues_MultiPartLabels(self):
+ label_values, non_col_labels = collections.defaultdict(list), []
+ table_view_helpers._AccumulateLabelValues(
+ ['OS-Mac-Server', 'OS-Mac-Laptop'], ['os', 'os-mac'],
+ label_values, non_col_labels)
+ self.assertEqual(
+ {'os': [('Mac-Server', False), ('Mac-Laptop', False)],
+ 'os-mac': [('Server', False), ('Laptop', False)],
+ },
+ label_values)
+ self.assertEqual([], non_col_labels)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/template_helpers_test.py b/appengine/monorail/framework/test/template_helpers_test.py
new file mode 100644
index 0000000..c6b8a32
--- /dev/null
+++ b/appengine/monorail/framework/test/template_helpers_test.py
@@ -0,0 +1,193 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for template_helpers module."""
+
+import unittest
+
+from framework import pbproxy_test_pb2
+from framework import template_helpers
+
+
+class HelpersUnitTest(unittest.TestCase):
+
+ def testDictionaryProxy(self):
+
+ # basic in 'n out test
+ item = template_helpers.EZTItem(label='foo', group_name='bar')
+
+ self.assertEquals('foo', item.label)
+ self.assertEquals('bar', item.group_name)
+
+ # be sure the __str__ returns the fields
+ self.assertEquals("EZTItem({'group_name': 'bar', 'label': 'foo'})",
+ str(item))
+
+ def testPBProxy(self):
+ """Checks that PBProxy wraps protobuf objects as expected."""
+ # check that protobuf fields are accessible in ".attribute" form
+ pbe = pbproxy_test_pb2.PBProxyExample()
+ pbe.foo = 'foo'
+ pbe.bar = False
+ pbep = template_helpers.PBProxy(pbe)
+ self.assertEqual(pbep.foo, 'foo')
+ # _bool suffix converts protobuf field 'bar' to None (EZT boolean false)
+ self.assertEqual(pbep.bar_bool, None)
+
+ # check that a new field can be added to the PBProxy
+ pbep.baz = 'bif'
+ self.assertEqual(pbep.baz, 'bif')
+
+ # check that a PBProxy-local field can hide a protobuf field
+ pbep.foo = 'local foo'
+ self.assertEqual(pbep.foo, 'local foo')
+
+ # check that a nested protobuf is recursively wrapped with a PBProxy
+ pbn = pbproxy_test_pb2.PBProxyNested()
+ pbn.nested = pbproxy_test_pb2.PBProxyExample()
+ pbn.nested.foo = 'bar'
+ pbn.nested.bar = True
+ pbnp = template_helpers.PBProxy(pbn)
+ self.assertEqual(pbnp.nested.foo, 'bar')
+ # _bool suffix converts protobuf field 'bar' to 'yes' (EZT boolean true)
+ self.assertEqual(pbnp.nested.bar_bool, 'yes')
+
+ # check that 'repeated' lists of items produce a list of strings
+ pbn.multiple_strings.append('1')
+ pbn.multiple_strings.append('2')
+ self.assertEqual(pbnp.multiple_strings, ['1', '2'])
+
+ # check that 'repeated' messages produce lists of PBProxy instances
+ pbe1 = pbproxy_test_pb2.PBProxyExample()
+ pbn.multiple_pbes.append(pbe1)
+ pbe1.foo = '1'
+ pbe1.bar = True
+ pbe2 = pbproxy_test_pb2.PBProxyExample()
+ pbn.multiple_pbes.append(pbe2)
+ pbe2.foo = '2'
+ pbe2.bar = False
+ self.assertEqual(pbnp.multiple_pbes[0].foo, '1')
+ self.assertEqual(pbnp.multiple_pbes[0].bar_bool, 'yes')
+ self.assertEqual(pbnp.multiple_pbes[1].foo, '2')
+ self.assertEqual(pbnp.multiple_pbes[1].bar_bool, None)
+
+ def testFitTextMethods(self):
+ """Tests both FitUnsafeText with an eye on i18n."""
+ # pylint: disable=anomalous-unicode-escape-in-string
+ test_data = (
+ u'This is a short string.',
+
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. '
+ u'This is a much longer string. ',
+
+ # This is a short escaped i18n string
+ '\xd5\xa1\xd5\xba\xd5\xa1\xd5\xaf\xd5\xab'.decode('utf-8'),
+
+ # This is a longer i18n string
+ '\xd5\xa1\xd5\xba\xd5\xa1\xd5\xaf\xd5\xab '
+ '\xe6\x88\x91\xe8\x83\xbd\xe5\x90\x9e '
+ '\xd5\xa1\xd5\xba\xd5\xa1\xd5\xaf\xd5\xab '
+ '\xe6\x88\x91\xe8\x83\xbd\xe5\x90\x9e '
+ '\xd5\xa1\xd5\xba\xd5\xa1\xd5\xaf\xd5\xab '
+ '\xe6\x88\x91\xe8\x83\xbd\xe5\x90\x9e '
+ '\xd5\xa1\xd5\xba\xd5\xa1\xd5\xaf\xd5\xab '
+ '\xe6\x88\x91\xe8\x83\xbd\xe5\x90\x9e '.decode('utf-8'),
+
+ # This is a longer i18n string that was causing trouble.
+ '\u041d\u0430 \u0431\u0435\u0440\u0435\u0433\u0443'
+ ' \u043f\u0443\u0441\u0442\u044b\u043d\u043d\u044b\u0445'
+ ' \u0432\u043e\u043b\u043d \u0421\u0442\u043e\u044f\u043b'
+ ' \u043e\u043d, \u0434\u0443\u043c'
+ ' \u0432\u0435\u043b\u0438\u043a\u0438\u0445'
+ ' \u043f\u043e\u043b\u043d, \u0418'
+ ' \u0432\u0434\u0430\u043b\u044c'
+ ' \u0433\u043b\u044f\u0434\u0435\u043b.'
+ ' \u041f\u0440\u0435\u0434 \u043d\u0438\u043c'
+ ' \u0448\u0438\u0440\u043e\u043a\u043e'
+ ' \u0420\u0435\u043a\u0430'
+ ' \u043d\u0435\u0441\u043b\u0430\u0441\u044f;'
+ ' \u0431\u0435\u0434\u043d\u044b\u0439'
+ ' \u0447\u0451\u043b\u043d \u041f\u043e'
+ ' \u043d\u0435\u0439'
+ ' \u0441\u0442\u0440\u0435\u043c\u0438\u043b\u0441\u044f'
+ ' \u043e\u0434\u0438\u043d\u043e\u043a\u043e.'
+ ' \u041f\u043e \u043c\u0448\u0438\u0441\u0442\u044b\u043c,'
+ ' \u0442\u043e\u043f\u043a\u0438\u043c'
+ ' \u0431\u0435\u0440\u0435\u0433\u0430\u043c'
+ ' \u0427\u0435\u0440\u043d\u0435\u043b\u0438'
+ ' \u0438\u0437\u0431\u044b \u0437\u0434\u0435\u0441\u044c'
+ ' \u0438 \u0442\u0430\u043c, \u041f\u0440\u0438\u044e\u0442'
+ ' \u0443\u0431\u043e\u0433\u043e\u0433\u043e'
+ ' \u0447\u0443\u0445\u043e\u043d\u0446\u0430;'
+ ' \u0418 \u043b\u0435\u0441,'
+ ' \u043d\u0435\u0432\u0435\u0434\u043e\u043c\u044b\u0439'
+ ' \u043b\u0443\u0447\u0430\u043c \u0412'
+ ' \u0442\u0443\u043c\u0430\u043d\u0435'
+ ' \u0441\u043f\u0440\u044f\u0442\u0430\u043d\u043d\u043e'
+ '\u0433\u043e \u0441\u043e\u043b\u043d\u0446\u0430,'
+ ' \u041a\u0440\u0443\u0433\u043e\u043c'
+ ' \u0448\u0443\u043c\u0435\u043b.'.decode('utf-8'))
+
+ for unicode_s in test_data:
+ # Get the length in characters, not bytes.
+ length = len(unicode_s)
+
+ # Test the FitUnsafeText method at the length boundary.
+ fitted_unsafe_text = template_helpers.FitUnsafeText(unicode_s, length)
+ self.assertEqual(fitted_unsafe_text, unicode_s)
+
+ # Set some values that test FitString well.
+ available_space = length / 2
+ max_trailing = length / 4
+ # Break the string at various places - symmetric range around 0
+ for i in range(1-max_trailing, max_trailing):
+ # Test the FitUnsafeText method.
+ fitted_unsafe_text = template_helpers.FitUnsafeText(
+ unicode_s, available_space - i)
+ self.assertEqual(fitted_unsafe_text[:available_space - i],
+ unicode_s[:available_space - i])
+
+ # Test a string that is already unicode
+ u_string = u'This is already unicode'
+ fitted_unsafe_text = template_helpers.FitUnsafeText(u_string, 100)
+ self.assertEqual(u_string, fitted_unsafe_text)
+
+ # Test a string that is already unicode, and has non-ascii in it.
+ u_string = u'This is already unicode este\\u0301tico'
+ fitted_unsafe_text = template_helpers.FitUnsafeText(u_string, 100)
+ self.assertEqual(u_string, fitted_unsafe_text)
+
+ def testEZTError(self):
+ errors = template_helpers.EZTError()
+ self.assertFalse(errors.AnyErrors())
+
+ errors.error_a = 'A'
+ self.assertTrue(errors.AnyErrors())
+ self.assertEquals('A', errors.error_a)
+
+ errors.SetError('error_b', 'B')
+ self.assertTrue(errors.AnyErrors())
+ self.assertEquals('A', errors.error_a)
+ self.assertEquals('B', errors.error_b)
+
+ def testBytesKbOrMb(self):
+ self.assertEqual('1023 bytes', template_helpers.BytesKbOrMb(1023))
+ self.assertEqual('1.0 KB', template_helpers.BytesKbOrMb(1024))
+ self.assertEqual('1023 KB', template_helpers.BytesKbOrMb(1024 * 1023))
+ self.assertEqual('1.0 MB', template_helpers.BytesKbOrMb(1024 * 1024))
+ self.assertEqual('98.0 MB', template_helpers.BytesKbOrMb(98 * 1024 * 1024))
+ self.assertEqual('99 MB', template_helpers.BytesKbOrMb(99 * 1024 * 1024))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/timestr_test.py b/appengine/monorail/framework/test/timestr_test.py
new file mode 100644
index 0000000..6827213
--- /dev/null
+++ b/appengine/monorail/framework/test/timestr_test.py
@@ -0,0 +1,104 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for timestr module."""
+
+import datetime
+import time
+import unittest
+
+from framework import timestr
+
+
+class TimeStrTest(unittest.TestCase):
+ """Unit tests for timestr routines."""
+
+ def testFormatAbsoluteDate(self):
+ now = datetime.datetime(2008, 1, 1)
+
+ def GetDate(*args):
+ date = datetime.datetime(*args)
+ return timestr.FormatAbsoluteDate(
+ time.mktime(date.timetuple()), clock=lambda: now)
+
+ self.assertEquals(GetDate(2008, 1, 1), 'Today')
+ self.assertEquals(GetDate(2007, 12, 31), 'Yesterday')
+ self.assertEquals(GetDate(2007, 12, 30), 'Dec 30')
+ self.assertEquals(GetDate(2007, 1, 1), 'Jan 2007')
+ self.assertEquals(GetDate(2007, 1, 2), 'Jan 2007')
+ self.assertEquals(GetDate(2007, 12, 31), 'Yesterday')
+ self.assertEquals(GetDate(2006, 12, 31), 'Dec 2006')
+ self.assertEquals(GetDate(2007, 7, 1), 'Jul 1')
+ self.assertEquals(GetDate(2007, 6, 30), 'Jun 2007')
+ self.assertEquals(GetDate(2008, 1, 3), 'Jan 2008')
+
+ # Leap year fun
+ now = datetime.datetime(2008, 3, 1)
+ self.assertEquals(GetDate(2008, 2, 29), 'Yesterday')
+
+ # Clock skew
+ now = datetime.datetime(2008, 1, 1, 23, 59, 59)
+ self.assertEquals(GetDate(2008, 1, 2), 'Today')
+ now = datetime.datetime(2007, 12, 31, 23, 59, 59)
+ self.assertEquals(GetDate(2008, 1, 1), 'Today')
+ self.assertEquals(GetDate(2008, 1, 2), 'Jan 2008')
+
+ def testFormatRelativeDate(self):
+ now = time.mktime(datetime.datetime(2008, 1, 1).timetuple())
+
+ def TestSecsAgo(secs_ago, expected, expected_recent_only):
+ test_time = now - secs_ago
+ actual = timestr.FormatRelativeDate(
+ test_time, clock=lambda: now)
+ self.assertEquals(actual, expected)
+ actual_recent_only = timestr.FormatRelativeDate(
+ test_time, clock=lambda: now, recent_only=True)
+ self.assertEquals(actual_recent_only, expected_recent_only)
+
+ TestSecsAgo(10 * 24 * 60 * 60, ', 2007', '')
+ TestSecsAgo(5 * 24 * 60 * 60 - 1, ' (4 days ago)', '4 days ago')
+ TestSecsAgo(5 * 60 * 60 - 1, ' (4 hours ago)', '4 hours ago')
+ TestSecsAgo(5 * 60 - 1, ' (4 minutes ago)', '4 minutes ago')
+ TestSecsAgo(2 * 60 - 1, ' (1 minute ago)', '1 minute ago')
+ TestSecsAgo(60 - 1, ' (moments ago)', 'moments ago')
+ TestSecsAgo(0, ' (moments ago)', 'moments ago')
+ TestSecsAgo(-10, ' (moments ago)', 'moments ago')
+ TestSecsAgo(-100, ' (in the future)', '')
+
+ def testGetHumanScaleDate(self):
+ """Tests GetHumanScaleDate()."""
+ now = time.mktime(datetime.datetime(2008, 4, 10, 20, 50, 30).timetuple())
+
+ def GetDate(*args):
+ date = datetime.datetime(*args)
+ timestamp = time.mktime(date.timetuple())
+ return timestr.GetHumanScaleDate(timestamp, now=now)
+
+ self.assertEquals(
+ GetDate(2008, 4, 10, 15), ('Today', '5 hours ago'))
+ self.assertEquals(
+ GetDate(2008, 4, 10, 19, 55), ('Today', '55 min ago'))
+ self.assertEquals(
+ GetDate(2008, 4, 10, 20, 48, 35), ('Today', '1 min ago'))
+ self.assertEquals(
+ GetDate(2008, 4, 10, 20, 49, 35), ('Today', 'moments ago'))
+ self.assertEquals(
+ GetDate(2008, 4, 10, 20, 50, 55), ('Today', 'moments ago'))
+ self.assertEquals(
+ GetDate(2008, 4, 9, 15), ('Yesterday', '29 hours ago'))
+ self.assertEquals(
+ GetDate(2008, 4, 5, 15), ('Last 7 days', 'Apr 05, 2008'))
+ self.assertEquals(
+ GetDate(2008, 3, 22, 15), ('Last 30 days', 'Mar 22, 2008'))
+ self.assertEquals(
+ GetDate(2008, 1, 2, 15), ('Earlier this year', 'Jan 02, 2008'))
+ self.assertEquals(
+ GetDate(2007, 12, 31, 15), ('Older', 'Dec 31, 2007'))
+ self.assertEquals(
+ GetDate(2008, 4, 11, 20, 49, 35), ('Future', 'Later'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/validate_test.py b/appengine/monorail/framework/test/validate_test.py
new file mode 100644
index 0000000..f8b6901
--- /dev/null
+++ b/appengine/monorail/framework/test/validate_test.py
@@ -0,0 +1,127 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This file provides unit tests for Validate functions."""
+
+import unittest
+
+from framework import validate
+
+
+class ValidateUnitTest(unittest.TestCase):
+ """Set of unit tests for validation functions."""
+
+ GOOD_EMAIL_ADDRESSES = [
+ 'user@example.com',
+ 'user@e.com',
+ 'user+tag@example.com',
+ 'u.ser@example.com',
+ 'us.er@example.com',
+ 'u.s.e.r@example.com',
+ 'user@ex-ample.com',
+ 'user@ex.ample.com',
+ 'user@e.x.ample.com',
+ 'user@exampl.e.com',
+ 'user@e-x-ample.com',
+ 'user@e-x-a-m-p-l-e.com',
+ 'user@e-x.am-ple.com',
+ 'user@e--xample.com',
+ ]
+
+ BAD_EMAIL_ADDRESSES = [
+ ' leading.whitespace@example.com',
+ 'trailing.whitespace@example.com ',
+ '(paren.quoted@example.com)',
+ '<angle.quoted@example.com>',
+ 'trailing.@example.com',
+ 'trailing.dot.@example.com',
+ '.leading@example.com',
+ '.leading.dot@example.com',
+ 'user@example.com.',
+ 'us..er@example.com',
+ 'user@ex..ample.com',
+ 'user@example..com',
+ 'user@ex-.ample.com',
+ 'user@-example.com',
+ 'user@.example.com',
+ 'user@example-.com',
+ 'user@example',
+ 'user@example.',
+ 'user@example.c',
+ 'user@example.comcomcomc',
+ 'user@example.co-m',
+ 'user@exa_mple.com',
+ 'user@exa-_mple.com',
+ 'user@example.c0m',
+ ]
+
+ def testIsValidEmail(self):
+ """Tests the Email validator class."""
+ for email in self.GOOD_EMAIL_ADDRESSES:
+ self.assertTrue(validate.IsValidEmail(email), msg='Rejected:%r' % email)
+
+ for email in self.BAD_EMAIL_ADDRESSES:
+ self.assertFalse(validate.IsValidEmail(email), msg='Accepted:%r' % email)
+
+ def testIsValidMailTo(self):
+ for email in self.GOOD_EMAIL_ADDRESSES:
+ self.assertTrue(
+ validate.IsValidMailTo('mailto:' + email),
+ msg='Rejected:%r' % ('mailto:' + email))
+
+ for email in self.BAD_EMAIL_ADDRESSES:
+ self.assertFalse(
+ validate.IsValidMailTo('mailto:' + email),
+ msg='Accepted:%r' % ('mailto:' + email))
+
+ GOOD_URLS = [
+ 'http://google.com',
+ 'http://maps.google.com/',
+ 'https://secure.protocol.com',
+ 'https://dash-domain.com',
+ 'http://www.google.com/search?q=foo&hl=en',
+ 'https://a.very.long.domain.name.net/with/a/long/path/inf0/too',
+ 'http://funny.ws/',
+ 'http://we.love.anchors.info/page.html#anchor',
+ 'http://redundant-slashes.com//in/path//info',
+ 'http://trailingslashe.com/in/path/info/',
+ 'http://domain.with.port.com:8080',
+ 'http://domain.with.port.com:8080/path/info',
+ 'ftp://ftp.gnu.org',
+ 'ftp://some.server.some.place.com'
+ ]
+
+ BAD_URLS = [
+ ' http://leading.whitespace.com',
+ 'http://trailing.domain.whitespace.com ',
+ 'http://trailing.whitespace.com/after/path/info ',
+ 'http://no-tld/',
+ 'http://underscore_domain.com/',
+ 'http://space in domain.com',
+ 'http://user@example.com', # standard, but we purposely don't accept it.
+ 'http://user:pass@ex.com', # standard, but we purposely don't accept it.
+ 'http://:password@ex.com', # standard, but we purposely don't accept it.
+ 'missing-http.com',
+ 'http:missing-slashes.com',
+ 'http:/only-one-slash.com',
+ 'http://bad-tld.c0m',
+ 'http://bad-tld.c',
+ 'http://trailing.dot.',
+ 'mailto:bad.scheme',
+ 'javascript:attempt-to-inject',
+ 'http://:8080/',
+ 'http://badport.com:808a0/ ',
+ ]
+
+ def testURL(self):
+ for url in self.GOOD_URLS:
+ self.assertTrue(validate.IsValidURL(url), msg='Rejected:%r' % url)
+
+ for url in self.BAD_URLS:
+ self.assertFalse(validate.IsValidURL(url), msg='Accepted:%r' % url)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/test/xsrf_test.py b/appengine/monorail/framework/test/xsrf_test.py
new file mode 100644
index 0000000..2a4b96d
--- /dev/null
+++ b/appengine/monorail/framework/test/xsrf_test.py
@@ -0,0 +1,88 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for XSRF utility functions."""
+
+import time
+import unittest
+
+from google.appengine.ext import testbed
+
+import settings
+from framework import xsrf
+
+
+class XsrfTest(unittest.TestCase):
+ """Set of unit tests for blocking XSRF attacks."""
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_datastore_v3_stub()
+
+ def testGenerateToken_AnonUserGetsNoToken(self):
+ self.assertEqual('', xsrf.GenerateToken(0L, '/path'))
+
+ def testGenerateToken_DifferentUsersGetDifferentTokens(self):
+ self.assertNotEqual(
+ xsrf.GenerateToken(111L, '/path'),
+ xsrf.GenerateToken(222L, '/path'))
+
+ def testGenerateToken_DifferentPathsGetDifferentTokens(self):
+ self.assertNotEqual(
+ xsrf.GenerateToken(111L, '/path/one'),
+ xsrf.GenerateToken(111L, '/path/two'))
+
+ def testGenerateToken_DifferentTimesGetDifferentTokens(self):
+ test_time = int(time.time())
+ self.assertNotEqual(
+ xsrf.GenerateToken(111L, '/path', token_time=test_time),
+ xsrf.GenerateToken(111L, '/path', token_time=test_time + 1))
+
+ def testValidToken(self):
+ token = xsrf.GenerateToken(111L, '/path')
+ xsrf.ValidateToken(token, 111L, '/path') # no exception raised
+
+ def testMalformedToken(self):
+ self.assertRaises(
+ xsrf.TokenIncorrect,
+ xsrf.ValidateToken, 'bad', 111L, '/path')
+ self.assertRaises(
+ xsrf.TokenIncorrect,
+ xsrf.ValidateToken, '', 111L, '/path')
+
+ self.assertRaises(
+ xsrf.TokenIncorrect,
+ xsrf.ValidateToken, '098a08fe08b08c08a05e:9721973123', 111L, '/path')
+
+ def testWrongUser(self):
+ token = xsrf.GenerateToken(111L, '/path')
+ self.assertRaises(
+ xsrf.TokenIncorrect,
+ xsrf.ValidateToken, token, 222L, '/path')
+
+ def testWrongPath(self):
+ token = xsrf.GenerateToken(111L, '/path/one')
+ self.assertRaises(
+ xsrf.TokenIncorrect,
+ xsrf.ValidateToken, token, 111L, '/path/two')
+
+ def testValidateToken_Expiration(self):
+ test_time = int(time.time())
+ token = xsrf.GenerateToken(111L, '/path', token_time=test_time)
+ xsrf.ValidateToken(token, 111L, '/path', now=test_time)
+ xsrf.ValidateToken(token, 111L, '/path', now=test_time + 1)
+ xsrf.ValidateToken(
+ token, 111L, '/path', now=test_time + xsrf.TOKEN_TIMEOUT_SEC)
+
+ self.assertRaises(
+ xsrf.TokenIncorrect,
+ xsrf.ValidateToken, token, 11L, '/path',
+ now=test_time + xsrf.TOKEN_TIMEOUT_SEC + 1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/framework/timestr.py b/appengine/monorail/framework/timestr.py
new file mode 100644
index 0000000..5ec1ee8
--- /dev/null
+++ b/appengine/monorail/framework/timestr.py
@@ -0,0 +1,184 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Time-to-string and time-from-string routines."""
+
+import datetime
+import time
+
+
+class Error(Exception):
+ """Exception used to indicate problems with time routines."""
+ pass
+
+
+HTML_TIME_FMT = '%a, %d %b %Y %H:%M:%S GMT'
+
+MONTH_YEAR_FMT = '%b %Y'
+MONTH_DAY_FMT = '%b %d'
+MONTH_DAY_YEAR_FMT = '%b %d %Y'
+
+# We assume that all server clocks are synchronized within this amount.
+MAX_CLOCK_SKEW_SEC = 30
+
+
+def TimeForHTMLHeader(when=None):
+ """Return the given time (or now) in HTML header format."""
+ if when is None:
+ when = int(time.time())
+ return time.strftime(HTML_TIME_FMT, time.gmtime(when))
+
+
+def FormatAbsoluteDate(
+ timestamp, clock=datetime.datetime.utcnow,
+ recent_format=MONTH_DAY_FMT, old_format=MONTH_YEAR_FMT):
+ """Format timestamp like 'Sep 5', or 'Yesterday', or 'Today'.
+
+ Args:
+ timestamp: Seconds since the epoch in UTC.
+ clock: callable that returns a datetime.datetime object when called with no
+ arguments, giving the current time to use when computing what to display.
+ recent_format: Format string to pass to strftime to present dates between
+ six months ago and yesterday.
+ old_format: Format string to pass to strftime to present dates older than
+ six months or more than skew_tolerance in the future.
+
+ Returns:
+ If timestamp's date is today, "Today". If timestamp's date is yesterday,
+ "Yesterday". If timestamp is within six months before today, return the
+ time as formatted by recent_format. Otherwise, return the time as formatted
+ by old_format.
+ """
+ ts = datetime.datetime.fromtimestamp(timestamp)
+ now = clock()
+ month_delta = 12 * now.year + now.month - (12 * ts.year + ts.month)
+ delta = now - ts
+
+ if ts > now:
+ # If the time is slightly in the future due to clock skew, treat as today.
+ skew_tolerance = datetime.timedelta(seconds=MAX_CLOCK_SKEW_SEC)
+ if -delta <= skew_tolerance:
+ return 'Today'
+ # Otherwise treat it like an old date.
+ else:
+ fmt = old_format
+ elif month_delta > 6 or delta.days >= 365:
+ fmt = old_format
+ elif delta.days == 1:
+ return 'Yesterday'
+ elif delta.days == 0:
+ return 'Today'
+ else:
+ fmt = recent_format
+
+ return time.strftime(fmt, time.localtime(timestamp)).replace(' 0', ' ')
+
+
+def FormatRelativeDate(timestamp, recent_only=False, clock=None):
+ """Return a short string that makes timestamp more meaningful to the user.
+
+ Describe the timestamp relative to the current time, e.g., '4
+ hours ago'. In cases where the timestamp is more than 6 days ago,
+ we simply show the year, so that the combined absolute and
+ relative parts look like 'Sep 05, 2005'.
+
+ Args:
+ timestamp: Seconds since the epoch in UTC.
+ recent_only: If True, only return a description of recent relative
+ dates. Do not return the year, and do not put results inside parentheses.
+ clock: optional function to return an int time, like int(time.time()).
+
+ Returns:
+ String describing relative time.
+ """
+ if clock:
+ now = clock()
+ else:
+ now = int(time.time())
+
+ # TODO(jrobbins): i18n of date strings
+ delta = int(now - timestamp)
+ d_minutes = delta // 60
+ d_hours = d_minutes // 60
+ d_days = d_hours // 24
+ if recent_only:
+ if d_days > 6:
+ return ''
+ if d_days > 1:
+ return '%s days ago' % d_days # starts at 2 days
+ if d_hours > 1:
+ return '%s hours ago' % d_hours # starts at 2 hours
+ if d_minutes > 1:
+ return '%s minutes ago' % d_minutes
+ if d_minutes > 0:
+ return '1 minute ago'
+ if delta > -MAX_CLOCK_SKEW_SEC:
+ return 'moments ago'
+ return ''
+ else:
+ if d_days > 6:
+ return ', %s' % (time.localtime(timestamp))[0]
+ if d_days > 1:
+ return ' (%s days ago)' % d_days # starts at 2 days
+ if d_hours > 1:
+ return ' (%s hours ago)' % d_hours # starts at 2 hours
+ if d_minutes > 1:
+ return ' (%s minutes ago)' % d_minutes
+ if d_minutes > 0:
+ return ' (1 minute ago)'
+ if delta > -MAX_CLOCK_SKEW_SEC:
+ return ' (moments ago)'
+ # Only say something is in the future if it is more than just clock skew.
+ return ' (in the future)'
+
+
+def GetHumanScaleDate(timestamp, now=None):
+ """Formats a timestamp to a course-grained and fine-grained time phrase.
+
+ Args:
+ timestamp: Seconds since the epoch in UTC.
+ now: Current time in seconds since the epoch in UTC.
+
+ Returns:
+ A pair (course_grain, fine_grain) where course_grain is a string
+ such as 'Today', 'Yesterday', etc.; and fine_grained is a string describing
+ relative hours for Today and Yesterday, or an exact date for longer ago.
+ """
+ if now is None:
+ now = int(time.time())
+
+ now_year = datetime.datetime.fromtimestamp(now).year
+ then_year = datetime.datetime.fromtimestamp(timestamp).year
+ delta = int(now - timestamp)
+ delta_minutes = delta // 60
+ delta_hours = delta_minutes // 60
+ delta_days = delta_hours // 24
+
+ if 0 <= delta_hours < 24:
+ if delta_hours > 1:
+ return 'Today', '%s hours ago' % delta_hours
+ if delta_minutes > 1:
+ return 'Today', '%s min ago' % delta_minutes
+ if delta_minutes > 0:
+ return 'Today', '1 min ago'
+ if delta > 0:
+ return 'Today', 'moments ago'
+ if 0 <= delta_hours < 48:
+ return 'Yesterday', '%s hours ago' % delta_hours
+ if 0 <= delta_days < 7:
+ return 'Last 7 days', time.strftime(
+ '%b %d, %Y', (time.localtime(timestamp)))
+ if 0 <= delta_days < 30:
+ return 'Last 30 days', time.strftime(
+ '%b %d, %Y', (time.localtime(timestamp)))
+ if delta > 0:
+ if now_year == then_year:
+ return 'Earlier this year', time.strftime(
+ '%b %d, %Y', (time.localtime(timestamp)))
+ return 'Older', time.strftime('%b %d, %Y', (time.localtime(timestamp)))
+ if delta > -MAX_CLOCK_SKEW_SEC:
+ return 'Today', 'moments ago'
+ # Only say something is in the future if it is more than just clock skew.
+ return 'Future', 'Later'
diff --git a/appengine/monorail/framework/tokenrefresh.py b/appengine/monorail/framework/tokenrefresh.py
new file mode 100644
index 0000000..7793e70
--- /dev/null
+++ b/appengine/monorail/framework/tokenrefresh.py
@@ -0,0 +1,58 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet for XSRF token refresh.
+
+Our XSRF tokens expire in 2 hours (as defined in xsrf.py), which would
+mean that users who open an issue page and take a long lunch would see
+an error if they try to submit a comment when they get back.
+"""
+
+import logging
+
+from framework import framework_constants
+from framework import jsonfeed
+from framework import xsrf
+
+
+# TODO(jrobbins): Make this also work with xhr tokens by checking expiration
+# time in CS_doPost().
+
+
+class TokenRefresh(jsonfeed.JsonFeed):
+ """JSON feed to give the user a new XSRF token."""
+
+ # Setting this class variable tells servlet.py to not check the XHR
+ # token for the token refresh request itself. It will always be
+ # expired, otherwise we would not need a new one. Instead, we check
+ # the form_token with a longer expiration.
+ CHECK_SECURITY_TOKEN = False
+
+ def HandleRequest(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ if not mr.auth.user_id:
+ return {}
+
+ post_data = mr.request.POST
+ form_token_path = post_data.get('form_token_path')
+ xsrf.ValidateToken(
+ post_data.get('form_token'),
+ mr.auth.user_id,
+ form_token_path,
+ timeout=xsrf.REFRESH_TOKEN_TIMEOUT_SEC)
+
+ return {
+ 'form_token': xsrf.GenerateToken(mr.auth.user_id, form_token_path),
+ 'token_expires_sec': xsrf.TokenExpiresSec(),
+ }
+
+
diff --git a/appengine/monorail/framework/urls.py b/appengine/monorail/framework/urls.py
new file mode 100644
index 0000000..9502669
--- /dev/null
+++ b/appengine/monorail/framework/urls.py
@@ -0,0 +1,123 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Constants that define the Monorail URL space."""
+
+# URLs of site-wide Monorail pages
+HOSTING_HOME = '/hosting/' # the big search box w/ popular labels
+PROJECT_CREATE = '/hosting/createProject'
+USER_SETTINGS = '/hosting/settings'
+PROJECT_MOVED = '/hosting/moved'
+CHECK_PROJECT_NAME_JSON = '/hosting/createProject/checkProjectName'
+GROUP_LIST = '/g/'
+GROUP_CREATE = '/hosting/createGroup'
+GROUP_DELETE = '/hosting/deleteGroup'
+
+# URLs of project pages
+SUMMARY = '/' # Now just a redirect to /issues/list
+UPDATES_LIST = '/updates/list'
+PEOPLE_LIST = '/people/list'
+PEOPLE_DETAIL = '/people/detail'
+PEOPLE_DETAIL_PREFS_JSON = '/people/detailPrefs'
+ADMIN_META = '/admin'
+ADMIN_ADVANCED = '/adminAdvanced'
+
+# URLs for stars
+STARS_JSON = '/hosting/stars'
+
+# URLs for cue cards (dismissible on-page help)
+CUES_JSON = '/hosting/cues'
+
+# URLs of user pages, relative to either /u/userid or /u/username
+# TODO(jrobbins): Add /u/userid as the canonical URL in metadata.
+USER_PROFILE = '/'
+
+# URLs for User Updates pages
+USER_UPDATES_PROJECTS = '/updates/projects'
+USER_UPDATES_DEVELOPERS = '/updates/developers'
+USER_UPDATES_MINE = '/updates'
+
+# URLs of user group pages, relative to /g/groupname.
+GROUP_DETAIL = '/'
+GROUP_ADMIN = '/groupadmin'
+
+# URL of JSON feed for the "My projects" menu
+USER_PROJECTS_JSON = '/hosting/projects'
+
+# URLs of issue tracker backend request handlers. Called from the frontends.
+BACKEND_SEARCH = '/_backend/search'
+BACKEND_NONVIEWABLE = '/_backend/nonviewable'
+
+# URLs of task queue request handlers. Called asynchronously from frontends.
+RECOMPUTE_DERIVED_FIELDS_TASK = '/_task/recomputeDerivedFields'
+NOTIFY_ISSUE_CHANGE_TASK = '/_task/notifyIssueChange'
+NOTIFY_BLOCKING_CHANGE_TASK = '/_task/notifyBlockingChange'
+NOTIFY_BULK_CHANGE_TASK = '/_task/notifyBulkEdit'
+OUTBOUND_EMAIL_TASK = '/_task/outboundEmail'
+SPAM_DATA_EXPORT_TASK = '/_task/spamDataExport'
+
+# URLs of cron job request handlers. Called from GAE via cron.yaml.
+REINDEX_QUEUE_CRON = '/_cron/reindexQueue'
+RAMCACHE_CONSOLIDATE_CRON = '/_cron/ramCacheConsolidate'
+REAP_CRON = '/_cron/reap'
+SPAM_DATA_EXPORT_CRON = '/_cron/spamDataExport'
+
+# URLs of User pages
+SAVED_QUERIES = '/queries'
+DASHBOARD = '/dashboard'
+
+# URLs of issue tracker project pages
+ISSUE_LIST = '/issues/list'
+ISSUE_DETAIL = '/issues/detail'
+ISSUE_PEEK = '/issues/peek' # not served, only used in issuepeek.py
+ISSUE_COMMENT_DELETION_JSON = '/issues/delComment'
+ISSUE_ATTACHMENT_DELETION_JSON = '/issues/delAttachment'
+ISSUE_FLAGSPAM_JSON = '/issues/flagspam'
+ISSUE_SETSTAR_JSON = '/issues/setstar'
+ISSUE_DELETE_JSON = '/issues/delete'
+ISSUE_ENTRY = '/issues/entry'
+ISSUE_OPTIONS_JSON = '/feeds/issueOptions'
+ISSUE_BULK_EDIT = '/issues/bulkedit'
+ISSUE_ADVSEARCH = '/issues/advsearch'
+ISSUE_TIPS = '/issues/searchtips'
+ISSUE_ATTACHMENT = '/issues/attachment'
+ISSUE_ATTACHMENT_TEXT = '/issues/attachmentText'
+ISSUE_LIST_CSV = '/issues/csv'
+COMPONENT_CHECKNAME_JSON = '/components/checkName'
+COMPONENT_CREATE = '/components/create'
+COMPONENT_DETAIL = '/components/detail'
+FIELD_CHECKNAME_JSON = '/fields/checkName'
+FIELD_CREATE = '/fields/create'
+FIELD_DETAIL = '/fields/detail'
+WIKI_LIST = '/w/list' # Wiki urls are just redirects to project.docs_url
+WIKI_PAGE = '/wiki/<wiki_page:.*>'
+ADMIN_INTRO = '/adminIntro'
+# TODO(jrbbins): move some editing from /admin to /adminIntro.
+ADMIN_COMPONENTS = '/adminComponents'
+ADMIN_LABELS = '/adminLabels'
+ADMIN_RULES = '/adminRules'
+ADMIN_TEMPLATES = '/adminTemplates'
+ADMIN_STATUSES = '/adminStatuses'
+ADMIN_VIEWS = '/adminViews'
+ADMIN_EXPORT = '/projectExport'
+ADMIN_EXPORT_JSON = '/projectExport/json'
+ISSUE_ORIGINAL = '/issues/original'
+ISSUE_REINDEX = '/issues/reindex'
+ISSUE_EXPORT = '/issues/export'
+ISSUE_EXPORT_JSON = '/issues/export/json'
+ISSUE_IMPORT = '/issues/import'
+
+# URLs of site-wide pages referenced from the framework directory.
+CAPTCHA_QUESTION = '/hosting/captcha'
+EXCESSIVE_ACTIVITY = '/hosting/excessiveActivity'
+BANNED = '/hosting/noAccess'
+NONPROJECT_COLLISION = '/hosting/collision'
+# This is for collisions that happen within a project, based at /p/projectname
+ARTIFACT_COLLISION = '/collision'
+
+CSP_REPORT = '/csp'
+TOKEN_REFRESH = '/hosting/tokenRefresh'
+
+SPAM_MODERATION_QUEUE = '/spamqueue'
diff --git a/appengine/monorail/framework/validate.py b/appengine/monorail/framework/validate.py
new file mode 100644
index 0000000..4aa40ca
--- /dev/null
+++ b/appengine/monorail/framework/validate.py
@@ -0,0 +1,102 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of Python input field validators."""
+
+import re
+
+# RFC 2821-compliant email address regex
+#
+# Please see sections "4.1.2 Command Argument Syntax" and
+# "4.1.3 Address Literals" of: http://www.faqs.org/rfcs/rfc2821.html
+#
+# The following implementation is still a subset of RFC 2821. Fully
+# double-quoted <user> parts are not supported (since the RFC discourages
+# their use anyway), and using the backslash to escape other characters
+# that are normally invalid, such as commas, is not supported.
+#
+# The groups in this regular expression are:
+#
+# <user>: all of the valid non-quoted portion of the email address before
+# the @ sign (not including the @ sign)
+#
+# <domain>: all of the domain name between the @ sign (but not including it)
+# and the dot before the TLD (but not including that final dot)
+#
+# <tld>: the top-level domain after the last dot (but not including that
+# final dot)
+#
+_RFC_2821_EMAIL_REGEX = r"""(?x)
+ (?P<user>
+ # Part of the username that comes before any dots that may occur in it.
+ # At least one of the listed non-dot characters is required before the
+ # first dot.
+ [-a-zA-Z0-9!#$%&'*+/=?^_`{|}~]+
+
+ # Remaining part of the username that starts with the dot and
+ # which may have other dots, if such a part exists. Only one dot
+ # is permitted between each "Atom", and a trailing dot is not permitted.
+ (?:[.][-a-zA-Z0-9!#$%&'*+/=?^_`{|}~]+)*
+ )
+
+ # Domain name, where subdomains are allowed. Also, dashes are allowed
+ # given that they are preceded and followed by at least one character.
+ @(?P<domain>
+ (?:[0-9a-zA-Z] # at least one non-dash
+ (?:[-]* # plus zero or more dashes
+ [0-9a-zA-Z]+ # plus at least one non-dash
+ )* # zero or more of dashes followed by non-dashes
+ ) # one required domain part (may be a sub-domain)
+
+ (?:\. # dot separator before additional sub-domain part
+ [0-9a-zA-Z] # at least one non-dash
+ (?:[-]* # plus zero or more dashes
+ [0-9a-zA-Z]+ # plus at least one non-dash
+ )* # zero or more of dashes followed by non-dashes
+ )* # at least one sub-domain part and a dot
+ )
+ \. # dot separator before TLD
+
+ # TLD, the part after 'usernames@domain.' which can consist of 2-9
+ # letters.
+ (?P<tld>[a-zA-Z]{2,9})
+ """
+
+# object used with <re>.search() or <re>.sub() to find email addresses
+# within a string (or with <re>.match() to find email addresses at the
+# beginning of a string that may be followed by trailing characters,
+# since <re>.match() implicitly anchors at the beginning of the string)
+RE_EMAIL_SEARCH = re.compile(_RFC_2821_EMAIL_REGEX)
+
+# object used with <re>.match to find strings that contain *only* a single
+# email address (by adding the end-of-string anchor $)
+RE_EMAIL_ONLY = re.compile('^%s$' % _RFC_2821_EMAIL_REGEX)
+
+_URL_HOST_PATTERN = (
+ r'(?:https?|ftp)://' # http(s) and ftp protocols
+ r'[-a-zA-Z0-9.]+\.[a-zA-Z]{2,9}(:[0-9]+)?' # ascii host values
+)
+_URL_REGEX = r'%s(/[^\s]*)?' % _URL_HOST_PATTERN
+
+# A more complete URL regular expression based on a combination of the
+# existing _URL_REGEX and the pattern found for URI regular expressions
+# found in the URL RFC document. It's detailed here:
+# http://www.ietf.org/rfc/rfc2396.txt
+RE_COMPLEX_URL = re.compile(r'^%s(\?([^# ]*))?(#(.*))?$' % _URL_REGEX)
+
+
+def IsValidEmail(s):
+ """Return true iff the string is a properly formatted email address."""
+ return RE_EMAIL_ONLY.match(s)
+
+
+def IsValidMailTo(s):
+ """Return true iff the string is a properly formatted mailto:."""
+ return s.startswith('mailto:') and RE_EMAIL_ONLY.match(s[7:])
+
+
+def IsValidURL(s):
+ """Return true iff the string is a properly formatted web or ftp URL."""
+ return RE_COMPLEX_URL.match(s)
diff --git a/appengine/monorail/framework/xsrf.py b/appengine/monorail/framework/xsrf.py
new file mode 100644
index 0000000..283b8ed
--- /dev/null
+++ b/appengine/monorail/framework/xsrf.py
@@ -0,0 +1,132 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Utility routines for avoiding cross-site-request-forgery."""
+
+import base64
+import hmac
+import logging
+import time
+
+# This is a file in the top-level directory that you must edit before deploying
+import settings
+from framework import framework_constants
+from services import secrets_svc
+
+# This is how long tokens are valid.
+TOKEN_TIMEOUT_SEC = 2 * framework_constants.SECS_PER_HOUR
+
+# The token refresh servlet accepts old tokens to generate new ones, but
+# we still impose a limit on how old they can be.
+REFRESH_TOKEN_TIMEOUT_SEC = 10 * framework_constants.SECS_PER_DAY
+
+# When the JS on a page decides whether or not it needs to refresh the
+# XSRF token before submitting a form, there could be some clock skew,
+# so we subtract a little time to avoid having the JS use an existing
+# token that the server might consider expired already.
+TOKEN_TIMEOUT_MARGIN_SEC = 5 * framework_constants.SECS_PER_MINUTE
+
+# Form tokens and issue stars are limited to only work with the specific
+# servlet path for the servlet that processes them. There are several
+# XHR handlers that mainly read data without making changes, so we just
+# use 'xhr' with all of them.
+XHR_SERVLET_PATH = 'xhr'
+
+DELIMITER = ':'
+
+
+def GenerateToken(user_id, servlet_path, token_time=None):
+ """Return a security token specifically for the given user.
+
+ Args:
+ user_id: int user ID of the user viewing an HTML form.
+ servlet_path: string URI path to limit the use of the token.
+ token_time: Time at which the token is generated in seconds since the
+ epoch. This is used in validation and testing. Defaults to the
+ current time.
+
+ Returns:
+ A url-safe security token. The token is a string with the digest
+ the user_id and time, followed by plain-text copy of the time that is
+ used in validation.
+
+ Raises:
+ ValueError: if the XSRF secret was not configured.
+ """
+ if not user_id:
+ return '' # Don't give tokens out to anonymous visitors.
+
+ token_time = token_time or int(time.time())
+ digester = hmac.new(secrets_svc.GetXSRFKey())
+ digester.update(str(user_id))
+ digester.update(DELIMITER)
+ digester.update(servlet_path)
+ digester.update(DELIMITER)
+ digester.update(str(token_time))
+ digest = digester.digest()
+
+ token = base64.urlsafe_b64encode('%s%s%d' % (digest, DELIMITER, token_time))
+ return token
+
+
+def ValidateToken(
+ token, user_id, servlet_path, now=None, timeout=TOKEN_TIMEOUT_SEC):
+ """Return True if the given token is valid for the given scope.
+
+ Args:
+ token: String token that was presented by the user.
+ user_id: int user ID.
+ servlet_path: string URI path to limit the use of the token.
+ now: Time in seconds since th epoch. Defaults to the current time.
+ It is explicitly specified only in tests.
+
+ Raises:
+ TokenIncorrect: if the token is missing or invalid.
+ """
+ if not token:
+ raise TokenIncorrect('missing token')
+
+ try:
+ decoded = base64.urlsafe_b64decode(str(token))
+ token_time = long(decoded.split(DELIMITER)[-1])
+ except (TypeError, ValueError):
+ raise TokenIncorrect('could not decode token')
+ now = now or int(time.time())
+
+ # The given token should match the generated one with the same time.
+ expected_token = GenerateToken(user_id, servlet_path, token_time=token_time)
+ if len(token) != len(expected_token):
+ raise TokenIncorrect('presented token is wrong size')
+
+ # Perform constant time comparison to avoid timing attacks
+ different = 0
+ for x, y in zip(token, expected_token):
+ different |= ord(x) ^ ord(y)
+ if different:
+ raise TokenIncorrect(
+ 'presented token does not match expected token: %r != %r' % (
+ token, expected_token))
+
+ # We check expiration last so that we only raise the expriration error
+ # if the token would have otherwise been valid.
+ if now - token_time > timeout:
+ raise TokenIncorrect('token has expired')
+
+
+def TokenExpiresSec(now=None):
+ """Return timestamp when current tokens will expire, minus a safety margin."""
+ now = now or int(time.time())
+ return now + TOKEN_TIMEOUT_SEC - TOKEN_TIMEOUT_MARGIN_SEC
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+# Caught separately in servlet.py
+class TokenIncorrect(Error):
+ """The POST body has an incorrect URL Command Attack token."""
+ pass
diff --git a/appengine/monorail/gae.py b/appengine/monorail/gae.py
new file mode 120000
index 0000000..92451c2
--- /dev/null
+++ b/appengine/monorail/gae.py
@@ -0,0 +1 @@
+../../../infra/luci/appengine/components/tools/gae.py
\ No newline at end of file
diff --git a/appengine/monorail/gae_ts_mon b/appengine/monorail/gae_ts_mon
new file mode 120000
index 0000000..822e4ad
--- /dev/null
+++ b/appengine/monorail/gae_ts_mon
@@ -0,0 +1 @@
+../../../infra/appengine_module/gae_ts_mon
\ No newline at end of file
diff --git a/appengine/monorail/module-besearch.yaml b/appengine/monorail/module-besearch.yaml
new file mode 100644
index 0000000..77235c3
--- /dev/null
+++ b/appengine/monorail/module-besearch.yaml
@@ -0,0 +1,36 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+application: monorail-staging
+module: besearch
+version: 2013-03-18
+runtime: python27
+api_version: 1
+threadsafe: no
+
+instance_class: F4
+automatic_scaling:
+ min_idle_instances: 20
+ max_pending_latency: 0.2s
+
+handlers:
+- url: /_backend/.*
+ script: monorailapp.app
+
+- url: /_cron/.*
+ script: monorailapp.app
+ login: admin
+
+- url: /_task/.*
+ script: monorailapp.app
+ login: admin
+
+libraries:
+- name: endpoints
+ version: 1.0
+- name: MySQLdb
+ version: "latest"
+- name: django
+ version: 1.4
diff --git a/appengine/monorail/monorailapp.py b/appengine/monorail/monorailapp.py
new file mode 100644
index 0000000..06644bb
--- /dev/null
+++ b/appengine/monorail/monorailapp.py
@@ -0,0 +1,33 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Main program for Monorail.
+
+Monorail is an issue tracking tool that is based on the code.google.com
+issue tracker, but it has been ported to Google AppEngine and Google Cloud SQL.
+"""
+
+import endpoints
+import logging
+import webapp2
+
+import gae_ts_mon
+
+import registerpages
+from framework import sorting
+from services import api_svc_v1
+from services import service_manager
+
+
+services = service_manager.set_up_services()
+sorting.InitializeArtValues(services)
+registry = registerpages.ServletRegistry()
+app_routes = registry.Register(services)
+app = webapp2.WSGIApplication(
+ app_routes, config={'services': services})
+gae_ts_mon.initialize(app)
+
+endpoints = endpoints.api_server(
+ [api_svc_v1.MonorailApi, api_svc_v1.ClientConfigApi])
diff --git a/appengine/monorail/project/__init__.py b/appengine/monorail/project/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/project/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/project/peopledetail.py b/appengine/monorail/project/peopledetail.py
new file mode 100644
index 0000000..ecdf013
--- /dev/null
+++ b/appengine/monorail/project/peopledetail.py
@@ -0,0 +1,262 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display details about each project member."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_bizobj
+from framework import framework_helpers
+from framework import framework_views
+from framework import jsonfeed
+from framework import monorailrequest
+from framework import permissions
+from framework import servlet
+from framework import template_helpers
+from framework import urls
+from project import project_helpers
+from project import project_views
+from services import user_svc
+
+CHECKBOX_PERMS = [
+ permissions.VIEW,
+ permissions.COMMIT,
+ permissions.CREATE_ISSUE,
+ permissions.ADD_ISSUE_COMMENT,
+ permissions.EDIT_ISSUE,
+ permissions.EDIT_ISSUE_OWNER,
+ permissions.EDIT_ISSUE_SUMMARY,
+ permissions.EDIT_ISSUE_STATUS,
+ permissions.EDIT_ISSUE_CC,
+ permissions.DELETE_ISSUE,
+ permissions.DELETE_OWN,
+ permissions.DELETE_ANY,
+ permissions.EDIT_ANY_MEMBER_NOTES,
+ permissions.MODERATE_SPAM,
+ ]
+
+
+class PeopleDetail(servlet.Servlet):
+ """People detail page documents one partipant's involvement in a project."""
+
+ _PAGE_TEMPLATE = 'project/people-detail-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PEOPLE
+
+ def AssertBasePermission(self, mr):
+ """Check that the user is allowed to access this servlet."""
+ super(PeopleDetail, self).AssertBasePermission(mr)
+ member_id = self.ValidateMemberID(mr.cnxn, mr.specified_user_id, mr.project)
+ # For now, contributors who cannot view other contributors are further
+ # restricted from viewing any part of the member list or detail pages.
+ if (not permissions.CanViewContributorList(mr) and
+ member_id != mr.auth.user_id):
+ raise permissions.PermissionException(
+ 'User is not allowed to view other people\'s details')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+
+ member_id = self.ValidateMemberID(mr.cnxn, mr.specified_user_id, mr.project)
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, [member_id])
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+
+ project_commitments = self.services.project.GetProjectCommitments(
+ mr.cnxn, mr.project_id)
+ member_view = project_views.MemberView(
+ mr.auth.user_id, member_id, users_by_id[member_id], mr.project,
+ project_commitments)
+
+ member_user = self.services.user.GetUser(mr.cnxn, member_id)
+ # This ignores indirect memberships, which is ok because we are viewing
+ # the page for a member directly involved in the project
+ role_perms = permissions.GetPermissions(
+ member_user, {member_id}, mr.project)
+
+ # TODO(jrobbins): clarify in the UI which permissions are built-in to
+ # the user's direct role, vs. which are granted via a group membership,
+ # vs. which ones are extra_perms that have been added specifically for
+ # this user.
+ member_perms = template_helpers.EZTItem()
+ for perm in CHECKBOX_PERMS:
+ setattr(member_perms, perm,
+ ezt.boolean(role_perms.HasPerm(perm, member_id, mr.project)))
+
+ displayed_extra_perms = [perm for perm in member_view.extra_perms
+ if perm not in CHECKBOX_PERMS]
+
+ viewing_self = mr.auth.user_id == member_id
+ warn_abandonment = (viewing_self and
+ permissions.ShouldCheckForAbandonment(mr))
+
+ return {
+ 'subtab_mode': None,
+ 'member': member_view,
+ 'role_perms': role_perms,
+ 'member_perms': member_perms,
+ 'displayed_extra_perms': displayed_extra_perms,
+ 'offer_edit_perms': ezt.boolean(self.CanEditPerms(mr)),
+ 'offer_edit_member_notes': ezt.boolean(
+ self.CanEditMemberNotes(mr, member_id)),
+ 'offer_remove_role': ezt.boolean(self.CanRemoveRole(mr, member_id)),
+ 'expand_perms': ezt.boolean(mr.auth.user_pb.keep_people_perms_open),
+ 'warn_abandonment': ezt.boolean(warn_abandonment),
+ 'total_num_owners': len(mr.project.owner_ids),
+ }
+
+ def ValidateMemberID(self, cnxn, member_id, project):
+ """Lookup a project member by user_id.
+
+ Args:
+ cnxn: connection to SQL database.
+ member_id: int user_id, same format as user profile page.
+ project: the current Project PB.
+
+ Returns:
+ The user ID of the project member. Raises an exception if the username
+ cannot be looked up, or if that user is not in the project.
+ """
+ if not member_id:
+ self.abort(404, 'project member not specified')
+
+ member_username = None
+ try:
+ member_username = self.services.user.LookupUserEmail(cnxn, member_id)
+ except user_svc.NoSuchUserException:
+ logging.info('user_id %s not found', member_id)
+
+ if not member_username:
+ logging.info('There is no such user id %r', member_id)
+ self.abort(404, 'project member not found')
+
+ if not framework_bizobj.UserIsInProject(project, {member_id}):
+ logging.info('User %r is not a member of %r',
+ member_username, project.project_name)
+ self.abort(404, 'project member not found')
+
+ return member_id
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ # 1. Parse and validate user input.
+ user_id, role, extra_perms, notes = self.ParsePersonData(mr, post_data)
+ member_id = self.ValidateMemberID(mr.cnxn, user_id, mr.project)
+
+ # 2. Call services layer to save changes.
+ if 'remove' in post_data:
+ self.ProcessRemove(mr, member_id)
+ else:
+ self.ProcessSave(mr, role, extra_perms, notes, member_id)
+
+ # 3. Determine the next page in the UI flow.
+ if 'remove' in post_data:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.PEOPLE_LIST, saved=1, ts=int(time.time()))
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.PEOPLE_DETAIL, u=user_id, saved=1, ts=int(time.time()))
+
+ def ProcessRemove(self, mr, member_id):
+ """Process the posted form when the user pressed 'Remove'."""
+ if not self.CanRemoveRole(mr, member_id):
+ raise permissions.PermissionException(
+ 'User is not allowed to remove this member from the project')
+
+ self.RemoveRole(mr.cnxn, mr.project, member_id)
+
+ def ProcessSave(self, mr, role, extra_perms, notes, member_id):
+ """Process the posted form when the user pressed 'Save'."""
+ if not self.CanEditPerms(mr) and not self.CanEditMemberNotes(mr, member_id):
+ raise permissions.PermissionException(
+ 'User is not allowed to edit people in this project')
+
+ if self.CanEditPerms(mr):
+ self.services.project.UpdateExtraPerms(
+ mr.cnxn, mr.project_id, member_id, extra_perms)
+ self.UpdateRole(mr.cnxn, mr.project, role, member_id)
+
+ if self.CanEditMemberNotes(mr, member_id):
+ self.services.project.UpdateCommitments(
+ mr.cnxn, mr.project_id, member_id, notes)
+
+ def CanEditMemberNotes(self, mr, member_id):
+ """Return true if the logged in user can edit the current user's notes."""
+ return (self.CheckPerm(mr, permissions.EDIT_ANY_MEMBER_NOTES) or
+ member_id == mr.auth.user_id)
+
+ def CanEditPerms(self, mr):
+ """Return true if the logged in user can edit the current user's perms."""
+ return self.CheckPerm(mr, permissions.EDIT_PROJECT)
+
+ def CanRemoveRole(self, mr, member_id):
+ """Return true if the logged in user can remove the current user's role."""
+ return (self.CheckPerm(mr, permissions.EDIT_PROJECT) or
+ member_id == mr.auth.user_id)
+
+ def ParsePersonData(self, mr, post_data):
+ """Parse the POST data for a project member.
+
+ Args:
+ mr: common information parsed from the user's request.
+ post_data: dictionary of lists of values for each HTML
+ form field.
+
+ Returns:
+ A tuple with user_id, role, extra_perms, and notes.
+ """
+ if not mr.specified_user_id:
+ raise monorailrequest.InputException('Field user_id is missing')
+
+ role = post_data.get('role', '').lower()
+ extra_perms = []
+ for ep in post_data.getall('extra_perms'):
+ perm = framework_bizobj.CanonicalizeLabel(ep)
+ # Perms with leading underscores are reserved.
+ perm = perm.strip('_')
+ if perm:
+ extra_perms.append(perm)
+
+ notes = post_data.get('notes', '').strip()
+ return mr.specified_user_id, role, extra_perms, notes
+
+ def RemoveRole(self, cnxn, project, member_id):
+ """Remove the given member from the project."""
+ owner_ids, committer_ids, contributor_ids = project_helpers.MembersWithout(
+ project, {member_id})
+ self.services.project.UpdateProjectRoles(
+ cnxn, project.project_id, owner_ids, committer_ids, contributor_ids)
+
+ def UpdateRole(self, cnxn, project, role, member_id):
+ """If the user's role was changed, update that in the Project."""
+ if not role:
+ return # Role was not in the form data
+
+ if role == framework_helpers.GetRoleName({member_id}, project).lower():
+ return # No change needed
+
+ owner_ids, committer_ids, contributor_ids = project_helpers.MembersWith(
+ project, {member_id}, role)
+
+ self.services.project.UpdateProjectRoles(
+ cnxn, project.project_id, owner_ids, committer_ids, contributor_ids)
+
+
+class PagePrefs(jsonfeed.JsonFeed):
+ """Remember a user pref for hide/show state of people permissions."""
+
+ def HandleRequest(self, mr):
+ """Store the logged in user's preference for the people detail page."""
+ expanded = bool(mr.GetIntParam('perms_expanded'))
+ logging.info('setting expanded: %r', expanded)
+
+ if mr.auth.user_id:
+ self.services.user.UpdateUserSettings(
+ mr.cnxn, mr.auth.user_id, mr.auth.user_pb,
+ keep_people_perms_open=expanded)
+
+ return {'expanded': expanded}
diff --git a/appengine/monorail/project/peoplelist.py b/appengine/monorail/project/peoplelist.py
new file mode 100644
index 0000000..bc6dac2
--- /dev/null
+++ b/appengine/monorail/project/peoplelist.py
@@ -0,0 +1,203 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display a paginated list of project members.
+
+This page lists owners, members, and contribtors. For each
+member, we display their username, permission system role + extra
+perms, and notes on their involvement in the project.
+"""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import paginate
+from framework import permissions
+from framework import servlet
+from framework import urls
+from project import project_helpers
+from project import project_views
+
+MEMBERS_PER_PAGE = 50
+
+
+class PeopleList(servlet.Servlet):
+ """People list page shows a paginatied list of project members."""
+
+ _PAGE_TEMPLATE = 'project/people-list-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PEOPLE
+
+ def AssertBasePermission(self, mr):
+ super(PeopleList, self).AssertBasePermission(mr)
+ # For now, contributors who cannot view other contributors are further
+ # restricted from viewing any part of the member list or detail pages.
+ if not permissions.CanViewContributorList(mr):
+ raise permissions.PermissionException(
+ 'User is not allowed to view the project people list')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ all_members = (mr.project.owner_ids +
+ mr.project.committer_ids +
+ mr.project.contributor_ids)
+
+ with self.profiler.Phase('gathering members on this page'):
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, all_members)
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+
+ # TODO(jrobbins): re-implement FindUntrustedGroups()
+ untrusted_user_group_proxies = []
+
+ with self.profiler.Phase('gathering commitments (notes)'):
+ project_commitments = self.services.project.GetProjectCommitments(
+ mr.cnxn, mr.project_id)
+
+ with self.profiler.Phase('making member views'):
+ owner_views = self._MakeMemberViews(
+ mr.auth.user_id, users_by_id, mr.project.owner_ids, mr.project,
+ project_commitments)
+ committer_views = self._MakeMemberViews(
+ mr.auth.user_id, users_by_id, mr.project.committer_ids, mr.project,
+ project_commitments)
+ contributor_views = self._MakeMemberViews(
+ mr.auth.user_id, users_by_id, mr.project.contributor_ids, mr.project,
+ project_commitments)
+ all_member_views = owner_views + committer_views + contributor_views
+
+ pagination = paginate.ArtifactPagination(
+ mr, all_member_views, MEMBERS_PER_PAGE, urls.PEOPLE_LIST)
+
+ offer_membership_editing = mr.perms.HasPerm(
+ permissions.EDIT_PROJECT, mr.auth.user_id, mr.project)
+
+ check_abandonment = permissions.ShouldCheckForAbandonment(mr)
+
+ return {
+ 'pagination': pagination,
+ 'subtab_mode': None,
+ 'offer_membership_editing': ezt.boolean(offer_membership_editing),
+ 'initial_add_members': '',
+ 'initially_expand_form': ezt.boolean(False),
+ 'untrusted_user_groups': untrusted_user_group_proxies,
+ 'check_abandonment': ezt.boolean(check_abandonment),
+ 'total_num_owners': len(mr.project.owner_ids),
+ }
+
+ def GatherHelpData(self, mr, _page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ _page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ cue = None
+ if (mr.auth.user_id and
+ not framework_bizobj.UserIsInProject(
+ mr.project, mr.auth.effective_ids) and
+ 'how_to_join_project' not in mr.auth.user_pb.dismissed_cues):
+ cue = 'how_to_join_project'
+
+ return {'cue': cue}
+
+ def _MakeMemberViews(
+ self, logged_in_user_id, users_by_id, member_ids, project,
+ project_commitments):
+ """Return a sorted list of MemberViews for display by EZT."""
+ member_views = [
+ project_views.MemberView(
+ logged_in_user_id, member_id, users_by_id[member_id], project,
+ project_commitments)
+ for member_id in member_ids]
+ member_views.sort(key=lambda mv: mv.user.email)
+ return member_views
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ permit_edit = mr.perms.HasPerm(
+ permissions.EDIT_PROJECT, mr.auth.user_id, mr.project)
+ if not permit_edit:
+ raise permissions.PermissionException(
+ 'User is not permitted to edit project membership')
+
+ if 'addbtn' in post_data:
+ return self.ProcessAddMembers(mr, post_data)
+ elif 'removebtn' in post_data:
+ return self.ProcessRemoveMembers(mr, post_data)
+
+ def ProcessAddMembers(self, mr, post_data):
+ """Process the user's request to add members.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ post_data: dictionary of form data.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ # 1. Parse and validate user input.
+ new_member_ids = project_helpers.ParseUsernames(
+ mr.cnxn, self.services.user, post_data.get('addmembers'))
+ role = post_data['role']
+
+ owner_ids, committer_ids, contributor_ids = project_helpers.MembersWith(
+ mr.project, new_member_ids, role)
+
+ total_people = len(owner_ids) + len(committer_ids) + len(contributor_ids)
+ if total_people > framework_constants.MAX_PROJECT_PEOPLE:
+ mr.errors.addmembers = (
+ 'Too many project members. The combined limit is %d.' %
+ framework_constants.MAX_PROJECT_PEOPLE)
+
+ # 2. Call services layer to save changes.
+ if not mr.errors.AnyErrors():
+ self.services.project.UpdateProjectRoles(
+ mr.cnxn, mr.project.project_id,
+ owner_ids, committer_ids, contributor_ids)
+
+ # 3. Determine the next page in the UI flow.
+ if mr.errors.AnyErrors():
+ add_members_str = post_data.get('addmembers', '')
+ self.PleaseCorrect(
+ mr, initial_add_members=add_members_str, initially_expand_form=True)
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.PEOPLE_LIST, saved=1, ts=int(time.time()))
+
+ def ProcessRemoveMembers(self, mr, post_data):
+ """Process the user's request to remove members.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ post_data: dictionary of form data.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ # 1. Parse and validate user input.
+ remove_strs = post_data.getall('remove')
+ logging.info('remove_strs = %r', remove_strs)
+ remove_ids = set(
+ self.services.user.LookupUserIDs(mr.cnxn, remove_strs).values())
+ owner_ids, committer_ids, contributor_ids = project_helpers.MembersWithout(
+ mr.project, remove_ids)
+
+ # 2. Call services layer to save changes.
+ self.services.project.UpdateProjectRoles(
+ mr.cnxn, mr.project.project_id, owner_ids, committer_ids,
+ contributor_ids)
+
+ # 3. Determine the next page in the UI flow.
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.PEOPLE_LIST, saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/project/project_helpers.py b/appengine/monorail/project/project_helpers.py
new file mode 100644
index 0000000..203b0f2
--- /dev/null
+++ b/appengine/monorail/project/project_helpers.py
@@ -0,0 +1,174 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions and classes used by the project pages."""
+
+import logging
+import re
+
+import settings
+from framework import framework_bizobj
+from framework import framework_views
+from project import project_views
+from proto import project_pb2
+
+
+_RE_EMAIL_SEPARATORS = re.compile(r'\s|,|;')
+
+
+def BuildProjectMembers(cnxn, project, user_service):
+ """Gather data for the members section of a project page.
+
+ Args:
+ cnxn: connection to SQL database.
+ project: Project PB of current project.
+ user_service: an instance of UserService for user persistence.
+
+ Returns:
+ A dictionary suitable for use with EZT.
+ """
+ # First, get all needed info on all users in one batch of requests.
+ users_by_id = framework_views.MakeAllUserViews(
+ cnxn, user_service, framework_bizobj.AllProjectMembers(project))
+
+ # Second, group the user proxies by role for display.
+ owner_proxies = [users_by_id[owner_id]
+ for owner_id in project.owner_ids]
+ committer_proxies = [users_by_id[committer_id]
+ for committer_id in project.committer_ids]
+ contributor_proxies = [users_by_id[contrib_id]
+ for contrib_id in project.contributor_ids]
+
+ return {
+ 'owners': owner_proxies,
+ 'committers': committer_proxies,
+ 'contributors': contributor_proxies,
+ 'all_members': users_by_id.values(),
+ }
+
+
+def BuildProjectAccessOptions(project):
+ """Return a list of project access values for use in an HTML menu.
+
+ Args:
+ project: current Project PB, or None when creating a new project.
+
+ Returns:
+ A list of ProjectAccessView objects that can be used in EZT.
+ """
+ access_levels = [project_pb2.ProjectAccess.ANYONE,
+ project_pb2.ProjectAccess.MEMBERS_ONLY]
+ access_views = []
+ for access in access_levels:
+ # Offer the allowed access levels. When editing an existing project,
+ # its current access level may always be kept, even if it is no longer
+ # in the list of allowed access levels for new projects.
+ if (access in settings.allowed_access_levels or
+ (project and access == project.access)):
+ access_views.append(project_views.ProjectAccessView(access))
+
+ return access_views
+
+
+def ParseUsernames(cnxn, user_service, usernames_text):
+ """Parse all usernames from a text field and return a list of user IDs.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_service: an instance of UserService for user persistence.
+ usernames_text: string that the user entered into a form field for a list
+ of email addresses. Or, None if the browser did not send that value.
+
+ Returns:
+ A set of user IDs for the users named. Or, an empty set if the
+ usernames_field was not in post_data.
+ """
+ if not usernames_text: # The user did not enter any addresses.
+ return set()
+
+ email_list = _RE_EMAIL_SEPARATORS.split(usernames_text)
+ # skip empty strings between consecutive separators
+ email_list = [email for email in email_list if email]
+
+ id_dict = user_service.LookupUserIDs(cnxn, email_list, autocreate=True)
+ return set(id_dict.values())
+
+
+def ParseProjectAccess(project, access_num_str):
+ """Parse and validate the "access" field out of post_data.
+
+ Args:
+ project: Project PB for the project that was edited, or None if the
+ user is creating a new project.
+ access_num_str: string of digits from the users POST that identifies
+ the desired project access level. Or, None if that widget was not
+ offered to the user.
+
+ Returns:
+ An enum project access level, or None if the user did not specify
+ any value or if the value specified was invalid.
+ """
+ access = None
+ if access_num_str:
+ access_number = int(access_num_str)
+ available_access_levels = BuildProjectAccessOptions(project)
+ allowed_access_choices = [access_view.key for access_view
+ in available_access_levels]
+ if access_number in allowed_access_choices:
+ access = project_pb2.ProjectAccess(access_number)
+
+ return access
+
+
+def MembersWithout(project, exclude_ids):
+ """Return three lists of member user IDs, with member_ids not in them."""
+ owner_ids = [user_id for user_id in project.owner_ids
+ if user_id not in exclude_ids]
+ committer_ids = [user_id for user_id in project.committer_ids
+ if user_id not in exclude_ids]
+ contributor_ids = [user_id for user_id in project.contributor_ids
+ if user_id not in exclude_ids]
+
+ return owner_ids, committer_ids, contributor_ids
+
+
+def MembersWith(project, new_member_ids, role):
+ """Return three lists of member IDs with the new IDs in the right one.
+
+ Args:
+ project: Project PB for the project to get current members from.
+ new_member_ids: set of user IDs for members being added.
+ role: string name of the role that new_member_ids should be granted.
+
+ Returns:
+ Three lists of member IDs with new_member_ids added to the appropriate
+ list and removed from any other role.
+
+ Raises:
+ ValueError: if the role is not one of owner, committer, or contributor.
+ """
+ owner_ids, committer_ids, contributor_ids = MembersWithout(
+ project, new_member_ids)
+
+ if role == 'owner':
+ owner_ids.extend(new_member_ids)
+ elif role == 'committer':
+ committer_ids.extend(new_member_ids)
+ elif role == 'contributor':
+ contributor_ids.extend(new_member_ids)
+ else:
+ raise ValueError()
+
+ return owner_ids, committer_ids, contributor_ids
+
+
+def UsersInvolvedInProject(project):
+ """Return a set of all user IDs referenced in the Project."""
+ result = set()
+ result.update(project.owner_ids)
+ result.update(project.committer_ids)
+ result.update(project.contributor_ids)
+ result.update([perm.member_id for perm in project.extra_perms])
+ return result
diff --git a/appengine/monorail/project/project_views.py b/appengine/monorail/project/project_views.py
new file mode 100644
index 0000000..4b6904b
--- /dev/null
+++ b/appengine/monorail/project/project_views.py
@@ -0,0 +1,109 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""View objects to help display projects in EZT."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_constants
+from framework import framework_helpers
+from framework import permissions
+from framework import template_helpers
+from framework import timestr
+from framework import urls
+from proto import project_pb2
+
+
+class ProjectAccessView(object):
+ """Object for project access information that can be easily used in EZT."""
+
+ ACCESS_NAMES = {
+ project_pb2.ProjectAccess.ANYONE: 'Anyone on the Internet',
+ project_pb2.ProjectAccess.MEMBERS_ONLY: 'Project Members',
+ }
+
+ def __init__(self, project_access_enum):
+ self.key = int(project_access_enum)
+ self.name = self.ACCESS_NAMES[project_access_enum]
+
+
+class ProjectView(template_helpers.PBProxy):
+ """View object to make it easy to display a search result in EZT."""
+
+ _MAX_SUMMARY_CHARS = 70
+ _LIMITED_DESCRIPTION_CHARS = 500
+
+ def __init__(self, pb, starred=False, now=None, num_stars=None,
+ membership_desc=None):
+ super(ProjectView, self).__init__(pb)
+
+ self.limited_summary = template_helpers.FitUnsafeText(
+ pb.summary, self._MAX_SUMMARY_CHARS)
+
+ self.limited_description = template_helpers.FitUnsafeText(
+ pb.description, self._LIMITED_DESCRIPTION_CHARS)
+
+ self.state_name = str(pb.state) # Gives the enum name
+ self.relative_home_url = '/p/%s' % pb.project_name
+
+ if now is None:
+ now = time.time()
+
+ last_full_hour = now - (now % framework_constants.SECS_PER_HOUR)
+ self.cached_content_timestamp = max(
+ pb.cached_content_timestamp, last_full_hour)
+ self.last_updated_exists = ezt.boolean(pb.recent_activity)
+ course_grain, fine_grain = timestr.GetHumanScaleDate(pb.recent_activity)
+ if course_grain == 'Older':
+ self.recent_activity = fine_grain
+ else:
+ self.recent_activity = course_grain
+
+ self.starred = ezt.boolean(starred)
+
+ self.num_stars = num_stars
+ self.plural = '' if num_stars == 1 else 's'
+ self.membership_desc = membership_desc
+
+
+class MemberView(object):
+ """EZT-view of details of how a person is participating in a project."""
+
+ def __init__(self, logged_in_user_id, member_id, user_view, project,
+ project_commitments, effective_ids=None):
+ """Initialize a MemberView with the given information.
+
+ Args:
+ logged_in_user_id: int user ID of the viewing user, or 0 for anon.
+ member_id: int user ID of the project member being viewed.
+ user_view: UserView object for this member.
+ project: Project PB for the currently viewed project.
+ project_commitments: ProjectCommitments PB for the currently viewed
+ project, or None if commitments are not to be displayed.
+ effective_ids: optional set of user IDs for this user, if supplied
+ we show the highest role that they have via any group membership.
+ """
+ self.viewing_self = ezt.boolean(logged_in_user_id == member_id)
+
+ self.user = user_view
+ member_qs_param = user_view.user_id
+ self.detail_url = '/p/%s%s?u=%s' % (
+ project.project_name, urls.PEOPLE_DETAIL, member_qs_param)
+ self.role = framework_helpers.GetRoleName(
+ effective_ids or {member_id}, project)
+ self.extra_perms = permissions.GetExtraPerms(project, member_id)
+ self.notes = None
+ if project_commitments is not None:
+ for commitment in project_commitments.commitments:
+ if commitment.member_id == member_id:
+ self.notes = commitment.notes
+ break
+
+ # Attributes needed by table_view_helpers.py
+ self.labels = []
+ self.derived_labels = []
diff --git a/appengine/monorail/project/projectadmin.py b/appengine/monorail/project/projectadmin.py
new file mode 100644
index 0000000..c289271
--- /dev/null
+++ b/appengine/monorail/project/projectadmin.py
@@ -0,0 +1,157 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlets for project administration main subtab."""
+
+import logging
+import time
+
+from third_party import cloudstorage
+from third_party import ezt
+
+from framework import emailfmt
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import gcs_helpers
+from framework import permissions
+from framework import servlet
+from framework import urls
+from framework import validate
+from project import project_helpers
+from project import project_views
+from tracker import tracker_views
+
+
+_MSG_INVALID_EMAIL_ADDRESS = 'Invalid email address'
+_MSG_DESCRIPTION_MISSING = 'Description is missing'
+_MSG_SUMMARY_MISSING = 'Summary is missing'
+
+
+class ProjectAdmin(servlet.Servlet):
+ """A page with project configuration options for the Project Owner(s)."""
+
+ _PAGE_TEMPLATE = 'project/project-admin-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ADMIN
+
+ def AssertBasePermission(self, mr):
+ super(ProjectAdmin, self).AssertBasePermission(mr)
+ if not self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ raise permissions.PermissionException(
+ 'User is not allowed to administer this project')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ available_access_levels = project_helpers.BuildProjectAccessOptions(
+ mr.project)
+ offer_access_level = len(available_access_levels) > 1
+ access_view = project_views.ProjectAccessView(mr.project.access)
+
+ return {
+ 'admin_tab_mode': self.ADMIN_TAB_META,
+ 'initial_summary': mr.project.summary,
+ 'initial_project_home': mr.project.home_page,
+ 'initial_docs_url': mr.project.docs_url,
+ 'initial_logo_gcs_id': mr.project.logo_gcs_id,
+ 'initial_logo_file_name': mr.project.logo_file_name,
+ 'logo_view': tracker_views.LogoView(mr.project),
+ 'initial_description': mr.project.description,
+ 'issue_notify': mr.project.issue_notify_address,
+ 'process_inbound_email': ezt.boolean(
+ mr.project.process_inbound_email),
+ 'email_from_addr': emailfmt.FormatFromAddr(mr.project),
+ 'only_owners_remove_restrictions': ezt.boolean(
+ mr.project.only_owners_remove_restrictions),
+ 'only_owners_see_contributors': ezt.boolean(
+ mr.project.only_owners_see_contributors),
+ 'offer_access_level': ezt.boolean(offer_access_level),
+ 'initial_access': access_view,
+ 'available_access_levels': available_access_levels,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ # 1. Parse and validate user input.
+ summary, description = self._ParseMeta(post_data, mr.errors)
+ access = project_helpers.ParseProjectAccess(
+ mr.project, post_data.get('access'))
+
+ only_owners_remove_restrictions = (
+ 'only_owners_remove_restrictions' in post_data)
+ only_owners_see_contributors = 'only_owners_see_contributors' in post_data
+
+ issue_notify = post_data['issue_notify']
+ if issue_notify and not validate.IsValidEmail(issue_notify):
+ mr.errors.issue_notify = _MSG_INVALID_EMAIL_ADDRESS
+
+ process_inbound_email = 'process_inbound_email' in post_data
+ home_page = post_data.get('project_home')
+ if home_page and not (
+ home_page.startswith('http:') or home_page.startswith('https:')):
+ mr.errors.project_home = 'Home page link must start with http: or https:'
+ docs_url = post_data.get('docs_url')
+ if docs_url and not (
+ docs_url.startswith('http:') or docs_url.startswith('https:')):
+ mr.errors.docs_url = 'Documentation link must start with http: or https:'
+
+ logo_gcs_id = ''
+ logo_file_name = ''
+ if 'logo' in post_data and not isinstance(post_data['logo'], basestring):
+ item = post_data['logo']
+ logo_file_name = item.filename
+ try:
+ logo_gcs_id = gcs_helpers.StoreLogoInGCS(
+ logo_file_name, item.value, mr.project.project_id)
+ except gcs_helpers.UnsupportedMimeType, e:
+ mr.errors.logo = e.message
+ elif mr.project.logo_gcs_id and mr.project.logo_file_name:
+ logo_gcs_id = mr.project.logo_gcs_id
+ logo_file_name = mr.project.logo_file_name
+ if post_data.get('delete_logo'):
+ try:
+ gcs_helpers.DeleteObjectFromGCS(logo_gcs_id)
+ except cloudstorage.NotFoundError:
+ pass
+ # Reset the GCS ID and file name.
+ logo_gcs_id = ''
+ logo_file_name = ''
+
+ # 2. Call services layer to save changes.
+ if not mr.errors.AnyErrors():
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id, issue_notify_address=issue_notify,
+ summary=summary, description=description,
+ only_owners_remove_restrictions=only_owners_remove_restrictions,
+ only_owners_see_contributors=only_owners_see_contributors,
+ process_inbound_email=process_inbound_email, access=access,
+ home_page=home_page, docs_url=docs_url,
+ logo_gcs_id=logo_gcs_id, logo_file_name=logo_file_name,
+ )
+
+ # 3. Determine the next page in the UI flow.
+ if mr.errors.AnyErrors():
+ access_view = project_views.ProjectAccessView(access)
+ self.PleaseCorrect(
+ mr, initial_summary=summary, initial_description=description,
+ initial_access=access_view)
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_META, saved=1, ts=int(time.time()))
+
+ def _ParseMeta(self, post_data, errors):
+ """Process a POST on the project metadata section of the admin page."""
+ summary = None
+ description = None
+
+ if 'summary' in post_data:
+ summary = post_data['summary']
+ if not summary:
+ errors.summary = _MSG_SUMMARY_MISSING
+ if 'description' in post_data:
+ description = post_data['description']
+ if not description:
+ errors.description = _MSG_DESCRIPTION_MISSING
+
+ return summary, description
diff --git a/appengine/monorail/project/projectadminadvanced.py b/appengine/monorail/project/projectadminadvanced.py
new file mode 100644
index 0000000..069d191
--- /dev/null
+++ b/appengine/monorail/project/projectadminadvanced.py
@@ -0,0 +1,212 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Page and form handlers for project administration "advanced" subtab.
+
+The advanced subtab allows the project to be archived, unarchived, deleted, or
+marked as moved. Site admins can use this page to "doom" a project, which is
+basically archiving it in a way that cannot be reversed by the project owners.
+
+The page also shows project data storage quota and usage values, and
+site admins can edit those quotas.
+"""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_constants
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from framework import template_helpers
+from framework import urls
+from proto import project_pb2
+from tracker import tracker_constants
+
+
+class ProjectAdminAdvanced(servlet.Servlet):
+ """A page with project state options for the Project Owner(s)."""
+
+ _PAGE_TEMPLATE = 'project/project-admin-advanced-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ADMIN
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(ProjectAdminAdvanced, self).AssertBasePermission(mr)
+ if not self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ raise permissions.PermissionException(
+ 'User is not allowed to administer this project')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the "Advanced" subtab.
+ """
+ page_data = {
+ 'admin_tab_mode': self.ADMIN_TAB_ADVANCED,
+ }
+ page_data.update(self._GatherPublishingOptions(mr))
+ page_data.update(self._GatherQuotaData(mr))
+
+ return page_data
+
+ def _GatherPublishingOptions(self, mr):
+ """Gather booleans to control the publishing buttons to show in EZT."""
+ state = mr.project.state
+ offer_archive = state != project_pb2.ProjectState.ARCHIVED
+ offer_delete = state == project_pb2.ProjectState.ARCHIVED
+ offer_publish = (
+ state == project_pb2.ProjectState.ARCHIVED and
+ (self.CheckPerm(mr, permissions.PUBLISH_PROJECT) or
+ not mr.project.state_reason))
+ offer_move = state == project_pb2.ProjectState.LIVE
+ offer_doom = self.CheckPerm(mr, permissions.ADMINISTER_SITE)
+ moved_to = mr.project.moved_to or 'http://'
+
+ publishing_data = {
+ 'offer_archive': ezt.boolean(offer_archive),
+ 'offer_publish': ezt.boolean(offer_publish),
+ 'offer_delete': ezt.boolean(offer_delete),
+ 'offer_move': ezt.boolean(offer_move),
+ 'moved_to': moved_to,
+ 'offer_doom': ezt.boolean(offer_doom),
+ 'default_doom_reason': framework_constants.DEFAULT_DOOM_REASON,
+ }
+
+ return publishing_data
+
+ def _GatherQuotaData(self, mr):
+ """Gather quota info from backends so that it can be passed to EZT."""
+ offer_quota_editing = self.CheckPerm(mr, permissions.EDIT_QUOTA)
+
+ quota_data = {
+ 'offer_quota_editing': ezt.boolean(offer_quota_editing),
+ 'attachment_quota': self._BuildAttachmentQuotaData(mr.project),
+ }
+
+ return quota_data
+
+ def _BuildComponentQuota(self, used_bytes, quota_bytes, field_name):
+ """Return an object to easily display quota info in EZT."""
+ if quota_bytes:
+ used_percent = 100 * used_bytes / quota_bytes
+ else:
+ used_percent = 0
+
+ quota_mb = quota_bytes / 1024 / 1024
+
+ return template_helpers.EZTItem(
+ used=template_helpers.BytesKbOrMb(used_bytes),
+ quota_mb=quota_mb,
+ used_percent=used_percent,
+ avail_percent=100 - used_percent,
+ field_name=field_name)
+
+ def _BuildAttachmentQuotaData(self, project):
+ return self._BuildComponentQuota(
+ project.attachment_bytes_used,
+ project.attachment_quota or
+ tracker_constants.ISSUE_ATTACHMENTS_QUOTA_HARD,
+ 'attachment_quota_mb')
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: dictionary of HTML form data.
+
+ Returns:
+ String URL to redirect to after processing is completed.
+ """
+ if 'savechanges' in post_data:
+ self._ProcessQuota(mr, post_data)
+ else:
+ self._ProcessPublishingOptions(mr, post_data)
+
+ if 'deletebtn' in post_data:
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.HOSTING_HOME, include_project=False)
+ else:
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_ADVANCED, saved=1, ts=int(time.time()))
+
+ return url
+
+ def _ProcessQuota(self, mr, post_data):
+ """Process form data to update project quotas."""
+ if not self.CheckPerm(mr, permissions.EDIT_QUOTA):
+ raise permissions.PermissionException(
+ 'User is not allowed to change project quotas')
+
+ try:
+ new_attachment_quota = int(post_data['attachment_quota_mb'])
+ new_attachment_quota *= 1024 * 1024
+ except ValueError:
+ mr.errors.attachment_quota = 'Invalid value'
+ self.PleaseCorrect(mr) # Don't echo back the bad input, just start over.
+ return
+
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id,
+ attachment_quota=new_attachment_quota)
+
+ def _ProcessPublishingOptions(self, mr, post_data):
+ """Process form data to update project state."""
+ # Note that EDIT_PROJECT is the base permission for this servlet, but
+ # dooming and undooming projects also requires PUBLISH_PROJECT.
+
+ state = mr.project.state
+
+ if 'archivebtn' in post_data and not mr.project.delete_time:
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id,
+ state=project_pb2.ProjectState.ARCHIVED)
+
+ elif 'deletebtn' in post_data: # Mark the project for immediate deletion.
+ if state != project_pb2.ProjectState.ARCHIVED:
+ raise permissions.PermissionException(
+ 'Projects must be archived before being deleted')
+ self.services.project.MarkProjectDeletable(
+ mr.cnxn, mr.project_id, self.services.config)
+
+ elif 'doombtn' in post_data: # Go from any state to forced ARCHIVED.
+ if not self.CheckPerm(mr, permissions.PUBLISH_PROJECT):
+ raise permissions.PermissionException(
+ 'User is not allowed to doom projects')
+ reason = post_data.get('reason')
+ delete_time = time.time() + framework_constants.DEFAULT_DOOM_PERIOD
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id,
+ state=project_pb2.ProjectState.ARCHIVED, state_reason=reason,
+ delete_time=delete_time)
+
+ elif 'publishbtn' in post_data: # Go from any state to LIVE
+ if (mr.project.delete_time and
+ not self.CheckPerm(mr, permissions.PUBLISH_PROJECT)):
+ raise permissions.PermissionException(
+ 'User is not allowed to unarchive doomed projects')
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id,
+ state=project_pb2.ProjectState.LIVE, state_reason='', delete_time=0,
+ read_only_reason='')
+
+ elif 'movedbtn' in post_data: # Record the moved_to location.
+ if state != project_pb2.ProjectState.LIVE:
+ raise permissions.PermissionException(
+ 'This project is not live, no user can move it')
+ moved_to = post_data.get('moved_to', '')
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id, moved_to=moved_to)
diff --git a/appengine/monorail/project/projectexport.py b/appengine/monorail/project/projectexport.py
new file mode 100644
index 0000000..f253b02
--- /dev/null
+++ b/appengine/monorail/project/projectexport.py
@@ -0,0 +1,196 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet to export a project's config in JSON format.
+"""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import permissions
+from framework import jsonfeed
+from framework import servlet
+from project import project_helpers
+from tracker import tracker_bizobj
+
+
+class ProjectExport(servlet.Servlet):
+ """Only site admins can export a project"""
+
+ _PAGE_TEMPLATE = 'project/project-export-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ADMIN
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page."""
+ super(ProjectExport, self).AssertBasePermission(mr)
+ if not mr.auth.user_pb.is_site_admin:
+ raise permissions.PermissionException(
+ 'Only site admins may export project configuration')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+
+ return {
+ 'admin_tab_mode': None,
+ 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
+ }
+
+
+class ProjectExportJSON(jsonfeed.JsonFeed):
+ """ProjectExportJSON shows all configuration for a Project in JSON form."""
+
+ # Pretty-print the JSON output.
+ JSON_INDENT = 4
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page."""
+ super(ProjectExportJSON, self).AssertBasePermission(mr)
+ if not mr.auth.user_pb.is_site_admin:
+ raise permissions.PermissionException(
+ 'Only site admins may export project configuration')
+
+ def HandleRequest(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ project = self.services.project.GetProject(mr.cnxn, mr.project.project_id)
+ user_id_set = project_helpers.UsersInvolvedInProject(project)
+
+ config = self.services.config.GetProjectConfig(
+ mr.cnxn, mr.project.project_id)
+ user_id_set.update(tracker_bizobj.UsersInvolvedInConfig(config))
+
+ # The value 0 indicates "no user", e.g., that an issue has no owner.
+ # We don't need to create a User row to represent that.
+ user_id_set.discard(0)
+ email_dict = self.services.user.LookupUserEmails(mr.cnxn, user_id_set)
+
+ project_json = self._MakeProjectJSON(project, email_dict)
+ config_json = self._MakeConfigJSON(config, email_dict)
+
+ json_data = {
+ 'metadata': {
+ 'version': 1,
+ 'when': int(time.time()),
+ 'who': mr.auth.email,
+ },
+ 'project': project_json,
+ 'config': config_json,
+ # This list could be derived from the others, but we provide it for
+ # ease of processing.
+ 'emails': email_dict.values(),
+ }
+ return json_data
+
+ def _MakeProjectJSON(self, project, email_dict):
+ project_json = {
+ 'name': project.project_name,
+ 'summary': project.summary,
+ 'description': project.description,
+ 'state': project.state.name,
+ 'access': project.access.name,
+ 'owners': [email_dict.get(user) for user in project.owner_ids],
+ 'committers': [email_dict.get(user) for user in project.committer_ids],
+ 'contributors': [
+ email_dict.get(user) for user in project.contributor_ids],
+ 'perms': [self._MakePermJSON(perm, email_dict)
+ for perm in project.extra_perms],
+ 'issue_notify_address': project.issue_notify_address,
+ 'attachment_bytes': project.attachment_bytes_used,
+ 'attachment_quota': project.attachment_quota,
+ 'recent_activity': project.recent_activity,
+ 'process_inbound_email': project.process_inbound_email,
+ 'only_owners_remove_restrictions':
+ project.only_owners_remove_restrictions,
+ 'only_owners_see_contributors': project.only_owners_see_contributors,
+ 'revision_url_format': project.revision_url_format,
+ 'read_only_reason': project.read_only_reason,
+ }
+ return project_json
+
+ def _MakePermJSON(self, perm, email_dict):
+ perm_json = {
+ 'member': email_dict.get(perm.member_id),
+ 'perms': [p for p in perm.perms],
+ }
+ return perm_json
+
+ def _MakeConfigJSON(self, config, email_dict):
+ config_json = {
+ 'statuses':
+ [self._MakeStatusJSON(status)
+ for status in config.well_known_statuses],
+ 'statuses_offer_merge':
+ [status for status in config.statuses_offer_merge],
+ 'labels':
+ [self._MakeLabelJSON(label) for label in config.well_known_labels],
+ 'exclusive_label_prefixes':
+ [label for label in config.exclusive_label_prefixes],
+ # TODO(agable): Export the projects FieldDefs (not yet used).
+ 'components':
+ [self._MakeComponentJSON(component, email_dict)
+ for component in config.component_defs],
+ 'templates':
+ [self._MakeTemplateJSON(template, email_dict)
+ for template in config.templates],
+ 'developer_template': config.default_template_for_developers,
+ 'user_template': config.default_template_for_users,
+ 'list_cols': config.default_col_spec,
+ 'list_spec': config.default_sort_spec,
+ 'grid_x': config.default_x_attr,
+ 'grid_y': config.default_y_attr,
+ 'only_known_values': config.restrict_to_known,
+ }
+ if config.custom_issue_entry_url:
+ config_json.update({'issue_entry_url': config.custom_issue_entry_url})
+ return config_json
+
+ def _MakeTemplateJSON(self, template, email_dict):
+ template_json = {
+ 'name': template.name,
+ 'summary': template.summary,
+ 'content': template.content,
+ 'summary_must_be_edited': template.summary_must_be_edited,
+ 'owner': email_dict.get(template.owner_id),
+ 'status': template.status,
+ 'labels': [label for label in template.labels],
+ # TODO(agable): Export the template's default Fields (not yet used).
+ 'members_only': template.members_only,
+ 'owner_defaults_to_member': template.owner_defaults_to_member,
+ 'component_required': template.component_required,
+ 'admins': [email_dict(user) for user in template.admin_ids],
+ }
+ return template_json
+
+ def _MakeStatusJSON(self, status):
+ status_json = {
+ 'status': status.status,
+ 'open': status.means_open,
+ 'docstring': status.status_docstring,
+ }
+ return status_json
+
+ def _MakeLabelJSON(self, label):
+ label_json = {
+ 'label': label.label,
+ 'docstring': label.label_docstring,
+ }
+ return label_json
+
+ def _MakeComponentJSON(self, component, email_dict):
+ component_json = {
+ 'path': component.path,
+ 'docstring': component.docstring,
+ 'admins': [email_dict.get(user) for user in component.admin_ids],
+ 'ccs': [email_dict.get(user) for user in component.cc_ids],
+ }
+ return component_json
diff --git a/appengine/monorail/project/projectsummary.py b/appengine/monorail/project/projectsummary.py
new file mode 100644
index 0000000..7e9a266
--- /dev/null
+++ b/appengine/monorail/project/projectsummary.py
@@ -0,0 +1,68 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display the project summary page."""
+
+import logging
+
+from framework import framework_bizobj
+from framework import permissions
+from framework import servlet
+from project import project_views
+
+from third_party import markdown
+
+
+class ProjectSummary(servlet.Servlet):
+ """Page to show brief project description and process documentation."""
+
+ _PAGE_TEMPLATE = 'project/project-summary-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PROCESS
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+
+ with self.profiler.Phase('getting project star count'):
+ num_stars = self.services.project_star.CountItemStars(
+ mr.cnxn, mr.project_id)
+ plural = '' if num_stars == 1 else 's'
+
+ page_data = {
+ 'admin_tab_mode': self.PROCESS_TAB_SUMMARY,
+ 'formatted_project_description':
+ markdown.Markdown(mr.project.description),
+ 'access_level': project_views.ProjectAccessView(mr.project.access),
+ 'num_stars': num_stars,
+ 'plural': plural,
+ 'home_page': mr.project.home_page,
+ }
+
+ return page_data
+
+ def GatherHelpData(self, mr, _page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ _page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ cue = None
+ dismissed = mr.auth.user_pb.dismissed_cues
+ project = mr.project
+
+ # Cue cards for project owners.
+ if self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ if ('document_team_duties' not in dismissed
+ and len(framework_bizobj.AllProjectMembers(project)) > 1
+ and not self.services.project.GetProjectCommitments(
+ mr.cnxn, mr.project_id).commitments):
+ cue = 'document_team_duties'
+
+ return {
+ 'cue': cue,
+ }
diff --git a/appengine/monorail/project/projectupdates.py b/appengine/monorail/project/projectupdates.py
new file mode 100644
index 0000000..6a48a01
--- /dev/null
+++ b/appengine/monorail/project/projectupdates.py
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display a paginated list of activity stream updates."""
+
+import logging
+
+from third_party import ezt
+
+from features import activities
+from framework import servlet
+from framework import urls
+
+
+class ProjectUpdates(servlet.Servlet):
+ """ProjectUpdates page shows a list of past activities."""
+
+ _PAGE_TEMPLATE = 'project/project-updates-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_UPDATES
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+
+ page_data = self._GatherUpdates(mr)
+ page_data['subtab_mode'] = None
+ page_data['user_updates_tab_mode'] = None
+ logging.info('project updates data is %r', page_data)
+ return page_data
+
+ def _GatherUpdates(self, mr):
+ """Gathers and returns activity streams data."""
+
+ url = '/p/%s%s' % (mr.project_name, urls.UPDATES_LIST)
+ return activities.GatherUpdatesData(
+ self.services, mr, self.profiler, project_ids=[mr.project_id],
+ ending='by_user', updates_page_url=url,
+ autolink=self.services.autolink)
diff --git a/appengine/monorail/project/test/__init__.py b/appengine/monorail/project/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/project/test/__init__.py
diff --git a/appengine/monorail/project/test/peopledetail_test.py b/appengine/monorail/project/test/peopledetail_test.py
new file mode 100644
index 0000000..adebcc6
--- /dev/null
+++ b/appengine/monorail/project/test/peopledetail_test.py
@@ -0,0 +1,273 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the people detail page."""
+
+import logging
+
+import unittest
+
+import webapp2
+
+from framework import monorailrequest
+from framework import permissions
+from project import peopledetail
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class PeopleDetailTest(unittest.TestCase):
+
+ def setUp(self):
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ services.user.TestAddUser('jrobbins', 111L)
+ services.user.TestAddUser('jrobbins@jrobbins.org', 333L)
+ services.user.TestAddUser('jrobbins@chromium.org', 555L)
+ services.user.TestAddUser('imso31337@gmail.com', 999L)
+ self.project = services.project.TestAddProject('proj')
+ self.project.owner_ids.extend([111L, 222L])
+ self.project.committer_ids.extend([333L, 444L])
+ self.project.contributor_ids.extend([555L])
+ self.servlet = peopledetail.PeopleDetail('req', 'res', services=services)
+
+ def VerifyAccess(self, exception_expected):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.servlet.AssertBasePermission(mr)
+ # Owner never raises PermissionException.
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=333',
+ project=self.project,
+ perms=permissions.COMMITTER_ACTIVE_PERMISSIONSET)
+ self.servlet.AssertBasePermission(mr)
+ # Committer never raises PermissionException.
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=555',
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ if exception_expected:
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ else:
+ self.servlet.AssertBasePermission(mr)
+ # No PermissionException raised
+
+ # Sign-out users
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=555',
+ project=self.project,
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ if exception_expected:
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ else:
+ self.servlet.AssertBasePermission(mr)
+
+ # Non-membr users
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=555',
+ project=self.project,
+ perms=permissions.USER_PERMISSIONSET)
+ if exception_expected:
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ else:
+ self.servlet.AssertBasePermission(mr)
+
+ def testAssertBasePermission_Normal(self):
+ self.VerifyAccess(False)
+
+ def testAssertBasePermission_HubSpoke(self):
+ self.project.only_owners_see_contributors = True
+ self.VerifyAccess(True)
+
+ def testAssertBasePermission_HubSpokeViewingSelf(self):
+ self.project.only_owners_see_contributors = True
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=333',
+ project=self.project,
+ perms=permissions.COMMITTER_ACTIVE_PERMISSIONSET)
+ mr.auth.user_id = 333L
+ self.servlet.AssertBasePermission(mr)
+ # No PermissionException raised
+
+ def testGatherPageData(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ mr.auth = monorailrequest.AuthData()
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertFalse(page_data['warn_abandonment'])
+ self.assertEquals(2, page_data['total_num_owners'])
+ # TODO(jrobbins): fill in tests for all other aspects.
+
+ def testValidateMemberID(self):
+ # We can validate owners
+ self.assertEquals(
+ 111L,
+ self.servlet.ValidateMemberID('fake cnxn', 111, self.project))
+
+ # We can parse members
+ self.assertEquals(
+ 333L,
+ self.servlet.ValidateMemberID(
+ 'fake cnxn', 333, self.project))
+
+ # 404 for user that does not exist
+ try:
+ self.servlet.ValidateMemberID('fake cnxn', 8933, self.project)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ # 404 for valid user that is not in this project
+ try:
+ self.servlet.ValidateMemberID('fake cnxn', 999, self.project)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ def testParsePersonData_BadPost(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail',
+ project=self.project)
+ post_data = fake.PostData()
+ try:
+ _result = self.servlet.ParsePersonData(mr, post_data)
+ self.fail()
+ except monorailrequest.InputException:
+ pass
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=',
+ project=self.project)
+ post_data = fake.PostData()
+ try:
+ _result = self.servlet.ParsePersonData(mr, post_data)
+ self.fail()
+ except monorailrequest.InputException:
+ pass
+
+ def testParsePersonData_NoDetails(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project)
+ post_data = fake.PostData(role=['owner'])
+ u, r, ac, n = self.servlet.ParsePersonData(mr, post_data)
+ self.assertEquals(111, u)
+ self.assertEquals('owner', r)
+ self.assertEquals([], ac)
+ self.assertEquals('', n)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=333',
+ project=self.project)
+ post_data = fake.PostData(role=['owner'])
+ u, r, ac, n = self.servlet.ParsePersonData(mr, post_data)
+ self.assertEquals(333, u)
+
+ def testParsePersonData(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project)
+ post_data = fake.PostData(
+ role=['owner'], extra_perms=['ViewQuota', 'EditIssue'])
+ u, r, ac, n = self.servlet.ParsePersonData(mr, post_data)
+ self.assertEquals(111, u)
+ self.assertEquals('owner', r)
+ self.assertEquals(['ViewQuota', 'EditIssue'], ac)
+ self.assertEquals('', n)
+
+ post_data = fake.PostData({
+ 'role': ['owner'],
+ 'extra_perms': [' ', ' \t'],
+ 'notes': [''],
+ })
+ u, r, ac, n = self.servlet.ParsePersonData(mr, post_data)
+ self.assertEquals(111, u)
+ self.assertEquals('owner', r)
+ self.assertEquals([], ac)
+ self.assertEquals('', n)
+
+ post_data = fake.PostData({
+ 'username': ['jrobbins'],
+ 'role': ['owner'],
+ 'extra_perms': ['_ViewQuota', ' __EditIssue'],
+ 'notes': [' Our local Python expert '],
+ })
+ u, r, ac, n = self.servlet.ParsePersonData(mr, post_data)
+ self.assertEquals(111, u)
+ self.assertEquals('owner', r)
+ self.assertEquals(['ViewQuota', 'EditIssue'], ac)
+ self.assertEquals('Our local Python expert', n)
+
+ def testCanEditMemberNotes(self):
+ """Only owners can edit member notes."""
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ result = self.servlet.CanEditMemberNotes(mr, 222)
+ self.assertFalse(result)
+
+ mr.auth.user_id = 222
+ result = self.servlet.CanEditMemberNotes(mr, 222)
+ self.assertTrue(result)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ result = self.servlet.CanEditMemberNotes(mr, 222)
+ self.assertTrue(result)
+
+ def testCanEditPerms(self):
+ """Only owners can edit member perms."""
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ result = self.servlet.CanEditPerms(mr)
+ self.assertFalse(result)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ result = self.servlet.CanEditPerms(mr)
+ self.assertTrue(result)
+
+ def testCanRemoveRole(self):
+ """Owners can remove members. Users could also remove themselves."""
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ result = self.servlet.CanRemoveRole(mr, 222)
+ self.assertFalse(result)
+
+ mr.auth.user_id = 111
+ result = self.servlet.CanRemoveRole(mr, 111)
+ self.assertTrue(result)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=111',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ result = self.servlet.CanRemoveRole(mr, 222)
+ self.assertTrue(result)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/project/test/peoplelist_test.py b/appengine/monorail/project/test/peoplelist_test.py
new file mode 100644
index 0000000..72c5275
--- /dev/null
+++ b/appengine/monorail/project/test/peoplelist_test.py
@@ -0,0 +1,119 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for People List servlet."""
+
+import unittest
+
+from framework import monorailrequest
+from framework import permissions
+from project import peoplelist
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class PeopleListTest(unittest.TestCase):
+ """Tests for the PeopleList servlet."""
+
+ def setUp(self):
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ services.user.TestAddUser('jrobbins@gmail.com', 111L)
+ services.user.TestAddUser('jrobbins@jrobbins.org', 222L)
+ services.user.TestAddUser('jrobbins@chromium.org', 333L)
+ services.user.TestAddUser('imso31337@gmail.com', 999L)
+ self.project = services.project.TestAddProject('proj')
+ self.project.owner_ids.extend([111L])
+ self.project.committer_ids.extend([222L])
+ self.project.contributor_ids.extend([333L])
+ self.servlet = peoplelist.PeopleList('req', 'res', services=services)
+
+ def VerifyAccess(self, exception_expected):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/list',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.servlet.AssertBasePermission(mr)
+ # Owner never raises PermissionException.
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/list',
+ project=self.project,
+ perms=permissions.COMMITTER_ACTIVE_PERMISSIONSET)
+ self.servlet.AssertBasePermission(mr)
+ # Committer never raises PermissionException.
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/list',
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ if exception_expected:
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ else:
+ self.servlet.AssertBasePermission(mr)
+ # No PermissionException raised
+
+ # Sign-out users
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=555',
+ project=self.project,
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ if exception_expected:
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ else:
+ self.servlet.AssertBasePermission(mr)
+
+ # Non-membr users
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/detail?u=555',
+ project=self.project,
+ perms=permissions.USER_PERMISSIONSET)
+ if exception_expected:
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ else:
+ self.servlet.AssertBasePermission(mr)
+
+ def testAssertBasePermission_Normal(self):
+ self.VerifyAccess(False)
+
+ def testAssertBasePermission_HideMembers(self):
+ self.project.only_owners_see_contributors = True
+ self.VerifyAccess(True)
+
+ def testGatherPageData(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/list',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ mr.auth = monorailrequest.AuthData()
+ page_data = self.servlet.GatherPageData(mr)
+
+ self.assertEqual(1, page_data['total_num_owners'])
+ # TODO(jrobbins): fill in tests for all other aspects.
+
+ def testProcessFormData_Permission(self):
+ """Only owners could add/remove members."""
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/list',
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, {})
+
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/people/list',
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.servlet.ProcessFormData(mr, {})
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/project/test/project_helpers_test.py b/appengine/monorail/project/test/project_helpers_test.py
new file mode 100644
index 0000000..00a728b
--- /dev/null
+++ b/appengine/monorail/project/test/project_helpers_test.py
@@ -0,0 +1,74 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for helpers module."""
+
+import unittest
+
+from project import project_helpers
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+
+
+class HelpersUnitTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake sql connection'
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.services.user.TestAddUser('b@example.com', 222L)
+ self.services.user.TestAddUser('c@example.com', 333L)
+
+ def testBuildProjectMembers(self):
+ project = project_pb2.MakeProject(
+ 'proj', owner_ids=[111L], committer_ids=[222L],
+ contributor_ids=[333L])
+ page_data = project_helpers.BuildProjectMembers(
+ self.cnxn, project, self.services.user)
+ self.assertEqual(111L, page_data['owners'][0].user_id)
+ self.assertEqual(222L, page_data['committers'][0].user_id)
+ self.assertEqual(333L, page_data['contributors'][0].user_id)
+ self.assertEqual(3, len(page_data['all_members']))
+
+ def testParseUsernames(self):
+ # Form field was not present in post data.
+ id_set = project_helpers.ParseUsernames(
+ self.cnxn, self.services.user, None)
+ self.assertEqual(set(), id_set)
+
+ # Form field was present, but empty.
+ id_set = project_helpers.ParseUsernames(
+ self.cnxn, self.services.user, '')
+ self.assertEqual(set(), id_set)
+
+ # Parsing valid user names.
+ id_set = project_helpers.ParseUsernames(
+ self.cnxn, self.services.user, 'a@example.com, c@example.com')
+ self.assertEqual({111L, 333L}, id_set)
+
+ def testParseProjectAccess_NotOffered(self):
+ project = project_pb2.MakeProject('proj')
+ access = project_helpers.ParseProjectAccess(project, None)
+ self.assertEqual(None, access)
+
+ def testParseProjectAccess_AllowedChoice(self):
+ project = project_pb2.MakeProject('proj')
+ access = project_helpers.ParseProjectAccess(project, '1')
+ self.assertEqual(project_pb2.ProjectAccess.ANYONE, access)
+
+ access = project_helpers.ParseProjectAccess(project, '3')
+ self.assertEqual(project_pb2.ProjectAccess.MEMBERS_ONLY, access)
+
+ def testParseProjectAccess_BogusChoice(self):
+ project = project_pb2.MakeProject('proj')
+ access = project_helpers.ParseProjectAccess(project, '9')
+ self.assertEqual(None, access)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/project/test/project_views_test.py b/appengine/monorail/project/test/project_views_test.py
new file mode 100644
index 0000000..d4ee4dd
--- /dev/null
+++ b/appengine/monorail/project/test/project_views_test.py
@@ -0,0 +1,113 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for project_views module."""
+
+import unittest
+
+from framework import framework_views
+from project import project_views
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+
+
+class ProjectAccessViewTest(unittest.TestCase):
+
+ def testAccessViews(self):
+ anyone_view = project_views.ProjectAccessView(
+ project_pb2.ProjectAccess.ANYONE)
+ self.assertEqual(anyone_view.key, int(project_pb2.ProjectAccess.ANYONE))
+
+ members_only_view = project_views.ProjectAccessView(
+ project_pb2.ProjectAccess.MEMBERS_ONLY)
+ self.assertEqual(members_only_view.key,
+ int(project_pb2.ProjectAccess.MEMBERS_ONLY))
+
+
+class ProjectViewTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ self.services.project.TestAddProject('test')
+
+ def testNormalProject(self):
+ project = self.services.project.GetProjectByName('fake cnxn', 'test')
+ project_view = project_views.ProjectView(project)
+ self.assertEqual('test', project_view.project_name)
+ self.assertEqual('/p/test', project_view.relative_home_url)
+ self.assertEqual('LIVE', project_view.state_name)
+
+ def testCachedContentTimestamp(self):
+ project = self.services.project.GetProjectByName('fake cnxn', 'test')
+
+ # Project was never updated since we added cached_content_timestamp.
+ project.cached_content_timestamp = 0
+ view = project_views.ProjectView(project, now=1 * 60 * 60 + 234)
+ self.assertEqual(1 * 60 * 60, view.cached_content_timestamp)
+
+ # Project was updated within the last hour, use that timestamp.
+ project.cached_content_timestamp = 1 * 60 * 60 + 123
+ view = project_views.ProjectView(project, now=1 * 60 * 60 + 234)
+ self.assertEqual(1 * 60 * 60 + 123, view.cached_content_timestamp)
+
+ # Project was not updated within the last hour, but user groups
+ # could have been updated on groups.google.com without any
+ # notification to us, so the client will ask for an updated feed
+ # at least once an hour.
+ project.cached_content_timestamp = 1 * 60 * 60 + 123
+ view = project_views.ProjectView(project, now=2 * 60 * 60 + 234)
+ self.assertEqual(2 * 60 * 60, view.cached_content_timestamp)
+
+
+class MemberViewTest(unittest.TestCase):
+
+ def setUp(self):
+ self.alice_view = framework_views.UserView(111L, 'alice', True)
+ self.bob_view = framework_views.UserView(222L, 'bob', True)
+ self.carol_view = framework_views.UserView(333L, 'carol', True)
+
+ self.project = project_pb2.Project()
+ self.project.project_name = 'proj'
+ self.project.owner_ids.append(111L)
+ self.project.committer_ids.append(222L)
+ self.project.contributor_ids.append(333L)
+
+ def testViewingSelf(self):
+ member_view = project_views.MemberView(
+ 0, 111L, self.alice_view, self.project, None)
+ self.assertFalse(member_view.viewing_self)
+ member_view = project_views.MemberView(
+ 222L, 111L, self.alice_view, self.project, None)
+ self.assertFalse(member_view.viewing_self)
+
+ member_view = project_views.MemberView(
+ 111L, 111L, self.alice_view, self.project, None)
+ self.assertTrue(member_view.viewing_self)
+
+ def testRoles(self):
+ member_view = project_views.MemberView(
+ 0, 111L, self.alice_view, self.project, None)
+ self.assertEqual('Owner', member_view.role)
+ self.assertEqual('/p/proj/people/detail?u=111',
+ member_view.detail_url)
+
+ member_view = project_views.MemberView(
+ 0, 222L, self.bob_view, self.project, None)
+ self.assertEqual('Committer', member_view.role)
+ self.assertEqual('/p/proj/people/detail?u=222',
+ member_view.detail_url)
+
+ member_view = project_views.MemberView(
+ 0, 333L, self.carol_view, self.project, None)
+ self.assertEqual('Contributor', member_view.role)
+ self.assertEqual('/p/proj/people/detail?u=333',
+ member_view.detail_url)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/project/test/projectadmin_test.py b/appengine/monorail/project/test/projectadmin_test.py
new file mode 100644
index 0000000..8f81642
--- /dev/null
+++ b/appengine/monorail/project/test/projectadmin_test.py
@@ -0,0 +1,76 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for projectadmin module."""
+
+import unittest
+
+from framework import permissions
+from project import projectadmin
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class ProjectAdminTest(unittest.TestCase):
+ """Unit tests for the ProjectAdmin servlet class."""
+
+ def setUp(self):
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ self.servlet = projectadmin.ProjectAdmin('req', 'res', services=services)
+ self.project = services.project.TestAddProject(
+ 'proj', summary='a summary', description='a description')
+ self.request, self.mr = testing_helpers.GetRequestObjects(
+ project=self.project)
+
+ def testAssertBasePermission(self):
+ # Contributors cannot edit the project
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ # Signed-out users cannot edit the project
+ mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ # Non-member users cannot edit the project
+ mr.perms = permissions.USER_PERMISSIONSET
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ # Owners can edit the project
+ mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(mr)
+
+ def testGatherPageData(self):
+ # Project has all default values.
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual('a summary', page_data['initial_summary'])
+ self.assertEqual('a description', page_data['initial_description'])
+ self.assertEqual(
+ int(project_pb2.ProjectAccess.ANYONE), page_data['initial_access'].key)
+
+ self.assertFalse(page_data['process_inbound_email'])
+ self.assertFalse(page_data['only_owners_remove_restrictions'])
+ self.assertFalse(page_data['only_owners_see_contributors'])
+
+ # Now try some alternate Project field values.
+ self.project.only_owners_remove_restrictions = True
+ self.project.only_owners_see_contributors = True
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertTrue(page_data['only_owners_remove_restrictions'])
+ self.assertTrue(page_data['only_owners_see_contributors'])
+
+ # TODO(jrobbins): many more tests needed.
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/project/test/projectadminadvanced_test.py b/appengine/monorail/project/test/projectadminadvanced_test.py
new file mode 100644
index 0000000..fa38a89
--- /dev/null
+++ b/appengine/monorail/project/test/projectadminadvanced_test.py
@@ -0,0 +1,123 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for projectadminadvanced module."""
+
+import time
+import unittest
+
+from framework import permissions
+from project import projectadminadvanced
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class ProjectAdminAdvancedTest(unittest.TestCase):
+ """Unit tests for the ProjectAdminAdvanced servlet class."""
+
+ def setUp(self):
+ services = service_manager.Services(
+ project=fake.ProjectService())
+ self.servlet = projectadminadvanced.ProjectAdminAdvanced(
+ 'req', 'res', services=services)
+ self.project = services.project.TestAddProject('proj')
+ self.mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+
+ def testAssertBasePermission(self):
+ # Signed-out users cannot edit the project
+ self.mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ # Non-member users cannot edit the project
+ self.mr.perms = permissions.USER_PERMISSIONSET
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ # Contributors cannot edit the project
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ self.mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+
+ def testGatherPageData(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.ADMIN_TAB_ADVANCED,
+ page_data['admin_tab_mode'])
+
+ def testGatherPublishingOptions_Live(self):
+ pub_data = self.servlet._GatherPublishingOptions(self.mr)
+ self.assertTrue(pub_data['offer_archive'])
+ self.assertTrue(pub_data['offer_move'])
+ self.assertFalse(pub_data['offer_publish'])
+ self.assertFalse(pub_data['offer_delete'])
+ self.assertEqual('http://', pub_data['moved_to'])
+
+ def testGatherPublishingOptions_Moved(self):
+ self.project.moved_to = 'other location'
+ pub_data = self.servlet._GatherPublishingOptions(self.mr)
+ self.assertTrue(pub_data['offer_archive'])
+ self.assertTrue(pub_data['offer_move'])
+ self.assertFalse(pub_data['offer_publish'])
+ self.assertFalse(pub_data['offer_delete'])
+ self.assertEqual('other location', pub_data['moved_to'])
+
+ def testGatherPublishingOptions_Archived(self):
+ self.project.state = project_pb2.ProjectState.ARCHIVED
+ pub_data = self.servlet._GatherPublishingOptions(self.mr)
+ self.assertFalse(pub_data['offer_archive'])
+ self.assertFalse(pub_data['offer_move'])
+ self.assertTrue(pub_data['offer_publish'])
+ self.assertTrue(pub_data['offer_delete'])
+
+ def testGatherPublishingOptions_Doomed(self):
+ self.project.state = project_pb2.ProjectState.ARCHIVED
+ self.project.state_reason = 'you are a spammer'
+ pub_data = self.servlet._GatherPublishingOptions(self.mr)
+ self.assertFalse(pub_data['offer_archive'])
+ self.assertFalse(pub_data['offer_move'])
+ self.assertFalse(pub_data['offer_publish'])
+ self.assertTrue(pub_data['offer_delete'])
+
+ def testGatherQuotaData(self):
+ self.mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ quota_data = self.servlet._GatherQuotaData(self.mr)
+ self.assertFalse(quota_data['offer_quota_editing'])
+
+ self.mr.perms = permissions.ADMIN_PERMISSIONSET
+ quota_data = self.servlet._GatherQuotaData(self.mr)
+ self.assertTrue(quota_data['offer_quota_editing'])
+
+ def testBuildComponentQuota(self):
+ ezt_item = self.servlet._BuildComponentQuota(
+ 5000, 10000, 'attachments')
+ self.assertEqual(50, ezt_item.used_percent)
+ self.assertEqual('attachments', ezt_item.field_name)
+
+ def testProcessFormData_NotDeleted(self):
+ self.mr.project_name = 'proj'
+ post_data = fake.PostData()
+ next_url = self.servlet.ProcessFormData(self.mr, post_data)
+ now = int(time.time())
+ self.assertEqual(
+ 'http://127.0.0.1/p/proj/adminAdvanced?saved=1&ts=%s' % now,
+ next_url)
+
+ def testProcessFormData_AfterDeletion(self):
+ self.mr.project_name = 'proj'
+ self.project.state = project_pb2.ProjectState.ARCHIVED
+ post_data = fake.PostData(deletebtn='1')
+ next_url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertEqual('http://127.0.0.1/hosting/', next_url)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/project/test/projectexport_test.py b/appengine/monorail/project/test/projectexport_test.py
new file mode 100644
index 0000000..56e4c63
--- /dev/null
+++ b/appengine/monorail/project/test/projectexport_test.py
@@ -0,0 +1,29 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for the projectexport servlet."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from testing import testing_helpers
+from project import projectexport
+
+
+class ProjectExportTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services()
+ self.servlet = projectexport.ProjectExport(
+ 'req', 'res', services=self.services)
+
+ def testAssertBasePermission(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ mr.auth.user_pb.is_site_admin = True
+ self.servlet.AssertBasePermission(mr)
diff --git a/appengine/monorail/project/test/projectsummary_test.py b/appengine/monorail/project/test/projectsummary_test.py
new file mode 100644
index 0000000..398babc
--- /dev/null
+++ b/appengine/monorail/project/test/projectsummary_test.py
@@ -0,0 +1,74 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for Project Summary servlet."""
+
+import unittest
+
+from framework import permissions
+from project import projectsummary
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class ProjectSummaryTest(unittest.TestCase):
+
+ def setUp(self):
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ project_star=fake.ProjectStarService())
+ self.project = services.project.TestAddProject(
+ 'proj', project_id=123, summary='sum',
+ description='desc')
+ self.servlet = projectsummary.ProjectSummary(
+ 'req', 'res', services=services)
+
+ def testGatherPageData(self):
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(
+ '<p>desc</p>', page_data['formatted_project_description'])
+ self.assertEqual(
+ int(project_pb2.ProjectAccess.ANYONE), page_data['access_level'].key)
+ self.assertEqual(0, page_data['num_stars'])
+ self.assertEqual('s', page_data['plural'])
+
+ def testGatherHelpData(self):
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+
+ # Non-members cannot edit project, so cue is not relevant.
+ mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual(None, help_data['cue'])
+
+ # Members (not owners) cannot edit project, so cue is not relevant.
+ mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual(None, help_data['cue'])
+
+ # This is a project member who has set up mailing lists and added
+ # members, but has not noted any duties.
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+ self.project.issue_notify_address = 'example@domain.com'
+ self.project.committer_ids.extend([111L, 222L])
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual('document_team_duties', help_data['cue'])
+
+ # Now he set up notes too.
+ project_commitments = project_pb2.ProjectCommitments()
+ project_commitments.project_id = self.project.project_id
+ project_commitments.commitments.append(
+ project_pb2.ProjectCommitments.MemberCommitment())
+ self.servlet.services.project.TestStoreProjectCommitments(
+ project_commitments)
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual(None, help_data['cue'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/project/test/projectupdates_test.py b/appengine/monorail/project/test/projectupdates_test.py
new file mode 100644
index 0000000..42f1d1d
--- /dev/null
+++ b/appengine/monorail/project/test/projectupdates_test.py
@@ -0,0 +1,55 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.project.projectupdates."""
+
+import unittest
+
+import mox
+
+from features import activities
+from project import projectupdates
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class ProjectUpdatesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(project=fake.ProjectService())
+
+ self.project_name = 'proj'
+ self.project_id = 987
+ self.project = self.services.project.TestAddProject(
+ self.project_name, project_id=self.project_id,
+ process_inbound_email=True)
+
+ self.mr = testing_helpers.MakeMonorailRequest(
+ services=self.services, project=self.project)
+ self.mr.project_name = self.project_name
+ self.project_updates = projectupdates.ProjectUpdates(
+ None, None, self.services)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testGatherPageData(self):
+ self.mox.StubOutWithMock(activities, 'GatherUpdatesData')
+ activities.GatherUpdatesData(
+ self.services, self.mr, mox.IgnoreArg(), project_ids=[self.project_id],
+ ending='by_user',
+ updates_page_url='/p/%s/updates/list' % self.project_name,
+ autolink=self.services.autolink).AndReturn({'test': 'testing'})
+ self.mox.ReplayAll()
+
+ page_data = self.project_updates.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+ self.assertEquals(
+ {'subtab_mode': None, 'user_updates_tab_mode': None, 'test': 'testing'},
+ page_data)
+
diff --git a/appengine/monorail/project/wikiredirect.py b/appengine/monorail/project/wikiredirect.py
new file mode 100644
index 0000000..0c323eb
--- /dev/null
+++ b/appengine/monorail/project/wikiredirect.py
@@ -0,0 +1,30 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to forward requests to the provided documentation url.
+
+This page handles the /wiki urls which are forwarded from Codesite.
+If a project has defined a docs_url, then the requests are forwarded there.
+If not, they are redirected to adminIntro.
+"""
+
+import httplib
+
+from framework import framework_helpers
+from framework import servlet
+from framework import urls
+
+
+class WikiRedirect(servlet.Servlet):
+ """Redirect to the wiki documentation, if provided."""
+
+ def get(self, **kwargs):
+ """Construct a 302 pointing at project.docs_url, or at adminIntro."""
+ docs_url = self.mr.project.docs_url
+ if not docs_url:
+ docs_url = framework_helpers.FormatAbsoluteURL(
+ self.mr, urls.ADMIN_INTRO, include_project=True)
+ self.response.location = docs_url
+ self.response.status = httplib.MOVED_PERMANENTLY
diff --git a/appengine/monorail/proto/__init__.py b/appengine/monorail/proto/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/proto/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/proto/api_clients_config.proto b/appengine/monorail/proto/api_clients_config.proto
new file mode 100644
index 0000000..fd30ee9
--- /dev/null
+++ b/appengine/monorail/proto/api_clients_config.proto
@@ -0,0 +1,38 @@
+// Copyright 2016 The Chromium Authors. All Rights Reserved.
+// Use of this source code is governed by the Apache v2.0 license that can be
+// found in the LICENSE file.
+
+// Schemas for monorail api client configs.
+// Command to generate api_clients_config_pb2.py: in monorail/ directory:
+// protoc ./proto/api_clients_config.proto --proto_path=./proto/ --python_out=./proto
+
+
+syntax = "proto2";
+
+package monorail;
+
+message ProjectPermission {
+ enum Role {
+ committer = 1;
+ contributor = 2;
+ }
+
+ optional string project = 1;
+ optional Role role = 2 [default = contributor];
+ repeated string extra_permissions = 3;
+}
+
+message Client {
+ optional string client_email = 1;
+ optional string display_name = 2;
+ optional string client_id = 3;
+ optional string description = 4;
+ repeated ProjectPermission project_permissions = 5;
+ optional int32 period_limit = 6 [default = 100000];
+ optional int32 lifetime_limit = 7 [default = 1000000];
+ repeated string contacts = 8;
+}
+
+message ClientCfg {
+ repeated Client clients = 1;
+}
diff --git a/appengine/monorail/proto/api_clients_config_pb2.py b/appengine/monorail/proto/api_clients_config_pb2.py
new file mode 100644
index 0000000..d244ab3
--- /dev/null
+++ b/appengine/monorail/proto/api_clients_config_pb2.py
@@ -0,0 +1,221 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: api_clients_config.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='api_clients_config.proto',
+ package='monorail',
+ serialized_pb='\n\x18\x61pi_clients_config.proto\x12\x08monorail\"\xa4\x01\n\x11ProjectPermission\x12\x0f\n\x07project\x18\x01 \x01(\t\x12;\n\x04role\x18\x02 \x01(\x0e\x32 .monorail.ProjectPermission.Role:\x0b\x63ontributor\x12\x19\n\x11\x65xtra_permissions\x18\x03 \x03(\t\"&\n\x04Role\x12\r\n\tcommitter\x10\x01\x12\x0f\n\x0b\x63ontributor\x10\x02\"\xe7\x01\n\x06\x43lient\x12\x14\n\x0c\x63lient_email\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x11\n\tclient_id\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x38\n\x13project_permissions\x18\x05 \x03(\x0b\x32\x1b.monorail.ProjectPermission\x12\x1c\n\x0cperiod_limit\x18\x06 \x01(\x05:\x06\x31\x30\x30\x30\x30\x30\x12\x1f\n\x0elifetime_limit\x18\x07 \x01(\x05:\x07\x31\x30\x30\x30\x30\x30\x30\x12\x10\n\x08\x63ontacts\x18\x08 \x03(\t\".\n\tClientCfg\x12!\n\x07\x63lients\x18\x01 \x03(\x0b\x32\x10.monorail.Client')
+
+
+
+_PROJECTPERMISSION_ROLE = _descriptor.EnumDescriptor(
+ name='Role',
+ full_name='monorail.ProjectPermission.Role',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='committer', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='contributor', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=165,
+ serialized_end=203,
+)
+
+
+_PROJECTPERMISSION = _descriptor.Descriptor(
+ name='ProjectPermission',
+ full_name='monorail.ProjectPermission',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project', full_name='monorail.ProjectPermission.project', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=unicode("", "utf-8"),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='role', full_name='monorail.ProjectPermission.role', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=2,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='extra_permissions', full_name='monorail.ProjectPermission.extra_permissions', index=2,
+ number=3, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _PROJECTPERMISSION_ROLE,
+ ],
+ options=None,
+ is_extendable=False,
+ extension_ranges=[],
+ serialized_start=39,
+ serialized_end=203,
+)
+
+
+_CLIENT = _descriptor.Descriptor(
+ name='Client',
+ full_name='monorail.Client',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='client_email', full_name='monorail.Client.client_email', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=unicode("", "utf-8"),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='display_name', full_name='monorail.Client.display_name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=unicode("", "utf-8"),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='client_id', full_name='monorail.Client.client_id', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=unicode("", "utf-8"),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='description', full_name='monorail.Client.description', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=unicode("", "utf-8"),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='project_permissions', full_name='monorail.Client.project_permissions', index=4,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='period_limit', full_name='monorail.Client.period_limit', index=5,
+ number=6, type=5, cpp_type=1, label=1,
+ has_default_value=True, default_value=100000,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='lifetime_limit', full_name='monorail.Client.lifetime_limit', index=6,
+ number=7, type=5, cpp_type=1, label=1,
+ has_default_value=True, default_value=1000000,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='contacts', full_name='monorail.Client.contacts', index=7,
+ number=8, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ extension_ranges=[],
+ serialized_start=206,
+ serialized_end=437,
+)
+
+
+_CLIENTCFG = _descriptor.Descriptor(
+ name='ClientCfg',
+ full_name='monorail.ClientCfg',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='clients', full_name='monorail.ClientCfg.clients', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ extension_ranges=[],
+ serialized_start=439,
+ serialized_end=485,
+)
+
+_PROJECTPERMISSION.fields_by_name['role'].enum_type = _PROJECTPERMISSION_ROLE
+_PROJECTPERMISSION_ROLE.containing_type = _PROJECTPERMISSION;
+_CLIENT.fields_by_name['project_permissions'].message_type = _PROJECTPERMISSION
+_CLIENTCFG.fields_by_name['clients'].message_type = _CLIENT
+DESCRIPTOR.message_types_by_name['ProjectPermission'] = _PROJECTPERMISSION
+DESCRIPTOR.message_types_by_name['Client'] = _CLIENT
+DESCRIPTOR.message_types_by_name['ClientCfg'] = _CLIENTCFG
+
+class ProjectPermission(_message.Message):
+ __metaclass__ = _reflection.GeneratedProtocolMessageType
+ DESCRIPTOR = _PROJECTPERMISSION
+
+ # @@protoc_insertion_point(class_scope:monorail.ProjectPermission)
+
+class Client(_message.Message):
+ __metaclass__ = _reflection.GeneratedProtocolMessageType
+ DESCRIPTOR = _CLIENT
+
+ # @@protoc_insertion_point(class_scope:monorail.Client)
+
+class ClientCfg(_message.Message):
+ __metaclass__ = _reflection.GeneratedProtocolMessageType
+ DESCRIPTOR = _CLIENTCFG
+
+ # @@protoc_insertion_point(class_scope:monorail.ClientCfg)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/appengine/monorail/proto/api_pb2_v1.py b/appengine/monorail/proto/api_pb2_v1.py
new file mode 100644
index 0000000..b61b9d8
--- /dev/null
+++ b/appengine/monorail/proto/api_pb2_v1.py
@@ -0,0 +1,534 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Protocol buffers for Monorail API."""
+
+from endpoints import ResourceContainer
+from protorpc import messages
+from protorpc import message_types
+
+from proto import usergroup_pb2
+
+
+########################## Helper Message ##########################
+
+
+class ErrorMessage(messages.Message):
+ """Request error."""
+ code = messages.IntegerField(
+ 1, required=True, variant=messages.Variant.INT32)
+ reason = messages.StringField(2, required=True)
+ message = messages.StringField(3, required=True)
+
+
+class Status(messages.Message):
+ """Issue status."""
+ status = messages.StringField(1, required=True)
+ meansOpen = messages.BooleanField(2, required=True)
+ description = messages.StringField(3)
+
+
+class Label(messages.Message):
+ """Issue label."""
+ label = messages.StringField(1, required=True)
+ description = messages.StringField(2)
+
+
+class Prompt(messages.Message):
+ """Default issue template values."""
+ name = messages.StringField(1, required=True)
+ title = messages.StringField(2)
+ description = messages.StringField(3)
+ titleMustBeEdited = messages.BooleanField(4)
+ status = messages.StringField(5)
+ labels = messages.StringField(6, repeated=True)
+ membersOnly = messages.BooleanField(7)
+ defaultToMember = messages.BooleanField(8)
+ componentRequired = messages.BooleanField(9)
+
+
+class Role(messages.Enum):
+ """User role."""
+ owner = 1
+ member = 2
+ contributor = 3
+
+
+class IssueState(messages.Enum):
+ """Issue state."""
+ closed = 0
+ open = 1
+
+
+class CannedQuery(messages.Enum):
+ """Canned query to search issues."""
+ all = 0
+ new = 1
+ open = 2
+ owned = 3
+ reported = 4
+ starred = 5
+ to_verify = 6
+
+
+class AtomPerson(messages.Message):
+ """Atomic person."""
+ name = messages.StringField(1, required=True)
+ htmlLink = messages.StringField(2)
+ kind = messages.StringField(3)
+
+
+class Attachment(messages.Message):
+ """Issue attachment."""
+ attachmentId = messages.IntegerField(
+ 1, variant=messages.Variant.INT64, required=True)
+ fileName = messages.StringField(2, required=True)
+ fileSize = messages.IntegerField(
+ 3, required=True, variant=messages.Variant.INT32)
+ mimetype = messages.StringField(4, required=True)
+ isDeleted = messages.BooleanField(5)
+
+
+class IssueRef(messages.Message):
+ "Issue reference."
+ issueId = messages.IntegerField(
+ 1, required=True, variant=messages.Variant.INT32)
+ projectId = messages.StringField(2)
+ kind = messages.StringField(3)
+
+
+class FieldValueOperator(messages.Enum):
+ """Operator of field values."""
+ add = 1
+ remove = 2
+ clear = 3
+
+
+class FieldValue(messages.Message):
+ """Custom field values."""
+ fieldName = messages.StringField(1, required=True)
+ fieldValue = messages.StringField(2)
+ derived = messages.BooleanField(3, default=False)
+ operator = messages.EnumField(FieldValueOperator, 4, default='add')
+
+
+class Update(messages.Message):
+ """Issue update."""
+ summary = messages.StringField(1)
+ status = messages.StringField(2)
+ owner = messages.StringField(3)
+ labels = messages.StringField(4, repeated=True)
+ cc = messages.StringField(5, repeated=True)
+ blockedOn = messages.StringField(6, repeated=True)
+ blocking = messages.StringField(7, repeated=True)
+ mergedInto = messages.StringField(8)
+ kind = messages.StringField(9)
+ components = messages.StringField(10, repeated=True)
+ moveToProject = messages.StringField(11)
+ fieldValues = messages.MessageField(FieldValue, 12, repeated=True)
+
+
+class ProjectIssueConfig(messages.Message):
+ """Issue configuration of project."""
+ kind = messages.StringField(1)
+ restrictToKnown = messages.BooleanField(2)
+ defaultColumns = messages.StringField(3, repeated=True)
+ defaultSorting = messages.StringField(4, repeated=True)
+ statuses = messages.MessageField(Status, 5, repeated=True)
+ labels = messages.MessageField(Label, 6, repeated=True)
+ prompts = messages.MessageField(Prompt, 7, repeated=True)
+ defaultPromptForMembers = messages.IntegerField(
+ 8, variant=messages.Variant.INT32)
+ defaultPromptForNonMembers = messages.IntegerField(
+ 9, variant=messages.Variant.INT32)
+ usersCanSetLabels = messages.BooleanField(10)
+
+
+class IssueCommentWrapper(messages.Message):
+ """Issue comment details."""
+ attachments = messages.MessageField(Attachment, 1, repeated=True)
+ author = messages.MessageField(AtomPerson, 2)
+ canDelete = messages.BooleanField(3)
+ content = messages.StringField(4)
+ deletedBy = messages.MessageField(AtomPerson, 5)
+ id = messages.IntegerField(6, variant=messages.Variant.INT32)
+ published = message_types.DateTimeField(7)
+ updates = messages.MessageField(Update, 8)
+ kind = messages.StringField(9)
+
+
+class IssueWrapper(messages.Message):
+ """Issue details."""
+ author = messages.MessageField(AtomPerson, 1)
+ blockedOn = messages.MessageField(IssueRef, 2, repeated=True)
+ blocking = messages.MessageField(IssueRef, 3, repeated=True)
+ canComment = messages.BooleanField(4)
+ canEdit = messages.BooleanField(5)
+ cc = messages.MessageField(AtomPerson, 6, repeated=True)
+ closed = message_types.DateTimeField(7)
+ description = messages.StringField(8)
+ id = messages.IntegerField(9, variant=messages.Variant.INT32)
+ kind = messages.StringField(10)
+ labels = messages.StringField(11, repeated=True)
+ owner = messages.MessageField(AtomPerson, 12)
+ published = message_types.DateTimeField(13)
+ starred = messages.BooleanField(14)
+ stars = messages.IntegerField(15, variant=messages.Variant.INT32)
+ state = messages.EnumField(IssueState, 16)
+ status = messages.StringField(17, required=True)
+ summary = messages.StringField(18, required=True)
+ title = messages.StringField(19)
+ updated = message_types.DateTimeField(20)
+ components = messages.StringField(21, repeated=True)
+ projectId = messages.StringField(22, required=True)
+ mergedInto = messages.MessageField(IssueRef, 23)
+ fieldValues = messages.MessageField(FieldValue, 24, repeated=True)
+
+
+class ProjectWrapper(messages.Message):
+ """Project details."""
+ kind = messages.StringField(1)
+ name = messages.StringField(2)
+ externalId = messages.StringField(3, required=True)
+ htmlLink = messages.StringField(4, required=True)
+ summary = messages.StringField(5)
+ description = messages.StringField(6)
+ versionControlSystem = messages.StringField(7)
+ repositoryUrls = messages.StringField(8, repeated=True)
+ issuesConfig = messages.MessageField(ProjectIssueConfig, 9)
+ role = messages.EnumField(Role, 10)
+ members = messages.MessageField(AtomPerson, 11, repeated=True)
+
+
+class UserGroupSettingsWrapper(messages.Message):
+ """User group settings."""
+ groupName = messages.StringField(1, required=True)
+ who_can_view_members = messages.EnumField(
+ usergroup_pb2.MemberVisibility, 2,
+ default=usergroup_pb2.MemberVisibility.MEMBERS)
+ ext_group_type = messages.EnumField(usergroup_pb2.GroupType, 3)
+ last_sync_time = messages.IntegerField(
+ 4, default=0, variant=messages.Variant.INT32)
+
+
+class GroupCitizens(messages.Message):
+ """Group members and owners."""
+ groupOwners = messages.StringField(1, repeated=True)
+ groupMembers = messages.StringField(2, repeated=True)
+
+
+########################## Comments Message ##########################
+
+# pylint: disable=pointless-string-statement
+
+"""Request to delete/undelete an issue's comments."""
+ISSUES_COMMENTS_DELETE_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ projectId=messages.StringField(1, required=True),
+ issueId=messages.IntegerField(
+ 2, required=True, variant=messages.Variant.INT32),
+ commentId=messages.IntegerField(
+ 3, required=True, variant=messages.Variant.INT32)
+)
+
+
+class IssuesCommentsDeleteResponse(messages.Message):
+ """Response message of request to delete/undelete an issue's comments."""
+ error = messages.MessageField(ErrorMessage, 1)
+
+
+"""Request to insert an issue's comments."""
+ISSUES_COMMENTS_INSERT_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ IssueCommentWrapper,
+ projectId=messages.StringField(1, required=True),
+ issueId=messages.IntegerField(
+ 2, required=True, variant=messages.Variant.INT32),
+ sendEmail=messages.BooleanField(3)
+)
+
+
+class IssuesCommentsInsertResponse(messages.Message):
+ """Response message of request to insert an issue's comments."""
+ error = messages.MessageField(ErrorMessage, 1)
+ id = messages.IntegerField(2, variant=messages.Variant.INT32)
+ kind = messages.StringField(3)
+ author = messages.MessageField(AtomPerson, 4)
+ content = messages.StringField(5)
+ published = message_types.DateTimeField(6)
+ updates = messages.MessageField(Update, 7)
+ canDelete = messages.BooleanField(8)
+
+
+"""Request to list an issue's comments."""
+ISSUES_COMMENTS_LIST_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ projectId=messages.StringField(1, required=True),
+ issueId=messages.IntegerField(
+ 2, required=True, variant=messages.Variant.INT32),
+ maxResults=messages.IntegerField(
+ 3, default=100, variant=messages.Variant.INT32),
+ startIndex=messages.IntegerField(
+ 4, default=0, variant=messages.Variant.INT32)
+)
+
+
+class IssuesCommentsListResponse(messages.Message):
+ """Response message of request to list an issue's comments."""
+ error = messages.MessageField(ErrorMessage, 1)
+ items = messages.MessageField(IssueCommentWrapper, 2, repeated=True)
+ totalResults = messages.IntegerField(3, variant=messages.Variant.INT32)
+ kind = messages.StringField(4)
+
+
+########################## Users Message ##########################
+
+"""Request to get a user."""
+USERS_GET_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ userId=messages.StringField(1, required=True),
+ ownerProjectsOnly=messages.BooleanField(2, default=False)
+)
+
+
+class UsersGetResponse(messages.Message):
+ """Response message of request to get a user."""
+ error = messages.MessageField(ErrorMessage, 1)
+ id = messages.StringField(2)
+ kind = messages.StringField(3)
+ projects = messages.MessageField(ProjectWrapper, 4, repeated=True)
+
+
+########################## Issues Message ##########################
+
+"""Request to get an issue."""
+ISSUES_GET_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ projectId=messages.StringField(1, required=True),
+ issueId=messages.IntegerField(
+ 2, required=True, variant=messages.Variant.INT32)
+)
+
+
+"""Request to insert an issue."""
+ISSUES_INSERT_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ IssueWrapper,
+ projectId=messages.StringField(1, required=True),
+ sendEmail=messages.BooleanField(2, default=True)
+)
+
+
+class IssuesGetInsertResponse(messages.Message):
+ """Response message of request to get/insert an issue."""
+ error = messages.MessageField(ErrorMessage, 1)
+ kind = messages.StringField(2)
+ id = messages.IntegerField(3, variant=messages.Variant.INT32)
+ title = messages.StringField(4)
+ summary = messages.StringField(5)
+ stars = messages.IntegerField(6, variant=messages.Variant.INT32)
+ starred = messages.BooleanField(7)
+ status = messages.StringField(8)
+ state = messages.EnumField(IssueState, 9)
+ labels = messages.StringField(10, repeated=True)
+ author = messages.MessageField(AtomPerson, 11)
+ owner = messages.MessageField(AtomPerson, 12)
+ cc = messages.MessageField(AtomPerson, 13, repeated=True)
+ updated = message_types.DateTimeField(14)
+ published = message_types.DateTimeField(15)
+ closed = message_types.DateTimeField(16)
+ blockedOn = messages.MessageField(IssueRef, 17, repeated=True)
+ blocking = messages.MessageField(IssueRef, 18, repeated=True)
+ projectId = messages.StringField(19)
+ canComment = messages.BooleanField(20)
+ canEdit = messages.BooleanField(21)
+ components = messages.StringField(22, repeated=True)
+ mergedInto = messages.MessageField(IssueRef, 23)
+ fieldValues = messages.MessageField(FieldValue, 24, repeated=True)
+
+
+"""Request to list issues."""
+ISSUES_LIST_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ projectId=messages.StringField(1, required=True),
+ additionalProject=messages.StringField(2, repeated=True),
+ can=messages.EnumField(CannedQuery, 3, default='all'),
+ label=messages.StringField(4),
+ maxResults=messages.IntegerField(
+ 5, default=100, variant=messages.Variant.INT32),
+ owner=messages.StringField(6),
+ publishedMax=messages.IntegerField(7, variant=messages.Variant.INT64),
+ publishedMin=messages.IntegerField(8, variant=messages.Variant.INT64),
+ q=messages.StringField(9),
+ sort=messages.StringField(10),
+ startIndex=messages.IntegerField(
+ 11, default=0, variant=messages.Variant.INT32),
+ status=messages.StringField(12),
+ updatedMax=messages.IntegerField(13, variant=messages.Variant.INT64),
+ updatedMin=messages.IntegerField(14, variant=messages.Variant.INT64)
+)
+
+
+class IssuesListResponse(messages.Message):
+ """Response message of request to list issues."""
+ error = messages.MessageField(ErrorMessage, 1)
+ items = messages.MessageField(IssueWrapper, 2, repeated=True)
+ totalResults = messages.IntegerField(3, variant=messages.Variant.INT32)
+ kind = messages.StringField(4)
+
+
+"""Request to list group settings."""
+GROUPS_SETTINGS_LIST_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ importedGroupsOnly=messages.BooleanField(1, default=False)
+)
+
+
+class GroupsSettingsListResponse(messages.Message):
+ """Response message of request to list group settings."""
+ error = messages.MessageField(ErrorMessage, 1)
+ groupSettings = messages.MessageField(
+ UserGroupSettingsWrapper, 2, repeated=True)
+
+
+"""Request to create a group."""
+GROUPS_CREATE_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ groupName = messages.StringField(1, required=True),
+ who_can_view_members = messages.EnumField(
+ usergroup_pb2.MemberVisibility, 2,
+ default=usergroup_pb2.MemberVisibility.MEMBERS, required=True),
+ ext_group_type = messages.EnumField(usergroup_pb2.GroupType, 3)
+)
+
+
+class GroupsCreateResponse(messages.Message):
+ """Response message of request to create a group."""
+ error = messages.MessageField(ErrorMessage, 1)
+ groupID = messages.IntegerField(
+ 2, variant=messages.Variant.INT32)
+
+
+"""Request to get a group."""
+GROUPS_GET_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ groupName = messages.StringField(1, required=True)
+)
+
+
+class GroupsGetResponse(messages.Message):
+ """Response message of request to create a group."""
+ error = messages.MessageField(ErrorMessage, 1)
+ groupID = messages.IntegerField(
+ 2, variant=messages.Variant.INT32)
+ groupSettings = messages.MessageField(
+ UserGroupSettingsWrapper, 3)
+ groupOwners = messages.StringField(4, repeated=True)
+ groupMembers = messages.StringField(5, repeated=True)
+
+
+"""Request to update a group."""
+GROUPS_UPDATE_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ GroupCitizens,
+ groupName = messages.StringField(1, required=True),
+ who_can_view_members = messages.EnumField(
+ usergroup_pb2.MemberVisibility, 2),
+ ext_group_type = messages.EnumField(usergroup_pb2.GroupType, 3),
+ last_sync_time = messages.IntegerField(
+ 4, default=0, variant=messages.Variant.INT32),
+ friend_projects = messages.StringField(5, repeated=True),
+)
+
+
+class GroupsUpdateResponse(messages.Message):
+ """Response message of request to update a group."""
+ error = messages.MessageField(ErrorMessage, 1)
+
+
+########################## Component Message ##########################
+
+class Component(messages.Message):
+ """Component PB."""
+ componentId = messages.IntegerField(
+ 1, required=True, variant=messages.Variant.INT32)
+ projectName = messages.StringField(2, required=True)
+ componentPath = messages.StringField(3, required=True)
+ description = messages.StringField(4)
+ admin = messages.StringField(5, repeated=True)
+ cc = messages.StringField(6, repeated=True)
+ deprecated = messages.BooleanField(7, default=False)
+ created = message_types.DateTimeField(8)
+ creator = messages.StringField(9)
+ modified = message_types.DateTimeField(10)
+ modifier = messages.StringField(11)
+
+
+"""Request to get components of a project."""
+COMPONENTS_LIST_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ projectId=messages.StringField(1, required=True),
+)
+
+
+class ComponentsListResponse(messages.Message):
+ """Response to list components."""
+ components = messages.MessageField(
+ Component, 1, repeated=True)
+
+
+class ComponentCreateRequestBody(messages.Message):
+ """Request body to create a component."""
+ parentPath = messages.StringField(1)
+ description = messages.StringField(2)
+ admin = messages.StringField(3, repeated=True)
+ cc = messages.StringField(4, repeated=True)
+ deprecated = messages.BooleanField(5, default=False)
+
+
+"""Request to create component of a project."""
+COMPONENTS_CREATE_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ ComponentCreateRequestBody,
+ projectId=messages.StringField(1, required=True),
+ componentName=messages.StringField(2, required=True),
+)
+
+
+"""Request to delete a component."""
+COMPONENTS_DELETE_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ message_types.VoidMessage,
+ projectId=messages.StringField(1, required=True),
+ componentPath=messages.StringField(2, required=True),
+)
+
+
+class ComponentUpdateFieldID(messages.Enum):
+ """Possible fields that can be updated in a component."""
+ LEAF_NAME = 1
+ DESCRIPTION = 2
+ ADMIN = 3
+ CC = 4
+ DEPRECATED = 5
+
+
+class ComponentUpdate(messages.Message):
+ """Component update."""
+ # 'field' allows a field to be cleared
+ field = messages.EnumField(ComponentUpdateFieldID, 1, required=True)
+ leafName = messages.StringField(2)
+ description = messages.StringField(3)
+ admin = messages.StringField(4, repeated=True)
+ cc = messages.StringField(5, repeated=True)
+ deprecated = messages.BooleanField(6)
+
+
+class ComponentUpdateRequestBody(messages.Message):
+ """Request body to update a component."""
+ updates = messages.MessageField(ComponentUpdate, 1, repeated=True)
+
+
+"""Request to update a component."""
+COMPONENTS_UPDATE_REQUEST_RESOURCE_CONTAINER = ResourceContainer(
+ ComponentUpdateRequestBody,
+ projectId=messages.StringField(1, required=True),
+ componentPath=messages.StringField(2, required=True),
+)
diff --git a/appengine/monorail/proto/ast_pb2.py b/appengine/monorail/proto/ast_pb2.py
new file mode 100644
index 0000000..db91127
--- /dev/null
+++ b/appengine/monorail/proto/ast_pb2.py
@@ -0,0 +1,87 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Protocol buffers for user queries parsed into abstract syntax trees.
+
+A user issue query can look like [Type=Defect owner:jrobbins "memory leak"].
+In that simple form, all the individual search conditions are simply ANDed
+together. In the code, a list of conditions to be ANDed is called a
+conjunction.
+
+Monorail also supports a quick-or feature: [Type=Defect,Enhancement]. That
+will match any issue that has labels Type-Defect or Type-Enhancement, or both.
+
+TODO(jrobbins): Monorail should also support a top-level "OR" keyword that can
+be used to logically OR a series of conjunctions. For example:
+[Type=Defect stars>10 OR Type=Enhancement stars>50]. Right now, these queries
+are parsed and represented in the AST, but not fully processed.
+
+There are no parenthesis and no "AND" keyword. So, the AST is always exactly
+two levels: the overall tree consistes of a list of conjunctions, and each
+conjunction consists of a list of conditions.
+
+A condition can look like [stars>10] or [summary:memory] or
+[Type=Defect,Enhancement]. Each condition has a single comparison operator.
+Most conditions refer to a single field definition, but in the case of
+cross-project search a single condition can have a list of field definitions
+from the different projects being searched. Each condition can have a list
+of constant values to compare against. The values may be all strings or all
+integers.
+
+Some conditions are procesed by the SQL database and others by the GAE
+search API. All conditions are passed to each module and it is up to
+the module to decide which conditions to handle and which to ignore.
+"""
+
+from protorpc import messages
+
+from proto import tracker_pb2
+
+
+# This is a special field_name for a FieldDef that means to do a fulltext
+# search for words that occur in any part of the issue.
+ANY_FIELD = 'any_field'
+
+
+class QueryOp(messages.Enum):
+ """Enumeration of possible query condition operators."""
+ EQ = 1
+ NE = 2
+ LT = 3
+ GT = 4
+ LE = 5
+ GE = 6
+ TEXT_HAS = 7
+ NOT_TEXT_HAS = 8
+ TEXT_MATCHES = 9
+ NOT_TEXT_MATCHES = 10
+ IS_DEFINED = 11
+ IS_NOT_DEFINED = 12
+ KEY_HAS = 13
+
+
+class Condition(messages.Message):
+ """Representation of one query condition. E.g., [Type=Defect,Task]."""
+ op = messages.EnumField(QueryOp, 1, required=True)
+ field_defs = messages.MessageField(tracker_pb2.FieldDef, 2, repeated=True)
+ str_values = messages.StringField(3, repeated=True)
+ int_values = messages.IntegerField(4, repeated=True)
+
+
+class Conjunction(messages.Message):
+ """A list of conditions that are implicitly ANDed together."""
+ conds = messages.MessageField(Condition, 1, repeated=True)
+
+
+class QueryAST(messages.Message):
+ """Abstract syntax tree for the user's query."""
+ conjunctions = messages.MessageField(Conjunction, 1, repeated=True)
+
+
+def MakeCond(op, field_defs, str_values, int_values):
+ """Shorthand function to construct a Condition PB."""
+ return Condition(
+ op=op, field_defs=field_defs, str_values=str_values,
+ int_values=int_values)
diff --git a/appengine/monorail/proto/project_pb2.py b/appengine/monorail/proto/project_pb2.py
new file mode 100644
index 0000000..c4c757f
--- /dev/null
+++ b/appengine/monorail/proto/project_pb2.py
@@ -0,0 +1,229 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Protocol buffers for Monorail projects."""
+
+from protorpc import messages
+
+# Project state affects permissions in that project, and project deletion.
+# It is edited on the project admin page. If it is anything other that LIVE
+# it triggers a notice at the top of every project page.
+# For more info, see the "Project deletion in Monorail" design doc.
+class ProjectState(messages.Enum):
+ """Enum for states in the project lifecycle."""
+ # Project is visible and indexed. This is the typical state.
+ #
+ # If moved_to is set, this project is live but has been moved
+ # to another location, so redirects will be used or links shown.
+ LIVE = 1
+
+ # Project owner has requested the project be archived. Project is
+ # read-only to members only, off-limits to non-members. Issues
+ # can be searched when in the project, but should not appear in
+ # site-wide searches. The project name is still in-use by this
+ # project.
+ #
+ # If a delete_time is set, then the project is doomed: (1) the
+ # state can only be changed by a site admin, and (2) the project
+ # will automatically transition to DELETABLE after that time is
+ # reached.
+ ARCHIVED = 2
+
+ # Project can be deleted at any time. The project name should
+ # have already been changed to a generated string, so it's
+ # impossible to navigate to this project, and the original name
+ # can be reused by a new project.
+ DELETABLE = 3
+
+
+# Project access affects permissions in that project.
+# It is edited on the project admin page.
+class ProjectAccess(messages.Enum):
+ """Enum for possible project access levels."""
+ # Anyone may view this project, even anonymous users.
+ ANYONE = 1
+
+ # Only project members may view the project.
+ MEMBERS_ONLY = 3
+
+
+# A Project PB represents a project in Monorail, which is a workspace for
+# project members to collaborate on issues.
+# A project is created on the project creation page, searched on the project
+# list page, and edited on the project admin page.
+class Project(messages.Message):
+ """This protocol buffer holds all the metadata associated with a project."""
+ state = messages.EnumField(ProjectState, 1, required=True)
+ access = messages.EnumField(ProjectAccess, 18, default=ProjectAccess.ANYONE)
+
+ # The short identifier for this project. This value is lower-cased,
+ # and must be between 3 and 20 characters (inclusive). Alphanumeric
+ # and dashes are allowed, and it must start with an alpha character.
+ # Project names must be unique.
+ project_name = messages.StringField(2, required=True)
+
+ # A numeric identifier for this project.
+ project_id = messages.IntegerField(3, required=True)
+
+ # A one-line summary (human-readable) name of the project.
+ summary = messages.StringField(4, default='')
+
+ # A detailed description of the project.
+ description = messages.StringField(5, default='')
+
+ # Description of why this project has the state set as it is.
+ # This is used for administrative purposes to notify Owners that we
+ # are going to delete their project unless they can provide a good
+ # reason to not do so.
+ state_reason = messages.StringField(9)
+
+ # Time (in seconds) at which an ARCHIVED project may automatically
+ # be changed to state DELETABLE. The state change is done by a
+ # cron job.
+ delete_time = messages.IntegerField(10)
+
+ # Note that these lists are disjoint (a user ID will not appear twice).
+ owner_ids = messages.IntegerField(11, repeated=True)
+ committer_ids = messages.IntegerField(12, repeated=True)
+ contributor_ids = messages.IntegerField(15, repeated=True)
+
+ class ExtraPerms(messages.Message):
+ """Nested message for each member's extra permissions in a project."""
+ member_id = messages.IntegerField(1, required=True)
+ # Each custom perm is a single word [a-zA-Z0-9].
+ perms = messages.StringField(2, repeated=True)
+
+ extra_perms = messages.MessageField(ExtraPerms, 16, repeated=True)
+
+ # Project owners may choose to have ALL issue change notifications go to a
+ # mailing list (in addition to going directly to the users interested
+ # in that issue).
+ issue_notify_address = messages.StringField(14)
+
+ # These fields keep track of the cumulative size of all issue attachments
+ # in a given project. Normally, the number of bytes used is compared
+ # to a constant defined in the web application. However, if a custom
+ # quota is specified here, it will be used instead. An issue attachment
+ # will fail if its size would put the project over its quota. Not all
+ # projects have these fields: they are only set when the first attachment
+ # is uploaded.
+ attachment_bytes_used = messages.IntegerField(38, default=0)
+ # If quota is not set, default from tracker_constants.py is used.
+ attachment_quota = messages.IntegerField(39)
+
+ # NOTE: open slots 40, 41
+
+ # Recent_activity is a timestamp (in seconds since the Epoch) of the
+ # last time that an issue was entered, updated, or commented on.
+ recent_activity = messages.IntegerField(42, default=0)
+
+ # NOTE: open slots 43...
+
+ # Timestamp (in seconds since the Epoch) of the most recent change
+ # to this project that would invalidate cached content. It is set
+ # whenever project membership is edited, or any component config PB
+ # is edited. HTTP requests for auto-complete feeds include this
+ # value in the URL.
+ cached_content_timestamp = messages.IntegerField(53, default=0)
+
+ # If set, this project has been moved elsewhere. This can
+ # be an absolute URL, the name of another project on the same site.
+ moved_to = messages.StringField(60)
+
+ # Enable inbound email processing for issues.
+ process_inbound_email = messages.BooleanField(63, default=False)
+
+ # Limit removal of Restrict-* labels to project owners.
+ only_owners_remove_restrictions = messages.BooleanField(64, default=False)
+
+ # A per-project read-only lock. This lock (1) is meant to be
+ # long-lived (lasting as long as migration operations, project
+ # deletion, or anything else might take and (2) is meant to only
+ # limit user mutations; whether or not it limits automated actions
+ # that would change project data (such as workflow items) is
+ # determined based on the action.
+ #
+ # This lock is implemented as a user-visible string describing the
+ # reason for the project being in a read-only state. An absent or empty
+ # value indicates that the project is read-write; a present and
+ # non-empty value indicates that the project is read-only for the
+ # reason described.
+ read_only_reason = messages.StringField(65)
+
+ # This option is rarely used, but it makes sense for projects that aim for
+ # hub-and-spoke collaboration bewtween a vendor organization (like Google)
+ # and representatives of partner companies who are not supposed to know
+ # about each other.
+ # When true, it prevents project committers, contributors, and visitors
+ # from seeing the list of project members on the project summary page,
+ # on the People list page, and in autocomplete for issue owner and Cc.
+ # Project owners can always see the complete list of project members.
+ only_owners_see_contributors = messages.BooleanField(66, default=False)
+
+ # This configures the URLs generated when autolinking revision numbers.
+ # E.g., gitiles, viewvc, or crrev.com.
+ revision_url_format = messages.StringField(67)
+
+ # The home page of the Project.
+ home_page = messages.StringField(68)
+ # The url to redirect to for wiki/documentation links.
+ docs_url = messages.StringField(71)
+ # The GCS object ID of the Project's logo.
+ logo_gcs_id = messages.StringField(69)
+ # The uploaded file name of the Project's logo.
+ logo_file_name = messages.StringField(70)
+
+
+# This PB documents some of the duties of some of the members
+# in a given project. This info is displayed on the project People page.
+class ProjectCommitments(messages.Message):
+ project_id = messages.IntegerField(50)
+
+ # TODO(agable): Does it still make sense to call it a 'Commitment' when
+ # it doesn't contain duties anymore?
+ class MemberCommitment(messages.Message):
+ member_id = messages.IntegerField(11, required=True)
+ notes = messages.StringField(13)
+
+ commitments = messages.MessageField(MemberCommitment, 2, repeated=True)
+
+
+def MakeProject(
+ project_name, project_id=None, state=ProjectState.LIVE,
+ access=ProjectAccess.ANYONE, summary=None, description=None,
+ moved_to=None, cached_content_timestamp=None,
+ owner_ids=None, committer_ids=None, contributor_ids=None,
+ read_only=None, home_page=None, docs_url=None,
+ logo_gcs_id=None, logo_file_name=None):
+ """Returns a project protocol buffer with the given attributes."""
+ project = Project(
+ project_name=project_name, access=access, state=state)
+ if project_id:
+ project.project_id = project_id
+ if moved_to:
+ project.moved_to = moved_to
+ if cached_content_timestamp:
+ project.cached_content_timestamp = cached_content_timestamp
+ if summary:
+ project.summary = summary
+ if description:
+ project.description = description
+ if home_page:
+ project.home_page = home_page
+ if docs_url:
+ project.docs_url = docs_url
+ if logo_gcs_id:
+ project.logo_gcs_id = logo_gcs_id
+ if logo_file_name:
+ project.logo_file_name = logo_file_name
+
+ project.owner_ids.extend(owner_ids or [])
+ project.committer_ids.extend(committer_ids or [])
+ project.contributor_ids.extend(contributor_ids or [])
+
+ if read_only is not None:
+ project.read_only = read_only
+
+ return project
diff --git a/appengine/monorail/proto/site_pb2.py b/appengine/monorail/proto/site_pb2.py
new file mode 100644
index 0000000..a153b92
--- /dev/null
+++ b/appengine/monorail/proto/site_pb2.py
@@ -0,0 +1,22 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Protocol buffers for Monorail site-wide features."""
+
+from protorpc import messages
+
+
+class UserTypeRestriction(messages.Enum):
+ """An enum for site-wide settings about who can take an action."""
+ # Anyone may do it.
+ ANYONE = 1
+
+ # Only domain admins may do it.
+ ADMIN_ONLY = 2
+
+ # No one may do it, the feature is basically disabled.
+ NO_ONE = 3
+
+ # TODO(jrobbins): implement same-domain users
diff --git a/appengine/monorail/proto/tracker_pb2.py b/appengine/monorail/proto/tracker_pb2.py
new file mode 100644
index 0000000..00e546b
--- /dev/null
+++ b/appengine/monorail/proto/tracker_pb2.py
@@ -0,0 +1,386 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""The Monorail issue tracker uses ProtoRPC for storing business objects."""
+
+from protorpc import messages
+
+
+class FieldValue(messages.Message):
+ """Holds a single custom field value in an issue.
+
+ Multi-valued custom fields will have multiple such FieldValues on a given
+ issue. Note that enumerated type custom fields are represented as key-value
+ labels.
+ """
+ field_id = messages.IntegerField(1, required=True)
+ # Only one of the following fields will hve any value.
+ int_value = messages.IntegerField(2)
+ str_value = messages.StringField(3)
+ user_id = messages.IntegerField(4, default=0)
+
+ derived = messages.BooleanField(5, default=False)
+
+
+class DanglingIssueRef(messages.Message):
+ """Holds a reference to an issue still on Google Codesite."""
+ project = messages.StringField(1, required=True)
+ issue_id = messages.IntegerField(2, required=True)
+
+
+class Issue(messages.Message):
+ """Holds all the current metadata about an issue.
+
+ The most frequent searches can work by consulting solely the issue metadata.
+ Display of the issue list is done solely with this issue metadata.
+ Displaying one issue in detail with description and comments requires
+ more info from other objects.
+
+ The issue_id field is the unique primary key for retrieving issues. Local ID
+ is a small integer that counts up in each project.
+
+ Summary, Status, Owner, CC, reporter, and opened_timestamp are hard
+ fields that are always there. All other metadata is stored as
+ labels or custom fields.
+ Next available tag: 54.
+ """
+ # Globally unique issue ID.
+ issue_id = messages.IntegerField(42)
+ # project_name is not stored in the DB, only the project_id is stored.
+ # project_name is used in RAM to simplify formatting logic in lots of places.
+ project_name = messages.StringField(1, required=True)
+ project_id = messages.IntegerField(50)
+ local_id = messages.IntegerField(2, required=True)
+ summary = messages.StringField(3, default='')
+ status = messages.StringField(4, default='')
+ owner_id = messages.IntegerField(5)
+ cc_ids = messages.IntegerField(6, repeated=True)
+ labels = messages.StringField(7, repeated=True)
+ component_ids = messages.IntegerField(39, repeated=True)
+
+ # Denormalized count of stars on this Issue.
+ star_count = messages.IntegerField(8, required=True, default=0)
+ reporter_id = messages.IntegerField(9, required=True, default=0)
+ # Time that the issue was opened, in seconds since the Epoch.
+ opened_timestamp = messages.IntegerField(10, required=True, default=0)
+
+ # This should be set when an issue is closed and cleared when a
+ # closed issue is reopened. Measured in seconds since the Epoch.
+ closed_timestamp = messages.IntegerField(12, default=0)
+
+ # This should be updated every time an issue is modified. Measured
+ # in seconds since the Epoch.
+ modified_timestamp = messages.IntegerField(13, default=0)
+
+ # Issue IDs of issues that this issue is blocked on.
+ blocked_on_iids = messages.IntegerField(16, repeated=True)
+
+ # Issue IDs of issues that this issue is blocking.
+ blocking_iids = messages.IntegerField(17, repeated=True)
+
+ # References to 'dangling' (still in codesite) issue relations.
+ dangling_blocked_on_refs = messages.MessageField(
+ DanglingIssueRef, 52, repeated=True)
+ dangling_blocking_refs = messages.MessageField(
+ DanglingIssueRef, 53, repeated=True)
+
+ # Issue ID of issue that this issue was merged into most recently. When it
+ # is missing or 0, it is considered to be not merged into any other issue.
+ merged_into = messages.IntegerField(18)
+
+ # Default derived via rules, used iff status == ''.
+ derived_status = messages.StringField(30, default='')
+ # Default derived via rules, used iff owner_id == 0.
+ derived_owner_id = messages.IntegerField(31, default=0)
+ # Additional CCs derived via rules.
+ derived_cc_ids = messages.IntegerField(32, repeated=True)
+ # Additional labels derived via rules.
+ derived_labels = messages.StringField(33, repeated=True)
+ # Additional notification email addresses derived via rules.
+ derived_notify_addrs = messages.StringField(34, repeated=True)
+ # Additional components derived via rules.
+ derived_component_ids = messages.IntegerField(40, repeated=True)
+
+ # Soft delete of the entire issue.
+ deleted = messages.BooleanField(35, default=False)
+
+ # Total number of attachments in the issue
+ attachment_count = messages.IntegerField(36, default=0)
+
+ # Total number of comments on the issue (not counting the initial comment
+ # created when the issue is created).
+ comment_count = messages.IntegerField(37, default=0)
+
+ # Custom field values (other than enums)
+ field_values = messages.MessageField(FieldValue, 41, repeated=True)
+
+ is_spam = messages.BooleanField(51, default=False)
+
+
+class FieldID(messages.Enum):
+ """Possible fields that can be updated in an Amendment."""
+ # The spelling of these names must match enum values in tracker.sql.
+ SUMMARY = 1
+ STATUS = 2
+ OWNER = 3
+ CC = 4
+ LABELS = 5
+ BLOCKEDON = 6
+ BLOCKING = 7
+ MERGEDINTO = 8
+ PROJECT = 9
+ COMPONENTS = 10
+ CUSTOM = 11
+
+
+class Amendment(messages.Message):
+ """Holds info about one issue field change."""
+ field = messages.EnumField(FieldID, 11, required=True)
+ # User-visible string describing the change
+ newvalue = messages.StringField(12, required=True)
+ # Newvalue could have + or - characters to indicate that labels and CCs
+ # were added or removed
+ # Users added to owner or cc field
+ added_user_ids = messages.IntegerField(29, repeated=True)
+ # Users removed from owner or cc
+ removed_user_ids = messages.IntegerField(30, repeated=True)
+ custom_field_name = messages.StringField(31)
+ # When having newvalue be a +/- string doesn't make sense (e.g. status),
+ # store the old value here so that it can still be displayed.
+ oldvalue = messages.StringField(32)
+
+
+class Attachment(messages.Message):
+ """Holds info about one attachment."""
+ attachment_id = messages.IntegerField(21, required=True)
+ # Client-side filename
+ filename = messages.StringField(22, required=True)
+ filesize = messages.IntegerField(23, required=True)
+ # File mime-type, or at least our best guess.
+ mimetype = messages.StringField(24, required=True)
+ deleted = messages.BooleanField(27, default=False)
+ gcs_object_id = messages.StringField(29, required=False)
+
+
+class IssueComment(messages.Message):
+ """Holds one issue description or one additional comment on an issue.
+
+ The IssueComment with the lowest timestamp is the issue description.
+ Next available tag: 52
+ """
+ id = messages.IntegerField(32)
+ # Issue ID of the issue that was commented on.
+ issue_id = messages.IntegerField(31, required=True)
+ project_id = messages.IntegerField(50)
+ # User who entered the comment
+ user_id = messages.IntegerField(4, required=True, default=0)
+ # Time when comment was entered (seconds).
+ timestamp = messages.IntegerField(5, required=True)
+ # Text of the comment
+ content = messages.StringField(6, required=True)
+ # Audit trail of changes made w/ this comment
+ amendments = messages.MessageField(Amendment, 10, repeated=True)
+
+ # Soft delete that can be undeleted.
+ # Deleted comments should not be shown to average users.
+ # If deleted, deleted_by contains the user id of user who deleted.
+ deleted_by = messages.IntegerField(13)
+
+ attachments = messages.MessageField(Attachment, 20, repeated=True)
+
+ # TODO(jrobbins): Always store unescaped text and let EZT do the
+ # escaping on output. Then I can eliminate this.
+ was_escaped = messages.BooleanField(25, default=True)
+
+ # Sequence number of the comment
+ # The field is optional for compatibility with code existing before
+ # this field was added.
+ sequence = messages.IntegerField(26)
+
+ # The body text of the inbound email that caused this issue comment
+ # to be automatically entered. If this field is non-empty, it means
+ # that the comment was added via an inbound email. Headers and attachments
+ # are not included.
+ inbound_message = messages.StringField(28)
+
+ is_spam = messages.BooleanField(51, default=False)
+
+class SavedQuery(messages.Message):
+ """Store a saved query, for either a project or a user."""
+ query_id = messages.IntegerField(1)
+ name = messages.StringField(2)
+ base_query_id = messages.IntegerField(3)
+ query = messages.StringField(4, required=True)
+
+ # For personal cross-project queries.
+ executes_in_project_ids = messages.IntegerField(5, repeated=True)
+
+ # For user saved queries.
+ subscription_mode = messages.StringField(6)
+
+
+class NotifyTriggers(messages.Enum):
+ """Issue tracker events that can trigger notification emails."""
+ NEVER = 0
+ ANY_COMMENT = 1
+ # TODO(jrobbins): ANY_CHANGE, OPENED_CLOSED, ETC.
+
+
+class FieldTypes(messages.Enum):
+ """Types of custom fields that Monorail supports."""
+ ENUM_TYPE = 1
+ INT_TYPE = 2
+ STR_TYPE = 3
+ USER_TYPE = 4
+ DATE_TYPE = 5
+ BOOL_TYPE = 6
+ # TODO(jrobbins): more types, see tracker.sql for all TODOs.
+
+
+class FieldDef(messages.Message):
+ """This PB stores info about one custom field definition."""
+ field_id = messages.IntegerField(1, required=True)
+ project_id = messages.IntegerField(2, required=True)
+ field_name = messages.StringField(3, required=True)
+ field_type = messages.EnumField(FieldTypes, 4, required=True)
+ applicable_type = messages.StringField(11)
+ applicable_predicate = messages.StringField(10)
+ is_required = messages.BooleanField(5, default=False)
+ is_multivalued = messages.BooleanField(6, default=False)
+ docstring = messages.StringField(7)
+ is_deleted = messages.BooleanField(8, default=False)
+ admin_ids = messages.IntegerField(9, repeated=True)
+
+ # validation details for int_type
+ min_value = messages.IntegerField(12)
+ max_value = messages.IntegerField(13)
+ # validation details for str_type
+ regex = messages.StringField(14)
+ # validation details for user_type
+ needs_member = messages.BooleanField(15, default=False)
+ needs_perm = messages.StringField(16)
+
+ # semantics for user_type fields
+ grants_perm = messages.StringField(17)
+ notify_on = messages.EnumField(NotifyTriggers, 18)
+
+
+class ComponentDef(messages.Message):
+ """This stores info about a component in a project."""
+ component_id = messages.IntegerField(1, required=True)
+ project_id = messages.IntegerField(2, required=True)
+ path = messages.StringField(3, required=True)
+ docstring = messages.StringField(4)
+ admin_ids = messages.IntegerField(5, repeated=True)
+ cc_ids = messages.IntegerField(6, repeated=True)
+ deprecated = messages.BooleanField(7, default=False)
+ created = messages.IntegerField(8)
+ creator_id = messages.IntegerField(9)
+ modified = messages.IntegerField(10)
+ modifier_id = messages.IntegerField(11)
+
+
+class FilterRule(messages.Message):
+ """Filter rules implement semantics as project-specific if-then rules."""
+ predicate = messages.StringField(10, required=True)
+
+ # If the predicate is satisfied, these actions set some of the derived_*
+ # fields on the issue: labels, status, owner, or CCs.
+ add_labels = messages.StringField(20, repeated=True)
+ default_status = messages.StringField(21)
+ default_owner_id = messages.IntegerField(22)
+ add_cc_ids = messages.IntegerField(23, repeated=True)
+ add_notify_addrs = messages.StringField(24, repeated=True)
+
+
+class StatusDef(messages.Message):
+ """Definition of one well-known issue status."""
+ status = messages.StringField(11, required=True)
+ means_open = messages.BooleanField(12, default=False)
+ status_docstring = messages.StringField(13)
+ deprecated = messages.BooleanField(14, default=False)
+
+
+class LabelDef(messages.Message):
+ """Definition of one well-known issue label."""
+ label = messages.StringField(21, required=True)
+ label_docstring = messages.StringField(22)
+ deprecated = messages.BooleanField(23, default=False)
+
+
+class TemplateDef(messages.Message):
+ """Definition of one issue template."""
+ template_id = messages.IntegerField(57)
+ name = messages.StringField(31, required=True)
+ content = messages.StringField(32, required=True)
+ summary = messages.StringField(33)
+ summary_must_be_edited = messages.BooleanField(34, default=False)
+ owner_id = messages.IntegerField(35)
+ status = messages.StringField(36)
+ # Note: labels field is considered to have been set iff summary was set.
+ labels = messages.StringField(37, repeated=True)
+ # This controls what is listed in the template drop-down menu. Users
+ # could still select any template by editing the URL, and that's OK.
+ members_only = messages.BooleanField(38, default=False)
+ # If no owner_id is specified, and owner_defaults_to_member is
+ # true, then when an issue is entered by a member, fill in the initial
+ # owner field with the signed in user's name.
+ owner_defaults_to_member = messages.BooleanField(39, default=True)
+ admin_ids = messages.IntegerField(41, repeated=True)
+
+ # Custom field values (other than enums)
+ field_values = messages.MessageField(FieldValue, 42, repeated=True)
+ # Components.
+ component_ids = messages.IntegerField(43, repeated=True)
+ component_required = messages.BooleanField(44, default=False)
+
+
+class ProjectIssueConfig(messages.Message):
+ """This holds all configuration info for one project.
+
+ That includes canned queries, well-known issue statuses,
+ and well-known issue labels.
+
+ "Well-known" means that they are always offered to the user in
+ drop-downs, even if there are currently no open issues that have
+ that label or status value. Deleting a well-known value from the
+ configuration does not change any issues that may still reference
+ that old label, and users are still free to use it.
+
+ Exclusive label prefixes mean that a given issue may only have one
+ label that begins with that prefix. E.g., Priority should be
+ exclusive so that no issue can be labeled with both Priority-High
+ and Priority-Low.
+ """
+
+ project_id = messages.IntegerField(60)
+ well_known_statuses = messages.MessageField(StatusDef, 10, repeated=True)
+ # If an issue's status is being set to one of these, show "Merge with:".
+ statuses_offer_merge = messages.StringField(14, repeated=True)
+
+ well_known_labels = messages.MessageField(LabelDef, 20, repeated=True)
+ exclusive_label_prefixes = messages.StringField(2, repeated=True)
+
+ field_defs = messages.MessageField(FieldDef, 5, repeated=True)
+ component_defs = messages.MessageField(ComponentDef, 6, repeated=True)
+
+ templates = messages.MessageField(TemplateDef, 30, repeated=True)
+
+ default_template_for_developers = messages.IntegerField(3, required=True)
+ default_template_for_users = messages.IntegerField(4, required=True)
+
+ # These options control the default appearance of the issue list or grid.
+ default_col_spec = messages.StringField(50, default='')
+ default_sort_spec = messages.StringField(51, default='')
+ default_x_attr = messages.StringField(52, default='')
+ default_y_attr = messages.StringField(53, default='')
+
+ # This bool controls whether users are able to enter odd-ball
+ # labels and status values, or whether they are limited to only the
+ # well-known labels and status values defined on the admin subtab.
+ restrict_to_known = messages.BooleanField(16, default=False)
+
+ # Allow special projects to have a custom URL for the "New issue" link.
+ custom_issue_entry_url = messages.StringField(56)
diff --git a/appengine/monorail/proto/user_pb2.py b/appengine/monorail/proto/user_pb2.py
new file mode 100644
index 0000000..5859cf0
--- /dev/null
+++ b/appengine/monorail/proto/user_pb2.py
@@ -0,0 +1,122 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Protocol buffers for Monorail users."""
+
+from protorpc import messages
+
+
+class ActionLimit(messages.Message):
+ """In-memory business object for action rate limiting.
+
+ We will keep track of the number of actions
+ of various types by individual users and limit each user's ability
+ to perform a large number of those actions. E.g., no one user can
+ create too many new projects.
+
+ Our application code checks three kinds of action limits:
+ 1. A soft limit on the number of actions in a period of time.
+ If this soft limit is exceeded, the user will need to solve a CAPTCHA.
+ 2. A hard limit on the number of actions in a period of time.
+ if this hard limit is exceeded, the requested actions will fail.
+ 3. A lifetime limit. The user cannot perform this type of action more
+ than this many times, ever. We can adjust the lifetime limit
+ for individual users who contact us.
+
+ The numeric values for the actual limits are coded as constants in our
+ application. Only the lifetime limit is stored in this PB, and then only
+ if it differs from the default.
+ """
+ # Number of times that the user has performed this type of action recently.
+ recent_count = messages.IntegerField(1, required=True, default=0)
+
+ # Time of most recent counter reset in seconds.
+ # If (Now - reset_timestamp) > threshold, recent_count may be zeroed.
+ reset_timestamp = messages.IntegerField(2, required=True, default=0)
+
+ # Number of times that the user has performed this type of action ever.
+ lifetime_count = messages.IntegerField(3, required=True, default=0)
+
+ # This field is only present for users who have contacted us and
+ # asked us to increase their lifetime limit. When present, this value
+ # overrides the application's built-in default limit.
+ lifetime_limit = messages.IntegerField(4, default=0)
+
+ # This field is only present for users who have contacted us and
+ # asked us to increase their period limit. When present, this value
+ # overrides the application's built-in default limit.
+ period_soft_limit = messages.IntegerField(5, default=0)
+ period_hard_limit = messages.IntegerField(6, default=0)
+
+
+class IssueUpdateNav(messages.Enum):
+ """Pref for where a project member goes after an issue update."""
+ UP_TO_LIST = 0 # Back to issue list or grid view.
+ STAY_SAME_ISSUE = 1 # Show the same issue with the update.
+ NEXT_IN_LIST = 2 # Triage mode: go to next issue, if any.
+
+
+class User(messages.Message):
+ """In-memory busines object for representing users."""
+ # Is this user a site administer?
+ is_site_admin = messages.BooleanField(4, required=True, default=False)
+
+ # User notification preferences. These preferences describe when
+ # a user is sent a email notification after an issue has changed.
+ # The user is notified if either of the following is true:
+ # 1. notify_issue_change is True and the user is named in the
+ # issue's Owner or CC field.
+ # 2. notify_starred_issue_change is True and the user has starred
+ # the issue.
+ notify_issue_change = messages.BooleanField(5, default=True)
+ notify_starred_issue_change = messages.BooleanField(6, default=True)
+
+ # This user has been banned, and this string describes why. All access
+ # to Monorail pages should be disabled.
+ banned = messages.StringField(7, default='')
+
+ # User action counts and limits.
+ project_creation_limit = messages.MessageField(ActionLimit, 8)
+ issue_comment_limit = messages.MessageField(ActionLimit, 9)
+ issue_attachment_limit = messages.MessageField(ActionLimit, 10)
+ issue_bulk_edit_limit = messages.MessageField(ActionLimit, 11)
+ ignore_action_limits = messages.BooleanField(13, default=False)
+
+ after_issue_update = messages.EnumField(
+ IssueUpdateNav, 29, default=IssueUpdateNav.STAY_SAME_ISSUE)
+
+ # Should we obfuscate the user's email address and require solving a captcha
+ # to reveal it entirely? The default value corresponds to requiring users to
+ # opt into publishing their identities, but our code ensures that the
+ # opposite takes place for Gmail accounts.
+ obscure_email = messages.BooleanField(26, default=True)
+
+ # The email address chosen by the user to reveal on the site.
+ email = messages.StringField(27)
+
+ # The user has seen these cue cards and dismissed them.
+ dismissed_cues = messages.StringField(32, repeated=True)
+
+ # Sticky state for show/hide widget on people details page.
+ keep_people_perms_open = messages.BooleanField(33, default=False)
+
+ deleted = messages.BooleanField(39, default=False)
+ deleted_timestamp = messages.IntegerField(40, default=0)
+
+ preview_on_hover = messages.BooleanField(42, default=True)
+
+ flag_spam_limit = messages.MessageField(ActionLimit, 43)
+ api_request_limit = messages.MessageField(ActionLimit, 44)
+
+def MakeUser():
+ """Create and return a new user record in RAM."""
+ user = User()
+ user.project_creation_limit = ActionLimit()
+ user.issue_comment_limit = ActionLimit()
+ user.issue_attachment_limit = ActionLimit()
+ user.issue_bulk_edit_limit = ActionLimit()
+ user.flag_spam_limit = ActionLimit()
+ user.api_request_limit = ActionLimit()
+ return user
diff --git a/appengine/monorail/proto/usergroup_pb2.py b/appengine/monorail/proto/usergroup_pb2.py
new file mode 100644
index 0000000..bea5a1a
--- /dev/null
+++ b/appengine/monorail/proto/usergroup_pb2.py
@@ -0,0 +1,46 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Protocol buffers for Monorail usergroups."""
+
+from protorpc import messages
+
+
+class MemberVisibility(messages.Enum):
+ """Enum controlling who can see the members of a user group."""
+ OWNERS = 0
+ MEMBERS = 1
+ ANYONE = 2
+
+
+class GroupType(messages.Enum):
+ """Type of external group to import."""
+ CHROME_INFRA_AUTH = 0
+ MDB = 1
+ BAGGINS = 3
+
+
+class UserGroupSettings(messages.Message):
+ """In-memory busines object for representing user group settings."""
+ who_can_view_members = messages.EnumField(
+ MemberVisibility, 1, default=MemberVisibility.MEMBERS)
+ ext_group_type = messages.EnumField(GroupType, 2)
+ last_sync_time = messages.IntegerField(
+ 3, default=0, variant=messages.Variant.INT32)
+ friend_projects = messages.IntegerField(
+ 4, repeated=True, variant=messages.Variant.INT32)
+# TODO(jrobbins): add settings to control who can join, etc.
+
+
+def MakeSettings(who_can_view_members_str, ext_group_type_str=None,
+ last_sync_time=0, friend_projects=[]):
+ """Create and return a new user record in RAM."""
+ settings = UserGroupSettings(
+ who_can_view_members=MemberVisibility(who_can_view_members_str.upper()))
+ if ext_group_type_str:
+ settings.ext_group_type = GroupType(ext_group_type_str.upper())
+ settings.last_sync_time = last_sync_time
+ settings.friend_projects = friend_projects
+ return settings
diff --git a/appengine/monorail/queue.yaml b/appengine/monorail/queue.yaml
new file mode 100644
index 0000000..af214d1
--- /dev/null
+++ b/appengine/monorail/queue.yaml
@@ -0,0 +1,16 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+queue:
+- name: default
+ rate: 4/s
+ max_concurrent_requests: 50
+
+- name: outboundemail
+ rate: 5/s
+
+- name: spamexport
+ rate: 1/d
+ max_concurrent_requests: 1
diff --git a/appengine/monorail/registerpages.py b/appengine/monorail/registerpages.py
new file mode 100644
index 0000000..1fba7d9
--- /dev/null
+++ b/appengine/monorail/registerpages.py
@@ -0,0 +1,336 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""This file sets up all the urls for monorail pages."""
+
+import logging
+
+import webapp2
+
+import settings
+
+from features import autolink
+from features import cues
+from features import filterrules
+from features import inboundemail
+from features import notify
+from features import savedqueries
+from features import spammodel
+from features import stars
+
+from framework import artifactcollision
+from framework import banned
+from framework import csp_report
+from framework import excessiveactivity
+from framework import reap
+from framework import registerpages_helpers
+from framework import tokenrefresh
+from framework import urls
+
+from project import peopledetail
+from project import peoplelist
+from project import projectadmin
+from project import projectadminadvanced
+from project import projectexport
+from project import projectsummary
+from project import projectupdates
+from project import wikiredirect
+
+from search import backendnonviewable
+from search import backendsearch
+
+from services import cachemanager_svc
+
+from sitewide import custom_404
+from sitewide import groupadmin
+from sitewide import groupcreate
+from sitewide import groupdetail
+from sitewide import grouplist
+from sitewide import hostinghome
+from sitewide import moved
+from sitewide import projectcreate
+from sitewide import userprofile
+from sitewide import userprojects
+from sitewide import usersettings
+from sitewide import userupdates
+
+from tracker import componentcreate
+from tracker import componentdetail
+from tracker import fieldcreate
+from tracker import fielddetail
+from tracker import issueadmin
+from tracker import issueadvsearch
+from tracker import issueattachment
+from tracker import issueattachmenttext
+from tracker import issuebulkedit
+from tracker import issuedetail
+from tracker import issueentry
+from tracker import issuelist
+from tracker import issuelistcsv
+from tracker import issueoptions
+from tracker import issueoriginal
+from tracker import issueexport
+from tracker import issueimport
+from tracker import issuereindex
+from tracker import issuetips
+from tracker import spam
+
+
+class ServletRegistry(object):
+
+ _PROJECT_NAME_REGEX = r'[a-z0-9][-a-z0-9]*[a-z0-9]'
+ _USERNAME_REGEX = r'[-+\w=.%]+(@([a-z0-9]+\.)*[a-z0-9]+)?'
+
+ def __init__(self):
+ self.routes = []
+
+ def _AddRoute(self, path_regex, servlet_class, method, does_write=False):
+ """Add a GET or POST handler to our webapp2 route list.
+
+ Args:
+ path_regex: string with webapp2 URL template regex.
+ servlet_class: a subclass of class Servlet.
+ method: string 'GET' or 'POST'.
+ does_write: True if the servlet could write to the database, we skip
+ registering such servlets when the site is in read_only mode. GET
+ handlers never write. Most, but not all, POST handlers do write.
+ """
+ if settings.read_only and does_write:
+ logging.info('Not registring %r because site is read-only', path_regex)
+ # TODO(jrobbins): register a helpful error page instead.
+ else:
+ self.routes.append(
+ webapp2.Route(path_regex, handler=servlet_class, methods=[method]))
+
+ def _SetupServlets(self, spec_dict, base='', post_does_write=True):
+ """Register each of the given servlets."""
+ for get_uri, servlet_class in spec_dict.items():
+ self._AddRoute(base + get_uri, servlet_class, 'GET')
+ post_uri = get_uri + ('edit.do' if get_uri.endswith('/') else '.do')
+ self._AddRoute(base + post_uri, servlet_class, 'POST',
+ does_write=post_does_write)
+
+ def _SetupProjectServlets(self, spec_dict, post_does_write=True):
+ """Register each of the given servlets in the project URI space."""
+ self._SetupServlets(
+ spec_dict, base='/p/<project_name:%s>' % self._PROJECT_NAME_REGEX,
+ post_does_write=post_does_write)
+
+ def _SetupUserServlets(self, spec_dict, post_does_write=True):
+ """Register each of the given servlets in the user URI space."""
+ self._SetupServlets(
+ spec_dict, base='/u/<viewed_username:%s>' % self._USERNAME_REGEX,
+ post_does_write=post_does_write)
+
+ def _SetupGroupServlets(self, spec_dict, post_does_write=True):
+ """Register each of the given servlets in the user group URI space."""
+ self._SetupServlets(
+ spec_dict, base='/g/<viewed_username:%s>' % self._USERNAME_REGEX,
+ post_does_write=post_does_write)
+
+ def Register(self, services):
+ """Register all the monorail request handlers."""
+ self._RegisterFrameworkHandlers()
+ self._RegisterSitewideHandlers()
+ self._RegisterProjectHandlers()
+ self._RegisterIssueHandlers()
+ self._RegisterRedirects()
+ self._RegisterInboundMail()
+ autolink.RegisterAutolink(services)
+ # Error pages should be the last to register.
+ self._RegisterErrorPages()
+ logging.info('Finished registering monorail handlers.')
+ return self.routes
+
+ def _RegisterProjectHandlers(self):
+ """Register page and form handlers that operate within a project."""
+ self._SetupProjectServlets({
+ urls.ADMIN_INTRO: projectsummary.ProjectSummary,
+ urls.PEOPLE_LIST: peoplelist.PeopleList,
+ urls.PEOPLE_DETAIL: peopledetail.PeopleDetail,
+ urls.PEOPLE_DETAIL_PREFS_JSON: peopledetail.PagePrefs,
+ urls.UPDATES_LIST: projectupdates.ProjectUpdates,
+ urls.ADMIN_META: projectadmin.ProjectAdmin,
+ urls.ADMIN_ADVANCED: projectadminadvanced.ProjectAdminAdvanced,
+ urls.ADMIN_EXPORT: projectexport.ProjectExport,
+ urls.ADMIN_EXPORT_JSON: projectexport.ProjectExportJSON,
+ })
+
+ def _RegisterIssueHandlers(self):
+ """Register page and form handlers for the issue tracker."""
+ self._SetupServlets({
+ # Note: there is both a site-wide and per-project issue list.
+ urls.ISSUE_LIST: issuelist.IssueList,
+
+ # Note: the following are at URLs that are not externaly accessible.
+ urls.BACKEND_SEARCH: backendsearch.BackendSearch,
+ urls.BACKEND_NONVIEWABLE: backendnonviewable.BackendNonviewable,
+ urls.RECOMPUTE_DERIVED_FIELDS_TASK:
+ filterrules.RecomputeDerivedFieldsTask,
+ urls.REINDEX_QUEUE_CRON: filterrules.ReindexQueueCron,
+ urls.NOTIFY_ISSUE_CHANGE_TASK: notify.NotifyIssueChangeTask,
+ urls.NOTIFY_BLOCKING_CHANGE_TASK: notify.NotifyBlockingChangeTask,
+ urls.NOTIFY_BULK_CHANGE_TASK: notify.NotifyBulkChangeTask,
+ urls.OUTBOUND_EMAIL_TASK: notify.OutboundEmailTask,
+ urls.SPAM_DATA_EXPORT_TASK: spammodel.TrainingDataExportTask,
+ })
+
+ self._SetupProjectServlets({
+ urls.ISSUE_LIST: issuelist.IssueList,
+ urls.ISSUE_LIST_CSV: issuelistcsv.IssueListCsv,
+ urls.ISSUE_REINDEX: issuereindex.IssueReindex,
+ urls.ISSUE_DETAIL: issuedetail.IssueDetail,
+ urls.ISSUE_COMMENT_DELETION_JSON: issuedetail.IssueCommentDeletion,
+ urls.ISSUE_ATTACHMENT_DELETION_JSON:
+ issueattachment.IssueAttachmentDeletion,
+ urls.ISSUE_FLAGSPAM_JSON: spam.FlagSpamForm,
+ urls.ISSUE_SETSTAR_JSON: issuedetail.SetStarForm,
+ urls.ISSUE_DELETE_JSON: issuedetail.IssueDeleteForm,
+ urls.ISSUE_ENTRY: issueentry.IssueEntry,
+ urls.ISSUE_OPTIONS_JSON: issueoptions.IssueOptionsJSON,
+ urls.ISSUE_TIPS: issuetips.IssueSearchTips,
+ urls.ISSUE_ATTACHMENT: issueattachment.AttachmentPage,
+ urls.ISSUE_ATTACHMENT_TEXT: issueattachmenttext.AttachmentText,
+ urls.ISSUE_BULK_EDIT: issuebulkedit.IssueBulkEdit,
+ urls.COMPONENT_CHECKNAME_JSON: componentcreate.CheckComponentNameJSON,
+ urls.COMPONENT_CREATE: componentcreate.ComponentCreate,
+ urls.COMPONENT_DETAIL: componentdetail.ComponentDetail,
+ urls.FIELD_CHECKNAME_JSON: fieldcreate.CheckFieldNameJSON,
+ urls.FIELD_CREATE: fieldcreate.FieldCreate,
+ urls.FIELD_DETAIL: fielddetail.FieldDetail,
+ urls.WIKI_LIST: wikiredirect.WikiRedirect,
+ urls.WIKI_PAGE: wikiredirect.WikiRedirect,
+ urls.ADMIN_STATUSES: issueadmin.AdminStatuses,
+ urls.ADMIN_LABELS: issueadmin.AdminLabels,
+ urls.ADMIN_RULES: issueadmin.AdminRules,
+ urls.ADMIN_TEMPLATES: issueadmin.AdminTemplates,
+ urls.ADMIN_COMPONENTS: issueadmin.AdminComponents,
+ urls.ADMIN_VIEWS: issueadmin.AdminViews,
+ urls.ISSUE_ORIGINAL: issueoriginal.IssueOriginal,
+ urls.ISSUE_EXPORT: issueexport.IssueExport,
+ urls.ISSUE_EXPORT_JSON: issueexport.IssueExportJSON,
+ urls.ISSUE_IMPORT: issueimport.IssueImport,
+ urls.SPAM_MODERATION_QUEUE: spam.ModerationQueue,
+ })
+
+ self._SetupUserServlets({
+ urls.SAVED_QUERIES: savedqueries.SavedQueries,
+ })
+
+ # These servlets accept POST, but never write to the database, so they can
+ # still be used when the site is read-only.
+ self._SetupProjectServlets({
+ urls.ISSUE_ADVSEARCH: issueadvsearch.IssueAdvancedSearch,
+ }, post_does_write=False)
+
+ list_redir = registerpages_helpers.MakeRedirectInScope(
+ urls.ISSUE_LIST, 'p')
+ self._SetupProjectServlets({
+ '': list_redir,
+ '/': list_redir,
+ '/issues': list_redir,
+ '/issues/': list_redir,
+ })
+
+ list_redir = registerpages_helpers.MakeRedirect(urls.ISSUE_LIST)
+ self._SetupServlets({
+ '/issues': list_redir,
+ '/issues/': list_redir,
+ })
+
+ def _RegisterFrameworkHandlers(self):
+ """Register page and form handlers for framework functionality."""
+ self._SetupServlets({
+ urls.CSP_REPORT: csp_report.CSPReportPage,
+ urls.TOKEN_REFRESH: tokenrefresh.TokenRefresh,
+
+ # These are only shown to users iff specific conditions are met.
+ urls.NONPROJECT_COLLISION: artifactcollision.ArtifactCollision,
+ urls.EXCESSIVE_ACTIVITY: excessiveactivity.ExcessiveActivity,
+ urls.BANNED: banned.Banned,
+ urls.PROJECT_MOVED: moved.ProjectMoved,
+
+ # These are not externally accessible
+ urls.RAMCACHE_CONSOLIDATE_CRON: cachemanager_svc.RamCacheConsolidate,
+ urls.REAP_CRON: reap.Reap,
+ urls.SPAM_DATA_EXPORT_CRON: spammodel.TrainingDataExport,
+ })
+
+ self._SetupProjectServlets({
+ # Collisions can happen on artifacts within a project or outside.
+ urls.ARTIFACT_COLLISION: artifactcollision.ArtifactCollision,
+ })
+
+ def _RegisterSitewideHandlers(self):
+ """Register page and form handlers that aren't associated with projects."""
+ self._SetupServlets({
+ urls.PROJECT_CREATE: projectcreate.ProjectCreate,
+ urls.CHECK_PROJECT_NAME_JSON: projectcreate.CheckProjectNameJSON,
+ # The user settings page is a site-wide servlet, not under /u/.
+ urls.USER_SETTINGS: usersettings.UserSettings,
+ urls.USER_PROJECTS_JSON: userprojects.ProjectsJsonFeed,
+ urls.HOSTING_HOME: hostinghome.HostingHome,
+ urls.STARS_JSON: stars.SetStarsFeed,
+ urls.CUES_JSON: cues.SetCuesFeed,
+ urls.GROUP_CREATE: groupcreate.GroupCreate,
+ urls.GROUP_LIST: grouplist.GroupList,
+ urls.GROUP_DELETE: grouplist.GroupList,
+ })
+
+ self._SetupUserServlets({
+ urls.USER_PROFILE: userprofile.UserProfile,
+ urls.USER_UPDATES_PROJECTS: userupdates.UserUpdatesProjects,
+ urls.USER_UPDATES_DEVELOPERS: userupdates.UserUpdatesDevelopers,
+ urls.USER_UPDATES_MINE: userupdates.UserUpdatesIndividual,
+ })
+
+ profile_redir = registerpages_helpers.MakeRedirectInScope(
+ urls.USER_PROFILE, 'u')
+ self._SetupUserServlets({'': profile_redir})
+
+ self._SetupGroupServlets({
+ urls.GROUP_DETAIL: groupdetail.GroupDetail,
+ urls.GROUP_ADMIN: groupadmin.GroupAdmin,
+ })
+
+ def _RegisterRedirects(self):
+ """Register redirects among pages inside monorail."""
+ redirect = registerpages_helpers.MakeRedirect('/hosting/')
+ self._SetupServlets({
+ '/hosting': redirect,
+ '/p': redirect,
+ '/p/': redirect,
+ '/u': redirect,
+ '/u/': redirect,
+ '/': redirect,
+ })
+
+ redirect = registerpages_helpers.MakeRedirectInScope(
+ urls.PEOPLE_LIST, 'p')
+ self._SetupProjectServlets({
+ '/people': redirect,
+ '/people/': redirect,
+ })
+
+ redirect = registerpages_helpers.MakeRedirect(urls.GROUP_LIST)
+ self._SetupServlets({'/g': redirect})
+
+ group_redir = registerpages_helpers.MakeRedirectInScope(
+ urls.USER_PROFILE, 'g')
+ self._SetupGroupServlets({'': group_redir})
+
+ def _RegisterInboundMail(self):
+ """Register a handler for inbound email."""
+ self.routes.append(webapp2.Route(
+ '/_ah/mail/<project_addr:.+>',
+ handler=inboundemail.InboundEmail,
+ methods=['POST', 'GET']))
+
+ def _RegisterErrorPages(self):
+ """Register handlers for errors."""
+ self._AddRoute(
+ '/p/<project_name:%s>/<unrecognized:.+>' % self._PROJECT_NAME_REGEX,
+ custom_404.ErrorPage, 'GET')
+
diff --git a/appengine/monorail/search/__init__.py b/appengine/monorail/search/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/search/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/search/ast2ast.py b/appengine/monorail/search/ast2ast.py
new file mode 100644
index 0000000..19c93ae
--- /dev/null
+++ b/appengine/monorail/search/ast2ast.py
@@ -0,0 +1,411 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Convert a user's issue search AST into a simplified AST.
+
+This phase of query processing simplifies the user's query by looking up
+the int IDs of any labels, statuses, or components that are mentioned by
+name in the original query. The data needed for lookups is typically cached
+in RAM in each backend job, so this will not put much load on the DB. The
+simplified ASTs are later converted into SQL which is simpler and has
+fewer joins.
+
+The simplified main query is better because:
+ + It is clearly faster, especially in the most common case where config
+ data is in RAM.
+ + Since less RAM is used to process the main query on each shard, query
+ execution time is more consistent with less variability under load. Less
+ variability is good because the user must wait for the slowest shard.
+ + The config tables (LabelDef, StatusDef, etc.) exist only on the master, so
+ they cannot be mentioned in a query that runs on a shard.
+ + The query string itself is shorter when numeric IDs are substituted, which
+ means that we can handle user queries with long lists of labels in a
+ reasonable-sized query.
+ + It bisects the complexity of the operation: it's easier to test and debug
+ the lookup and simplification logic plus the main query logic this way
+ than it would be to deal with an even more complex SQL main query.
+"""
+
+import logging
+import re
+
+from proto import ast_pb2
+from proto import tracker_pb2
+# TODO(jrobbins): if BUILTIN_ISSUE_FIELDS was passed through, I could
+# remove this dep.
+from search import query2ast
+from services import user_svc
+from tracker import tracker_bizobj
+
+
+def PreprocessAST(
+ cnxn, query_ast, project_ids, services, harmonized_config):
+ """Preprocess the query by doing lookups so that the SQL query is simpler.
+
+ Args:
+ cnxn: connection to SQL database.
+ query_ast: user query abstract syntax tree parsed by query2ast.py.
+ project_ids: collection of int project IDs to use to look up status values
+ and labels.
+ services: Connections to persistence layer for users and configs.
+ harmonized_config: harmonized config for all projects being searched.
+
+ Returns:
+ A new QueryAST PB with simplified conditions. Specifically, string values
+ for labels, statuses, and components are replaced with the int IDs of
+ those items. Also, is:open is distilled down to
+ status_id != closed_status_ids.
+ """
+ new_conjs = []
+ for conj in query_ast.conjunctions:
+ new_conds = [
+ _PreprocessCond(
+ cnxn, cond, project_ids, services, harmonized_config)
+ for cond in conj.conds]
+ new_conjs.append(ast_pb2.Conjunction(conds=new_conds))
+
+ return ast_pb2.QueryAST(conjunctions=new_conjs)
+
+
+def _PreprocessIsOpenCond(
+ cnxn, cond, project_ids, services, _harmonized_config):
+ """Preprocess an is:open cond into status_id != closed_status_ids."""
+ if project_ids:
+ closed_status_ids = []
+ for project_id in project_ids:
+ closed_status_ids.extend(services.config.LookupClosedStatusIDs(
+ cnxn, project_id))
+ else:
+ closed_status_ids = services.config.LookupClosedStatusIDsAnyProject(cnxn)
+
+ is_closed = not bool(cond.int_values[0])
+ return ast_pb2.Condition(
+ op=ast_pb2.QueryOp.EQ if is_closed else ast_pb2.QueryOp.NE,
+ field_defs=[query2ast.BUILTIN_ISSUE_FIELDS['status_id']],
+ int_values=closed_status_ids)
+
+
+def _PreprocessIsBlockedCond(
+ _cnxn, cond, _project_ids, _services, _harmonized_config):
+ """Preprocess an is:blocked cond into issues that are blocked."""
+ op = (ast_pb2.QueryOp.IS_DEFINED if bool(cond.int_values[0])
+ else ast_pb2.QueryOp.IS_NOT_DEFINED)
+ return ast_pb2.Condition(
+ op=op, field_defs=[query2ast.BUILTIN_ISSUE_FIELDS['blockedon_id']])
+
+
+def _PreprocessBlockedOnCond(
+ cnxn, cond, project_ids, services, _harmonized_config):
+ """Preprocess blockedon=xyz and has:blockedon conds.
+
+ Preprocesses blockedon=xyz cond into blockedon_id:issue_ids.
+ Preprocesses has:blockedon cond into issues that are blocked on other issues.
+ """
+ issue_ids = _GetIssueIDsFromLocalIdsCond(cnxn, cond, project_ids, services)
+ return ast_pb2.Condition(
+ op=_TextOpToIntOp(cond.op),
+ field_defs=[query2ast.BUILTIN_ISSUE_FIELDS['blockedon_id']],
+ int_values=issue_ids)
+
+
+def _PreprocessBlockingCond(
+ cnxn, cond, project_ids, services, _harmonized_config):
+ """Preprocess blocking=xyz and has:blocking conds.
+
+ Preprocesses blocking=xyz cond into blocking_id:issue_ids.
+ Preprocesses has:blocking cond into issues that are blocking other issues.
+ """
+ issue_ids = _GetIssueIDsFromLocalIdsCond(cnxn, cond, project_ids, services)
+ return ast_pb2.Condition(
+ op=_TextOpToIntOp(cond.op),
+ field_defs=[query2ast.BUILTIN_ISSUE_FIELDS['blocking_id']],
+ int_values=issue_ids)
+
+
+def _GetIssueIDsFromLocalIdsCond(cnxn, cond, project_ids, services):
+ """Returns global IDs from the local IDs provided in the cond."""
+ # Get {project_name: project} for all projects in project_ids.
+ ids_to_projects = services.project.GetProjects(cnxn, project_ids)
+ ref_projects = {pb.project_name: pb for pb in ids_to_projects.itervalues()}
+ # Populate default_project_name if there is only one project id provided.
+ default_project_name = None
+ if len(ref_projects) == 1:
+ default_project_name = ref_projects.values()[0].project_name
+
+ # Populate refs with (project_name, local_id) pairs.
+ refs = []
+ for val in cond.str_values:
+ project_name, local_id = tracker_bizobj.ParseIssueRef(val)
+ if not project_name:
+ if not default_project_name:
+ # TODO(rmistry): Support the below.
+ raise ValueError(
+ 'Searching for issues accross multiple/all projects without '
+ 'project prefixes is ambiguous and is currently not supported.')
+ project_name = default_project_name
+ refs.append((project_name, int(local_id)))
+
+ return services.issue.ResolveIssueRefs(
+ cnxn, ref_projects, default_project_name, refs)
+
+
+def _PreprocessStatusCond(
+ cnxn, cond, project_ids, services, _harmonized_config):
+ """Preprocess a status=names cond into status_id=IDs."""
+ if project_ids:
+ status_ids = []
+ for project_id in project_ids:
+ status_ids.extend(services.config.LookupStatusIDs(
+ cnxn, project_id, cond.str_values))
+ else:
+ status_ids = services.config.LookupStatusIDsAnyProject(
+ cnxn, cond.str_values)
+
+ return ast_pb2.Condition(
+ op=_TextOpToIntOp(cond.op),
+ field_defs=[query2ast.BUILTIN_ISSUE_FIELDS['status_id']],
+ int_values=status_ids)
+
+
+def _IsEqualityOp(op):
+ """Return True for EQ and NE."""
+ return op in (ast_pb2.QueryOp.EQ, ast_pb2.QueryOp.NE)
+
+
+def _IsDefinedOp(op):
+ """Return True for IS_DEFINED and IS_NOT_DEFINED."""
+ return op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED)
+
+
+def _TextOpToIntOp(op):
+ """If a query is optimized from string to ID matching, use an equality op."""
+ if op == ast_pb2.QueryOp.TEXT_HAS or op == ast_pb2.QueryOp.KEY_HAS:
+ return ast_pb2.QueryOp.EQ
+ elif op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ return ast_pb2.QueryOp.NE
+ return op
+
+
+def _MakePrefixRegex(cond):
+ """Return a regex to match strings that start with cond values."""
+ all_prefixes = '|'.join(map(re.escape, cond.str_values))
+ return re.compile(r'(%s)-.+' % all_prefixes, re.I)
+
+
+def _MakeKeyValueRegex(cond):
+ """Return a regex to match the first token and remaining text separately."""
+ keys, values = zip(*map(lambda x: x.split('-', 1), cond.str_values))
+ if len(set(keys)) != 1:
+ raise ValueError(
+ "KeyValue query with multiple different keys: %r" % cond.str_values)
+ all_values = '|'.join(map(re.escape, values))
+ return re.compile(r'%s-.*\b(%s)\b.*' % (keys[0], all_values), re.I)
+
+
+def _MakeWordBoundaryRegex(cond):
+ """Return a regex to match the cond values as whole words."""
+ all_words = '|'.join(map(re.escape, cond.str_values))
+ return re.compile(r'.*\b(%s)\b.*' % all_words, re.I)
+
+
+def _PreprocessLabelCond(
+ cnxn, cond, project_ids, services, _harmonized_config):
+ """Preprocess a label=names cond into label_id=IDs."""
+ if project_ids:
+ label_ids = []
+ for project_id in project_ids:
+ if _IsEqualityOp(cond.op):
+ label_ids.extend(services.config.LookupLabelIDs(
+ cnxn, project_id, cond.str_values))
+ elif _IsDefinedOp(cond.op):
+ label_ids.extend(services.config.LookupIDsOfLabelsMatching(
+ cnxn, project_id, _MakePrefixRegex(cond)))
+ elif cond.op == ast_pb2.QueryOp.KEY_HAS:
+ label_ids.extend(services.config.LookupIDsOfLabelsMatching(
+ cnxn, project_id, _MakeKeyValueRegex(cond)))
+ else:
+ label_ids.extend(services.config.LookupIDsOfLabelsMatching(
+ cnxn, project_id, _MakeWordBoundaryRegex(cond)))
+ else:
+ if _IsEqualityOp(cond.op):
+ label_ids = services.config.LookupLabelIDsAnyProject(
+ cnxn, cond.str_values)
+ elif _IsDefinedOp(cond.op):
+ label_ids = services.config.LookupIDsOfLabelsMatchingAnyProject(
+ cnxn, _MakePrefixRegex(cond))
+ elif cond.op == ast_pb2.QueryOp.KEY_HAS:
+ label_ids = services.config.LookupIDsOfLabelsMatchingAnyProject(
+ cnxn, _MakeKeyValueRegex(cond))
+ else:
+ label_ids = services.config.LookupIDsOfLabelsMatchingAnyProject(
+ cnxn, _MakeWordBoundaryRegex(cond))
+
+ return ast_pb2.Condition(
+ op=_TextOpToIntOp(cond.op),
+ field_defs=[query2ast.BUILTIN_ISSUE_FIELDS['label_id']],
+ int_values=label_ids)
+
+
+def _PreprocessComponentCond(
+ cnxn, cond, project_ids, services, harmonized_config):
+ """Preprocess a component= or component:name cond into component_id=IDs."""
+ exact = _IsEqualityOp(cond.op)
+ component_ids = []
+ if project_ids:
+ # We are searching within specific projects, so harmonized_config
+ # holds the config data for all those projects.
+ for comp_path in cond.str_values:
+ component_ids.extend(tracker_bizobj.FindMatchingComponentIDs(
+ comp_path, harmonized_config, exact=exact))
+ else:
+ # We are searching across the whole site, so we have no harmonized_config
+ # to use.
+ component_ids = services.config.FindMatchingComponentIDsAnyProject(
+ cnxn, cond.str_values, exact=exact)
+
+ return ast_pb2.Condition(
+ op=_TextOpToIntOp(cond.op),
+ field_defs=[query2ast.BUILTIN_ISSUE_FIELDS['component_id']],
+ int_values=component_ids)
+
+
+def _PreprocessExactUsers(cnxn, cond, user_service, id_fields):
+ """Preprocess a foo=emails cond into foo_id=IDs, if exact user match.
+
+ This preprocesing step converts string conditions to int ID conditions.
+ E.g., [owner=email] to [owner_id=ID]. It only does it in cases
+ where (a) the email was "me", so it was already converted to an string of
+ digits in the search pipeline, or (b) it is "user@domain" which resolves to
+ a known Monorail user. It is also possible to search for, e.g.,
+ [owner:substring], but such searches remain 'owner' field searches rather
+ than 'owner_id', and they cannot be combined with the "me" keyword.
+
+ Args:
+ cnxn: connection to the DB.
+ cond: original parsed query Condition PB.
+ user_service: connection to user persistence layer.
+ id_fields: list of the search fields to use if the conversion to IDs
+ succeeds.
+
+ Returns:
+ A new Condition PB that checks the id_field. Or, the original cond.
+ """
+ op = _TextOpToIntOp(cond.op)
+ if _IsDefinedOp(op):
+ # No need to look up any IDs if we are just testing for any defined value.
+ return ast_pb2.Condition(op=op, field_defs=id_fields)
+
+ # This preprocessing step is only for ops that compare whole values, not
+ # substrings.
+ if not _IsEqualityOp(op):
+ logging.info('could not convert to IDs because op is %r', op)
+ return cond
+
+ user_ids = []
+ for val in cond.str_values:
+ try:
+ user_ids.append(int(val))
+ except ValueError:
+ try:
+ user_ids.append(user_service.LookupUserID(cnxn, val))
+ except user_svc.NoSuchUserException:
+ logging.info('could not convert user %r to int ID', val)
+ return cond # preprocessing failed, stick with the original cond.
+
+ return ast_pb2.Condition(op=op, field_defs=id_fields, int_values=user_ids)
+
+
+def _PreprocessOwnerCond(
+ cnxn, cond, _project_ids, services, _harmonized_config):
+ """Preprocess a owner=emails cond into owner_id=IDs, if exact user match."""
+ return _PreprocessExactUsers(
+ cnxn, cond, services.user, [query2ast.BUILTIN_ISSUE_FIELDS['owner_id']])
+
+
+def _PreprocessCcCond(
+ cnxn, cond, _project_ids, services, _harmonized_config):
+ """Preprocess a cc=emails cond into cc_id=IDs, if exact user match."""
+ return _PreprocessExactUsers(
+ cnxn, cond, services.user, [query2ast.BUILTIN_ISSUE_FIELDS['cc_id']])
+
+
+def _PreprocessReporterCond(
+ cnxn, cond, _project_ids, services, _harmonized_config):
+ """Preprocess a reporter=emails cond into reporter_id=IDs, if exact."""
+ return _PreprocessExactUsers(
+ cnxn, cond, services.user,
+ [query2ast.BUILTIN_ISSUE_FIELDS['reporter_id']])
+
+
+def _PreprocessStarredByCond(
+ cnxn, cond, _project_ids, services, _harmonized_config):
+ """Preprocess a starredby=emails cond into starredby_id=IDs, if exact."""
+ return _PreprocessExactUsers(
+ cnxn, cond, services.user,
+ [query2ast.BUILTIN_ISSUE_FIELDS['starredby_id']])
+
+
+def _PreprocessCommentByCond(
+ cnxn, cond, _project_ids, services, _harmonized_config):
+ """Preprocess a commentby=emails cond into commentby_id=IDs, if exact."""
+ return _PreprocessExactUsers(
+ cnxn, cond, services.user,
+ [query2ast.BUILTIN_ISSUE_FIELDS['commentby_id']])
+
+
+def _PreprocessCustomCond(cnxn, cond, services):
+ """Preprocess a custom_user_field=emails cond into IDs, if exact matches."""
+ # TODO(jrobbins): better support for ambiguous fields.
+ # For now, if any field is USER_TYPE and the value being searched
+ # for is the email address of an existing account, it will convert
+ # to a user ID and we go with exact ID matching. Otherwise, we
+ # leave the cond as-is for ast2select to do string matching on.
+ user_field_defs = [fd for fd in cond.field_defs
+ if fd.field_type == tracker_pb2.FieldTypes.USER_TYPE]
+ if user_field_defs:
+ return _PreprocessExactUsers(cnxn, cond, services.user, user_field_defs)
+ else:
+ return cond
+
+
+_PREPROCESSORS = {
+ 'open': _PreprocessIsOpenCond,
+ 'blocked': _PreprocessIsBlockedCond,
+ 'blockedon': _PreprocessBlockedOnCond,
+ 'blocking': _PreprocessBlockingCond,
+ 'status': _PreprocessStatusCond,
+ 'label': _PreprocessLabelCond,
+ 'component': _PreprocessComponentCond,
+ 'owner': _PreprocessOwnerCond,
+ 'cc': _PreprocessCcCond,
+ 'reporter': _PreprocessReporterCond,
+ 'starredby': _PreprocessStarredByCond,
+ 'commentby': _PreprocessCommentByCond,
+ }
+
+
+def _PreprocessCond(
+ cnxn, cond, project_ids, services, harmonized_config):
+ """Preprocess query by looking up status, label and component IDs."""
+ # All the fields in a cond share the same name because they are parsed
+ # from a user query term, and the term syntax allows just one field name.
+ field_name = cond.field_defs[0].field_name
+ assert all(fd.field_name == field_name for fd in cond.field_defs)
+
+ # Case 1: The user is searching custom fields.
+ if any(fd.field_id for fd in cond.field_defs):
+ # There can't be a mix of custom and built-in fields because built-in
+ # field names are reserved and take priority over any conflicting ones.
+ assert all(fd.field_id for fd in cond.field_defs)
+ return _PreprocessCustomCond(cnxn, cond, services)
+
+ # Case 2: The user is searching a built-in field.
+ preproc = _PREPROCESSORS.get(field_name)
+ if preproc:
+ # We have a preprocessor for that built-in field.
+ return preproc(cnxn, cond, project_ids, services, harmonized_config)
+ else:
+ # We don't have a preprocessor for it.
+ return cond
diff --git a/appengine/monorail/search/ast2select.py b/appengine/monorail/search/ast2select.py
new file mode 100644
index 0000000..d2a3445
--- /dev/null
+++ b/appengine/monorail/search/ast2select.py
@@ -0,0 +1,545 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Convert a user's issue search AST into SQL clauses.
+
+The main query is done on the Issues table.
+ + Some simple conditions are implemented as WHERE conditions on the Issue
+ table rows. These are generated by the _Compare() function.
+ + More complex conditions are implemented via a "LEFT JOIN ... ON ..." clause
+ plus a check in the WHERE clause to select only rows where the join's ON
+ condition was satisfied. These are generated by appending a clause to
+ the left_joins list plus calling _CompareAlreadyJoined(). Each such left
+ join defines a unique alias to keep it separate from other conditions.
+
+The functions that generate SQL snippets need to insert table names, column
+names, alias names, and value placeholders into the generated string. These
+functions use the string format() method and the "{varname}" syntax to avoid
+confusion with the "%s" syntax used for SQL value placeholders.
+"""
+
+import logging
+
+from framework import sql
+from proto import ast_pb2
+from proto import tracker_pb2
+from services import tracker_fulltext
+
+
+NATIVE_SEARCHABLE_FIELDS = {
+ 'id': 'local_id',
+ 'stars': 'star_count',
+ 'attachments': 'attachment_count',
+ 'opened': 'opened',
+ 'closed': 'closed',
+ 'modified': 'modified',
+ 'spam': 'is_spam'
+ }
+
+
+def BuildSQLQuery(query_ast):
+ """Translate the user's query into an SQL query.
+
+ Args:
+ query_ast: user query abstract syntax tree parsed by query2ast.py.
+
+ Returns:
+ A pair of lists (left_joins, where) to use when building the SQL SELECT
+ statement. Each of them is a list of (str, [val, ...]) pairs.
+ """
+ left_joins = []
+ where = []
+ # TODO(jrobbins): Handle "OR" in queries. For now, we just process the
+ # first conjunction and assume that it is the only one.
+ assert len(query_ast.conjunctions) == 1, 'TODO(jrobbins) handle "OR" queries'
+ conj = query_ast.conjunctions[0]
+
+ for cond_num, cond in enumerate(conj.conds):
+ cond_left_joins, cond_where = _ProcessCond(cond_num, cond)
+ left_joins.extend(cond_left_joins)
+ where.extend(cond_where)
+
+ return left_joins, where
+
+
+def _ProcessBlockedOnIDCond(cond, alias, _user_alias):
+ """Convert a blockedon_id=issue_id cond to SQL."""
+ return _GetBlockIDCond(cond, alias, blocking_id=False)
+
+
+def _ProcessBlockingIDCond(cond, alias, _user_alias):
+ """Convert a blocking_id:1,2 cond to SQL."""
+ return _GetBlockIDCond(cond, alias, blocking_id=True)
+
+
+def _GetBlockIDCond(cond, alias, blocking_id=False):
+ """Convert either a blocking_id or blockedon_id cond to SQL.
+
+ If blocking_id is False then it is treated as a blockedon_id request,
+ otherwise it is treated as a blocking_id request.
+ """
+ matching_issue_col = 'issue_id' if blocking_id else 'dst_issue_id'
+ ret_issue_col = 'dst_issue_id' if blocking_id else 'issue_id'
+
+ kind_cond_str, kind_cond_args = _Compare(
+ alias, ast_pb2.QueryOp.EQ, tracker_pb2.FieldTypes.STR_TYPE, 'kind',
+ ['blockedon'])
+ left_joins = [(
+ ('IssueRelation AS {alias} ON Issue.id = {alias}.%s AND '
+ '{kind_cond}' % ret_issue_col).format(
+ alias=alias, kind_cond=kind_cond_str), kind_cond_args)]
+
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ if field_values:
+ where = [_Compare(
+ alias, ast_pb2.QueryOp.EQ, field_type, matching_issue_col,
+ field_values)]
+ else:
+ # If no field values are specified display all issues which have the
+ # property.
+ where = [_CompareAlreadyJoined(alias, cond.op, ret_issue_col)]
+
+ return left_joins, where
+
+
+def _GetFieldTypeAndValues(cond):
+ """Returns the field type and values to use from the condition.
+
+ This function should be used when we do not know what values are present on
+ the condition. Eg: cond.int_values could be set if ast2ast.py preprocessing is
+ first done. If that preprocessing is not done then str_values could be set
+ instead.
+ If both int values and str values exist on the condition then the int values
+ are returned.
+ """
+ if cond.int_values:
+ return tracker_pb2.FieldTypes.INT_TYPE, cond.int_values
+ else:
+ return tracker_pb2.FieldTypes.STR_TYPE, cond.str_values
+
+
+def _ProcessOwnerCond(cond, alias, _user_alias):
+ """Convert an owner:substring cond to SQL."""
+ left_joins = [(
+ 'User AS {alias} ON (Issue.owner_id = {alias}.user_id '
+ 'OR Issue.derived_owner_id = {alias}.user_id)'.format(
+ alias=alias), [])]
+ where = [_Compare(alias, cond.op, tracker_pb2.FieldTypes.STR_TYPE, 'email',
+ cond.str_values)]
+
+ return left_joins, where
+
+
+def _ProcessOwnerIDCond(cond, _alias, _user_alias):
+ """Convert an owner_id=user_id cond to SQL."""
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ explicit_str, explicit_args = _Compare(
+ 'Issue', cond.op, field_type, 'owner_id', field_values)
+ derived_str, derived_args = _Compare(
+ 'Issue', cond.op, field_type, 'derived_owner_id', field_values)
+ if cond.op in (ast_pb2.QueryOp.NE, ast_pb2.QueryOp.NOT_TEXT_HAS):
+ where = [(explicit_str, explicit_args), (derived_str, derived_args)]
+ else:
+ if cond.op == ast_pb2.QueryOp.IS_NOT_DEFINED:
+ op = ' AND '
+ else:
+ op = ' OR '
+ where = [
+ ('(' + explicit_str + op + derived_str + ')',
+ explicit_args + derived_args)]
+
+ return [], where
+
+
+def _ProcessReporterCond(cond, alias, _user_alias):
+ """Convert a reporter:substring cond to SQL."""
+ left_joins = [(
+ 'User AS {alias} ON Issue.reporter_id = {alias}.user_id'.format(
+ alias=alias), [])]
+ where = [_Compare(alias, cond.op, tracker_pb2.FieldTypes.STR_TYPE, 'email',
+ cond.str_values)]
+
+ return left_joins, where
+
+
+def _ProcessReporterIDCond(cond, _alias, _user_alias):
+ """Convert a reporter_ID=user_id cond to SQL."""
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ where = [_Compare(
+ 'Issue', cond.op, field_type, 'reporter_id', field_values)]
+ return [], where
+
+
+def _ProcessCcCond(cond, alias, user_alias):
+ """Convert a cc:substring cond to SQL."""
+ email_cond_str, email_cond_args = _Compare(
+ user_alias, cond.op, tracker_pb2.FieldTypes.STR_TYPE, 'email',
+ cond.str_values)
+ # Note: email_cond_str will have parens, if needed.
+ left_joins = [(
+ '(Issue2Cc AS {alias} JOIN User AS {user_alias} '
+ 'ON {alias}.cc_id = {user_alias}.user_id AND {email_cond}) '
+ 'ON Issue.id = {alias}.issue_id AND '
+ 'Issue.shard = {alias}.issue_shard'.format(
+ alias=alias, user_alias=user_alias, email_cond=email_cond_str),
+ email_cond_args)]
+ where = [_CompareAlreadyJoined(user_alias, cond.op, 'email')]
+
+ return left_joins, where
+
+
+def _ProcessCcIDCond(cond, alias, _user_alias):
+ """Convert a cc_id=user_id cond to SQL."""
+ join_str = (
+ 'Issue2Cc AS {alias} ON Issue.id = {alias}.issue_id AND '
+ 'Issue.shard = {alias}.issue_shard'.format(
+ alias=alias))
+ if cond.op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ left_joins = [(join_str, [])]
+ else:
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ cond_str, cond_args = _Compare(
+ alias, ast_pb2.QueryOp.EQ, field_type, 'cc_id', field_values)
+ left_joins = [(join_str + ' AND ' + cond_str, cond_args)]
+
+ where = [_CompareAlreadyJoined(alias, cond.op, 'cc_id')]
+ return left_joins, where
+
+
+def _ProcessStarredByCond(cond, alias, user_alias):
+ """Convert a starredby:substring cond to SQL."""
+ email_cond_str, email_cond_args = _Compare(
+ user_alias, cond.op, tracker_pb2.FieldTypes.STR_TYPE, 'email',
+ cond.str_values)
+ # Note: email_cond_str will have parens, if needed.
+ left_joins = [(
+ '(IssueStar AS {alias} JOIN User AS {user_alias} '
+ 'ON {alias}.user_id = {user_alias}.user_id AND {email_cond}) '
+ 'ON Issue.id = {alias}.issue_id'.format(
+ alias=alias, user_alias=user_alias, email_cond=email_cond_str),
+ email_cond_args)]
+ where = [_CompareAlreadyJoined(user_alias, cond.op, 'email')]
+
+ return left_joins, where
+
+
+def _ProcessStarredByIDCond(cond, alias, _user_alias):
+ """Convert a starredby_id=user_id cond to SQL."""
+ join_str = 'IssueStar AS {alias} ON Issue.id = {alias}.issue_id'.format(
+ alias=alias)
+ if cond.op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ left_joins = [(join_str, [])]
+ else:
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ cond_str, cond_args = _Compare(
+ alias, ast_pb2.QueryOp.EQ, field_type, 'user_id', field_values)
+ left_joins = [(join_str + ' AND ' + cond_str, cond_args)]
+
+ where = [_CompareAlreadyJoined(alias, cond.op, 'user_id')]
+ return left_joins, where
+
+
+def _ProcessCommentByCond(cond, alias, user_alias):
+ """Convert a commentby:substring cond to SQL."""
+ email_cond_str, email_cond_args = _Compare(
+ user_alias, cond.op, tracker_pb2.FieldTypes.STR_TYPE, 'email',
+ cond.str_values)
+ # Note: email_cond_str will have parens, if needed.
+ left_joins = [(
+ '(Comment AS {alias} JOIN User AS {user_alias} '
+ 'ON {alias}.commenter_id = {user_alias}.user_id AND {email_cond}) '
+ 'ON Issue.id = {alias}.issue_id'.format(
+ alias=alias, user_alias=user_alias, email_cond=email_cond_str),
+ email_cond_args)]
+ where = [_CompareAlreadyJoined(user_alias, cond.op, 'email')]
+
+ return left_joins, where
+
+
+def _ProcessCommentByIDCond(cond, alias, _user_alias):
+ """Convert a commentby_id=user_id cond to SQL."""
+ left_joins = [(
+ 'Comment AS {alias} ON Issue.id = {alias}.issue_id'.format(
+ alias=alias), [])]
+ if cond.op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ where = [_CompareAlreadyJoined(alias, cond.op, 'commenter_id')]
+ else:
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ where = [_Compare(alias, cond.op, field_type, 'commenter_id', field_values)]
+
+ return left_joins, where
+
+
+def _ProcessStatusIDCond(cond, _alias, _user_alias):
+ """Convert a status_id=ID cond to SQL."""
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ explicit_str, explicit_args = _Compare(
+ 'Issue', cond.op, field_type, 'status_id', field_values)
+ derived_str, derived_args = _Compare(
+ 'Issue', cond.op, field_type, 'derived_status_id', field_values)
+ if cond.op in (ast_pb2.QueryOp.IS_NOT_DEFINED, ast_pb2.QueryOp.NE):
+ where = [(explicit_str, explicit_args), (derived_str, derived_args)]
+ else:
+ where = [
+ ('(' + explicit_str + ' OR ' + derived_str + ')',
+ explicit_args + derived_args)]
+
+ return [], where
+
+
+def _ProcessLabelIDCond(cond, alias, _user_alias):
+ """Convert a label_id=ID cond to SQL."""
+ join_str = (
+ 'Issue2Label AS {alias} ON Issue.id = {alias}.issue_id AND '
+ 'Issue.shard = {alias}.issue_shard'.format(alias=alias))
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ cond_str, cond_args = _Compare(
+ alias, ast_pb2.QueryOp.EQ, field_type, 'label_id', field_values)
+ left_joins = [(join_str + ' AND ' + cond_str, cond_args)]
+ where = [_CompareAlreadyJoined(alias, cond.op, 'label_id')]
+ return left_joins, where
+
+
+def _ProcessComponentIDCond(cond, alias, _user_alias):
+ """Convert a component_id=ID cond to SQL."""
+ # This is a built-in field, so it shadows any other fields w/ the same name.
+ join_str = (
+ 'Issue2Component AS {alias} ON Issue.id = {alias}.issue_id AND '
+ 'Issue.shard = {alias}.issue_shard'.format(alias=alias))
+ if cond.op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ left_joins = [(join_str, [])]
+ else:
+ field_type, field_values = _GetFieldTypeAndValues(cond)
+ cond_str, cond_args = _Compare(
+ alias, ast_pb2.QueryOp.EQ, field_type, 'component_id', field_values)
+ left_joins = [(join_str + ' AND ' + cond_str, cond_args)]
+
+ where = [_CompareAlreadyJoined(alias, cond.op, 'component_id')]
+ return left_joins, where
+
+
+def _ProcessCustomFieldCond(cond, alias, user_alias):
+ """Convert a custom field cond to SQL."""
+ # TODO(jrobbins): handle ambiguous field names that map to multiple
+ # field definitions, especially for cross-project search.
+ field_def = cond.field_defs[0]
+ val_type = field_def.field_type
+
+ join_str = (
+ 'Issue2FieldValue AS {alias} ON Issue.id = {alias}.issue_id AND '
+ 'Issue.shard = {alias}.issue_shard AND '
+ '{alias}.field_id = %s'.format(alias=alias))
+ left_joins = [(join_str, [field_def.field_id])]
+ if val_type == tracker_pb2.FieldTypes.INT_TYPE:
+ where = [_Compare(alias, cond.op, val_type, 'int_value', cond.int_values)]
+ elif val_type == tracker_pb2.FieldTypes.STR_TYPE:
+ where = [_Compare(alias, cond.op, val_type, 'str_value', cond.str_values)]
+ elif val_type == tracker_pb2.FieldTypes.USER_TYPE:
+ if cond.int_values or cond.op in (
+ ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ where = [_Compare(alias, cond.op, val_type, 'user_id', cond.int_values)]
+ else:
+ email_cond_str, email_cond_args = _Compare(
+ user_alias, cond.op, val_type, 'email', cond.str_values)
+ left_joins.append((
+ 'User AS {user_alias} ON {alias}.user_id = {user_alias}.user_id '
+ 'AND {email_cond}'.format(
+ alias=alias, user_alias=user_alias, email_cond=email_cond_str),
+ email_cond_args))
+ where = [_CompareAlreadyJoined(user_alias, cond.op, 'email')]
+
+ return left_joins, where
+
+
+def _ProcessAttachmentCond(cond, alias, _user_alias):
+ """Convert has:attachment and -has:attachment cond to SQL."""
+ if cond.op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ left_joins = []
+ where = [_Compare('Issue', cond.op, tracker_pb2.FieldTypes.INT_TYPE,
+ 'attachment_count', cond.int_values)]
+ else:
+ field_def = cond.field_defs[0]
+ val_type = field_def.field_type
+ left_joins = [
+ ('Attachment AS {alias} ON Issue.id = {alias}.issue_id AND '
+ '{alias}.deleted = %s'.format(alias=alias),
+ [False])]
+ where = [_Compare(alias, cond.op, val_type, 'filename', cond.str_values)]
+
+ return left_joins, where
+
+
+_PROCESSORS = {
+ 'owner': _ProcessOwnerCond,
+ 'owner_id': _ProcessOwnerIDCond,
+ 'reporter': _ProcessReporterCond,
+ 'reporter_id': _ProcessReporterIDCond,
+ 'cc': _ProcessCcCond,
+ 'cc_id': _ProcessCcIDCond,
+ 'starredby': _ProcessStarredByCond,
+ 'starredby_id': _ProcessStarredByIDCond,
+ 'commentby': _ProcessCommentByCond,
+ 'commentby_id': _ProcessCommentByIDCond,
+ 'status_id': _ProcessStatusIDCond,
+ 'label_id': _ProcessLabelIDCond,
+ 'component_id': _ProcessComponentIDCond,
+ 'blockedon_id': _ProcessBlockedOnIDCond,
+ 'blocking_id': _ProcessBlockingIDCond,
+ 'attachment': _ProcessAttachmentCond,
+ }
+
+
+def _ProcessCond(cond_num, cond):
+ """Translate one term of the user's search into an SQL query.
+
+ Args:
+ cond_num: integer cond number used to make distinct local variable names.
+ cond: user query cond parsed by query2ast.py.
+
+ Returns:
+ A pair of lists (left_joins, where) to use when building the SQL SELECT
+ statement. Each of them is a list of (str, [val, ...]) pairs.
+ """
+ alias = 'Cond%d' % cond_num
+ user_alias = 'User%d' % cond_num
+ # Note: a condition like [x=y] has field_name "x", there may be multiple
+ # field definitions that match "x", but they will all have field_name "x".
+ field_def = cond.field_defs[0]
+ assert all(field_def.field_name == fd.field_name for fd in cond.field_defs)
+
+ if field_def.field_name in NATIVE_SEARCHABLE_FIELDS:
+ col = NATIVE_SEARCHABLE_FIELDS[field_def.field_name]
+ where = [_Compare(
+ 'Issue', cond.op, field_def.field_type, col,
+ cond.str_values or cond.int_values)]
+ return [], where
+
+ elif field_def.field_name in _PROCESSORS:
+ proc = _PROCESSORS[field_def.field_name]
+ return proc(cond, alias, user_alias)
+
+ elif field_def.field_id: # it is a search on a custom field
+ return _ProcessCustomFieldCond(cond, alias, user_alias)
+
+ elif (field_def.field_name in tracker_fulltext.ISSUE_FULLTEXT_FIELDS or
+ field_def.field_name == 'any_field'):
+ pass # handled by full-text search.
+
+ else:
+ logging.error('untranslated search cond %r', cond)
+
+ return [], []
+
+
+def _Compare(alias, op, val_type, col, vals):
+ """Return an SQL comparison for the given values. For use in WHERE or ON.
+
+ Args:
+ alias: String name of the table or alias defined in a JOIN clause.
+ op: One of the operators defined in ast_pb2.py.
+ val_type: One of the value types defined in ast_pb2.py.
+ col: string column name to compare to vals.
+ vals: list of values that the user is searching for.
+
+ Returns:
+ (cond_str, cond_args) where cond_str is a SQL condition that may contain
+ some %s placeholders, and cond_args is the list of values that fill those
+ placeholders. If the condition string contains any AND or OR operators,
+ the whole expression is put inside parens.
+
+ Raises:
+ NoPossibleResults: The user's query is impossible to ever satisfy, e.g.,
+ it requires matching an empty set of labels.
+ """
+ vals_ph = sql.PlaceHolders(vals)
+ if col in ['label', 'status', 'email']:
+ alias_col = 'LOWER(%s.%s)' % (alias, col)
+ else:
+ alias_col = '%s.%s' % (alias, col)
+
+ def Fmt(cond_str):
+ return cond_str.format(alias_col=alias_col, vals_ph=vals_ph)
+
+ no_value = (0 if val_type in [tracker_pb2.FieldTypes.DATE_TYPE,
+ tracker_pb2.FieldTypes.INT_TYPE] else '')
+ if op == ast_pb2.QueryOp.IS_DEFINED:
+ return Fmt('({alias_col} IS NOT NULL AND {alias_col} != %s)'), [no_value]
+ if op == ast_pb2.QueryOp.IS_NOT_DEFINED:
+ return Fmt('({alias_col} IS NULL OR {alias_col} = %s)'), [no_value]
+
+ if val_type in [tracker_pb2.FieldTypes.DATE_TYPE,
+ tracker_pb2.FieldTypes.INT_TYPE]:
+ if op == ast_pb2.QueryOp.TEXT_HAS:
+ op = ast_pb2.QueryOp.EQ
+ if op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ op = ast_pb2.QueryOp.NE
+
+ if op == ast_pb2.QueryOp.EQ:
+ if not vals:
+ raise NoPossibleResults('Column %s has no possible value' % alias_col)
+ elif len(vals) == 1:
+ cond_str = Fmt('{alias_col} = %s')
+ else:
+ cond_str = Fmt('{alias_col} IN ({vals_ph})')
+ return cond_str, vals
+
+ if op == ast_pb2.QueryOp.NE:
+ if not vals:
+ return 'TRUE', [] # a no-op that matches every row.
+ elif len(vals) == 1:
+ comp = Fmt('{alias_col} != %s')
+ else:
+ comp = Fmt('{alias_col} NOT IN ({vals_ph})')
+ return '(%s IS NULL OR %s)' % (alias_col, comp), vals
+
+ # Note: These operators do not support quick-OR
+ val = vals[0]
+
+ if op == ast_pb2.QueryOp.GT:
+ return Fmt('{alias_col} > %s'), [val]
+ if op == ast_pb2.QueryOp.LT:
+ return Fmt('{alias_col} < %s'), [val]
+ if op == ast_pb2.QueryOp.GE:
+ return Fmt('{alias_col} >= %s'), [val]
+ if op == ast_pb2.QueryOp.LE:
+ return Fmt('{alias_col} <= %s'), [val]
+
+ if op == ast_pb2.QueryOp.TEXT_MATCHES:
+ return Fmt('{alias_col} LIKE %s'), [val]
+ if op == ast_pb2.QueryOp.NOT_TEXT_MATCHES:
+ return Fmt('({alias_col} IS NULL OR {alias_col} NOT LIKE %s)'), [val]
+
+ if op == ast_pb2.QueryOp.TEXT_HAS:
+ return Fmt('{alias_col} LIKE %s'), ['%' + val + '%']
+ if op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ return (Fmt('({alias_col} IS NULL OR {alias_col} NOT LIKE %s)'),
+ ['%' + val + '%'])
+
+ logging.error('unknown op: %r', op)
+
+
+def _CompareAlreadyJoined(alias, op, col):
+ """Return a WHERE clause comparison that checks that a join succeeded."""
+ def Fmt(cond_str):
+ return cond_str.format(alias_col='%s.%s' % (alias, col))
+
+ if op in (ast_pb2.QueryOp.EQ, ast_pb2.QueryOp.TEXT_HAS,
+ ast_pb2.QueryOp.TEXT_MATCHES, ast_pb2.QueryOp.IS_DEFINED):
+ return Fmt('{alias_col} IS NOT NULL'), []
+
+ if op in (ast_pb2.QueryOp.NE, ast_pb2.QueryOp.NOT_TEXT_HAS,
+ ast_pb2.QueryOp.NOT_TEXT_MATCHES,
+ ast_pb2.QueryOp.IS_NOT_DEFINED):
+ return Fmt('{alias_col} IS NULL'), []
+
+ logging.error('unknown op: %r', op)
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+
+
+class NoPossibleResults(Error):
+ """The query could never match any rows from the database, so don't try.."""
diff --git a/appengine/monorail/search/ast2sort.py b/appengine/monorail/search/ast2sort.py
new file mode 100644
index 0000000..b40e5c8
--- /dev/null
+++ b/appengine/monorail/search/ast2sort.py
@@ -0,0 +1,334 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Convert a user's issue sorting directives into SQL clauses.
+
+Some sort directives translate into simple ORDER BY column specifications.
+Other sort directives require that a LEFT JOIN be done to bring in
+relevant information that is then used in the ORDER BY.
+
+Sorting based on strings can slow down the DB because long sort-keys
+must be loaded into RAM, which means that fewer sort-keys fit into the
+DB's sorting buffers at a time. Also, Monorail defines the sorting
+order of well-known labels and statuses based on the order in which
+they are defined in the project's config. So, we determine the sort order of
+labels and status values before executing the query and then use the MySQL
+FIELD() function to sort their IDs in the desired order, without sorting
+strings.
+
+For more info, see the "Sorting in Monorail" and "What makes Monorail Fast?"
+design docs.
+"""
+
+import logging
+
+from framework import sql
+from proto import tracker_pb2
+
+
+NATIVE_SORTABLE_FIELDS = [
+ 'id', 'stars', 'attachments', 'opened', 'closed', 'modified']
+
+FIELDS_TO_COLUMNS = {
+ 'id': 'local_id',
+ 'stars': 'star_count',
+ 'attachments': 'attachment_count',
+ }
+
+
+def BuildSortClauses(
+ sort_directives, harmonized_labels, harmonized_statuses,
+ harmonized_fields):
+ """Return LEFT JOIN and ORDER BY clauses needed to sort the results."""
+ if not sort_directives:
+ return [], []
+
+ all_left_joins = []
+ all_order_by = []
+ for i, sd in enumerate(sort_directives):
+ left_join_parts, order_by_parts = _OneSortDirective(
+ i, sd, harmonized_labels, harmonized_statuses, harmonized_fields)
+ all_left_joins.extend(left_join_parts)
+ all_order_by.extend(order_by_parts)
+
+ return all_left_joins, all_order_by
+
+
+def _ProcessProjectSD(fmt):
+ """Convert a 'project' sort directive into SQL."""
+ left_joins = []
+ order_by = [(fmt('Issue.project_id {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessReporterSD(fmt):
+ """Convert a 'reporter' sort directive into SQL."""
+ left_joins = [
+ (fmt('User AS {alias} ON Issue.reporter_id = {alias}.user_id'), [])]
+ order_by = [
+ (fmt('ISNULL({alias}.email) {sort_dir}'), []),
+ (fmt('{alias}.email {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessOwnerSD(fmt):
+ """Convert a 'owner' sort directive into SQL."""
+ left_joins = [
+ (fmt('User AS {alias} ON (Issue.owner_id = {alias}.user_id OR '
+ 'Issue.derived_owner_id = {alias}.user_id)'), [])]
+ order_by = [
+ (fmt('ISNULL({alias}.email) {sort_dir}'), []),
+ (fmt('{alias}.email {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessCcSD(fmt):
+ """Convert a 'cc' sort directive into SQL."""
+ # Note: derived cc's are included automatically.
+ # Note: This sorts on the best Cc, not all Cc addresses.
+ # Being more exact might require GROUP BY and GROUP_CONCAT().
+ left_joins = [
+ (fmt('Issue2Cc AS {alias} ON Issue.id = {alias}.issue_id '
+ 'LEFT JOIN User AS {alias}_user '
+ 'ON {alias}.cc_id = {alias}_user.user_id'), [])]
+ order_by = [
+ (fmt('ISNULL({alias}_user.email) {sort_dir}'), []),
+ (fmt('{alias}_user.email {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessComponentSD(fmt):
+ """Convert a 'component' sort directive into SQL."""
+ # Note: derived components are included automatically.
+ # Note: This sorts on the best component, not all of them.
+ # Being more exact might require GROUP BY and GROUP_CONCAT().
+ left_joins = [
+ (fmt('Issue2Component AS {alias} ON Issue.id = {alias}.issue_id '
+ 'LEFT JOIN ComponentDef AS {alias}_component '
+ 'ON {alias}.component_id = {alias}_component.id'), [])]
+ order_by = [
+ (fmt('ISNULL({alias}_component.path) {sort_dir}'), []),
+ (fmt('{alias}_component.path {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessSummarySD(fmt):
+ """Convert a 'summary' sort directive into SQL."""
+ left_joins = [
+ (fmt('IssueSummary AS {alias} ON Issue.id = {alias}.issue_id'), [])]
+ order_by = [(fmt('{alias}.summary {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessStatusSD(fmt, harmonized_statuses):
+ """Convert a 'status' sort directive into SQL."""
+ left_joins = []
+ # Note: status_def_rows are already ordered by REVERSED rank.
+ wk_status_ids = [
+ stat_id for stat_id, rank, _ in harmonized_statuses
+ if rank is not None]
+ odd_status_ids = [
+ stat_id for stat_id, rank, _ in harmonized_statuses
+ if rank is None]
+ wk_status_ph = sql.PlaceHolders(wk_status_ids)
+ # Even though oddball statuses sort lexographically, use FIELD to determine
+ # the order so that the database sorts ints rather than strings for speed.
+ odd_status_ph = sql.PlaceHolders(odd_status_ids)
+
+ order_by = [] # appended to below: both well-known and oddball can apply
+ sort_col = ('IF(ISNULL(Issue.status_id), Issue.derived_status_id, '
+ 'Issue.status_id)')
+ # Reverse sort by using rev_sort_dir because we want NULLs at the end.
+ if wk_status_ids:
+ order_by.append(
+ (fmt('FIELD({sort_col}, {wk_status_ph}) {rev_sort_dir}',
+ sort_col=sort_col, wk_status_ph=wk_status_ph),
+ wk_status_ids))
+ if odd_status_ids:
+ order_by.append(
+ (fmt('FIELD({sort_col}, {odd_status_ph}) {rev_sort_dir}',
+ sort_col=sort_col, odd_status_ph=odd_status_ph),
+ odd_status_ids))
+
+ return left_joins, order_by
+
+
+def _ProcessBlockedSD(fmt):
+ """Convert a 'blocked' sort directive into SQL."""
+ left_joins = [
+ (fmt('IssueRelation AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.kind = %s'),
+ ['blockedon'])]
+ order_by = [(fmt('ISNULL({alias}.dst_issue_id) {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessBlockedOnSD(fmt):
+ """Convert a 'blockedon' sort directive into SQL."""
+ left_joins = [
+ (fmt('IssueRelation AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.kind = %s'),
+ ['blockedon'])]
+ order_by = [(fmt('ISNULL({alias}.dst_issue_id) {sort_dir}'), []),
+ (fmt('{alias}.dst_issue_id {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessBlockingSD(fmt):
+ """Convert a 'blocking' sort directive into SQL."""
+ left_joins = [
+ (fmt('IssueRelation AS {alias} ON Issue.id = {alias}.dst_issue_id '
+ 'AND {alias}.kind = %s'),
+ ['blockedon'])]
+ order_by = [(fmt('ISNULL({alias}.issue_id) {sort_dir}'), []),
+ (fmt('{alias}.issue_id {sort_dir}'), [])]
+ return left_joins, order_by
+
+
+def _ProcessCustomAndLabelSD(
+ sd, harmonized_labels, harmonized_fields, alias, sort_dir, fmt):
+ """Convert a label or custom field sort directive into SQL."""
+ left_joins = []
+ order_by = []
+
+ fd_list = [fd for fd in harmonized_fields
+ if fd.field_name.lower() == sd]
+ if fd_list:
+ int_left_joins, int_order_by = _CustomFieldSortClauses(
+ fd_list, tracker_pb2.FieldTypes.INT_TYPE, 'int_value',
+ alias, sort_dir)
+ str_left_joins, str_order_by = _CustomFieldSortClauses(
+ fd_list, tracker_pb2.FieldTypes.STR_TYPE, 'str_value',
+ alias, sort_dir)
+ user_left_joins, user_order_by = _CustomFieldSortClauses(
+ fd_list, tracker_pb2.FieldTypes.USER_TYPE, 'user_id',
+ alias, sort_dir)
+ left_joins.extend(int_left_joins + str_left_joins + user_left_joins)
+ order_by.extend(int_order_by + str_order_by + user_order_by)
+
+ label_left_joinss, label_order_by = _LabelSortClauses(
+ sd, harmonized_labels, fmt)
+ left_joins.extend(label_left_joinss)
+ order_by.extend(label_order_by)
+
+ return left_joins, order_by
+
+
+def _LabelSortClauses(sd, harmonized_labels, fmt):
+ """Give LEFT JOIN and ORDER BY terms for label sort directives."""
+ # Note: derived labels should work automatically.
+
+ # label_def_rows are already ordered by REVERSED rank.
+ wk_label_ids = [
+ label_id for label_id, rank, label in harmonized_labels
+ if label.lower().startswith('%s-' % sd) and rank is not None]
+ odd_label_ids = [
+ label_id for label_id, rank, label in harmonized_labels
+ if label.lower().startswith('%s-' % sd) and rank is None]
+ all_label_ids = wk_label_ids + odd_label_ids
+
+ if all_label_ids:
+ left_joins = [
+ (fmt('Issue2Label AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.label_id IN ({all_label_ph})',
+ all_label_ph=sql.PlaceHolders(all_label_ids)),
+ all_label_ids)]
+ else:
+ left_joins = []
+
+ order_by = []
+ # Reverse sort by using rev_sort_dir because we want NULLs at the end.
+ if wk_label_ids:
+ order_by.append(
+ (fmt('FIELD({alias}.label_id, {wk_label_ph}) {rev_sort_dir}',
+ wk_label_ph=sql.PlaceHolders(wk_label_ids)),
+ wk_label_ids))
+ if odd_label_ids:
+ # Even though oddball labels sort lexographically, use FIELD to determine
+ # the order so that the database sorts ints rather than strings for speed
+ order_by.append(
+ (fmt('FIELD({alias}.label_id, {odd_label_ph}) {rev_sort_dir}',
+ odd_label_ph=sql.PlaceHolders(odd_label_ids)),
+ odd_label_ids))
+
+ return left_joins, order_by
+
+
+def _CustomFieldSortClauses(
+ fd_list, value_type, value_column, alias, sort_dir):
+ """Give LEFT JOIN and ORDER BY terms for custom fields of the given type."""
+ relevant_fd_list = [fd for fd in fd_list if fd.field_type == value_type]
+ if not relevant_fd_list:
+ return [], []
+
+ field_ids_ph = sql.PlaceHolders(relevant_fd_list)
+ def Fmt(sql_str):
+ return sql_str.format(
+ value_column=value_column, sort_dir=sort_dir,
+ field_ids_ph=field_ids_ph, alias=alias + '_' + value_column)
+
+ left_joins = [
+ (Fmt('Issue2FieldValue AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.field_id IN ({field_ids_ph})'),
+ [fd.field_id for fd in relevant_fd_list])]
+
+ if value_type == tracker_pb2.FieldTypes.USER_TYPE:
+ left_joins.append(
+ (Fmt('User AS {alias}_user ON {alias}.user_id = {alias}_user.user_id'),
+ []))
+ order_by = [
+ (Fmt('ISNULL({alias}_user.email) {sort_dir}'), []),
+ (Fmt('{alias}_user.email {sort_dir}'), [])]
+ else:
+ # Unfortunately, this sorts on the best field value, not all of them.
+ order_by = [
+ (Fmt('ISNULL({alias}.{value_column}) {sort_dir}'), []),
+ (Fmt('{alias}.{value_column} {sort_dir}'), [])]
+
+ return left_joins, order_by
+
+
+_PROCESSORS = {
+ 'component': _ProcessComponentSD,
+ 'project': _ProcessProjectSD,
+ 'reporter': _ProcessReporterSD,
+ 'owner': _ProcessOwnerSD,
+ 'cc': _ProcessCcSD,
+ 'summary': _ProcessSummarySD,
+ 'blocked': _ProcessBlockedSD,
+ 'blockedon': _ProcessBlockedOnSD,
+ 'blocking': _ProcessBlockingSD,
+ }
+
+
+def _OneSortDirective(
+ i, sd, harmonized_labels, harmonized_statuses, harmonized_fields):
+ """Return SQL clauses to do the sorting for one sort directive."""
+ alias = 'Sort%d' % i
+ if sd.startswith('-'):
+ sort_dir, rev_sort_dir = 'DESC', 'ASC'
+ sd = sd[1:]
+ else:
+ sort_dir, rev_sort_dir = 'ASC', 'DESC'
+
+ def Fmt(sql_str, **kwargs):
+ return sql_str.format(
+ sort_dir=sort_dir, rev_sort_dir=rev_sort_dir, alias=alias,
+ sd=sd, col=FIELDS_TO_COLUMNS.get(sd, sd), **kwargs)
+
+ if sd in NATIVE_SORTABLE_FIELDS:
+ left_joins = []
+ order_by = [(Fmt('Issue.{col} {sort_dir}'), [])]
+ return left_joins, order_by
+
+ elif sd in _PROCESSORS:
+ proc = _PROCESSORS[sd]
+ return proc(Fmt)
+
+ elif sd == 'status':
+ return _ProcessStatusSD(Fmt, harmonized_statuses)
+ else: # otherwise, it must be a field or label, or both
+ return _ProcessCustomAndLabelSD(
+ sd, harmonized_labels, harmonized_fields, alias, sort_dir, Fmt)
diff --git a/appengine/monorail/search/backendnonviewable.py b/appengine/monorail/search/backendnonviewable.py
new file mode 100644
index 0000000..e7ec0a1
--- /dev/null
+++ b/appengine/monorail/search/backendnonviewable.py
@@ -0,0 +1,143 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet that searches for issues that the specified user cannot view.
+
+The GET request to a backend has query string parameters for the
+shard_id, a user_id, and list of project IDs. It returns a
+JSON-formatted dict with issue_ids that that user is not allowed to
+view. As a side-effect, this servlet updates multiple entries
+in memcache, including each "nonviewable:USER_ID;PROJECT_ID;SHARD_ID".
+"""
+
+import logging
+
+from google.appengine.api import memcache
+
+from framework import framework_constants
+from framework import framework_helpers
+from framework import jsonfeed
+from framework import permissions
+from framework import sql
+
+
+RESTRICT_VIEW_PATTERN = 'restrict-view-%'
+
+# We cache the set of IIDs that a given user cannot view, and we invalidate
+# that set when the issues are changed via Monorail. Also, we limit the live
+# those cache entries so that changes in a user's (direct or indirect) roles
+# in a project will take effect.
+NONVIEWABLE_MEMCACHE_EXPIRATION = 15 * framework_constants.SECS_PER_MINUTE
+
+
+class BackendNonviewable(jsonfeed.InternalTask):
+ """JSON servlet for getting issue IDs that the specified user cannot view."""
+
+ CHECK_SAME_APP = True
+
+ def HandleRequest(self, mr):
+ """Get all the user IDs that the specified user cannot view.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary {project_id: [issue_id]} in JSON format.
+ """
+ if mr.shard_id is None:
+ return {'message': 'Cannot proceed without a valid shard_id.'}
+ user_id = mr.specified_logged_in_user_id
+ user = self.services.user.GetUser(mr.cnxn, user_id)
+ effective_ids = self.services.usergroup.LookupMemberships(mr.cnxn, user_id)
+ if user_id:
+ effective_ids.add(user_id)
+ project_id = mr.specified_project_id
+ project = self.services.project.GetProject(mr.cnxn, project_id)
+
+ perms = permissions.GetPermissions(user, effective_ids, project)
+
+ nonviewable_iids = self.GetNonviewableIIDs(
+ mr.cnxn, user, effective_ids, project, perms, mr.shard_id)
+
+ cached_ts = mr.invalidation_timestep
+ if mr.specified_project_id:
+ memcache.set(
+ 'nonviewable:%d;%d;%d' % (project_id, user_id, mr.shard_id),
+ (nonviewable_iids, cached_ts),
+ time=NONVIEWABLE_MEMCACHE_EXPIRATION)
+ else:
+ memcache.set(
+ 'nonviewable:all;%d;%d' % (user_id, mr.shard_id),
+ (nonviewable_iids, cached_ts),
+ time=NONVIEWABLE_MEMCACHE_EXPIRATION)
+
+ logging.info('set nonviewable:%s;%d;%d to %r', project_id, user_id,
+ mr.shard_id, nonviewable_iids)
+
+ return {
+ 'nonviewable': nonviewable_iids,
+
+ # These are not used in the frontend, but useful for debugging.
+ 'project_id': project_id,
+ 'user_id': user_id,
+ 'shard_id': mr.shard_id,
+ }
+
+ def GetNonviewableIIDs(
+ self, cnxn, user, effective_ids, project, perms, shard_id):
+ """Return a list of IIDs that the user cannot view in the project shard."""
+ # Project owners and site admins can see all issues.
+ if not perms.consider_restrictions:
+ return []
+
+ # There are two main parts to the computation that we do in parallel:
+ # getting at-risk IIDs and getting OK-iids.
+ cnxn_2 = sql.MonorailConnection()
+ at_risk_iids_promise = framework_helpers.Promise(
+ self.GetAtRiskIIDs, cnxn_2, user, effective_ids, project, perms, shard_id)
+ ok_iids = self.GetViewableIIDs(
+ cnxn, effective_ids, project.project_id, shard_id)
+ at_risk_iids = at_risk_iids_promise.WaitAndGetValue()
+
+ # The set of non-viewable issues is the at-risk ones minus the ones where
+ # the user is the reporter, owner, CC'd, or granted "View" permission.
+ nonviewable_iids = set(at_risk_iids).difference(ok_iids)
+
+ return list(nonviewable_iids)
+
+ def GetAtRiskIIDs(
+ self, cnxn, user, effective_ids, project, perms, shard_id):
+ """Return IIDs of restricted issues that user might not be able to view."""
+ at_risk_label_ids = self.GetPersonalAtRiskLabelIDs(
+ cnxn, user, effective_ids, project, perms)
+ at_risk_iids = self.services.issue.GetIIDsByLabelIDs(
+ cnxn, at_risk_label_ids, project.project_id, shard_id)
+
+ return at_risk_iids
+
+ def GetPersonalAtRiskLabelIDs(
+ self, cnxn, _user, effective_ids, project, perms):
+ """Return list of label_ids for restriction labels that user can't view."""
+ at_risk_label_ids = []
+ label_def_rows = self.services.config.GetLabelDefRowsAnyProject(
+ cnxn, where=[('LOWER(label) LIKE %s', [RESTRICT_VIEW_PATTERN])])
+ for label_id, _pid, _rank, label, _docstring, _hidden in label_def_rows:
+ label_lower = label.lower()
+ needed_perm = label_lower.split('-', 2)[-1]
+ if not perms.CanUsePerm(needed_perm, effective_ids, project, []):
+ at_risk_label_ids.append(label_id)
+
+ return at_risk_label_ids
+
+ def GetViewableIIDs(self, cnxn, effective_ids, project_id, shard_id):
+ """Return IIDs of issues that user can view because they participate."""
+ # Anon user is never reporter, owner, CC'd or granted perms.
+ if not effective_ids:
+ return []
+
+ ok_iids = self.services.issue.GetIIDsByParticipant(
+ cnxn, effective_ids, [project_id], shard_id)
+
+ return ok_iids
diff --git a/appengine/monorail/search/backendsearch.py b/appengine/monorail/search/backendsearch.py
new file mode 100644
index 0000000..19baba1
--- /dev/null
+++ b/appengine/monorail/search/backendsearch.py
@@ -0,0 +1,67 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A servlet that implements the backend of issues search.
+
+The GET request to a backend search has the same query string
+parameters as the issue list servlet. But, instead of rendering a
+HTML page, the backend search handler returns a JSON response with a
+list of matching, sorted issue IID numbers from this shard that are
+viewable by the requesting user.
+
+Each backend search request works within a single shard. Each
+besearch backend job can access any single shard while processing a request.
+
+The current user ID must be passed in from the frontend for permission
+checking. The user ID for the special "me" term can also be passed in
+(so that you can view another user's dashboard and "me" will refer to
+them).
+"""
+
+import logging
+import time
+
+from framework import jsonfeed
+from search import backendsearchpipeline
+from tracker import tracker_constants
+
+
+class BackendSearch(jsonfeed.InternalTask):
+ """JSON servlet for issue search in a GAE backend."""
+
+ CHECK_SAME_APP = True
+ _DEFAULT_RESULTS_PER_PAGE = tracker_constants.DEFAULT_RESULTS_PER_PAGE
+
+ def HandleRequest(self, mr):
+ """Search for issues and respond with the IIDs of matching issues.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format.
+ """
+ # Users are never logged into backends, so the frontends tell us.
+ logging.info('query_project_names is %r', mr.query_project_names)
+ pipeline = backendsearchpipeline.BackendSearchPipeline(
+ mr, self.services, self.profiler, self._DEFAULT_RESULTS_PER_PAGE,
+ mr.query_project_names, mr.specified_logged_in_user_id,
+ mr.specified_me_user_id)
+ pipeline.SearchForIIDs()
+
+ start = time.time()
+ # Backends work in parallel to precache issues that the
+ # frontend is very likely to need.
+ _prefetched_issues = self.services.issue.GetIssues(
+ mr.cnxn, pipeline.result_iids[:mr.start + mr.num],
+ shard_id=mr.shard_id)
+ logging.info('prefetched and memcached %d issues in %d ms',
+ len(pipeline.result_iids[:mr.start + mr.num]),
+ int(1000 * (time.time() - start)))
+
+ return {
+ 'unfiltered_iids': pipeline.result_iids,
+ 'search_limit_reached': pipeline.search_limit_reached,
+ }
diff --git a/appengine/monorail/search/backendsearchpipeline.py b/appengine/monorail/search/backendsearchpipeline.py
new file mode 100644
index 0000000..c3bb3bf
--- /dev/null
+++ b/appengine/monorail/search/backendsearchpipeline.py
@@ -0,0 +1,309 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Backend issue issue search and sorting.
+
+Each of several "besearch" backend jobs manages one shard of the overall set
+of issues in the system. The backend search pipeline retrieves the issues
+that match the user query, puts them into memcache, and returns them to
+the frontend search pipeline.
+"""
+
+import logging
+import re
+import time
+
+from google.appengine.api import memcache
+
+import settings
+from features import savedqueries_helpers
+from framework import framework_constants
+from framework import framework_helpers
+from framework import sorting
+from framework import sql
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import ast2ast
+from search import ast2select
+from search import ast2sort
+from search import query2ast
+from search import searchpipeline
+from services import tracker_fulltext
+from services import fulltext_helpers
+from tracker import tracker_bizobj
+
+
+# Used in constructing the at-risk query.
+AT_RISK_LABEL_RE = re.compile(r'^(restrict-view-.+)$', re.IGNORECASE)
+
+# Limit on the number of list items to show in debug log statements
+MAX_LOG = 200
+
+
+class BackendSearchPipeline(object):
+ """Manage the process of issue search, including Promises and caching.
+
+ Even though the code is divided into several methods, the public
+ methods should be called in sequence, so the execution of the code
+ is pretty much in the order of the source code lines here.
+ """
+
+ def __init__(
+ self, mr, services, prof, default_results_per_page,
+ query_project_names, logged_in_user_id, me_user_id):
+
+ self.mr = mr
+ self.profiler = prof
+ self.services = services
+ self.default_results_per_page = default_results_per_page
+
+ self.query_project_list = services.project.GetProjectsByName(
+ mr.cnxn, query_project_names).values()
+ self.query_project_ids = [
+ p.project_id for p in self.query_project_list]
+
+ self.me_user_id = me_user_id
+ self.mr.auth.user_id = logged_in_user_id
+ if self.mr.auth.user_id:
+ self.mr.auth.effective_ids = services.usergroup.LookupMemberships(
+ mr.cnxn, self.mr.auth.user_id)
+ self.mr.auth.effective_ids.add(self.mr.auth.user_id)
+
+ # The following fields are filled in as the pipeline progresses.
+ # The value None means that we still need to compute that value.
+ self.result_iids = None # Sorted issue IDs that match the query
+ self.search_limit_reached = False # True if search results limit is hit.
+
+ # Projects that contain the result issues.
+ self.issue_projects = {p.project_id: p for p in self.query_project_list}
+
+ self._MakePromises()
+
+ def _MakePromises(self):
+ config_dict = self.services.config.GetProjectConfigs(
+ self.mr.cnxn, self.query_project_ids)
+ self.harmonized_config = tracker_bizobj.HarmonizeConfigs(
+ config_dict.values())
+
+ self.canned_query = savedqueries_helpers.SavedQueryIDToCond(
+ self.mr.cnxn, self.services.features, self.mr.can)
+
+ self.canned_query = searchpipeline.ReplaceKeywordsWithUserID(
+ self.me_user_id, self.canned_query)
+ self.user_query = searchpipeline.ReplaceKeywordsWithUserID(
+ self.me_user_id, self.mr.query)
+ logging.debug('Searching query: %s %s', self.canned_query, self.user_query)
+
+ slice_term = ('Issue.shard = %s', [self.mr.shard_id])
+
+ sd = sorting.ComputeSortDirectives(self.mr, self.harmonized_config)
+
+ self.result_iids_promise = framework_helpers.Promise(
+ _GetQueryResultIIDs, self.mr.cnxn,
+ self.services, self.canned_query, self.user_query,
+ self.query_project_ids, self.harmonized_config, sd,
+ slice_term, self.mr.shard_id, self.mr.invalidation_timestep)
+
+ def SearchForIIDs(self):
+ """Wait for the search Promises and store their results."""
+ with self.profiler.Phase('WaitOnPromises'):
+ self.result_iids, self.search_limit_reached = (
+ self.result_iids_promise.WaitAndGetValue())
+
+
+def SearchProjectCan(
+ cnxn, services, project_ids, query_ast, shard_id, harmonized_config,
+ left_joins=None, where=None, sort_directives=None, query_desc=''):
+ """Return a list of issue global IDs in the projects that satisfy the query.
+
+ Args:
+ cnxn: Regular database connection to the master DB.
+ services: interface to issue storage backends.
+ project_ids: list of int IDs of the project to search
+ query_ast: A QueryAST PB with conjunctions and conditions.
+ shard_id: limit search to the specified shard ID int.
+ harmonized_config: harmonized config for all projects being searched.
+ left_joins: SQL LEFT JOIN clauses that are needed in addition to
+ anything generated from the query_ast.
+ where: SQL WHERE clauses that are needed in addition to
+ anything generated from the query_ast.
+ sort_directives: list of strings specifying the columns to sort on.
+ query_desc: descriptive string for debugging.
+
+ Returns:
+ (issue_ids, capped) where issue_ids is a list of issue issue_ids that
+ satisfy the query, and capped is True if the number of results were
+ capped due to an implementation limit.
+ """
+ logging.info('searching projects %r for AST %r', project_ids, query_ast)
+ start_time = time.time()
+ left_joins = left_joins or []
+ where = where or []
+ if project_ids:
+ cond_str = 'Issue.project_id IN (%s)' % sql.PlaceHolders(project_ids)
+ where.append((cond_str, project_ids))
+
+ query_ast = ast2ast.PreprocessAST(
+ cnxn, query_ast, project_ids, services, harmonized_config)
+ logging.info('simplified AST is %r', query_ast)
+ try:
+ query_left_joins, query_where = ast2select.BuildSQLQuery(query_ast)
+ left_joins.extend(query_left_joins)
+ where.extend(query_where)
+ except ast2select.NoPossibleResults as e:
+ # TODO(jrobbins): inform the user that their query was impossible.
+ logging.info('Impossible query %s.\n %r\n\n', e.message, query_ast)
+ return [], False
+ logging.info('translated to left_joins %r', left_joins)
+ logging.info('translated to where %r', where)
+
+ fts_capped = False
+ if query_ast.conjunctions:
+ # TODO(jrobbins): Handle "OR" in queries. For now, we just process the
+ # first conjunction.
+ assert len(query_ast.conjunctions) == 1
+ conj = query_ast.conjunctions[0]
+ full_text_iids, fts_capped = tracker_fulltext.SearchIssueFullText(
+ project_ids, conj, shard_id)
+ if full_text_iids is not None:
+ if not full_text_iids:
+ return [], False # No match on free-text terms, so don't bother DB.
+ cond_str = 'Issue.id IN (%s)' % sql.PlaceHolders(full_text_iids)
+ where.append((cond_str, full_text_iids))
+
+ label_def_rows = []
+ status_def_rows = []
+ if sort_directives:
+ if project_ids:
+ for pid in project_ids:
+ label_def_rows.extend(services.config.GetLabelDefRows(cnxn, pid))
+ status_def_rows.extend(services.config.GetStatusDefRows(cnxn, pid))
+ else:
+ label_def_rows = services.config.GetLabelDefRowsAnyProject(cnxn)
+ status_def_rows = services.config.GetStatusDefRowsAnyProject(cnxn)
+
+ harmonized_labels = tracker_bizobj.HarmonizeLabelOrStatusRows(
+ label_def_rows)
+ harmonized_statuses = tracker_bizobj.HarmonizeLabelOrStatusRows(
+ status_def_rows)
+ harmonized_fields = harmonized_config.field_defs
+ sort_left_joins, order_by = ast2sort.BuildSortClauses(
+ sort_directives, harmonized_labels, harmonized_statuses,
+ harmonized_fields)
+ logging.info('translated to sort left_joins %r', sort_left_joins)
+ logging.info('translated to order_by %r', order_by)
+
+ issue_ids, db_capped = services.issue.RunIssueQuery(
+ cnxn, left_joins + sort_left_joins, where, order_by, shard_id=shard_id)
+ logging.warn('executed "%s" query %r for %d issues in %dms',
+ query_desc, query_ast, len(issue_ids),
+ int((time.time() - start_time) * 1000))
+ capped = fts_capped or db_capped
+ return issue_ids, capped
+
+def _FilterSpam(query_ast):
+ uses_spam = False
+ # TODO(jrobbins): Handle "OR" in queries. For now, we just modify the
+ # first conjunction.
+ conjunction = query_ast.conjunctions[0]
+ for condition in conjunction.conds:
+ for field in condition.field_defs:
+ if field.field_name == 'spam':
+ uses_spam = True
+
+ if not uses_spam:
+ query_ast.conjunctions[0].conds.append(
+ ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ,
+ [tracker_pb2.FieldDef(
+ field_name='spam',
+ field_type=tracker_pb2.FieldTypes.BOOL_TYPE)
+ ],
+ [], [0]))
+
+ return query_ast
+
+def _GetQueryResultIIDs(
+ cnxn, services, canned_query, user_query,
+ query_project_ids, harmonized_config, sd, slice_term,
+ shard_id, invalidation_timestep):
+ """Do a search and return a list of matching issue IDs.
+
+ Args:
+ cnxn: connection to the database.
+ services: interface to issue storage backends.
+ canned_query: string part of the query from the drop-down menu.
+ user_query: string part of the query that the user typed in.
+ query_project_ids: list of project IDs to search.
+ harmonized_config: combined configs for all the queried projects.
+ sd: list of sort directives.
+ slice_term: additional query term to narrow results to a logical shard
+ within a physical shard.
+ shard_id: int number of the database shard to search.
+ invalidation_timestep: int timestep to use keep memcached items fresh.
+
+ Returns:
+ Tuple consisting of:
+ A list of issue issue_ids that match the user's query. An empty list, [],
+ is returned if no issues match the query.
+ Boolean that is set to True if the search results limit of this shard is
+ hit.
+ """
+ query_ast = _FilterSpam(query2ast.ParseUserQuery(
+ user_query, canned_query, query2ast.BUILTIN_ISSUE_FIELDS,
+ harmonized_config))
+
+ logging.info('query_project_ids is %r', query_project_ids)
+
+ is_fulltext_query = bool(
+ query_ast.conjunctions and
+ fulltext_helpers.BuildFTSQuery(
+ query_ast.conjunctions[0], tracker_fulltext.ISSUE_FULLTEXT_FIELDS))
+ expiration = framework_constants.MEMCACHE_EXPIRATION
+ if is_fulltext_query:
+ expiration = framework_constants.FULLTEXT_MEMCACHE_EXPIRATION
+
+ result_iids, search_limit_reached = SearchProjectCan(
+ cnxn, services, query_project_ids, query_ast, shard_id,
+ harmonized_config, sort_directives=sd, where=[slice_term],
+ query_desc='getting query issue IDs')
+ logging.info('Found %d result_iids', len(result_iids))
+
+ projects_str = ','.join(str(pid) for pid in sorted(query_project_ids))
+ projects_str = projects_str or 'all'
+ memcache_key = ';'.join([
+ projects_str, canned_query, user_query, ' '.join(sd), str(shard_id)])
+ memcache.set(memcache_key, (result_iids, invalidation_timestep),
+ time=expiration)
+ logging.info('set memcache key %r', memcache_key)
+
+ search_limit_memcache_key = ';'.join([
+ projects_str, canned_query, user_query, ' '.join(sd),
+ 'search_limit_reached', str(shard_id)])
+ memcache.set(search_limit_memcache_key,
+ (search_limit_reached, invalidation_timestep),
+ time=expiration)
+ logging.info('set search limit memcache key %r',
+ search_limit_memcache_key)
+
+ timestamps_for_projects = memcache.get_multi(
+ keys=(['%d;%d' % (pid, shard_id) for pid in query_project_ids] +
+ ['all:%d' % shard_id]))
+
+ if query_project_ids:
+ for pid in query_project_ids:
+ key = '%d;%d' % (pid, shard_id)
+ if key not in timestamps_for_projects:
+ memcache.set(
+ key, invalidation_timestep,
+ time=framework_constants.MEMCACHE_EXPIRATION)
+ else:
+ key = 'all;%d' % shard_id
+ if key not in timestamps_for_projects:
+ memcache.set(
+ key, invalidation_timestep,
+ time=framework_constants.MEMCACHE_EXPIRATION)
+
+ return result_iids, search_limit_reached
diff --git a/appengine/monorail/search/frontendsearchpipeline.py b/appengine/monorail/search/frontendsearchpipeline.py
new file mode 100644
index 0000000..9bc74b1
--- /dev/null
+++ b/appengine/monorail/search/frontendsearchpipeline.py
@@ -0,0 +1,935 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""The FrontendSearchPipeline class manages issue search and sorting.
+
+The frontend pipeline checks memcache for cached results in each shard. It
+then calls backend jobs to do any shards that had a cache miss. On cache hit,
+the cached results must be filtered by permissions, so the at-risk cache and
+backends are consulted. Next, the sharded results are combined into an overall
+list of IIDs. Then, that list is paginated and the issues on the current
+pagination page can be shown. Alternatively, this class can determine just the
+position the currently shown issue would occupy in the overall sorted list.
+"""
+
+import json
+
+import collections
+import logging
+import math
+import random
+import time
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import memcache
+from google.appengine.api import modules
+from google.appengine.api import urlfetch
+
+import settings
+from features import savedqueries_helpers
+from framework import framework_constants
+from framework import framework_helpers
+from framework import paginate
+from framework import sorting
+from framework import urls
+from search import query2ast
+from search import searchpipeline
+from services import fulltext_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+
+
+# Fail-fast responses usually finish in less than 50ms. If we see a failure
+# in under that amount of time, we don't bother logging it.
+FAIL_FAST_LIMIT_SEC = 0.050
+
+# The choices help balance the cost of choosing samples vs. the cost of
+# selecting issues that are in a range bounded by neighboring samples.
+# Preferred chunk size parameters were determined by experimentation.
+MIN_SAMPLE_CHUNK_SIZE = int(
+ math.sqrt(tracker_constants.DEFAULT_RESULTS_PER_PAGE))
+MAX_SAMPLE_CHUNK_SIZE = int(math.sqrt(settings.search_limit_per_shard))
+PREFERRED_NUM_CHUNKS = 50
+
+
+class FrontendSearchPipeline(object):
+ """Manage the process of issue search, including backends and caching.
+
+ Even though the code is divided into several methods, the public
+ methods should be called in sequence, so the execution of the code
+ is pretty much in the order of the source code lines here.
+ """
+
+ def __init__(self, mr, services, prof, default_results_per_page):
+ self.mr = mr
+ self.services = services
+ self.profiler = prof
+ self.default_results_per_page = default_results_per_page
+ self.grid_mode = (mr.mode == 'grid')
+ self.grid_limited = False
+ self.pagination = None
+ self.num_skipped_at_start = 0
+ self.total_count = 0
+
+ self.query_project_names = set()
+ if mr.query_project_names:
+ self.query_project_names.update(mr.query_project_names)
+
+ projects = services.project.GetProjectsByName(
+ mr.cnxn, self.query_project_names).values()
+ self.query_project_ids = [p.project_id for p in projects]
+ if mr.project_name:
+ self.query_project_ids.append(mr.project_id)
+ self.query_project_names.add(mr.project_name)
+
+ config_dict = self.services.config.GetProjectConfigs(
+ mr.cnxn, self.query_project_ids)
+ self.harmonized_config = tracker_bizobj.HarmonizeConfigs(
+ config_dict.values())
+
+ # The following fields are filled in as the pipeline progresses.
+ # The value None means that we still need to compute that value.
+ self.users_by_id = {}
+ self.nonviewable_iids = {} # {shard_id: set(iid)}
+ self.unfiltered_iids = {} # {shard_id: [iid, ...]} needing perm checks.
+ self.filtered_iids = {} # {shard_id: [iid, ...]} already perm checked.
+ self.search_limit_reached = {} # {shard_id: [bool, ...]}.
+ self.counts = {}
+ self.allowed_iids = [] # Matching iids that user is permitted to view.
+ self.allowed_results = None # results that the user is permitted to view.
+ self.visible_results = None # allowed_results on current pagination page.
+ self.error_responses = set()
+
+ # Projects that contain the result issues. This starts off as a dict of
+ # all the query projects, but it can grow based on the found issues in the
+ # case where the user is searching across the entire site.
+ self.issue_projects = {p.project_id: p for p in projects}
+
+ error_msg = query2ast.CheckSyntax(
+ self.mr.query, self.harmonized_config, warnings=self.mr.warnings)
+ if error_msg:
+ self.mr.errors.query = error_msg
+
+ def SearchForIIDs(self):
+ """Use backends to search each shard and store their results."""
+ with self.profiler.Phase('Checking cache and calling Backends'):
+ rpc_tuples = _StartBackendSearch(
+ self.mr, self.query_project_names, self.query_project_ids,
+ self.harmonized_config, self.unfiltered_iids,
+ self.search_limit_reached, self.nonviewable_iids,
+ self.error_responses, self.services)
+
+ with self.profiler.Phase('Waiting for Backends'):
+ try:
+ _FinishBackendSearch(rpc_tuples)
+ except Exception as e:
+ logging.exception(e)
+ raise
+
+ if self.error_responses:
+ logging.error('%r error responses. Incomplete search results.',
+ self.error_responses)
+
+ with self.profiler.Phase('Filtering cached results'):
+ for shard_id in self.unfiltered_iids:
+ if shard_id not in self.nonviewable_iids:
+ logging.error(
+ 'Not displaying shard %r because of no nonviewable_iids', shard_id)
+ self.error_responses.add(shard_id)
+ filtered_shard_iids = []
+ else:
+ unfiltered_shard_iids = self.unfiltered_iids[shard_id]
+ nonviewable_shard_iids = self.nonviewable_iids[shard_id]
+ # TODO(jrobbins): avoid creating large temporary lists.
+ filtered_shard_iids = [iid for iid in unfiltered_shard_iids
+ if iid not in nonviewable_shard_iids]
+ if self.grid_mode:
+ self.filtered_iids[shard_id] = filtered_shard_iids
+ else:
+ self.filtered_iids[shard_id] = filtered_shard_iids[
+ :self.mr.start + self.mr.num]
+ self.counts[shard_id] = len(filtered_shard_iids)
+
+ with self.profiler.Phase('Counting all filtered results'):
+ self.total_count = sum(self.counts.itervalues())
+
+ def MergeAndSortIssues(self):
+ """Merge and sort results from all shards into one combined list."""
+ with self.profiler.Phase('selecting issues to merge and sort'):
+ if not self.grid_mode:
+ self._NarrowFilteredIIDs()
+ self.allowed_iids = []
+ for filtered_shard_iids in self.filtered_iids.itervalues():
+ self.allowed_iids.extend(filtered_shard_iids)
+
+ # The grid view is not paginated, so limit the results shown to avoid
+ # generating a HTML page that would be too large.
+ limit = settings.max_issues_in_grid
+ if self.grid_mode and len(self.allowed_iids) > limit:
+ self.grid_limited = True
+ self.allowed_iids = self.allowed_iids[:limit]
+
+ with self.profiler.Phase('getting allowed results'):
+ self.allowed_results = self.services.issue.GetIssues(
+ self.mr.cnxn, self.allowed_iids)
+
+ # Note: At this point, we have results that are only sorted within
+ # each backend's shard. We still need to sort the merged result.
+ self._LookupNeededUsers(self.allowed_results)
+ with self.profiler.Phase('merging and sorting issues'):
+ self.allowed_results = _SortIssues(
+ self.mr, self.allowed_results, self.harmonized_config,
+ self.users_by_id)
+
+ def _NarrowFilteredIIDs(self):
+ """Combine filtered shards into a range of IIDs for issues to sort.
+
+ The niave way is to concatenate shard_iids[:start + num] for all
+ shards then select [start:start + num]. We do better by sampling
+ issues and then determining which of those samples are known to
+ come before start or after start+num. We then trim off all those IIDs
+ and sort a smaller range of IIDs that might actuall be displayed.
+ See the design doc at go/monorail-sorting.
+
+ This method modifies self.fitered_iids and self.num_skipped_at_start.
+ """
+ # Sample issues and skip those that are known to come before start.
+ # See the "Sorting in Monorail" design doc.
+
+ # If the result set is small, don't bother optimizing it.
+ orig_length = _TotalLength(self.filtered_iids)
+ if orig_length < self.mr.num * 4:
+ return
+
+ # 1. Get sample issues in each shard and sort them all together.
+ last = self.mr.start + self.mr.num
+ on_hand_samples = {}
+ needed_iids = []
+ for shard_id in self.filtered_iids:
+ self._AccumulateSampleIssues(
+ self.filtered_iids[shard_id], on_hand_samples, needed_iids)
+ retrieved_samples = self.services.issue.GetIssuesDict(
+ self.mr.cnxn, needed_iids)
+ sample_issues = on_hand_samples.values() + retrieved_samples.values()
+ self._LookupNeededUsers(sample_issues)
+ sample_issues = _SortIssues(
+ self.mr, sample_issues, self.harmonized_config, self.users_by_id)
+ sample_iids = [issue.issue_id for issue in sample_issues]
+
+ # 2. Trim off some IIDs that are sure to be positioned after last.
+ num_trimmed_end = _TrimEndShardedIIDs(self.filtered_iids, sample_iids, last)
+ logging.info('Trimmed %r issues from the end of shards', num_trimmed_end)
+
+ # 3. Trim off some IIDs that are sure to be posiitoned before start.
+ keep = _TotalLength(self.filtered_iids) - self.mr.start
+ # Reverse the sharded lists.
+ _ReverseShards(self.filtered_iids)
+ sample_iids.reverse()
+ self.num_skipped_at_start = _TrimEndShardedIIDs(
+ self.filtered_iids, sample_iids, keep)
+ logging.info('Trimmed %r issues from the start of shards',
+ self.num_skipped_at_start)
+ # Reverse sharded lists again to get back into forward order.
+ _ReverseShards(self.filtered_iids)
+
+ def DetermineIssuePosition(self, issue):
+ """Calculate info needed to show the issue flipper.
+
+ Args:
+ issue: The issue currently being viewed.
+
+ Returns:
+ A 3-tuple (prev_iid, index, next_iid) were prev_iid is the
+ IID of the previous issue in the total ordering (or None),
+ index is the index that the current issue has in the total
+ ordering, and next_iid is the next issue (or None). If the current
+ issue is not in the list of results at all, returns None, None, None.
+ """
+ # 1. If the current issue is not in the results at all, then exit.
+ if not any(issue.issue_id in filtered_shard_iids
+ for filtered_shard_iids in self.filtered_iids.itervalues()):
+ return None, None, None
+
+ # 2. Choose and retrieve sample issues in each shard.
+ samples_by_shard = {} # {shard_id: {iid: issue}}
+ needed_iids = []
+ for shard_id in self.filtered_iids:
+ samples_by_shard[shard_id] = {}
+ self._AccumulateSampleIssues(
+ self.filtered_iids[shard_id], samples_by_shard[shard_id], needed_iids)
+ retrieved_samples = self.services.issue.GetIssuesDict(
+ self.mr.cnxn, needed_iids)
+ for retrieved_iid, retrieved_issue in retrieved_samples.iteritems():
+ shard_id = retrieved_iid % settings.num_logical_shards
+ samples_by_shard[shard_id][retrieved_iid] = retrieved_issue
+
+ # 3. Build up partial results for each shard.
+ preceeding_counts = {} # dict {shard_id: num_issues_preceeding_current}
+ prev_candidates, next_candidates = [], []
+ for shard_id in self.filtered_iids:
+ prev_candidate, index_in_shard, next_candidate = (
+ self._DetermineIssuePositionInShard(
+ shard_id, issue, samples_by_shard[shard_id]))
+ preceeding_counts[shard_id] = index_in_shard
+ if prev_candidate:
+ prev_candidates.append(prev_candidate)
+ if next_candidate:
+ next_candidates.append(next_candidate)
+
+ # 4. Combine the results.
+ index = sum(preceeding_counts.itervalues())
+ prev_candidates = _SortIssues(
+ self.mr, prev_candidates, self.harmonized_config, self.users_by_id)
+ prev_iid = prev_candidates[-1].issue_id if prev_candidates else None
+ next_candidates = _SortIssues(
+ self.mr, next_candidates, self.harmonized_config, self.users_by_id)
+ next_iid = next_candidates[0].issue_id if next_candidates else None
+
+ return prev_iid, index, next_iid
+
+ def _DetermineIssuePositionInShard(self, shard_id, issue, sample_dict):
+ """Determine where the given issue would fit into results from a shard."""
+ # See the design doc for details. Basically, it first surveys the results
+ # to bound a range where the given issue would belong, then it fetches the
+ # issues in that range and sorts them.
+
+ filtered_shard_iids = self.filtered_iids[shard_id]
+
+ # 1. Select a sample of issues, leveraging ones we have in RAM already.
+ issues_on_hand = sample_dict.values()
+ if issue.issue_id not in sample_dict:
+ issues_on_hand.append(issue)
+
+ self._LookupNeededUsers(issues_on_hand)
+ sorted_on_hand = _SortIssues(
+ self.mr, issues_on_hand, self.harmonized_config, self.users_by_id)
+ sorted_on_hand_iids = [soh.issue_id for soh in sorted_on_hand]
+ index_in_on_hand = sorted_on_hand_iids.index(issue.issue_id)
+
+ # 2. Bound the gap around where issue belongs.
+ if index_in_on_hand == 0:
+ fetch_start = 0
+ else:
+ prev_on_hand_iid = sorted_on_hand_iids[index_in_on_hand - 1]
+ fetch_start = filtered_shard_iids.index(prev_on_hand_iid) + 1
+
+ if index_in_on_hand == len(sorted_on_hand) - 1:
+ fetch_end = len(filtered_shard_iids)
+ else:
+ next_on_hand_iid = sorted_on_hand_iids[index_in_on_hand + 1]
+ fetch_end = filtered_shard_iids.index(next_on_hand_iid)
+
+ # 3. Retrieve all the issues in that gap to get an exact answer.
+ fetched_issues = self.services.issue.GetIssues(
+ self.mr.cnxn, filtered_shard_iids[fetch_start:fetch_end])
+ if issue.issue_id not in filtered_shard_iids[fetch_start:fetch_end]:
+ fetched_issues.append(issue)
+ self._LookupNeededUsers(fetched_issues)
+ sorted_fetched = _SortIssues(
+ self.mr, fetched_issues, self.harmonized_config, self.users_by_id)
+ sorted_fetched_iids = [sf.issue_id for sf in sorted_fetched]
+ index_in_fetched = sorted_fetched_iids.index(issue.issue_id)
+
+ # 4. Find the issues that come immediately before and after the place where
+ # the given issue would belong in this shard.
+ if index_in_fetched > 0:
+ prev_candidate = sorted_fetched[index_in_fetched - 1]
+ elif index_in_on_hand > 0:
+ prev_candidate = sorted_on_hand[index_in_on_hand - 1]
+ else:
+ prev_candidate = None
+
+ if index_in_fetched < len(sorted_fetched) - 1:
+ next_candidate = sorted_fetched[index_in_fetched + 1]
+ elif index_in_on_hand < len(sorted_on_hand) - 1:
+ next_candidate = sorted_on_hand[index_in_on_hand + 1]
+ else:
+ next_candidate = None
+
+ return prev_candidate, fetch_start + index_in_fetched, next_candidate
+
+ def _AccumulateSampleIssues(self, issue_ids, sample_dict, needed_iids):
+ """Select a scattering of issues from the list, leveraging RAM cache."""
+ chunk_size = max(MIN_SAMPLE_CHUNK_SIZE, min(MAX_SAMPLE_CHUNK_SIZE,
+ int(len(issue_ids) / PREFERRED_NUM_CHUNKS)))
+ for i in range(chunk_size, len(issue_ids), chunk_size):
+ issue = self.services.issue.GetAnyOnHandIssue(
+ issue_ids, start=i, end=min(i + chunk_size, len(issue_ids)))
+ if issue:
+ sample_dict[issue.issue_id] = issue
+ else:
+ needed_iids.append(issue_ids[i])
+
+ def _LookupNeededUsers(self, issues):
+ """Look up user info needed to sort issues, if any."""
+ with self.profiler.Phase('lookup of owner, reporter, and cc'):
+ additional_user_views_by_id = (
+ tracker_helpers.MakeViewsForUsersInIssues(
+ self.mr.cnxn, issues, self.services.user,
+ omit_ids=self.users_by_id.keys()))
+ self.users_by_id.update(additional_user_views_by_id)
+
+ def Paginate(self):
+ """Fetch matching issues and paginate the search results.
+
+ These two actions are intertwined because we try to only
+ retrieve the Issues on the current pagination page.
+ """
+ if self.grid_mode:
+ # We don't paginate the grid view. But, pagination object shows counts.
+ self.pagination = paginate.ArtifactPagination(
+ self.mr, self.allowed_results, self.default_results_per_page,
+ total_count=self.total_count, list_page_url=urls.ISSUE_LIST)
+ # We limited the results, but still show the original total count.
+ self.visible_results = self.allowed_results
+
+ else:
+ # We already got the issues, just display a slice of the visible ones.
+ limit_reached = False
+ for shard_limit_reached in self.search_limit_reached.values():
+ limit_reached |= shard_limit_reached
+ self.pagination = paginate.ArtifactPagination(
+ self.mr, self.allowed_results, self.default_results_per_page,
+ total_count=self.total_count, list_page_url=urls.ISSUE_LIST,
+ limit_reached=limit_reached, skipped=self.num_skipped_at_start)
+ self.visible_results = self.pagination.visible_results
+
+ # If we were not forced to look up visible users already, do it now.
+ if self.grid_mode:
+ self._LookupNeededUsers(self.allowed_results)
+ else:
+ self._LookupNeededUsers(self.visible_results)
+
+ def __repr__(self):
+ """Return a string that shows the internal state of this pipeline."""
+ if self.allowed_iids:
+ shown_allowed_iids = self.allowed_iids[:200]
+ else:
+ shown_allowed_iids = self.allowed_iids
+
+ if self.allowed_results:
+ shown_allowed_results = self.allowed_results[:200]
+ else:
+ shown_allowed_results = self.allowed_results
+
+ parts = [
+ 'allowed_iids: %r' % shown_allowed_iids,
+ 'allowed_results: %r' % shown_allowed_results,
+ 'len(visible_results): %r' % (
+ self.visible_results and len(self.visible_results))]
+ return '%s(%s)' % (self.__class__.__name__, '\n'.join(parts))
+
+
+def _MakeBackendCallback(func, *args):
+ return lambda: func(*args)
+
+
+def _StartBackendSearch(
+ mr, query_project_names, query_project_ids, harmonized_config,
+ unfiltered_iids_dict, search_limit_reached_dict,
+ nonviewable_iids, error_responses, services):
+ """Request that our backends search and return a list of matching issue IDs.
+
+ Args:
+ mr: commonly used info parsed from the request, including query and
+ sort spec.
+ query_project_names: set of project names to search.
+ query_project_ids: list of project IDs to search.
+ harmonized_config: combined ProjectIssueConfig for all projects being
+ searched.
+ unfiltered_iids_dict: dict {shard_id: [iid, ...]} of unfiltered search
+ results to accumulate into. They need to be later filtered by
+ permissions and merged into filtered_iids_dict.
+ search_limit_reached_dict: dict{shard_id: [bool, ...]} to determine if
+ the search limit of any shard was reached.
+ nonviewable_iids: dict {shard_id: set(iid)} of restricted issues in the
+ projects being searched that the signed in user cannot view.
+ services: connections to backends.
+
+ Returns:
+ A list of rpc_tuples that can be passed to _FinishBackendSearch to wait
+ on any remaining backend calls.
+
+ SIDE-EFFECTS:
+ Any data found in memcache is immediately put into unfiltered_iids_dict.
+ As the backends finish their work, _HandleBackendSearchResponse will update
+ unfiltered_iids_dict for those shards.
+ """
+ rpc_tuples = []
+ needed_shard_ids = set(range(settings.num_logical_shards))
+
+ # 1. Get whatever we can from memcache. Cache hits are only kept if they are
+ # not already expired. Each kept cache hit will have unfiltered IIDs, so we
+ # need to get the at-risk IIDs to efficiently filter them based on perms.
+ project_shard_timestamps = _GetProjectTimestamps(
+ query_project_ids, needed_shard_ids)
+
+ if mr.use_cached_searches:
+ cached_unfiltered_iids_dict, cached_search_limit_reached_dict = (
+ _GetCachedSearchResults(
+ mr, query_project_ids, needed_shard_ids, harmonized_config,
+ project_shard_timestamps, services))
+ unfiltered_iids_dict.update(cached_unfiltered_iids_dict)
+ search_limit_reached_dict.update(cached_search_limit_reached_dict)
+ for cache_hit_shard_id in unfiltered_iids_dict:
+ needed_shard_ids.remove(cache_hit_shard_id)
+
+ _GetNonviewableIIDs(
+ query_project_ids, mr.auth.user_id, set(range(settings.num_logical_shards)),
+ rpc_tuples, nonviewable_iids, project_shard_timestamps,
+ services.cache_manager.processed_invalidations_up_to,
+ mr.use_cached_searches)
+
+ # 2. Hit backends for any shards that are still needed. When these results
+ # come back, they are also put into unfiltered_iids_dict..
+ for shard_id in needed_shard_ids:
+ rpc = _StartBackendSearchCall(
+ mr, query_project_names, shard_id,
+ services.cache_manager.processed_invalidations_up_to)
+ rpc_tuple = (time.time(), shard_id, rpc)
+ rpc.callback = _MakeBackendCallback(
+ _HandleBackendSearchResponse, mr, query_project_names, rpc_tuple,
+ rpc_tuples, settings.backend_retries, unfiltered_iids_dict,
+ search_limit_reached_dict,
+ services.cache_manager.processed_invalidations_up_to,
+ error_responses)
+ rpc_tuples.append(rpc_tuple)
+
+ return rpc_tuples
+
+
+def _FinishBackendSearch(rpc_tuples):
+ """Wait for all backend calls to complete, including any retries."""
+ while rpc_tuples:
+ active_rpcs = [rpc for (_time, _shard_id, rpc) in rpc_tuples]
+ # Wait for any active RPC to complete. It's callback function will
+ # automatically be called.
+ finished_rpc = apiproxy_stub_map.UserRPC.wait_any(active_rpcs)
+ # Figure out which rpc_tuple finished and remove it from our list.
+ for rpc_tuple in rpc_tuples:
+ _time, _shard_id, rpc = rpc_tuple
+ if rpc == finished_rpc:
+ rpc_tuples.remove(rpc_tuple)
+ break
+ else:
+ raise ValueError('We somehow finished an RPC that is not in rpc_tuples')
+
+
+def _GetProjectTimestamps(query_project_ids, needed_shard_ids):
+ """Get a dict of modified_ts values for all specified project-shards."""
+ project_shard_timestamps = {}
+ if query_project_ids:
+ keys = []
+ for pid in query_project_ids:
+ for sid in needed_shard_ids:
+ keys.append('%d;%d' % (pid, sid))
+ else:
+ keys = [('all;%d' % sid) for sid in needed_shard_ids]
+
+ timestamps_for_project = memcache.get_multi(keys=keys)
+ for key, timestamp in timestamps_for_project.iteritems():
+ pid_str, sid_str = key.split(';')
+ if pid_str == 'all':
+ project_shard_timestamps['all', int(sid_str)] = timestamp
+ else:
+ project_shard_timestamps[int(pid_str), int(sid_str)] = timestamp
+
+ return project_shard_timestamps
+
+
+def _GetNonviewableIIDs(
+ query_project_ids, logged_in_user_id, needed_shard_ids, rpc_tuples,
+ nonviewable_iids, project_shard_timestamps, invalidation_timestep,
+ use_cached_searches):
+ """Build a set of at-risk IIDs, and accumulate RPCs to get uncached ones."""
+ if query_project_ids:
+ keys = []
+ for pid in query_project_ids:
+ for sid in needed_shard_ids:
+ keys.append('%d;%d;%d' % (pid, logged_in_user_id, sid))
+ else:
+ keys = [('all;%d;%d' % sid)
+ for (logged_in_user_id, sid) in needed_shard_ids]
+
+ if use_cached_searches:
+ cached_dict = memcache.get_multi(keys, key_prefix='nonviewable:')
+ else:
+ cached_dict = {}
+
+ for sid in needed_shard_ids:
+ if query_project_ids:
+ for pid in query_project_ids:
+ _AccumulateNonviewableIIDs(
+ pid, logged_in_user_id, sid, cached_dict, nonviewable_iids,
+ project_shard_timestamps, rpc_tuples, invalidation_timestep)
+ else:
+ _AccumulateNonviewableIIDs(
+ None, logged_in_user_id, sid, cached_dict, nonviewable_iids,
+ project_shard_timestamps, rpc_tuples, invalidation_timestep)
+
+
+def _AccumulateNonviewableIIDs(
+ pid, logged_in_user_id, sid, cached_dict, nonviewable_iids,
+ project_shard_timestamps, rpc_tuples, invalidation_timestep):
+ """Use one of the retrieved cache entries or call a backend if needed."""
+ if pid is None:
+ key = 'all;%d;%d' % (logged_in_user_id, sid)
+ else:
+ key = '%d;%d;%d' % (pid, logged_in_user_id, sid)
+
+ if key in cached_dict:
+ issue_ids, cached_ts = cached_dict.get(key)
+ modified_ts = project_shard_timestamps.get((pid, sid))
+ if modified_ts is None or modified_ts > cached_ts:
+ logging.info('nonviewable too stale on (project %r, shard %r)',
+ pid, sid)
+ else:
+ logging.info('adding %d nonviewable issue_ids', len(issue_ids))
+ nonviewable_iids[sid] = set(issue_ids)
+
+ if sid not in nonviewable_iids:
+ logging.info('nonviewable for %r not found', key)
+ logging.info('starting backend call for nonviewable iids %r', key)
+ rpc = _StartBackendNonviewableCall(
+ pid, logged_in_user_id, sid, invalidation_timestep)
+ rpc_tuple = (time.time(), sid, rpc)
+ rpc.callback = _MakeBackendCallback(
+ _HandleBackendNonviewableResponse, pid, logged_in_user_id, sid,
+ rpc_tuple, rpc_tuples, settings.backend_retries, nonviewable_iids,
+ invalidation_timestep)
+ rpc_tuples.append(rpc_tuple)
+
+
+def _GetCachedSearchResults(
+ mr, query_project_ids, needed_shard_ids, harmonized_config,
+ project_shard_timestamps, services):
+ """Return a dict of cached search results that are not already stale.
+
+ If it were not for cross-project search, we would simply cache when we do a
+ search and then invalidate when an issue is modified. But, with
+ cross-project search we don't know all the memcache entries that would
+ need to be invalidated. So, instead, we write the search result cache
+ entries and then an initial modified_ts value for each project if it was
+ not already there. And, when we update an issue we write a new
+ modified_ts entry, which implicitly invalidate all search result
+ cache entries that were written earlier because they are now stale. When
+ reading from the cache, we ignore any query project with modified_ts
+ after its search result cache timestamp, because it is stale.
+
+ Args:
+ mr: common information parsed from the request.
+ query_project_ids: list of project ID numbers for all projects being
+ searched.
+ needed_shard_ids: set of shard IDs that need to be checked.
+ harmonized_config: ProjectIsueConfig with combined information for all
+ projects involved in this search.
+ project_shard_timestamps: a dict {(project_id, shard_id): timestamp, ...}
+ that tells when each shard was last invalidated.
+ services: connections to backends.
+
+ Returns:
+ Tuple consisting of:
+ A dictionary {shard_id: [issue_id, ...], ...} of unfiltered search result
+ issue IDs. Only shard_ids found in memcache will be in that dictionary.
+ The result issue IDs must be permission checked before they can be
+ considered to be part of the user's result set.
+ A dictionary {shard_id: bool, ...}. The boolean is set to True if
+ the search results limit of the shard is hit.
+ """
+ projects_str = ','.join(str(pid) for pid in sorted(query_project_ids))
+ projects_str = projects_str or 'all'
+ canned_query = savedqueries_helpers.SavedQueryIDToCond(
+ mr.cnxn, services.features, mr.can)
+ logging.info('canned query is %r', canned_query)
+ canned_query = searchpipeline.ReplaceKeywordsWithUserID(
+ mr.me_user_id, canned_query)
+ user_query = searchpipeline.ReplaceKeywordsWithUserID(
+ mr.me_user_id, mr.query)
+
+ sd = sorting.ComputeSortDirectives(mr, harmonized_config)
+ memcache_prefix = ';'.join([projects_str, canned_query, user_query,
+ ' '.join(sd), ''])
+ cached_dict = memcache.get_multi(
+ [str(sid) for sid in needed_shard_ids], key_prefix=memcache_prefix)
+ search_limit_memcache_prefix = ';'.join(
+ [projects_str, canned_query, user_query,
+ ' '.join(sd), 'search_limit_reached', ''])
+ cached_search_limit_reached_dict = memcache.get_multi(
+ [str(sid) for sid in needed_shard_ids],
+ key_prefix=search_limit_memcache_prefix)
+
+ unfiltered_dict = {}
+ search_limit_reached_dict = {}
+ for shard_id in needed_shard_ids:
+ if str(shard_id) not in cached_dict:
+ logging.info('memcache miss on shard %r', shard_id)
+ continue
+
+ cached_iids, cached_ts = cached_dict[str(shard_id)]
+ if cached_search_limit_reached_dict.get(str(shard_id)):
+ search_limit_reached, _ = cached_search_limit_reached_dict[str(shard_id)]
+ else:
+ search_limit_reached = False
+
+ stale = False
+ if query_project_ids:
+ for project_id in query_project_ids:
+ modified_ts = project_shard_timestamps.get((project_id, shard_id))
+ if modified_ts is None or modified_ts > cached_ts:
+ stale = True
+ logging.info('memcache too stale on shard %r because of %r',
+ shard_id, project_id)
+ break
+ else:
+ modified_ts = project_shard_timestamps.get(('all', shard_id))
+ if modified_ts is None or modified_ts > cached_ts:
+ stale = True
+ logging.info('memcache too stale on shard %r because of all',
+ shard_id)
+
+ if not stale:
+ logging.info('memcache hit on %r', shard_id)
+ unfiltered_dict[shard_id] = cached_iids
+ search_limit_reached_dict[shard_id] = search_limit_reached
+
+ return unfiltered_dict, search_limit_reached_dict
+
+
+def _MakeBackendRequestHeaders(failfast):
+ headers = {
+ # This is needed to allow frontends to talk to backends without going
+ # through a login screen on googleplex.com.
+ # http://wiki/Main/PrometheusInternal#Internal_Applications_and_APIs
+ 'X-URLFetch-Service-Id': 'GOOGLEPLEX',
+ }
+ if failfast:
+ headers['X-AppEngine-FailFast'] = 'Yes'
+ return headers
+
+
+def _StartBackendSearchCall(
+ mr, query_project_names, shard_id, invalidation_timestep,
+ deadline=None, failfast=True):
+ """Ask a backend to query one shard of the database."""
+ backend_host = modules.get_hostname(module='besearch')
+ url = 'http://%s%s' % (backend_host, framework_helpers.FormatURL(
+ mr, urls.BACKEND_SEARCH,
+ skip_filtering=True, # TODO(jrobbins): remove after next release.
+ projects=','.join(query_project_names),
+ start=0, num=mr.start + mr.num,
+ logged_in_user_id=mr.auth.user_id or 0,
+ me_user_id=mr.me_user_id, shard_id=shard_id,
+ invalidation_timestep=invalidation_timestep))
+ logging.info('\n\nCalling backend: %s', url)
+ rpc = urlfetch.create_rpc(
+ deadline=deadline or settings.backend_deadline)
+ headers = _MakeBackendRequestHeaders(failfast)
+ # follow_redirects=False is needed to avoid a login screen on googleplex.
+ urlfetch.make_fetch_call(rpc, url, follow_redirects=False, headers=headers)
+ return rpc
+
+
+def _StartBackendNonviewableCall(
+ project_id, logged_in_user_id, shard_id, invalidation_timestep,
+ deadline=None, failfast=True):
+ """Ask a backend to query one shard of the database."""
+ backend_host = modules.get_hostname(module='besearch')
+ url = 'http://%s%s' % (backend_host, framework_helpers.FormatURL(
+ None, urls.BACKEND_NONVIEWABLE,
+ project_id=project_id or '',
+ logged_in_user_id=logged_in_user_id or '',
+ shard_id=shard_id,
+ invalidation_timestep=invalidation_timestep))
+ logging.info('Calling backend nonviewable: %s', url)
+ rpc = urlfetch.create_rpc(deadline=deadline or settings.backend_deadline)
+ headers = _MakeBackendRequestHeaders(failfast)
+ # follow_redirects=False is needed to avoid a login screen on googleplex.
+ urlfetch.make_fetch_call(rpc, url, follow_redirects=False, headers=headers)
+ return rpc
+
+
+def _HandleBackendSearchResponse(
+ mr, query_project_names, rpc_tuple, rpc_tuples, remaining_retries,
+ unfiltered_iids, search_limit_reached, invalidation_timestep,
+ error_responses):
+ """Process one backend response and retry if there was an error."""
+ start_time, shard_id, rpc = rpc_tuple
+ duration_sec = time.time() - start_time
+
+ try:
+ response = rpc.get_result()
+ logging.info('call to backend took %d sec', duration_sec)
+ # Note that response.content has "})]'\n" prepended to it.
+ json_content = response.content[5:]
+ logging.info('got json text: %r length %r',
+ json_content[:framework_constants.LOGGING_MAX_LENGTH],
+ len(json_content))
+ json_data = json.loads(json_content)
+ unfiltered_iids[shard_id] = json_data['unfiltered_iids']
+ search_limit_reached[shard_id] = json_data['search_limit_reached']
+
+ except Exception as e:
+ if duration_sec > FAIL_FAST_LIMIT_SEC: # Don't log fail-fast exceptions.
+ logging.exception(e)
+ if not remaining_retries:
+ logging.error('backend search retries exceeded')
+ error_responses.add(shard_id)
+ return # Used all retries, so give up.
+
+ if duration_sec >= settings.backend_deadline:
+ logging.error('backend search on %r took too long', shard_id)
+ error_responses.add(shard_id)
+ return # That backend shard is overloaded, so give up.
+
+ logging.error('backend call for shard %r failed, retrying', shard_id)
+ retry_rpc = _StartBackendSearchCall(
+ mr, query_project_names, shard_id, invalidation_timestep,
+ failfast=remaining_retries > 2)
+ retry_rpc_tuple = (time.time(), shard_id, retry_rpc)
+ retry_rpc.callback = _MakeBackendCallback(
+ _HandleBackendSearchResponse, mr, query_project_names,
+ retry_rpc_tuple, rpc_tuples, remaining_retries - 1, unfiltered_iids,
+ search_limit_reached, invalidation_timestep, error_responses)
+ rpc_tuples.append(retry_rpc_tuple)
+
+
+def _HandleBackendNonviewableResponse(
+ project_id, logged_in_user_id, shard_id, rpc_tuple, rpc_tuples,
+ remaining_retries, nonviewable_iids, invalidation_timestep):
+ """Process one backend response and retry if there was an error."""
+ start_time, shard_id, rpc = rpc_tuple
+ duration_sec = time.time() - start_time
+
+ try:
+ response = rpc.get_result()
+ logging.info('call to backend nonviewable took %d sec', duration_sec)
+ # Note that response.content has "})]'\n" prepended to it.
+ json_content = response.content[5:]
+ logging.info('got json text: %r length %r',
+ json_content[:framework_constants.LOGGING_MAX_LENGTH],
+ len(json_content))
+ json_data = json.loads(json_content)
+ nonviewable_iids[shard_id] = set(json_data['nonviewable'])
+
+ except Exception as e:
+ if duration_sec > FAIL_FAST_LIMIT_SEC: # Don't log fail-fast exceptions.
+ logging.exception(e)
+
+ if not remaining_retries:
+ logging.warn('Used all retries, so give up on shard %r', shard_id)
+ return
+
+ if duration_sec >= settings.backend_deadline:
+ logging.error('nonviewable call on %r took too long', shard_id)
+ return # That backend shard is overloaded, so give up.
+
+ logging.error(
+ 'backend nonviewable call for shard %r;%r;%r failed, retrying',
+ project_id, logged_in_user_id, shard_id)
+ retry_rpc = _StartBackendNonviewableCall(
+ project_id, logged_in_user_id, shard_id, invalidation_timestep,
+ failfast=remaining_retries > 2)
+ retry_rpc_tuple = (time.time(), shard_id, retry_rpc)
+ retry_rpc.callback = _MakeBackendCallback(
+ _HandleBackendNonviewableResponse, project_id, logged_in_user_id,
+ shard_id, retry_rpc_tuple, rpc_tuples, remaining_retries - 1,
+ nonviewable_iids, invalidation_timestep)
+ rpc_tuples.append(retry_rpc_tuple)
+
+
+def _TotalLength(sharded_iids):
+ """Return the total length of all issue_iids lists."""
+ return sum(len(issue_iids) for issue_iids in sharded_iids.itervalues())
+
+
+def _ReverseShards(sharded_iids):
+ """Reverse each issue_iids list in place."""
+ for shard_id in sharded_iids:
+ sharded_iids[shard_id].reverse()
+
+
+def _TrimEndShardedIIDs(sharded_iids, sample_iids, num_needed):
+ """Trim the IIDs to keep at least num_needed items.
+
+ Args:
+ sharded_iids: dict {shard_id: issue_id_list} for search results. This is
+ modified in place to remove some trailing issue IDs.
+ sample_iids: list of IIDs from a sorted list of sample issues.
+ num_needed: int minimum total number of items to keep. Some IIDs that are
+ known to belong in positions > num_needed will be trimmed off.
+
+ Returns:
+ The total number of IIDs removed from the IID lists.
+ """
+ # 1. Get (sample_iid, position_in_shard) for each sample.
+ sample_positions = _CalcSamplePositions(sharded_iids, sample_iids)
+
+ # 2. Walk through the samples, computing a combined lower bound at each
+ # step until we know that we have passed at least num_needed IIDs.
+ lower_bound_per_shard = {}
+ excess_samples = []
+ for i in range(len(sample_positions)):
+ sample_iid, pos = sample_positions[i]
+ shard_id = sample_iid % settings.num_logical_shards
+ lower_bound_per_shard[shard_id] = pos
+ overall_lower_bound = sum(lower_bound_per_shard.itervalues())
+ if overall_lower_bound >= num_needed:
+ excess_samples = sample_positions[i + 1:]
+ break
+ else:
+ return 0 # We went through all samples and never reached num_needed.
+
+ # 3. Truncate each shard at the first excess sample in that shard.
+ already_trimmed = set()
+ num_trimmed = 0
+ for sample_iid, pos in excess_samples:
+ shard_id = sample_iid % settings.num_logical_shards
+ if shard_id not in already_trimmed:
+ num_trimmed += len(sharded_iids[shard_id]) - pos
+ sharded_iids[shard_id] = sharded_iids[shard_id][:pos]
+ already_trimmed.add(shard_id)
+
+ return num_trimmed
+
+
+# TODO(jrobbins): Convert this to a python generator.
+def _CalcSamplePositions(sharded_iids, sample_iids):
+ """Return [(sample_iid, position_in_shard), ...] for each sample."""
+ # We keep track of how far index() has scanned in each shard to avoid
+ # starting over at position 0 when looking for the next sample in
+ # the same shard.
+ scan_positions = collections.defaultdict(lambda: 0)
+ sample_positions = []
+ for sample_iid in sample_iids:
+ shard_id = sample_iid % settings.num_logical_shards
+ try:
+ pos = sharded_iids.get(shard_id, []).index(
+ sample_iid, scan_positions[shard_id])
+ scan_positions[shard_id] = pos
+ sample_positions.append((sample_iid, pos))
+ except ValueError:
+ pass
+
+ return sample_positions
+
+
+def _SortIssues(mr, issues, config, users_by_id):
+ """Sort the found issues based on the request and config values.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ issues: A list of issues to be sorted.
+ config: A ProjectIssueConfig that could impact sort order.
+ users_by_id: dictionary {user_id: user_view,...} for all users who
+ participate in any issue in the entire list.
+
+ Returns:
+ A sorted list of issues, based on parameters from mr and config.
+ """
+ issues = sorting.SortArtifacts(
+ mr, issues, config, tracker_helpers.SORTABLE_FIELDS,
+ username_cols=tracker_constants.USERNAME_COLS, users_by_id=users_by_id)
+ return issues
diff --git a/appengine/monorail/search/query2ast.py b/appengine/monorail/search/query2ast.py
new file mode 100644
index 0000000..6c7b617
--- /dev/null
+++ b/appengine/monorail/search/query2ast.py
@@ -0,0 +1,425 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions that integrate the GAE search index with Monorail."""
+
+import collections
+import datetime
+import logging
+import re
+from services import fulltext_helpers
+import time
+
+from proto import ast_pb2
+from proto import tracker_pb2
+
+
+# TODO(jrobbins): Consider re-implementing this whole file by using a
+# BNF syntax specification and a parser generator or library.
+
+# encodings
+UTF8 = 'utf-8'
+
+# Field types and operators
+BOOL = tracker_pb2.FieldTypes.BOOL_TYPE
+DATE = tracker_pb2.FieldTypes.DATE_TYPE
+NUM = tracker_pb2.FieldTypes.INT_TYPE
+TXT = tracker_pb2.FieldTypes.STR_TYPE
+
+EQ = ast_pb2.QueryOp.EQ
+NE = ast_pb2.QueryOp.NE
+LT = ast_pb2.QueryOp.LT
+GT = ast_pb2.QueryOp.GT
+LE = ast_pb2.QueryOp.LE
+GE = ast_pb2.QueryOp.GE
+TEXT_HAS = ast_pb2.QueryOp.TEXT_HAS
+NOT_TEXT_HAS = ast_pb2.QueryOp.NOT_TEXT_HAS
+TEXT_MATCHES = ast_pb2.QueryOp.TEXT_MATCHES
+NOT_TEXT_MATCHES = ast_pb2.QueryOp.NOT_TEXT_MATCHES
+IS_DEFINED = ast_pb2.QueryOp.IS_DEFINED
+IS_NOT_DEFINED = ast_pb2.QueryOp.IS_NOT_DEFINED
+KEY_HAS = ast_pb2.QueryOp.KEY_HAS
+
+# Mapping from user query comparison operators to our internal representation.
+OPS = {
+ ':': TEXT_HAS,
+ '=': EQ,
+ '!=': NE,
+ '<': LT,
+ '>': GT,
+ '<=': LE,
+ '>=': GE,
+}
+
+# This is a partial regular expression that matches all of our comparison
+# operators, such as =, 1=, >, and <. Longer ones listed first so that the
+# shorter ones don't cause premature matches.
+OPS_PATTERN = '|'.join(
+ map(re.escape, sorted(OPS.keys(), key=lambda op: -len(op))))
+
+# This RE extracts search terms from a subquery string.
+TERM_RE = re.compile(
+ r'(-?"[^"]+")|' # E.g., ["division by zero"]
+ r'(\S+(%s)[^ "]+)|' # E.g., [stars>10]
+ r'(\w+(%s)"[^"]+")|' # E.g., [summary:"memory leak"]
+ r'(-?[._\*\w][-._\*\w]+)' # E.g., [-workaround]
+ % (OPS_PATTERN, OPS_PATTERN), flags=re.UNICODE)
+
+# This RE is used to further decompose a comparison term into prefix, op, and
+# value. E.g., [stars>10] or [is:open] or [summary:"memory leak"]. The prefix
+# can include a leading "-" to negate the comparison.
+OP_RE = re.compile(
+ r'^(?P<prefix>[-_\w]*?)'
+ r'(?P<op>%s)'
+ r'(?P<value>([-,.@>/_\*\w]+|"[^"]+"))$' %
+ OPS_PATTERN,
+ flags=re.UNICODE)
+
+
+# Predefined issue fields passed to the query parser.
+_ISSUE_FIELDS_LIST = [
+ (ast_pb2.ANY_FIELD, TXT),
+ ('attachment', TXT), # attachment file names
+ ('attachments', NUM), # number of attachment files
+ ('blocked', BOOL),
+ ('blockedon', TXT),
+ ('blockedon_id', NUM),
+ ('blocking', TXT),
+ ('blocking_id', NUM),
+ ('cc', TXT),
+ ('cc_id', NUM),
+ ('comment', TXT),
+ ('commentby', TXT),
+ ('commentby_id', NUM),
+ ('component', TXT),
+ ('component_id', NUM),
+ ('description', TXT),
+ ('id', NUM),
+ ('label', TXT),
+ ('label_id', NUM),
+ ('mergedinto', NUM),
+ ('open', BOOL),
+ ('owner', TXT),
+ ('owner_id', NUM),
+ ('project', TXT),
+ ('reporter', TXT),
+ ('reporter_id', NUM),
+ ('spam', BOOL),
+ ('stars', NUM),
+ ('starredby', TXT),
+ ('starredby_id', NUM),
+ ('status', TXT),
+ ('status_id', NUM),
+ ('summary', TXT),
+ ]
+
+_DATE_FIELDS = (
+ 'closed',
+ 'modified',
+ 'opened',
+)
+
+# Add all _DATE_FIELDS to _ISSUE_FIELDS_LIST.
+_ISSUE_FIELDS_LIST.extend((date_field, DATE) for date_field in _DATE_FIELDS)
+
+_DATE_FIELD_SUFFIX_TO_OP = {
+ '-after': '>',
+ '-before': '<',
+}
+
+BUILTIN_ISSUE_FIELDS = {
+ f_name: tracker_pb2.FieldDef(field_name=f_name, field_type=f_type)
+ for f_name, f_type in _ISSUE_FIELDS_LIST}
+
+
+def ParseUserQuery(
+ query, scope, builtin_fields, harmonized_config, warnings=None):
+ """Parse a user query and return a set of structure terms.
+
+ Args:
+ query: string with user's query. E.g., 'Priority=High'.
+ scope: string search terms that define the scope in which the
+ query should be executed. They are expressed in the same
+ user query language. E.g., adding the canned query.
+ builtin_fields: dict {field_name: FieldDef(field_name, type)}
+ mapping field names to FieldDef objects for built-in fields.
+ harmonized_config: config for all the projects being searched.
+ @@@ custom field name is not unique in cross project search.
+ - custom_fields = {field_name: [fd, ...]}
+ - query build needs to OR each possible interpretation
+ - could be label in one project and field in another project.
+ @@@ what about searching across all projects?
+ warnings: optional list to accumulate warning messages.
+
+ Returns:
+ A QueryAST with conjunctions (usually just one), where each has a list of
+ Condition PBs with op, fields, str_values and int_values. E.g., the query
+ [priority=high leak OR stars>100] over open issues would return
+ QueryAST(
+ Conjunction(Condition(EQ, [open_fd], [], [1]),
+ Condition(EQ, [label_fd], ['priority-high'], []),
+ Condition(TEXT_HAS, any_field_fd, ['leak'], [])),
+ Conjunction(Condition(EQ, [open_fd], [], [1]),
+ Condition(GT, [stars_fd], [], [100])))
+
+ Raises:
+ InvalidQueryError: If a problem was detected in the user's query.
+ """
+ if warnings is None:
+ warnings = []
+ if _HasParens(query):
+ warnings.append('Parentheses are ignored in user queries.')
+
+ if _HasParens(scope):
+ warnings.append('Parentheses are ignored in saved queries.')
+
+ # Convert the overall query into one or more OR'd subqueries.
+ subqueries = query.split(' OR ')
+
+ if len(subqueries) > 1: # TODO(jrobbins): temporary limitation just for now.
+ raise InvalidQueryError('Logical operator OR is not supported yet.')
+
+ # Make a dictionary of all fields: built-in + custom in each project.
+ combined_fields = collections.defaultdict(
+ list, {field_name: [field_def]
+ for field_name, field_def in builtin_fields.iteritems()})
+ for fd in harmonized_config.field_defs:
+ if fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE:
+ # Only do non-enum fields because enums are stored as labels
+ combined_fields[fd.field_name.lower()].append(fd)
+
+ conjunctions = [
+ _ParseConjunction(sq, scope, combined_fields, warnings)
+ for sq in subqueries]
+ return ast_pb2.QueryAST(conjunctions=conjunctions)
+
+
+def _HasParens(s):
+ """Return True if there are parentheses in the given string."""
+ # Monorail cannot handle parenthesized expressions, so we tell the
+ # user that immediately. Even inside a quoted string, the GAE search
+ # engine will not handle parens in TEXT-type fields.
+ return '(' in s or ')' in s
+
+
+def _ParseConjunction(subquery, scope, fields, warnings):
+ """Parse part of a user query into a Conjunction PB."""
+ logging.info('Parsing sub query: %r in scope %r', subquery, scope)
+ scoped_query = ('%s %s' % (scope, subquery)).lower()
+ cond_strs = _ExtractConds(scoped_query)
+ conds = [_ParseCond(cond_str, fields, warnings) for cond_str in cond_strs]
+ return ast_pb2.Conjunction(conds=conds)
+
+
+def _ParseCond(cond_str, fields, warnings):
+ """Parse one user query condition string into a Condition PB."""
+ op_match = OP_RE.match(cond_str)
+ # Do not treat as key:value search terms if any of the special prefixes match.
+ special_prefixes_match = any(
+ cond_str.startswith(p) for p in fulltext_helpers.NON_OP_PREFIXES)
+ if op_match and not special_prefixes_match:
+ prefix = op_match.group('prefix')
+ op = op_match.group('op')
+ val = op_match.group('value')
+ # Special case handling to continue to support old date query terms from
+ # codesite. See monorail:151 for more details.
+ if prefix.startswith(_DATE_FIELDS):
+ for date_suffix in _DATE_FIELD_SUFFIX_TO_OP:
+ if prefix.endswith(date_suffix):
+ prefix = prefix.rstrip(date_suffix)
+ op = _DATE_FIELD_SUFFIX_TO_OP[date_suffix]
+ return _ParseStructuredTerm(prefix, op, val, fields)
+
+ # Treat the cond as a full-text search term, which might be negated.
+ if cond_str.startswith('-'):
+ op = NOT_TEXT_HAS
+ cond_str = cond_str[1:]
+ else:
+ op = TEXT_HAS
+
+ # Flag a potential user misunderstanding.
+ if cond_str.lower() in ('and', 'or', 'not'):
+ warnings.append(
+ 'The only supported boolean operator is OR (all capitals).')
+
+ return ast_pb2.MakeCond(
+ op, [BUILTIN_ISSUE_FIELDS[ast_pb2.ANY_FIELD]], [cond_str], [])
+
+
+def _ParseStructuredTerm(prefix, op_str, value, fields):
+ """Parse one user structured query term into an internal representation.
+
+ Args:
+ prefix: The query operator, usually a field name. E.g., summary. It can
+ also be special operators like "is" to test boolean fields.
+ op_str: the comparison operator. Usually ":" or "=", but can be any OPS.
+ value: the value to compare against, e.g., term to find in that field.
+ fields: dict {name_lower: [FieldDef, ...]} for built-in and custom fields.
+
+ Returns:
+ A Condition PB.
+ """
+ unquoted_value = value.strip('"')
+ # Quick-OR is a convenient way to write one condition that matches any one of
+ # multiple values, like set membership. E.g., [Priority=High,Critical].
+ quick_or_vals = [v.strip() for v in unquoted_value.split(',')]
+
+ if ((prefix == 'is' or prefix == '-is') and
+ unquoted_value in ['open', 'blocked', 'spam']):
+ return ast_pb2.MakeCond(
+ EQ, fields[unquoted_value], [], [int(prefix == 'is')])
+
+ op = OPS[op_str]
+ negate = False
+ if prefix.startswith('-'):
+ negate = True
+ if op == EQ:
+ op = NE
+ elif op == TEXT_HAS:
+ op = NOT_TEXT_HAS
+ prefix = prefix[1:]
+
+ # Search entries with or without any value in the specified field.
+ if prefix == 'has':
+ op = IS_NOT_DEFINED if negate else IS_DEFINED
+ if unquoted_value in fields: # Look for that field with any value.
+ return ast_pb2.MakeCond(op, fields[unquoted_value], [], [])
+ else: # Look for any label with that prefix.
+ return ast_pb2.MakeCond(op, fields['label'], [unquoted_value], [])
+
+ if prefix in fields: # search built-in and custom fields. E.g., summary.
+ # Note: if first matching field is date-type, we assume they all are.
+ # TODO(jrobbins): better handling for rare case where multiple projects
+ # define the same custom field name, and one is a date and another is not.
+ first_field = fields[prefix][0]
+ if first_field.field_type == DATE:
+ date_value = _ParseDateValue(unquoted_value)
+ return ast_pb2.MakeCond(op, fields[prefix], [], [date_value])
+ else:
+ quick_or_ints = []
+ for qov in quick_or_vals:
+ try:
+ quick_or_ints.append(int(qov))
+ except ValueError:
+ pass
+ return ast_pb2.MakeCond(op, fields[prefix], quick_or_vals, quick_or_ints)
+
+ # Since it is not a field, treat it as labels, E.g., Priority.
+ quick_or_labels = ['%s-%s' % (prefix, v) for v in quick_or_vals]
+ # Convert substring match to key-value match if user typed 'foo:bar'.
+ if op == TEXT_HAS:
+ op = KEY_HAS
+ return ast_pb2.MakeCond(op, fields['label'], quick_or_labels, [])
+
+
+def _ExtractConds(query):
+ """Parse a query string into a list of individual condition strings.
+
+ Args:
+ query: UTF-8 encoded search query string.
+
+ Returns:
+ A list of query condition strings.
+ """
+ # Convert to unicode then search for distinct terms.
+ term_matches = TERM_RE.findall(query)
+
+ terms = []
+ for (phrase, word_label, _op1, phrase_label, _op2,
+ word) in term_matches:
+ # Case 1: Quoted phrases, e.g., ["hot dog"].
+ if phrase_label or phrase:
+ terms.append(phrase_label or phrase)
+
+ # Case 2: Comparisons
+ elif word_label:
+ special_prefixes_match = any(
+ word_label.startswith(p) for p in fulltext_helpers.NON_OP_PREFIXES)
+ match = OP_RE.match(word_label)
+ if match:
+ label = match.group('prefix')
+ op = match.group('op')
+ word = match.group('value')
+ if special_prefixes_match:
+ # Do not include quotes if any of the special prefixes match because
+ # we do not want to treat the label as key:value search terms.
+ terms.append('%s%s%s' % (label, op, word))
+ else:
+ terms.append('%s%s"%s"' % (label, op, word))
+ else:
+ # It looked like a key:value cond, but not exactly, so treat it
+ # as fulltext search. It is probably a tiny bit of source code.
+ terms.append('"%s"' % word_label)
+
+ # Case 3: Simple words.
+ elif word:
+ terms.append(word)
+
+ else:
+ logging.warn('Unexpected search term in %r', query)
+
+ return terms
+
+
+def _ParseDateValue(val):
+ """Convert the user-entered date into timestamp."""
+ # Support timestamp value such as opened>1437671476
+ try:
+ return int(val)
+ except ValueError:
+ pass
+
+ # TODO(jrobbins): future: take timezones into account.
+ # TODO(jrobbins): for now, explain to users that "today" is
+ # actually now: the current time, not 12:01am in their timezone.
+ # In fact, it is not very useful because everything in the system
+ # happened before the current time.
+ if val == 'today':
+ return _CalculatePastDate(0)
+ elif val.startswith('today-'):
+ try:
+ days_ago = int(val.split('-')[1])
+ except ValueError:
+ days_ago = 0
+ return _CalculatePastDate(days_ago)
+
+ if '/' in val:
+ year, month, day = [int(x) for x in val.split('/')]
+ elif '-' in val:
+ year, month, day = [int(x) for x in val.split('-')]
+
+ try:
+ return int(time.mktime(datetime.datetime(year, month, day).timetuple()))
+ except ValueError:
+ raise InvalidQueryError('Could not parse date')
+
+
+def _CalculatePastDate(days_ago, now=None):
+ """Calculates the timestamp N days ago from now."""
+ if now is None:
+ now = int(time.time())
+ ts = now - days_ago * 24 * 60 * 60
+ return ts
+
+
+def CheckSyntax(query, harmonized_config, warnings=None):
+ """Parse the given query and report the first error or None."""
+ try:
+ ParseUserQuery(
+ query, '', BUILTIN_ISSUE_FIELDS, harmonized_config, warnings=warnings)
+ except InvalidQueryError as e:
+ return e.message
+
+ return None
+
+
+class Error(Exception):
+ """Base exception class for this package."""
+ pass
+
+
+class InvalidQueryError(Error):
+ """Error raised when an invalid query is requested."""
+ pass
diff --git a/appengine/monorail/search/searchpipeline.py b/appengine/monorail/search/searchpipeline.py
new file mode 100644
index 0000000..6968224
--- /dev/null
+++ b/appengine/monorail/search/searchpipeline.py
@@ -0,0 +1,104 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions and classes used in issue search and sorting."""
+
+import logging
+import re
+
+from features import savedqueries_helpers
+from search import query2ast
+from services import tracker_fulltext
+from services import fulltext_helpers
+from tracker import tracker_helpers
+
+
+# Users can use "is:starred" in queries to limit
+# search results to issues starred by that user.
+IS_STARRED_RE = re.compile(r'\b(?![-@.:])is:starred\b(?![-@.:])', re.I)
+
+# Users can use "me" in other fields to refer to the logged in user name.
+ME_RE = re.compile(r'(?<=[=:])me\b(?![-@.:=])', re.I)
+
+
+def _AccumulateIssueProjectsAndConfigs(
+ cnxn, project_dict, config_dict, services, issues):
+ """Fetch any projects and configs that we need but haven't already loaded.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_dict: dict {project_id: project} of projects that we have
+ already retrieved.
+ config_dict: dict {project_id: project} of configs that we have
+ already retrieved.
+ services: connections to backends.
+ issues: list of issues, which may be parts of different projects.
+
+ Returns:
+ Nothing, but projects_dict will be updated to include all the projects that
+ contain the given issues, and config_dicts will be updated to incude all
+ the corresponding configs.
+ """
+ new_ids = {issue.project_id for issue in issues}
+ new_ids.difference_update(project_dict.iterkeys())
+ new_projects_dict = services.project.GetProjects(cnxn, new_ids)
+ project_dict.update(new_projects_dict)
+ new_configs_dict = services.config.GetProjectConfigs(cnxn, new_ids)
+ config_dict.update(new_configs_dict)
+
+
+def ReplaceKeywordsWithUserID(me_user_id, query):
+ """Substitutes User ID in terms such as is:starred and me.
+
+ This is done on the query string before it is parsed because the query string
+ is used as a key for cached search results in memcache. A search for by one
+ user for owner:me should not retrieve results stored for some other user.
+
+ Args:
+ me_user_id: Null when no user is logged in, or user ID of the logged in
+ user when doing an interactive search, or the viewed user ID when
+ viewing someone else's dashboard, or the subscribing user's ID when
+ evaluating subscriptions.
+ query: The query string.
+
+ Returns:
+ A string with "me" and "is:starred" removed or replaced by new terms that
+ use the numeric user ID provided.
+ """
+ if me_user_id:
+ star_term = 'starredby:%d' % me_user_id
+ query = IS_STARRED_RE.sub(star_term, query)
+ query = ME_RE.sub(str(me_user_id), query)
+ else:
+ query = IS_STARRED_RE.sub('', query)
+ query = ME_RE.sub('', query)
+
+ return query
+
+
+def ParseQuery(mr, config, services):
+ """Parse the user's query.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ config: The ProjectConfig PB for the project.
+ services: connections to backends.
+
+ Returns:
+ A pair (ast, is_fulltext) with the parsed query abstract syntax tree
+ and a boolean that is True if the query included any fulltext terms.
+ """
+ canned_query = savedqueries_helpers.SavedQueryIDToCond(
+ mr.cnxn, services.features, mr.can)
+ query_ast = query2ast.ParseUserQuery(
+ mr.query, canned_query, query2ast.BUILTIN_ISSUE_FIELDS, config)
+
+ is_fulltext_query = bool(
+ query_ast.conjunctions and
+ fulltext_helpers.BuildFTSQuery(
+ query_ast.conjunctions[0], tracker_fulltext.ISSUE_FULLTEXT_FIELDS))
+
+ return query_ast, is_fulltext_query
+
diff --git a/appengine/monorail/search/test/__init__.py b/appengine/monorail/search/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/search/test/__init__.py
diff --git a/appengine/monorail/search/test/ast2ast_test.py b/appengine/monorail/search/test/ast2ast_test.py
new file mode 100644
index 0000000..0a9f9d4
--- /dev/null
+++ b/appengine/monorail/search/test/ast2ast_test.py
@@ -0,0 +1,554 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the ast2ast module."""
+
+import unittest
+
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import ast2ast
+from search import query2ast
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+OWNER_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['owner']
+OWNER_ID_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['owner_id']
+
+
+class AST2ASTTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 101, 789, 'UI', 'doc', False, [], [], 0, 0))
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 102, 789, 'UI>Search', 'doc', False, [], [], 0, 0))
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 201, 789, 'DB', 'doc', False, [], [], 0, 0))
+ self.config.component_defs.append(
+ tracker_bizobj.MakeComponentDef(
+ 301, 789, 'Search', 'doc', False, [], [], 0, 0))
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService())
+ self.services.user.TestAddUser('a@example.com', 111L)
+
+ def testPreprocessAST_EmptyAST(self):
+ ast = ast_pb2.QueryAST() # No conjunctions in it.
+ new_ast = ast2ast.PreprocessAST(
+ self.cnxn, ast, [789], self.services, self.config)
+ self.assertEqual(ast, new_ast)
+
+ def testPreprocessAST_Normal(self):
+ open_field = BUILTIN_ISSUE_FIELDS['open']
+ label_field = BUILTIN_ISSUE_FIELDS['label']
+ label_id_field = BUILTIN_ISSUE_FIELDS['label_id']
+ status_id_field = BUILTIN_ISSUE_FIELDS['status_id']
+ conds = [
+ ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [open_field], [], [1]),
+ ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [label_field], ['Hot'], [])]
+
+ ast = ast_pb2.QueryAST()
+ ast.conjunctions.append(ast_pb2.Conjunction(conds=conds))
+ new_ast = ast2ast.PreprocessAST(
+ self.cnxn, ast, [789], self.services, self.config)
+ self.assertEqual(2, len(new_ast.conjunctions[0].conds))
+ new_cond_1, new_cond_2 = new_ast.conjunctions[0].conds
+ self.assertEqual(ast_pb2.QueryOp.NE, new_cond_1.op)
+ self.assertEqual([status_id_field], new_cond_1.field_defs)
+ self.assertEqual([7, 8, 9], new_cond_1.int_values)
+ self.assertEqual([], new_cond_1.str_values)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond_2.op)
+ self.assertEqual([label_id_field], new_cond_2.field_defs)
+ self.assertEqual([0], new_cond_2.int_values)
+ self.assertEqual([], new_cond_2.str_values)
+
+ def testPreprocessIsOpenCond(self):
+ open_field = BUILTIN_ISSUE_FIELDS['open']
+ status_id_field = BUILTIN_ISSUE_FIELDS['status_id']
+
+ # is:open -> status_id!=closed_status_ids
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [open_field], [], [1])
+ new_cond = ast2ast._PreprocessIsOpenCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.NE, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([7, 8, 9], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ # -is:open -> status_id=closed_status_ids
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [open_field], [], [0])
+ new_cond = ast2ast._PreprocessIsOpenCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([7, 8, 9], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockedOnCond_WithSingleProjectID(self):
+ blockedon_field = BUILTIN_ISSUE_FIELDS['blockedon']
+ blockedon_id_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['1'], [101]), # One existing issue.
+ (['Project1:1'], [101]), # One existing issue with project prefix.
+ (['1', '2'], [101, 102]), # Two existing issues.
+ (['3'], [])): # Non-existant issue.
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blockedon_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [1], self.services, None)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blockedon_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockedOnCond_WithMultipleProjectIDs(self):
+ blockedon_field = BUILTIN_ISSUE_FIELDS['blockedon']
+ blockedon_id_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['Project1:1'], [101]),
+ (['Project1:1', 'Project2:2'], [101, 102])):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blockedon_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [1, 2], self.services, None)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blockedon_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockedOnCond_WithMultipleProjectIDs_NoPrefix(self):
+ blockedon_field = BUILTIN_ISSUE_FIELDS['blockedon']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids in (['1'], ['1', '2'], ['3']):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blockedon_field], local_ids, [])
+ try:
+ ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [1, 2], self.services, None)
+ self.fail('Expected an Exception.')
+ except ValueError, e:
+ self.assertEquals(
+ 'Searching for issues accross multiple/all projects without '
+ 'project prefixes is ambiguous and is currently not supported.',
+ e.message)
+
+ def testPreprocessIsBlockedCond(self):
+ blocked_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ for int_val, expected_op in ((1, ast_pb2.QueryOp.IS_DEFINED),
+ (0, ast_pb2.QueryOp.IS_NOT_DEFINED)):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [blocked_field], [], [int_val])
+ new_cond = ast2ast._PreprocessIsBlockedCond(
+ self.cnxn, cond, [100], self.services, None)
+ self.assertEqual(expected_op, new_cond.op)
+ self.assertEqual([blocked_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessHasBlockedOnCond(self):
+ blocked_field = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ for op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ cond = ast_pb2.MakeCond(op, [blocked_field], [], [])
+ new_cond = ast2ast._PreprocessBlockedOnCond(
+ self.cnxn, cond, [100], self.services, None)
+ self.assertEqual(op, op)
+ self.assertEqual([blocked_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessHasBlockingCond(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking_id']
+ for op in (ast_pb2.QueryOp.IS_DEFINED, ast_pb2.QueryOp.IS_NOT_DEFINED):
+ cond = ast_pb2.MakeCond(op, [blocking_field], [], [])
+ new_cond = ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [100], self.services, None)
+ self.assertEqual(op, op)
+ self.assertEqual([blocking_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockingCond_WithSingleProjectID(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking']
+ blocking_id_field = BUILTIN_ISSUE_FIELDS['blocking_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=1, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['1'], [101]), # One existing issue.
+ (['Project1:1'], [101]), # One existing issue with project prefix.
+ (['1', '2'], [101, 102]), # Two existing issues.
+ (['3'], [])): # Non-existant issue.
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [1], self.services, None)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blocking_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockingCond_WithMultipleProjectIDs(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking']
+ blocking_id_field = BUILTIN_ISSUE_FIELDS['blocking_id']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids, expected in (
+ (['Project1:1'], [101]),
+ (['Project1:1', 'Project2:2'], [101, 102])):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ new_cond = ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [1, 2], self.services, None)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([blocking_id_field], new_cond.field_defs)
+ self.assertEqual(expected, new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessBlockingCond_WithMultipleProjectIDs_NoPrefix(self):
+ blocking_field = BUILTIN_ISSUE_FIELDS['blocking']
+ self.services.project.TestAddProject('Project1', project_id=1)
+ self.services.project.TestAddProject('Project2', project_id=2)
+ issue1 = fake.MakeTestIssue(
+ project_id=1, local_id=1, summary='sum', status='new', owner_id=2,
+ issue_id=101)
+ issue2 = fake.MakeTestIssue(
+ project_id=2, local_id=2, summary='sum', status='new', owner_id=2,
+ issue_id=102)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+
+ for local_ids in (['1'], ['1', '2'], ['3']):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [blocking_field], local_ids, [])
+ try:
+ ast2ast._PreprocessBlockingCond(
+ self.cnxn, cond, [1, 2], self.services, None)
+ self.fail('Expected an Exception.')
+ except ValueError, e:
+ self.assertEquals(
+ 'Searching for issues accross multiple/all projects without '
+ 'project prefixes is ambiguous and is currently not supported.',
+ e.message)
+
+ def testPreprocessStatusCond(self):
+ status_field = BUILTIN_ISSUE_FIELDS['status']
+ status_id_field = BUILTIN_ISSUE_FIELDS['status_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [status_field], [], [])
+ new_cond = ast2ast._PreprocessStatusCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [status_field], ['New', 'Assigned'], [])
+ new_cond = ast2ast._PreprocessStatusCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([status_id_field], new_cond.field_defs)
+ self.assertEqual([0, 1], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [status_field], [], [])
+ new_cond = ast2ast._PreprocessStatusCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual([], new_cond.int_values)
+
+ def testPrefixRegex(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Priority', 'Severity'], [])
+ regex = ast2ast._MakePrefixRegex(cond)
+ self.assertRegexpMatches('Priority-1', regex)
+ self.assertRegexpMatches('Severity-3', regex)
+ self.assertNotRegexpMatches('My-Priority', regex)
+
+ def testKeyValueRegex(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Type-Feature', 'Type-Security'], [])
+ regex = ast2ast._MakeKeyValueRegex(cond)
+ self.assertRegexpMatches('Type-Feature', regex)
+ self.assertRegexpMatches('Type-Bug-Security', regex)
+ self.assertNotRegexpMatches('Type-Bug', regex)
+ self.assertNotRegexpMatches('Security-Feature', regex)
+
+ def testKeyValueRegex_multipleKeys(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Type-Bug', 'Security-Bug'], [])
+ with self.assertRaises(ValueError):
+ ast2ast._MakeKeyValueRegex(cond)
+
+ def testWordBoundryRegex(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['Type-Bug'], [])
+ regex = ast2ast._MakeKeyValueRegex(cond)
+ self.assertRegexpMatches('Type-Bug-Security', regex)
+ self.assertNotRegexpMatches('Type-BugSecurity', regex)
+
+ def testPreprocessLabelCond(self):
+ label_field = BUILTIN_ISSUE_FIELDS['label']
+ label_id_field = BUILTIN_ISSUE_FIELDS['label_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [label_field], ['Priority'], [])
+ new_cond = ast2ast._PreprocessLabelCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([label_id_field], new_cond.field_defs)
+ self.assertEqual([1, 2, 3], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [label_field],
+ ['Priority-Low', 'Priority-High'], [])
+ new_cond = ast2ast._PreprocessLabelCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([label_id_field], new_cond.field_defs)
+ self.assertEqual([0, 1], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.KEY_HAS, [label_field],
+ ['Priority-Low', 'Priority-High'], [])
+ new_cond = ast2ast._PreprocessLabelCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([label_id_field], new_cond.field_defs)
+ self.assertEqual([1, 2, 3], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessComponentCond_QuickOR(self):
+ component_field = BUILTIN_ISSUE_FIELDS['component']
+ component_id_field = BUILTIN_ISSUE_FIELDS['component_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [component_field], ['UI', 'DB'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101, 102, 201], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], ['UI', 'DB'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101, 102, 201], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], [], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual([], new_cond.int_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], ['unknown@example.com'],
+ [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual([], new_cond.int_values)
+
+ def testPreprocessComponentCond_RootedAndNonRooted(self):
+ component_field = BUILTIN_ISSUE_FIELDS['component']
+ component_id_field = BUILTIN_ISSUE_FIELDS['component_id']
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [component_field], ['UI'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101, 102], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [component_field], ['UI'], [])
+ new_cond = ast2ast._PreprocessComponentCond(
+ self.cnxn, cond, [789], self.services, self.config)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([component_id_field], new_cond.field_defs)
+ self.assertEqual([101], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessExactUsers_IsDefined(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.IS_DEFINED, [OWNER_FIELD], ['a@example.com'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD])
+ self.assertEqual(ast_pb2.QueryOp.IS_DEFINED, new_cond.op)
+ self.assertEqual([OWNER_ID_FIELD], new_cond.field_defs)
+ self.assertEqual([], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessExactUsers_UserFound(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [OWNER_FIELD], ['a@example.com'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD])
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([OWNER_ID_FIELD], new_cond.field_defs)
+ self.assertEqual([111L], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessExactUsers_UserSpecifiedByID(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [OWNER_FIELD], ['123'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD])
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual([OWNER_ID_FIELD], new_cond.field_defs)
+ self.assertEqual([123L], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ def testPreprocessExactUsers_NonEquality(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.GE, [OWNER_ID_FIELD], ['111'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD])
+ self.assertEqual(cond, new_cond)
+
+ def testPreprocessExactUsers_UserNotFound(self):
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [OWNER_FIELD], ['unknown@example.com'], [])
+ new_cond = ast2ast._PreprocessExactUsers(
+ self.cnxn, cond, self.services.user, [OWNER_ID_FIELD])
+ self.assertEqual(cond, new_cond)
+
+ def testPreprocessCustomCond_User(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='TPM',
+ field_type=tracker_pb2.FieldTypes.USER_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['a@example.com'], [])
+ new_cond = ast2ast._PreprocessCustomCond(self.cnxn, cond, self.services)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual(cond.field_defs, new_cond.field_defs)
+ self.assertEqual([111L], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['111'], [])
+ new_cond = ast2ast._PreprocessCustomCond(self.cnxn, cond, self.services)
+ self.assertEqual(ast_pb2.QueryOp.EQ, new_cond.op)
+ self.assertEqual(cond.field_defs, new_cond.field_defs)
+ self.assertEqual([111L], new_cond.int_values)
+ self.assertEqual([], new_cond.str_values)
+
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['unknown@example.com'], [])
+ new_cond = ast2ast._PreprocessCustomCond(self.cnxn, cond, self.services)
+ self.assertEqual(cond, new_cond)
+
+ def testPreprocessCustomCond_NonUser(self):
+ fd = tracker_pb2.FieldDef(
+ field_id=1, field_name='TPM',
+ field_type=tracker_pb2.FieldTypes.INT_TYPE)
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['foo'], [123])
+ new_cond = ast2ast._PreprocessCustomCond(self.cnxn, cond, self.services)
+ self.assertEqual(cond, new_cond)
+
+ fd.field_type = tracker_pb2.FieldTypes.STR_TYPE
+ new_cond = ast2ast._PreprocessCustomCond(self.cnxn, cond, self.services)
+ self.assertEqual(cond, new_cond)
+
+ def testPreprocessCond_NoChange(self):
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.TEXT_HAS, [ANY_FIELD], ['foo'], [])
+ self.assertEqual(
+ cond, ast2ast._PreprocessCond(self.cnxn, cond, [], None, None))
+
+ def testTextOpToIntOp(self):
+ self.assertEqual(ast_pb2.QueryOp.EQ,
+ ast2ast._TextOpToIntOp(ast_pb2.QueryOp.TEXT_HAS))
+ self.assertEqual(ast_pb2.QueryOp.EQ,
+ ast2ast._TextOpToIntOp(ast_pb2.QueryOp.KEY_HAS))
+ self.assertEqual(ast_pb2.QueryOp.NE,
+ ast2ast._TextOpToIntOp(ast_pb2.QueryOp.NOT_TEXT_HAS))
+
+ for enum_name, _enum_id in ast_pb2.QueryOp.to_dict().iteritems():
+ no_change_op = ast_pb2.QueryOp(enum_name)
+ if no_change_op not in (
+ ast_pb2.QueryOp.TEXT_HAS,
+ ast_pb2.QueryOp.NOT_TEXT_HAS,
+ ast_pb2.QueryOp.KEY_HAS):
+ self.assertEqual(no_change_op,
+ ast2ast._TextOpToIntOp(no_change_op))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/ast2select_test.py b/appengine/monorail/search/test/ast2select_test.py
new file mode 100644
index 0000000..efef48d
--- /dev/null
+++ b/appengine/monorail/search/test/ast2select_test.py
@@ -0,0 +1,446 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the ast2select module."""
+
+import unittest
+
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import ast2select
+from search import query2ast
+from tracker import tracker_bizobj
+
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+
+
+class AST2SelectTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ def testBuildSQLQuery_EmptyAST(self):
+ ast = ast_pb2.QueryAST(conjunctions=[ast_pb2.Conjunction()]) # No conds
+ left_joins, where = ast2select.BuildSQLQuery(ast)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], where)
+
+ def testBuildSQLQuery_Normal(self):
+ owner_field = BUILTIN_ISSUE_FIELDS['owner']
+ reporter_id_field = BUILTIN_ISSUE_FIELDS['reporter_id']
+ conds = [
+ ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [owner_field], ['example.com'], []),
+ ast_pb2.MakeCond(
+ ast_pb2.QueryOp.EQ, [reporter_id_field], [], [111L])]
+ ast = ast_pb2.QueryAST(conjunctions=[ast_pb2.Conjunction(conds=conds)])
+ left_joins, where = ast2select.BuildSQLQuery(ast)
+ self.assertEqual(
+ [('User AS Cond0 ON (Issue.owner_id = Cond0.user_id '
+ 'OR Issue.derived_owner_id = Cond0.user_id)', [])],
+ left_joins)
+ self.assertEqual(
+ [('LOWER(Cond0.email) LIKE %s', ['%example.com%']),
+ ('Issue.reporter_id = %s', [111L])],
+ where)
+
+ def testBlockingIDCond_SingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ txt_cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['1'], [])
+ num_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1L])
+
+ for cond, expected in ((txt_cond, '1'), (num_cond, 1L)):
+ left_joins, where = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1')
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s',
+ ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.issue_id = %s', [expected])],
+ where)
+
+ def testBlockingIDCond_MultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ txt_cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['1', '2', '3'], [])
+ num_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1L, 2L, 3L])
+
+ for cond, expected in ((txt_cond, ['1', '2', '3']),
+ (num_cond, [1L, 2L, 3L])):
+ left_joins, where = ast2select._ProcessBlockingIDCond(
+ cond, 'Cond1', 'Issue1')
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s',
+ ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.issue_id IN (%s,%s,%s)', expected)],
+ where)
+
+ def testBlockedOnIDCond_SingleValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ txt_cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['1'], [])
+ num_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1L])
+
+ for cond, expected in ((txt_cond, '1'), (num_cond, 1L)):
+ left_joins, where = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', 'Issue1')
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s',
+ ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.dst_issue_id = %s', [expected])],
+ where)
+
+ def testBlockedIDCond_MultiValue(self):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ txt_cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['1', '2', '3'], [])
+ num_cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1L, 2L, 3L])
+
+ for cond, expected in ((txt_cond, ['1', '2', '3']),
+ (num_cond, [1L, 2L, 3L])):
+ left_joins, where = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', 'Issue1')
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s',
+ ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.dst_issue_id IN (%s,%s,%s)', expected)],
+ where)
+
+ def testHasBlockedCond(self):
+ for op, expected in ((ast_pb2.QueryOp.IS_DEFINED, 'IS NOT NULL'),
+ (ast_pb2.QueryOp.IS_NOT_DEFINED, 'IS NULL')):
+ fd = BUILTIN_ISSUE_FIELDS['blockedon_id']
+ cond = ast_pb2.MakeCond(op, [fd], [], [])
+
+ left_joins, where = ast2select._ProcessBlockedOnIDCond(
+ cond, 'Cond1', None)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual([('Cond1.issue_id %s' % expected, [])], where)
+
+ def testHasBlockingCond(self):
+ for op, expected in ((ast_pb2.QueryOp.IS_DEFINED, 'IS NOT NULL'),
+ (ast_pb2.QueryOp.IS_NOT_DEFINED, 'IS NULL')):
+ fd = BUILTIN_ISSUE_FIELDS['blocking_id']
+ cond = ast_pb2.MakeCond(op, [fd], [], [])
+
+ left_joins, where = ast2select._ProcessBlockingIDCond(cond, 'Cond1', None)
+ self.assertEqual(
+ [('IssueRelation AS Cond1 ON Issue.id = Cond1.dst_issue_id AND '
+ 'Cond1.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual([('Cond1.dst_issue_id %s' % expected, [])], where)
+
+ def testProcessOwnerCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['owner']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where = ast2select._ProcessOwnerCond(cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('User AS Cond1 ON (Issue.owner_id = Cond1.user_id '
+ 'OR Issue.derived_owner_id = Cond1.user_id)', [])],
+ left_joins)
+ self.assertEqual(
+ [('LOWER(Cond1.email) LIKE %s', ['%example.com%'])],
+ where)
+
+ def testProcessOwnerIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['owner_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111L])
+ left_joins, where = ast2select._ProcessOwnerIDCond(cond, 'Cond1', 'User1')
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.owner_id = %s OR Issue.derived_owner_id = %s)',
+ [111L, 111L])],
+ where)
+
+ def testProcessReporterCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['reporter']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where = ast2select._ProcessReporterCond(cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('User AS Cond1 ON Issue.reporter_id = Cond1.user_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('LOWER(Cond1.email) LIKE %s', ['%example.com%'])],
+ where)
+
+ def testProcessReporterIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['reporter_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111L])
+ left_joins, where = ast2select._ProcessReporterIDCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('Issue.reporter_id = %s', [111L])],
+ where)
+
+ def testProcessCcCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where = ast2select._ProcessCcCond(cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('(Issue2Cc AS Cond1 JOIN User AS User1 '
+ 'ON Cond1.cc_id = User1.user_id AND LOWER(User1.email) LIKE %s) '
+ 'ON Issue.id = Cond1.issue_id AND Issue.shard = Cond1.issue_shard',
+ ['%example.com%'])],
+ left_joins)
+ self.assertEqual(
+ [('User1.email IS NOT NULL', [])],
+ where)
+
+ def testProcessCcIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['cc_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111L])
+ left_joins, where = ast2select._ProcessCcIDCond(cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('Issue2Cc AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.cc_id = %s',
+ [111L])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.cc_id IS NOT NULL', [])],
+ where)
+
+ def testProcessStarredByCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['starredby']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where = ast2select._ProcessStarredByCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('(IssueStar AS Cond1 JOIN User AS User1 '
+ 'ON Cond1.user_id = User1.user_id AND LOWER(User1.email) LIKE %s) '
+ 'ON Issue.id = Cond1.issue_id', ['%example.com%'])],
+ left_joins)
+ self.assertEqual(
+ [('User1.email IS NOT NULL', [])],
+ where)
+
+ def testProcessStarredByIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['starredby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111L])
+ left_joins, where = ast2select._ProcessStarredByIDCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('IssueStar AS Cond1 ON Issue.id = Cond1.issue_id '
+ 'AND Cond1.user_id = %s', [111L])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.user_id IS NOT NULL', [])],
+ where)
+
+ def testProcessCommentByCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby']
+ cond = ast_pb2.MakeCond(
+ ast_pb2.QueryOp.TEXT_HAS, [fd], ['example.com'], [])
+ left_joins, where = ast2select._ProcessCommentByCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('(Comment AS Cond1 JOIN User AS User1 '
+ 'ON Cond1.commenter_id = User1.user_id '
+ 'AND LOWER(User1.email) LIKE %s) '
+ 'ON Issue.id = Cond1.issue_id', ['%example.com%'])],
+ left_joins)
+ self.assertEqual(
+ [('User1.email IS NOT NULL', [])],
+ where)
+
+ def testProcessCommentByIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['commentby_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [111L])
+ left_joins, where = ast2select._ProcessCommentByIDCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('Comment AS Cond1 ON Issue.id = Cond1.issue_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.commenter_id = %s', [111L])],
+ where)
+
+ def testProcessStatusIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['status_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [2])
+ left_joins, where = ast2select._ProcessStatusIDCond(cond, 'Cond1', 'User1')
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.status_id = %s OR Issue.derived_status_id = %s)', [2, 2])],
+ where)
+
+ def testProcessLabelIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['label_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [1])
+ left_joins, where = ast2select._ProcessLabelIDCond(cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('Issue2Label AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.label_id = %s', [1])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.label_id IS NOT NULL', [])],
+ where)
+
+ def testProcessComponentIDCond(self):
+ fd = BUILTIN_ISSUE_FIELDS['component_id']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.EQ, [fd], [], [101])
+ left_joins, where = ast2select._ProcessComponentIDCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('Issue2Component AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Issue.shard = Cond1.issue_shard AND '
+ 'Cond1.component_id = %s', [101])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.component_id IS NOT NULL', [])],
+ where)
+
+ def testProcessCustomFieldCond(self):
+ pass # TODO(jrobbins): fill in this test case.
+
+ def testProcessAttachmentCond_HasAttachment(self):
+ fd = BUILTIN_ISSUE_FIELDS['attachment']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.IS_DEFINED, [fd], [], [])
+ left_joins, where = ast2select._ProcessAttachmentCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.attachment_count IS NOT NULL AND '
+ 'Issue.attachment_count != %s)',
+ [0])],
+ where)
+
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.IS_NOT_DEFINED, [fd], [], [])
+ left_joins, where = ast2select._ProcessAttachmentCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('(Issue.attachment_count IS NULL OR '
+ 'Issue.attachment_count = %s)',
+ [0])],
+ where)
+
+ def testProcessAttachmentCond_TextHas(self):
+ fd = BUILTIN_ISSUE_FIELDS['attachment']
+ cond = ast_pb2.MakeCond(ast_pb2.QueryOp.TEXT_HAS, [fd], ['jpg'], [])
+ left_joins, where = ast2select._ProcessAttachmentCond(
+ cond, 'Cond1', 'User1')
+ self.assertEqual(
+ [('Attachment AS Cond1 ON Issue.id = Cond1.issue_id AND '
+ 'Cond1.deleted = %s',
+ [False])],
+ left_joins)
+ self.assertEqual(
+ [('Cond1.filename LIKE %s', ['%jpg%'])],
+ where)
+
+ def testCompare_IntTypes(self):
+ val_type = tracker_pb2.FieldTypes.INT_TYPE
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.IS_DEFINED, val_type, 'col', [1, 2])
+ self.assertEqual('(Alias.col IS NOT NULL AND Alias.col != %s)', cond_str)
+ self.assertEqual([0], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', [1])
+ self.assertEqual('Alias.col = %s', cond_str)
+ self.assertEqual([1], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', [1, 2])
+ self.assertEqual('Alias.col IN (%s,%s)', cond_str)
+ self.assertEqual([1, 2], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [])
+ self.assertEqual('TRUE', cond_str)
+ self.assertEqual([], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [1])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col != %s)', cond_str)
+ self.assertEqual([1], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [1, 2])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col NOT IN (%s,%s))',
+ cond_str)
+ self.assertEqual([1, 2], cond_args)
+
+ def testCompare_STRTypes(self):
+ val_type = tracker_pb2.FieldTypes.STR_TYPE
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.IS_DEFINED, val_type, 'col', ['a', 'b'])
+ self.assertEqual('(Alias.col IS NOT NULL AND Alias.col != %s)', cond_str)
+ self.assertEqual([''], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', ['a'])
+ self.assertEqual('Alias.col = %s', cond_str)
+ self.assertEqual(['a'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.EQ, val_type, 'col', ['a', 'b'])
+ self.assertEqual('Alias.col IN (%s,%s)', cond_str)
+ self.assertEqual(['a', 'b'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', [])
+ self.assertEqual('TRUE', cond_str)
+ self.assertEqual([], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', ['a'])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col != %s)', cond_str)
+ self.assertEqual(['a'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NE, val_type, 'col', ['a', 'b'])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col NOT IN (%s,%s))',
+ cond_str)
+ self.assertEqual(['a', 'b'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.TEXT_HAS, val_type, 'col', ['a'])
+ self.assertEqual('Alias.col LIKE %s', cond_str)
+ self.assertEqual(['%a%'], cond_args)
+
+ cond_str, cond_args = ast2select._Compare(
+ 'Alias', ast_pb2.QueryOp.NOT_TEXT_HAS, val_type, 'col', ['a'])
+ self.assertEqual('(Alias.col IS NULL OR Alias.col NOT LIKE %s)', cond_str)
+ self.assertEqual(['%a%'], cond_args)
+
+ def testCompareAlreadyJoined(self):
+ cond_str, cond_args = ast2select._CompareAlreadyJoined(
+ 'Alias', ast_pb2.QueryOp.EQ, 'col')
+ self.assertEqual('Alias.col IS NOT NULL', cond_str)
+ self.assertEqual([], cond_args)
+
+ cond_str, cond_args = ast2select._CompareAlreadyJoined(
+ 'Alias', ast_pb2.QueryOp.NE, 'col')
+ self.assertEqual('Alias.col IS NULL', cond_str)
+ self.assertEqual([], cond_args)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/ast2sort_test.py b/appengine/monorail/search/test/ast2sort_test.py
new file mode 100644
index 0000000..47b650b
--- /dev/null
+++ b/appengine/monorail/search/test/ast2sort_test.py
@@ -0,0 +1,214 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the ast2sort module."""
+
+import unittest
+
+from search import ast2sort
+from search import query2ast
+
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+
+
+class AST2SortTest(unittest.TestCase):
+
+ def setUp(self):
+ self.harmonized_labels = [
+ (101, 0, 'Hot'), (102, 1, 'Cold'), (103, None, 'Odd')]
+ self.harmonized_statuses = [
+ (201, 0, 'New'), (202, 1, 'Assigned'), (203, None, 'OnHold')]
+ self.harmonized_fields = []
+ self.fmt = lambda string, **kwords: string
+
+ def testBuildSortClauses_EmptySortDirectives(self):
+ left_joins, order_by = ast2sort.BuildSortClauses(
+ [], self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], order_by)
+
+ def testBuildSortClauses_Normal(self):
+ left_joins, order_by = ast2sort.BuildSortClauses(
+ ['stars', 'status', 'pri', 'reporter', 'id'], self.harmonized_labels,
+ self.harmonized_statuses, self.harmonized_fields)
+ expected_left_joins = [
+ ('User AS Sort3 ON Issue.reporter_id = Sort3.user_id', [])]
+ expected_order_by = [
+ ('Issue.star_count ASC', []),
+ ('FIELD(IF(ISNULL(Issue.status_id), Issue.derived_status_id, '
+ 'Issue.status_id), %s,%s) DESC', [201, 202]),
+ ('FIELD(IF(ISNULL(Issue.status_id), Issue.derived_status_id, '
+ 'Issue.status_id), %s) DESC', [203]),
+ ('ISNULL(Sort3.email) ASC', []),
+ ('Sort3.email ASC', []),
+ ('Issue.local_id ASC', [])]
+ self.assertEqual(expected_left_joins, left_joins)
+ self.assertEqual(expected_order_by, order_by)
+
+ def testProcessProjectSD(self):
+ left_joins, order_by = ast2sort._ProcessProjectSD(self.fmt)
+ self.assertEqual([], left_joins)
+ self.assertEqual(
+ [('Issue.project_id {sort_dir}', [])],
+ order_by)
+
+ def testProcessReporterSD(self):
+ left_joins, order_by = ast2sort._ProcessReporterSD(self.fmt)
+ self.assertEqual(
+ [('User AS {alias} ON Issue.reporter_id = {alias}.user_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.email) {sort_dir}', []),
+ ('{alias}.email {sort_dir}', [])],
+ order_by)
+
+ def testProcessOwnerSD(self):
+ left_joins, order_by = ast2sort._ProcessOwnerSD(self.fmt)
+ self.assertEqual(
+ [('User AS {alias} ON (Issue.owner_id = {alias}.user_id OR '
+ 'Issue.derived_owner_id = {alias}.user_id)', [])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.email) {sort_dir}', []),
+ ('{alias}.email {sort_dir}', [])],
+ order_by)
+
+ def testProcessCcSD(self):
+ left_joins, order_by = ast2sort._ProcessCcSD(self.fmt)
+ self.assertEqual(
+ [('Issue2Cc AS {alias} ON Issue.id = {alias}.issue_id '
+ 'LEFT JOIN User AS {alias}_user '
+ 'ON {alias}.cc_id = {alias}_user.user_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}_user.email) {sort_dir}', []),
+ ('{alias}_user.email {sort_dir}', [])],
+ order_by)
+
+ def testProcessComponentSD(self):
+ left_joins, order_by = ast2sort._ProcessComponentSD(self.fmt)
+ self.assertEqual(
+ [('Issue2Component AS {alias} ON Issue.id = {alias}.issue_id '
+ 'LEFT JOIN ComponentDef AS {alias}_component '
+ 'ON {alias}.component_id = {alias}_component.id', [])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}_component.path) {sort_dir}', []),
+ ('{alias}_component.path {sort_dir}', [])],
+ order_by)
+
+ def testProcessSummarySD(self):
+ left_joins, order_by = ast2sort._ProcessSummarySD(self.fmt)
+ self.assertEqual(
+ [('IssueSummary AS {alias} ON Issue.id = {alias}.issue_id', [])],
+ left_joins)
+ self.assertEqual(
+ [('{alias}.summary {sort_dir}', [])],
+ order_by)
+
+ def testProcessStatusSD(self):
+ pass # TODO(jrobbins): fill in this test case
+
+ def testProcessBlockedSD(self):
+ left_joins, order_by = ast2sort._ProcessBlockedSD(self.fmt)
+ self.assertEqual(
+ [('IssueRelation AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.dst_issue_id) {sort_dir}', [])],
+ order_by)
+
+ def testProcessBlockedOnSD(self):
+ left_joins, order_by = ast2sort._ProcessBlockedOnSD(self.fmt)
+ self.assertEqual(
+ [('IssueRelation AS {alias} ON Issue.id = {alias}.issue_id '
+ 'AND {alias}.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.dst_issue_id) {sort_dir}', []),
+ ('{alias}.dst_issue_id {sort_dir}', [])],
+ order_by)
+
+ def testProcessBlockingSD(self):
+ left_joins, order_by = ast2sort._ProcessBlockingSD(self.fmt)
+ self.assertEqual(
+ [('IssueRelation AS {alias} ON Issue.id = {alias}.dst_issue_id '
+ 'AND {alias}.kind = %s', ['blockedon'])],
+ left_joins)
+ self.assertEqual(
+ [('ISNULL({alias}.issue_id) {sort_dir}', []),
+ ('{alias}.issue_id {sort_dir}', [])],
+ order_by)
+
+ def testProcessCustomAndLabelSD(self):
+ pass # TODO(jrobbins): fill in this test case
+
+ def testLabelSortClauses_NoSuchLabels(self):
+ sd = 'somethingelse'
+ harmonized_labels = [
+ (101, 0, 'Type-Defect'),
+ (102, 1, 'Type-Enhancement'),
+ (103, 2, 'Type-Task'),
+ (104, 0, 'Priority-High'),
+ (199, None, 'Type-Laundry'),
+ ]
+ left_joins, order_by = ast2sort._LabelSortClauses(
+ sd, harmonized_labels, self.fmt)
+ self.assertEqual([], left_joins)
+ self.assertEqual([], order_by)
+
+ def testLabelSortClauses_Normal(self):
+ sd = 'type'
+ harmonized_labels = [
+ (101, 0, 'Type-Defect'),
+ (102, 1, 'Type-Enhancement'),
+ (103, 2, 'Type-Task'),
+ (104, 0, 'Priority-High'),
+ (199, None, 'Type-Laundry'),
+ ]
+ left_joins, order_by = ast2sort._LabelSortClauses(
+ sd, harmonized_labels, self.fmt)
+ self.assertEqual(1, len(left_joins))
+ self.assertEqual(
+ ('Issue2Label AS {alias} ON Issue.id = {alias}.issue_id AND '
+ '{alias}.label_id IN ({all_label_ph})',
+ [101, 102, 103, 199]),
+ left_joins[0])
+ self.assertEqual(2, len(order_by))
+ self.assertEqual(
+ ('FIELD({alias}.label_id, {wk_label_ph}) {rev_sort_dir}',
+ [101, 102, 103]),
+ order_by[0])
+ self.assertEqual(
+ ('FIELD({alias}.label_id, {odd_label_ph}) {rev_sort_dir}',
+ [199]),
+ order_by[1])
+
+ def testOneSortDirective_NativeSortable(self):
+ left_joins, order_by = ast2sort._OneSortDirective(
+ 1, 'opened', self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('Issue.opened ASC', [])], order_by)
+
+ left_joins, order_by = ast2sort._OneSortDirective(
+ 1, 'stars', self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('Issue.star_count ASC', [])], order_by)
+
+ left_joins, order_by = ast2sort._OneSortDirective(
+ 1, '-stars', self.harmonized_labels, self.harmonized_statuses,
+ self.harmonized_fields)
+ self.assertEqual([], left_joins)
+ self.assertEqual([('Issue.star_count DESC', [])], order_by)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/backendnonviewable_test.py b/appengine/monorail/search/test/backendnonviewable_test.py
new file mode 100644
index 0000000..37068a4
--- /dev/null
+++ b/appengine/monorail/search/test/backendnonviewable_test.py
@@ -0,0 +1,169 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.search.backendnonviewable."""
+
+import unittest
+import mox
+
+from google.appengine.api import memcache
+from google.appengine.ext import testbed
+
+from framework import permissions
+from search import backendnonviewable
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class BackendNonviewableTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ )
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest()
+ self.mr.specified_project_id = 789
+ self.mr.shard_id = 2
+ self.mr.invalidation_timestep = 12345
+
+ self.servlet = backendnonviewable.BackendNonviewable(
+ 'req', 'res', services=self.services)
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testHandleRequest(self):
+ pass # TODO(jrobbins): fill in this test.
+
+ def testGetNonviewableIIDs_OwnerOrAdmin(self):
+ """Check the special case for users who are never restricted."""
+ perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ nonviewable_iids = self.servlet.GetNonviewableIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, {111L}, self.project, perms, 2)
+ self.assertEqual([], nonviewable_iids)
+
+ def testGetNonviewableIIDs_RegularUser(self):
+ pass # TODO(jrobbins)
+
+ def testGetNonviewableIIDs_Anon(self):
+ pass # TODO(jrobbins)
+
+ def testGetAtRiskIIDs_NothingEverAtRisk(self):
+ """Handle the case where the site has no restriction labels."""
+ fake_restriction_label_rows = []
+ fake_restriction_label_ids = []
+ fake_at_risk_iids = []
+ self.mox.StubOutWithMock(self.services.config, 'GetLabelDefRowsAnyProject')
+ self.services.config.GetLabelDefRowsAnyProject(
+ self.mr.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn(fake_restriction_label_rows)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByLabelIDs')
+ self.services.issue.GetIIDsByLabelIDs(
+ self.mr.cnxn, fake_restriction_label_ids, 789, 2
+ ).AndReturn(fake_at_risk_iids)
+ self.mox.ReplayAll()
+
+ at_risk_iids = self.servlet.GetAtRiskIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, self.mr.auth.effective_ids,
+ self.project, self.mr.perms, self.mr.shard_id)
+ self.mox.VerifyAll()
+ self.assertEqual([], at_risk_iids)
+
+ def testGetAtRiskIIDs_NoIssuesAtRiskRightNow(self):
+ """Handle the case where the project has no restricted issues."""
+ fake_restriction_label_rows = [
+ (123, 789, 1, 'Restrict-View-A', 'doc', False),
+ (234, 789, 2, 'Restrict-View-B', 'doc', False),
+ ]
+ fake_restriction_label_ids = [123, 234]
+ fake_at_risk_iids = []
+ self.mox.StubOutWithMock(self.services.config, 'GetLabelDefRowsAnyProject')
+ self.services.config.GetLabelDefRowsAnyProject(
+ self.mr.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn(fake_restriction_label_rows)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByLabelIDs')
+ self.services.issue.GetIIDsByLabelIDs(
+ self.mr.cnxn, fake_restriction_label_ids, 789, 2
+ ).AndReturn(fake_at_risk_iids)
+ self.mox.ReplayAll()
+
+ at_risk_iids = self.servlet.GetAtRiskIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, self.mr.auth.effective_ids,
+ self.project, self.mr.perms, self.mr.shard_id)
+ self.mox.VerifyAll()
+ self.assertEqual([], at_risk_iids)
+
+ def testGetAtRiskIIDs_SomeAtRisk(self):
+ """Handle the case where the project has some restricted issues."""
+ fake_restriction_label_rows = [
+ (123, 789, 1, 'Restrict-View-A', 'doc', False),
+ (234, 789, 2, 'Restrict-View-B', 'doc', False),
+ ]
+ fake_restriction_label_ids = [123, 234]
+ fake_at_risk_iids = [432, 543]
+ self.mox.StubOutWithMock(self.services.config, 'GetLabelDefRowsAnyProject')
+ self.services.config.GetLabelDefRowsAnyProject(
+ self.mr.cnxn, where=[('LOWER(label) LIKE %s', ['restrict-view-%'])]
+ ).AndReturn(fake_restriction_label_rows)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByLabelIDs')
+ self.services.issue.GetIIDsByLabelIDs(
+ self.mr.cnxn, fake_restriction_label_ids, 789, 2
+ ).AndReturn(fake_at_risk_iids)
+ self.mox.ReplayAll()
+
+ at_risk_iids = self.servlet.GetAtRiskIIDs(
+ self.mr.cnxn, self.mr.auth.user_pb, self.mr.auth.effective_ids,
+ self.project, self.mr.perms, self.mr.shard_id)
+ self.mox.VerifyAll()
+ self.assertEqual([432, 543], at_risk_iids)
+
+ def testGetPersonalAtRiskLabelIDs(self):
+ pass # TODO(jrobbins): For now, this is covered by GetAtRiskIIDs cases.
+
+ def testGetViewableIIDs_Anon(self):
+ """Anon users are never participants in any issues."""
+ ok_iids = self.servlet.GetViewableIIDs(
+ self.mr.cnxn, set(), 789, 2)
+ self.assertEqual([], ok_iids)
+
+ def testGetViewableIIDs_NoIssues(self):
+ """This visitor does not participate in any issues."""
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByParticipant')
+ self.services.issue.GetIIDsByParticipant(
+ self.mr.cnxn, {111L}, [789], 2).AndReturn([])
+ self.mox.ReplayAll()
+
+ ok_iids = self.servlet.GetViewableIIDs(
+ self.mr.cnxn, {111L}, 789, 2)
+ self.mox.VerifyAll()
+ self.assertEqual([], ok_iids)
+
+ def testGetViewableIIDs_SomeIssues(self):
+ """This visitor participates in some issues."""
+ self.mox.StubOutWithMock(self.services.issue, 'GetIIDsByParticipant')
+ self.services.issue.GetIIDsByParticipant(
+ self.mr.cnxn, {111L}, [789], 2).AndReturn([543, 654])
+ self.mox.ReplayAll()
+
+ ok_iids = self.servlet.GetViewableIIDs(
+ self.mr.cnxn, {111L}, 789, 2)
+ self.mox.VerifyAll()
+ self.assertEqual([543, 654], ok_iids)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/backendsearch_test.py b/appengine/monorail/search/test/backendsearch_test.py
new file mode 100644
index 0000000..bfc64ef
--- /dev/null
+++ b/appengine/monorail/search/test/backendsearch_test.py
@@ -0,0 +1,101 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.search.backendsearch."""
+
+import unittest
+import mox
+
+import settings
+from search import backendsearch
+from search import backendsearchpipeline
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class BackendSearchTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ issue=fake.IssueService(),
+ )
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/_backend/besearch?q=Priority:High&shard=2')
+ self.mr.query_project_names = ['proj']
+ self.mr.specified_logged_in_user_id = 111L
+ self.mr.specified_me_user_id = 222L
+ self.mr.shard_id = 2
+ self.servlet = backendsearch.BackendSearch(
+ 'req', 'res', services=self.services)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testHandleRequest_NoResults(self):
+ """Handle the case where the search has no results."""
+ pipeline = testing_helpers.Blank(
+ SearchForIIDs=lambda: None,
+ result_iids=[],
+ search_limit_reached=False)
+ self.mox.StubOutWithMock(backendsearchpipeline, 'BackendSearchPipeline')
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, self.servlet.profiler, 100, ['proj'], 111L, 222L
+ ).AndReturn(pipeline)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual([], json_data['unfiltered_iids'])
+ self.assertFalse(json_data['search_limit_reached'])
+
+ def testHandleRequest_ResultsInOnePagainationPage(self):
+ """Prefetch all result issues and return them."""
+ allowed_iids = [1, 2, 3, 4, 5, 6, 7, 8]
+ pipeline = testing_helpers.Blank(
+ SearchForIIDs=lambda: None,
+ result_iids=allowed_iids,
+ search_limit_reached=False)
+ self.mox.StubOutWithMock(backendsearchpipeline, 'BackendSearchPipeline')
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, self.servlet.profiler, 100, ['proj'], 111L, 222L
+ ).AndReturn(pipeline)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIssues')
+ # All issues are prefetched because they fit on the first pagination page.
+ self.services.issue.GetIssues(self.mr.cnxn, allowed_iids, shard_id=2)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual([1, 2, 3, 4, 5, 6, 7, 8], json_data['unfiltered_iids'])
+ self.assertFalse(json_data['search_limit_reached'])
+
+ def testHandleRequest_ResultsExceedPagainationPage(self):
+ """Return all result issue IDs, but only prefetch the first page."""
+ self.mr.num = 5
+ pipeline = testing_helpers.Blank(
+ SearchForIIDs=lambda: None,
+ result_iids=[1, 2, 3, 4, 5, 6, 7, 8],
+ search_limit_reached=False)
+ self.mox.StubOutWithMock(backendsearchpipeline, 'BackendSearchPipeline')
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, self.servlet.profiler, 100, ['proj'], 111L, 222L
+ ).AndReturn(pipeline)
+ self.mox.StubOutWithMock(self.services.issue, 'GetIssues')
+ # First 5 issues are prefetched because num=5
+ self.services.issue.GetIssues(self.mr.cnxn, [1, 2, 3, 4, 5], shard_id=2)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(self.mr)
+ self.mox.VerifyAll()
+ # All are IDs are returned to the frontend.
+ self.assertEqual([1, 2, 3, 4, 5, 6, 7, 8], json_data['unfiltered_iids'])
+ self.assertFalse(json_data['search_limit_reached'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/backendsearchpipeline_test.py b/appengine/monorail/search/test/backendsearchpipeline_test.py
new file mode 100644
index 0000000..2751405
--- /dev/null
+++ b/appengine/monorail/search/test/backendsearchpipeline_test.py
@@ -0,0 +1,249 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the backendsearchpipeline module."""
+
+import mox
+import unittest
+
+from google.appengine.api import memcache
+from google.appengine.ext import testbed
+
+import settings
+from framework import framework_helpers
+from framework import profiler
+from framework import sorting
+from framework import sql
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import backendsearchpipeline
+from search import ast2ast
+from search import query2ast
+from services import service_manager
+from services import tracker_fulltext
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+class BackendSearchPipelineTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ cache_manager=fake.CacheManager())
+ self.profiler = profiler.Profiler()
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/list?q=Priority:High',
+ project=self.project)
+ self.mr.me_user_id = 999L # This value is not used by backend search
+ self.mr.shard_id = 2
+ self.mr.invalidation_timestep = 12345
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ sorting.InitializeArtValues(self.services)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def SetUpPromises(self, exp_query):
+ self.mox.StubOutWithMock(framework_helpers, 'Promise')
+ framework_helpers.Promise(
+ backendsearchpipeline._GetQueryResultIIDs, self.mr.cnxn,
+ self.services, 'is:open', exp_query, [789],
+ mox.IsA(tracker_pb2.ProjectIssueConfig), ['project', 'id'],
+ ('Issue.shard = %s', [2]), 2, self.mr.invalidation_timestep
+ ).AndReturn('fake promise 1')
+
+ def testMakePromises_Anon(self):
+ """A backend pipeline does not personalize the query of anon users."""
+ self.SetUpPromises('Priority:High')
+ self.mox.ReplayAll()
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, self.profiler, 100, ['proj'], None, None)
+ self.mox.VerifyAll()
+
+ def testMakePromises_SignedIn(self):
+ """A backend pipeline immediately personalizes and runs the query."""
+ self.mr.query = 'owner:me'
+ self.SetUpPromises('owner:111')
+ self.mox.ReplayAll()
+ backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, self.profiler, 100, ['proj'], 111L, 111L)
+ self.mox.VerifyAll()
+
+ def testSearchForIIDs(self):
+ self.SetUpPromises('Priority:High')
+ self.mox.ReplayAll()
+ be_pipeline = backendsearchpipeline.BackendSearchPipeline(
+ self.mr, self.services, self.profiler, 100, ['proj'], 111L, 111L)
+ be_pipeline.result_iids_promise = testing_helpers.Blank(
+ WaitAndGetValue=lambda: ([10002, 10052], False))
+ be_pipeline.SearchForIIDs()
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], be_pipeline.result_iids)
+ self.assertEqual(False, be_pipeline.search_limit_reached)
+
+
+class BackendSearchPipelineMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ cache_manager=fake.CacheManager())
+ self.profiler = profiler.Profiler()
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/list?q=Priority:High',
+ project=self.project)
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testSearchProjectCan_Normal(self):
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ simplified_query_ast = ast2ast.PreprocessAST(
+ self.cnxn, query_ast, [789], self.services, self.config)
+ conj = simplified_query_ast.conjunctions[0]
+ self.mox.StubOutWithMock(tracker_fulltext, 'SearchIssueFullText')
+ tracker_fulltext.SearchIssueFullText(
+ [789], conj, 2).AndReturn((None, False))
+ self.mox.StubOutWithMock(self.services.issue, 'RunIssueQuery')
+ self.services.issue.RunIssueQuery(
+ self.cnxn, mox.IsA(list), mox.IsA(list), mox.IsA(list),
+ shard_id=2).AndReturn(([10002, 10052], False))
+ self.mox.ReplayAll()
+ result, capped = backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertFalse(capped)
+
+ def testSearchProjectCan_DBCapped(self):
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ simplified_query_ast = ast2ast.PreprocessAST(
+ self.cnxn, query_ast, [789], self.services, self.config)
+ conj = simplified_query_ast.conjunctions[0]
+ self.mox.StubOutWithMock(tracker_fulltext, 'SearchIssueFullText')
+ tracker_fulltext.SearchIssueFullText(
+ [789], conj, 2).AndReturn((None, False))
+ self.mox.StubOutWithMock(self.services.issue, 'RunIssueQuery')
+ self.services.issue.RunIssueQuery(
+ self.cnxn, mox.IsA(list), mox.IsA(list), mox.IsA(list),
+ shard_id=2).AndReturn(([10002, 10052], True))
+ self.mox.ReplayAll()
+ result, capped = backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertTrue(capped)
+
+ def testSearchProjectCan_FTSCapped(self):
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ simplified_query_ast = ast2ast.PreprocessAST(
+ self.cnxn, query_ast, [789], self.services, self.config)
+ conj = simplified_query_ast.conjunctions[0]
+ self.mox.StubOutWithMock(tracker_fulltext, 'SearchIssueFullText')
+ tracker_fulltext.SearchIssueFullText(
+ [789], conj, 2).AndReturn(([10002, 10052], True))
+ self.mox.StubOutWithMock(self.services.issue, 'RunIssueQuery')
+ self.services.issue.RunIssueQuery(
+ self.cnxn, mox.IsA(list), mox.IsA(list), mox.IsA(list),
+ shard_id=2).AndReturn(([10002, 10052], False))
+ self.mox.ReplayAll()
+ result, capped = backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result)
+ self.assertTrue(capped)
+
+ def testGetQueryResultIIDs(self):
+ sd = ['project', 'id']
+ slice_term = ('Issue.shard = %s', [2])
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+ query_ast = backendsearchpipeline._FilterSpam(query_ast)
+
+ self.mox.StubOutWithMock(backendsearchpipeline, 'SearchProjectCan')
+ backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config,
+ sort_directives=sd, where=[slice_term],
+ query_desc='getting query issue IDs'
+ ).AndReturn(([10002, 10052], False))
+ self.mox.ReplayAll()
+ result_iids, limit_reached = backendsearchpipeline._GetQueryResultIIDs(
+ self.cnxn, self.services, 'is:open', 'Priority:High',
+ [789], self.config, sd, slice_term, 2, 12345)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result_iids)
+ self.assertFalse(limit_reached)
+ self.assertEqual(
+ ([10002, 10052], 12345),
+ memcache.get('789;is:open;Priority:High;project id;2'))
+
+ def testGetSpamQueryResultIIDs(self):
+ sd = ['project', 'id']
+ slice_term = ('Issue.shard = %s', [2])
+ query_ast = query2ast.ParseUserQuery(
+ 'Priority:High is:spam', 'is:open', query2ast.BUILTIN_ISSUE_FIELDS,
+ self.config)
+
+ query_ast = backendsearchpipeline._FilterSpam(query_ast)
+
+ self.mox.StubOutWithMock(backendsearchpipeline, 'SearchProjectCan')
+ backendsearchpipeline.SearchProjectCan(
+ self.cnxn, self.services, [789], query_ast, 2, self.config,
+ sort_directives=sd, where=[slice_term],
+ query_desc='getting query issue IDs'
+ ).AndReturn(([10002, 10052], False))
+ self.mox.ReplayAll()
+ result_iids, limit_reached = backendsearchpipeline._GetQueryResultIIDs(
+ self.cnxn, self.services, 'is:open', 'Priority:High is:spam',
+ [789], self.config, sd, slice_term, 2, 12345)
+ self.mox.VerifyAll()
+ self.assertEqual([10002, 10052], result_iids)
+ self.assertFalse(limit_reached)
+ self.assertEqual(
+ ([10002, 10052], 12345),
+ memcache.get('789;is:open;Priority:High is:spam;project id;2'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/frontendsearchpipeline_test.py b/appengine/monorail/search/test/frontendsearchpipeline_test.py
new file mode 100644
index 0000000..7e5bd8b
--- /dev/null
+++ b/appengine/monorail/search/test/frontendsearchpipeline_test.py
@@ -0,0 +1,859 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the frontendsearchpipeline module."""
+
+import mox
+import unittest
+
+from google.appengine.api import memcache
+from google.appengine.api import modules
+from google.appengine.ext import testbed
+from google.appengine.api import urlfetch
+
+import settings
+from framework import profiler
+from framework import sorting
+from framework import urls
+from proto import ast_pb2
+from proto import project_pb2
+from proto import tracker_pb2
+from search import frontendsearchpipeline
+from search import searchpipeline
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+
+
+# Just an example timestamp. The value does not matter.
+NOW = 2444950132
+
+
+class FrontendSearchPipelineTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ cache_manager=fake.CacheManager())
+ self.profiler = profiler.Profiler()
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/list', project=self.project)
+ self.mr.me_user_id = 111L
+
+ self.issue_1 = fake.MakeTestIssue(
+ 789, 1, 'one', 'New', 111L, labels=['Priority-High'])
+ self.services.issue.TestAddIssue(self.issue_1)
+ self.issue_2 = fake.MakeTestIssue(
+ 789, 2, 'two', 'New', 111L, labels=['Priority-Low'])
+ self.services.issue.TestAddIssue(self.issue_2)
+ self.issue_3 = fake.MakeTestIssue(
+ 789, 3, 'three', 'New', 111L, labels=['Priority-Medium'])
+ self.services.issue.TestAddIssue(self.issue_3)
+ self.mr.sort_spec = 'Priority'
+
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ sorting.InitializeArtValues(self.services)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testSearchForIIDs_AllResultsCached_AllAtRiskCached(self):
+ unfiltered_iids = {1: [1001, 1011]}
+ nonviewable_iids = {1: set()}
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
+ frontendsearchpipeline._StartBackendSearch(
+ self.mr, set(['proj']), [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
+ unfiltered_iids, {}, nonviewable_iids, set(), self.services).AndReturn([])
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
+ frontendsearchpipeline._FinishBackendSearch([])
+ self.mox.ReplayAll()
+
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ pipeline.unfiltered_iids = unfiltered_iids
+ pipeline.nonviewable_iids = nonviewable_iids
+ pipeline.SearchForIIDs()
+ self.mox.VerifyAll()
+ self.assertEqual(2, pipeline.total_count)
+ self.assertEqual(2, pipeline.counts[1])
+ self.assertEqual([1001, 1011], pipeline.filtered_iids[1])
+
+ def testMergeAndSortIssues_EmptyResult(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ pipeline.filtered_iids = {0: [], 1: [], 2: []}
+
+ pipeline.MergeAndSortIssues()
+ self.assertEqual([], pipeline.allowed_iids)
+ self.assertEqual([], pipeline.allowed_results)
+ self.assertEqual({}, pipeline.users_by_id)
+
+ def testMergeAndSortIssues_Normal(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ # In this unit test case we are not calling SearchForIIDs(), instead just
+ # set pipeline.filtered_iids directly.
+ pipeline.filtered_iids = {
+ 0: [],
+ 1: [self.issue_1.issue_id],
+ 2: [self.issue_2.issue_id],
+ 3: [self.issue_3.issue_id]
+ }
+
+ pipeline.MergeAndSortIssues()
+ self.assertEqual(
+ [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
+ pipeline.allowed_iids)
+ self.assertEqual(
+ [self.issue_1, self.issue_3, self.issue_2], # high, medium, low.
+ pipeline.allowed_results)
+ self.assertEqual([111L], pipeline.users_by_id.keys())
+
+ def testDetermineIssuePosition_Normal(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ # In this unit test case we are not calling SearchForIIDs(), instead just
+ # set pipeline.filtered_iids directly.
+ pipeline.filtered_iids = {
+ 0: [],
+ 1: [self.issue_1.issue_id],
+ 2: [self.issue_2.issue_id],
+ 3: [self.issue_3.issue_id]
+ }
+
+ prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ self.assertEqual(self.issue_1.issue_id, prev_iid)
+ self.assertEqual(1, index)
+ self.assertEqual(self.issue_2.issue_id, next_iid)
+
+ def testDetermineIssuePosition_NotInResults(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ # In this unit test case we are not calling SearchForIIDs(), instead just
+ # set pipeline.filtered_iids directly.
+ pipeline.filtered_iids = {
+ 0: [],
+ 1: [self.issue_1.issue_id],
+ 2: [self.issue_2.issue_id],
+ 3: []
+ }
+
+ prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ self.assertEqual(None, prev_iid)
+ self.assertEqual(None, index)
+ self.assertEqual(None, next_iid)
+
+ def testDetermineIssuePositionInShard_IssueIsInShard(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ # Let's assume issues 1, 2, and 3 are all in the same shard.
+ pipeline.filtered_iids = {
+ 0: [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
+ }
+
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_1, {})
+ self.assertEqual(None, prev_cand)
+ self.assertEqual(0, index)
+ self.assertEqual(self.issue_3, next_cand)
+
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_3, {})
+ self.assertEqual(self.issue_1, prev_cand)
+ self.assertEqual(1, index)
+ self.assertEqual(self.issue_2, next_cand)
+
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_2, {})
+ self.assertEqual(self.issue_3, prev_cand)
+ self.assertEqual(2, index)
+ self.assertEqual(None, next_cand)
+
+ def testDetermineIssuePositionInShard_IssueIsNotInShard(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+
+ # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
+ pipeline.filtered_iids = {
+ 0: [self.issue_2.issue_id, self.issue_3.issue_id],
+ }
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_1, {})
+ self.assertEqual(None, prev_cand)
+ self.assertEqual(0, index)
+ self.assertEqual(self.issue_3, next_cand)
+
+ pipeline.filtered_iids = {
+ 0: [self.issue_1.issue_id, self.issue_2.issue_id],
+ }
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_3, {})
+ self.assertEqual(self.issue_1, prev_cand)
+ self.assertEqual(1, index)
+ self.assertEqual(self.issue_2, next_cand)
+
+ pipeline.filtered_iids = {
+ 0: [self.issue_1.issue_id, self.issue_3.issue_id],
+ }
+ prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
+ 0, self.issue_2, {})
+ self.assertEqual(self.issue_3, prev_cand)
+ self.assertEqual(2, index)
+ self.assertEqual(None, next_cand)
+
+ def testAccumulateSampleIssues_Empty(self):
+ """When the search gave no results, there cannot be any samples."""
+ sample_dict = {}
+ needed_iids = []
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ issue_ids = []
+ pipeline._AccumulateSampleIssues(issue_ids, sample_dict, needed_iids)
+ self.assertEqual({}, sample_dict)
+ self.assertEqual([], needed_iids)
+
+ def testAccumulateSampleIssues_Small(self):
+ """When the search gave few results, don't bother with samples."""
+ sample_dict = {}
+ needed_iids = []
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ issue_ids = [78901, 78902]
+ pipeline._AccumulateSampleIssues(issue_ids, sample_dict, needed_iids)
+ self.assertEqual({}, sample_dict)
+ self.assertEqual([], needed_iids)
+
+ def testAccumulateSampleIssues_Normal(self):
+ """We will choose at least one sample for every 10 results in a shard."""
+ sample_dict = {}
+ needed_iids = []
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ issues = []
+ for i in range(23):
+ issue = fake.MakeTestIssue(789, 100 + i, 'samp test', 'New', 111L)
+ issues.append(issue)
+ self.services.issue.TestAddIssue(issue)
+
+ issue_ids = [issue.issue_id for issue in issues]
+ pipeline._AccumulateSampleIssues(issue_ids, sample_dict, needed_iids)
+ self.assertEqual(2, len(needed_iids))
+ for sample_iid in needed_iids:
+ self.assertIn(sample_iid, issue_ids)
+
+ def testLookupNeededUsers(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+
+ pipeline._LookupNeededUsers([])
+ self.assertEqual([], pipeline.users_by_id.keys())
+
+ pipeline._LookupNeededUsers([self.issue_1, self.issue_2, self.issue_3])
+ self.assertEqual([111L], pipeline.users_by_id.keys())
+
+ def testPaginate_Grid(self):
+ self.mr.mode = 'grid'
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ pipeline.allowed_iids = [
+ self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
+ pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
+ pipeline.total_count = len(pipeline.allowed_results)
+ pipeline.Paginate()
+ self.assertEqual(
+ [self.issue_1, self.issue_2, self.issue_3],
+ pipeline.visible_results)
+
+ def testPaginate_List(self):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ self.mr, self.services, self.profiler, 100)
+ pipeline.allowed_iids = [
+ self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
+ pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
+ pipeline.total_count = len(pipeline.allowed_results)
+ pipeline.Paginate()
+ self.assertEqual(
+ [self.issue_1, self.issue_2, self.issue_3],
+ pipeline.visible_results)
+ self.assertFalse(pipeline.pagination.limit_reached)
+
+
+class FrontendSearchPipelineMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testMakeBackendCallback(self):
+ called_with = []
+
+ def func(a, b):
+ called_with.append((a, b))
+
+ callback = frontendsearchpipeline._MakeBackendCallback(func, 10, 20)
+ callback()
+ self.assertEqual([(10, 20)], called_with)
+
+ def testStartBackendSearch(self):
+ # TODO(jrobbins): write this test.
+ pass
+
+ def testFinishBackendSearch(self):
+ # TODO(jrobbins): write this test.
+ pass
+
+ def testGetProjectTimestamps_NoneSet(self):
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [], [])
+ self.assertEqual({}, project_shard_timestamps)
+
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [], [0, 1, 2, 3, 4])
+ self.assertEqual({}, project_shard_timestamps)
+
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [789], [0, 1, 2, 3, 4])
+ self.assertEqual({}, project_shard_timestamps)
+
+ def testGetProjectTimestamps_SpecificProjects(self):
+ memcache.set('789;0', NOW)
+ memcache.set('789;1', NOW - 1000)
+ memcache.set('789;2', NOW - 3000)
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [789], [0, 1, 2])
+ self.assertEqual(
+ { (789, 0): NOW,
+ (789, 1): NOW - 1000,
+ (789, 2): NOW - 3000,
+ },
+ project_shard_timestamps)
+
+ memcache.set('790;0', NOW)
+ memcache.set('790;1', NOW - 10000)
+ memcache.set('790;2', NOW - 30000)
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [789, 790], [0, 1, 2])
+ self.assertEqual(
+ { (789, 0): NOW,
+ (789, 1): NOW - 1000,
+ (789, 2): NOW - 3000,
+ (790, 0): NOW,
+ (790, 1): NOW - 10000,
+ (790, 2): NOW - 30000,
+ },
+ project_shard_timestamps)
+
+ def testGetProjectTimestamps_SiteWide(self):
+ memcache.set('all;0', NOW)
+ memcache.set('all;1', NOW - 10000)
+ memcache.set('all;2', NOW - 30000)
+ project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
+ [], [0, 1, 2])
+ self.assertEqual(
+ { ('all', 0): NOW,
+ ('all', 1): NOW - 10000,
+ ('all', 2): NOW - 30000,
+ },
+ project_shard_timestamps)
+
+ def testGetNonviewableIIDs_SearchMissSoNoOp(self):
+ """If search cache missed, don't bother looking up nonviewable IIDs."""
+ unfiltered_iids_dict = {} # No cached search results found.
+ rpc_tuples = [] # Nothing should accumulate here in this case.
+ nonviewable_iids = {} # Nothing should accumulate here in this case.
+ processed_invalidations_up_to = 12345
+ frontendsearchpipeline._GetNonviewableIIDs(
+ [789], 111L, unfiltered_iids_dict.keys(), rpc_tuples, nonviewable_iids,
+ {}, processed_invalidations_up_to, True)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({}, nonviewable_iids)
+
+ def testGetNonviewableIIDs_SearchHitThenNonviewableHit(self):
+ """If search cache hit, get nonviewable info from cache."""
+ unfiltered_iids_dict = {
+ 1: [10001, 10021],
+ 2: ['the search result issue_ids do not matter'],
+ }
+ rpc_tuples = [] # Nothing should accumulate here in this case.
+ nonviewable_iids = {} # Our mock results should end up here.
+ processed_invalidations_up_to = 12345
+ memcache.set('nonviewable:789;111;1',
+ ([10001, 10031], processed_invalidations_up_to - 10))
+ memcache.set('nonviewable:789;111;2',
+ ([10002, 10042], processed_invalidations_up_to - 30))
+
+ project_shard_timestamps = {
+ (789, 1): 0, # not stale
+ (789, 2): 0, # not stale
+ }
+ frontendsearchpipeline._GetNonviewableIIDs(
+ [789], 111L, unfiltered_iids_dict.keys(), rpc_tuples, nonviewable_iids,
+ project_shard_timestamps, processed_invalidations_up_to, True)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({1: {10001, 10031}, 2: {10002, 10042}}, nonviewable_iids)
+
+ def testGetNonviewableIIDs_SearchHitNonviewableMissSoStartRPC(self):
+ """If search hit and n-v miss, create RPCs to get nonviewable info."""
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ unfiltered_iids_dict = {
+ 2: ['the search result issue_ids do not matter'],
+ }
+ rpc_tuples = [] # One RPC object should accumulate here.
+ nonviewable_iids = {} # This will stay empty until RPCs complete.
+ processed_invalidations_up_to = 12345
+ # Nothing is set in memcache for this case.
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111L, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._GetNonviewableIIDs(
+ [789], 111L, unfiltered_iids_dict.keys(), rpc_tuples, nonviewable_iids,
+ {}, processed_invalidations_up_to, True)
+ self.mox.VerifyAll()
+ _, sid_0, rpc_0 = rpc_tuples[0]
+ self.assertEqual(2, sid_0)
+ self.assertEqual({}, nonviewable_iids)
+ self.assertEqual(a_fake_rpc, rpc_0)
+ self.assertIsNotNone(a_fake_rpc.callback)
+
+ def testAccumulateNonviewableIIDs_MemcacheHitForProject(self):
+ processed_invalidations_up_to = 12345
+ cached_dict = {
+ '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
+ '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
+ }
+ rpc_tuples = [] # Nothing should accumulate here.
+ nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
+ project_shard_timestamps = {
+ (789, 1): 0, # not stale
+ (789, 2): 0, # not stale
+ }
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ 789, 111L, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
+ rpc_tuples, processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
+
+ def testAccumulateNonviewableIIDs_MemcacheStaleForProject(self):
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ processed_invalidations_up_to = 12345
+ cached_dict = {
+ '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
+ '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
+ }
+ rpc_tuples = [] # Nothing should accumulate here.
+ nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes
+ project_shard_timestamps = {
+ (789, 1): 0, # not stale
+ (789, 2): processed_invalidations_up_to, # stale!
+ }
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111L, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ 789, 111L, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
+ rpc_tuples, processed_invalidations_up_to)
+ self.mox.VerifyAll()
+ _, sid_0, rpc_0 = rpc_tuples[0]
+ self.assertEqual(2, sid_0)
+ self.assertEqual(a_fake_rpc, rpc_0)
+ self.assertIsNotNone(a_fake_rpc.callback)
+ self.assertEqual({1: {10001}}, nonviewable_iids)
+
+ def testAccumulateNonviewableIIDs_MemcacheHitForWholeSite(self):
+ processed_invalidations_up_to = 12345
+ cached_dict = {
+ 'all;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
+ 'all;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
+ }
+ rpc_tuples = [] # Nothing should accumulate here.
+ nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
+ project_shard_timestamps = {
+ (None, 1): 0, # not stale
+ (None, 2): 0, # not stale
+ }
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ None, 111L, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
+ rpc_tuples, processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
+
+ def testAccumulateNonviewableIIDs_MemcacheMissSoStartRPC(self):
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ cached_dict = {} # Nothing here, so it is an at-risk cache miss.
+ rpc_tuples = [] # One RPC should accumulate here.
+ nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes.
+ processed_invalidations_up_to = 12345
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111L, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._AccumulateNonviewableIIDs(
+ 789, 111L, 2, cached_dict, nonviewable_iids, {}, rpc_tuples,
+ processed_invalidations_up_to)
+ self.mox.VerifyAll()
+ _, sid_0, rpc_0 = rpc_tuples[0]
+ self.assertEqual(2, sid_0)
+ self.assertEqual(a_fake_rpc, rpc_0)
+ self.assertIsNotNone(a_fake_rpc.callback)
+ self.assertEqual({1: {10001}}, nonviewable_iids)
+
+ def testGetCachedSearchResults(self):
+ # TODO(jrobbins): Write this test.
+ pass
+
+ def testMakeBackendRequestHeaders(self):
+ headers = frontendsearchpipeline._MakeBackendRequestHeaders(False)
+ self.assertNotIn('X-AppEngine-FailFast', headers)
+ headers = frontendsearchpipeline._MakeBackendRequestHeaders(True)
+ self.assertEqual('Yes', headers['X-AppEngine-FailFast'])
+
+ def testStartBackendSearchCall(self):
+ self.mox.StubOutWithMock(urlfetch, 'create_rpc')
+ self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
+ self.mox.StubOutWithMock(modules, 'get_hostname')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
+ a_fake_rpc)
+ modules.get_hostname(module='besearch')
+ urlfetch.make_fetch_call(
+ a_fake_rpc, mox.StrContains(urls.BACKEND_SEARCH), follow_redirects=False,
+ headers=mox.IsA(dict))
+ self.mox.ReplayAll()
+
+ processed_invalidations_up_to = 12345
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
+ mr.me_user_id = 111L
+ frontendsearchpipeline._StartBackendSearchCall(
+ mr, ['proj'], 2, processed_invalidations_up_to)
+ self.mox.VerifyAll()
+
+ def testStartBackendNonviewableCall(self):
+ self.mox.StubOutWithMock(urlfetch, 'create_rpc')
+ self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
+ self.mox.StubOutWithMock(modules, 'get_hostname')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
+ a_fake_rpc)
+ modules.get_hostname(module='besearch')
+ urlfetch.make_fetch_call(
+ a_fake_rpc, mox.StrContains(urls.BACKEND_NONVIEWABLE),
+ follow_redirects=False, headers=mox.IsA(dict))
+ self.mox.ReplayAll()
+
+ processed_invalidations_up_to = 12345
+ frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111L, 2, processed_invalidations_up_to)
+ self.mox.VerifyAll()
+
+ def testHandleBackendSearchResponse_Error(self):
+ response_str = 'There was a problem processing the query.'
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str, status_code=500))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ filtered_iids = {} # Search results should accumlate here, per-shard.
+ search_limit_reached = {} # Booleans accumulate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
+ mr.me_user_id = 111L
+ error_responses = set()
+
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
+ frontendsearchpipeline._HandleBackendSearchResponse(
+ mr, ['proj'], rpc_tuple, rpc_tuples, 0, filtered_iids,
+ search_limit_reached, processed_invalidations_up_to, error_responses)
+ self.assertEqual([], rpc_tuples)
+ self.assertIn(2, error_responses)
+
+ def testHandleBackendSearchResponse_Normal(self):
+ response_str = (
+ '})]\'\n'
+ '{'
+ ' "unfiltered_iids": [10002, 10042],'
+ ' "search_limit_reached": false'
+ '}'
+ )
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str, status_code=200))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ filtered_iids = {} # Search results should accumlate here, per-shard.
+ search_limit_reached = {} # Booleans accumulate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
+ mr.me_user_id = 111L
+ error_responses = set()
+ frontendsearchpipeline._HandleBackendSearchResponse(
+ mr, ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids,
+ search_limit_reached, processed_invalidations_up_to, error_responses)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({2: [10002, 10042]}, filtered_iids)
+ self.assertEqual({2: False}, search_limit_reached)
+
+
+ def testHandleBackendSearchResponse_TriggersRetry(self):
+ response_str = None
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(content=response_str))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # New RPC should be appended here
+ filtered_iids = {} # No change here until retry completes.
+ search_limit_reached = {} # No change here until retry completes.
+ processed_invalidations_up_to = 12345
+ error_responses = set()
+
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
+ mr.me_user_id = 111L
+
+ self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ rpc = frontendsearchpipeline._StartBackendSearchCall(
+ mr, ['proj'], 2, processed_invalidations_up_to, failfast=False
+ ).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._HandleBackendSearchResponse(
+ mr, ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids,
+ search_limit_reached, processed_invalidations_up_to, error_responses)
+ self.mox.VerifyAll()
+ _, retry_shard_id, retry_rpc = rpc_tuples[0]
+ self.assertEqual(2, retry_shard_id)
+ self.assertEqual(a_fake_rpc, retry_rpc)
+ self.assertIsNotNone(retry_rpc.callback)
+ self.assertEqual({}, filtered_iids)
+ self.assertEqual({}, search_limit_reached)
+
+ def testHandleBackendNonviewableResponse_Error(self):
+ response_str = 'There was an error.'
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str,
+ status_code=500
+ ))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ frontendsearchpipeline._HandleBackendNonviewableResponse(
+ 789, 111L, 2, rpc_tuple, rpc_tuples, 0, nonviewable_iids,
+ processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertNotEqual({2: {10002, 10042}}, nonviewable_iids)
+
+ def testHandleBackendNonviewableResponse_Normal(self):
+ response_str = (
+ '})]\'\n'
+ '{'
+ ' "nonviewable": [10002, 10042]'
+ '}'
+ )
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(
+ content=response_str,
+ status_code=200
+ ))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # Nothing should be added for this case.
+ nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
+ processed_invalidations_up_to = 12345
+
+ frontendsearchpipeline._HandleBackendNonviewableResponse(
+ 789, 111L, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
+ processed_invalidations_up_to)
+ self.assertEqual([], rpc_tuples)
+ self.assertEqual({2: {10002, 10042}}, nonviewable_iids)
+
+ def testHandleBackendAtRiskResponse_TriggersRetry(self):
+ response_str = None
+ rpc = testing_helpers.Blank(
+ get_result=lambda: testing_helpers.Blank(content=response_str))
+ rpc_tuple = (NOW, 2, rpc)
+ rpc_tuples = [] # New RPC should be appended here
+ nonviewable_iids = {} # No change here until retry completes.
+ processed_invalidations_up_to = 12345
+
+ self.mox.StubOutWithMock(
+ frontendsearchpipeline, '_StartBackendNonviewableCall')
+ a_fake_rpc = testing_helpers.Blank(callback=None)
+ rpc = frontendsearchpipeline._StartBackendNonviewableCall(
+ 789, 111L, 2, processed_invalidations_up_to, failfast=False
+ ).AndReturn(a_fake_rpc)
+ self.mox.ReplayAll()
+
+ frontendsearchpipeline._HandleBackendNonviewableResponse(
+ 789, 111L, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
+ processed_invalidations_up_to)
+ self.mox.VerifyAll()
+ _, retry_shard_id, retry_rpc = rpc_tuples[0]
+ self.assertEqual(2, retry_shard_id)
+ self.assertIsNotNone(retry_rpc.callback)
+ self.assertEqual(a_fake_rpc, retry_rpc)
+ self.assertEqual({}, nonviewable_iids)
+
+ def testSortIssues(self):
+ services = service_manager.Services(
+ cache_manager=fake.CacheManager())
+ sorting.InitializeArtValues(services)
+
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
+ mr.sort_spec = 'priority'
+ issue_1 = fake.MakeTestIssue(
+ 789, 1, 'one', 'New', 111L, labels=['Priority-High'])
+ issue_2 = fake.MakeTestIssue(
+ 789, 2, 'two', 'New', 111L, labels=['Priority-Low'])
+ issue_3 = fake.MakeTestIssue(
+ 789, 3, 'three', 'New', 111L, labels=['Priority-Medium'])
+ issues = [issue_1, issue_2, issue_3]
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ sorted_issues = frontendsearchpipeline._SortIssues(mr, issues, config, {})
+
+ self.assertEqual(
+ [issue_1, issue_3, issue_2], # Order is high, medium, low.
+ sorted_issues)
+
+
+class FrontendSearchPipelineShardMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.sharded_iids = {
+ 0: [10, 20, 30, 40, 50],
+ 1: [21, 41, 61, 81],
+ 2: [42, 52, 62, 72, 102],
+ 3: [],
+ }
+
+ def testTotalLength_Empty(self):
+ """If there were no results, the length of the sharded list is zero."""
+ self.assertEqual(0, frontendsearchpipeline._TotalLength({}))
+
+ def testTotalLength_Normal(self):
+ """The length of the sharded list is the sum of the shard lengths."""
+ self.assertEqual(
+ 14, frontendsearchpipeline._TotalLength(self.sharded_iids))
+
+ def testReverseShards_Empty(self):
+ """Reversing an empty sharded list is still empty."""
+ empty_sharded_iids = {}
+ frontendsearchpipeline._ReverseShards(empty_sharded_iids)
+ self.assertEqual({}, empty_sharded_iids)
+
+ def testReverseShards_Normal(self):
+ """Reversing a sharded list reverses each shard."""
+ frontendsearchpipeline._ReverseShards(self.sharded_iids)
+ self.assertEqual(
+ {0: [50, 40, 30, 20, 10],
+ 1: [81, 61, 41, 21],
+ 2: [102, 72, 62, 52, 42],
+ 3: [],
+ },
+ self.sharded_iids)
+
+ def testTrimShardedIIDs_Empty(self):
+ """If the sharded list is empty, trimming it makes no change."""
+ empty_sharded_iids = {}
+ frontendsearchpipeline._TrimEndShardedIIDs(empty_sharded_iids, [], 12)
+ self.assertEqual({}, empty_sharded_iids)
+
+ frontendsearchpipeline._TrimEndShardedIIDs(
+ empty_sharded_iids, [100, 88, 99], 12)
+ self.assertEqual({}, empty_sharded_iids)
+
+ def testTrimShardedIIDs_NoSamples(self):
+ """If there are no samples, we don't trim off any IIDs."""
+ orig_sharded_iids = {
+ shard_id: iids[:] for shard_id, iids in self.sharded_iids.iteritems()}
+ num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
+ self.sharded_iids, [], 12)
+ self.assertEqual(0, num_trimmed)
+ self.assertEqual(orig_sharded_iids, self.sharded_iids)
+
+ num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
+ self.sharded_iids, [], 1)
+ self.assertEqual(0, num_trimmed)
+ self.assertEqual(orig_sharded_iids, self.sharded_iids)
+
+ def testTrimShardedIIDs_Normal(self):
+ """The first 3 samples contribute all needed IIDs, so trim off the rest."""
+ samples = [30, 41, 62, 40, 81]
+ num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
+ self.sharded_iids, samples, 5)
+ self.assertEqual(2 + 1 + 0 + 0, num_trimmed)
+ self.assertEqual(
+ { # shard_id: iids before lower-bound + iids before 1st excess sample.
+ 0: [10, 20] + [30],
+ 1: [21] + [41, 61],
+ 2: [42, 52] + [62, 72, 102],
+ 3: [] + []},
+ self.sharded_iids)
+
+ def testCalcSamplePositions_Empty(self):
+ sharded_iids = {0: []}
+ samples = []
+ self.assertEqual(
+ [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
+
+ sharded_iids = {0: [10, 20, 30, 40]}
+ samples = []
+ self.assertEqual(
+ [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
+
+ sharded_iids = {0: []}
+ # E.g., the IIDs 2 and 4 might have been trimmed out in the forward phase.
+ # But we still have them in the list for the backwards phase, and they
+ # should just not contribute anything to the result.
+ samples = [2, 4]
+ self.assertEqual(
+ [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
+
+ def testCalcSamplePositions_Normal(self):
+ samples = [30, 41, 62, 40, 81]
+ self.assertEqual(
+ [(30, 2), (41, 1), (62, 2), (40, 3), (81, 3)],
+ frontendsearchpipeline._CalcSamplePositions(self.sharded_iids, samples))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/query2ast_test.py b/appengine/monorail/search/test/query2ast_test.py
new file mode 100644
index 0000000..e25768e
--- /dev/null
+++ b/appengine/monorail/search/test/query2ast_test.py
@@ -0,0 +1,546 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the query2ast module."""
+
+import datetime
+import time
+import unittest
+
+from proto import ast_pb2
+from search import query2ast
+from services import fulltext_helpers
+from tracker import tracker_bizobj
+
+BOOL = query2ast.BOOL
+DATE = query2ast.DATE
+NUM = query2ast.NUM
+TXT = query2ast.TXT
+
+BUILTIN_ISSUE_FIELDS = query2ast.BUILTIN_ISSUE_FIELDS
+ANY_FIELD = query2ast.BUILTIN_ISSUE_FIELDS['any_field']
+
+EQ = query2ast.EQ
+NE = query2ast.NE
+LT = query2ast.LT
+GT = query2ast.GT
+LE = query2ast.LE
+GE = query2ast.GE
+TEXT_HAS = query2ast.TEXT_HAS
+NOT_TEXT_HAS = query2ast.NOT_TEXT_HAS
+TEXT_MATCHES = query2ast.TEXT_MATCHES
+NOT_TEXT_MATCHES = query2ast.NOT_TEXT_MATCHES
+IS_DEFINED = query2ast.IS_DEFINED
+IS_NOT_DEFINED = query2ast.IS_NOT_DEFINED
+KEY_HAS = query2ast.KEY_HAS
+
+MakeCond = ast_pb2.MakeCond
+
+
+class QueryParsingUnitTest(unittest.TestCase):
+
+ default_config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ @unittest.skip('TODO(jrobbins): fully support OR')
+ def skip_testParseUserQuery_OrClause(self):
+ # ParseUserQuery extends _ParseORQuery with specialized
+ # handling of "OR" operators in a user query
+
+ # an "OR" query, which should look like two separate simple querys
+ # joined together by a pipe.
+ ast = query2ast.ParseUserQuery(
+ 'ham OR fancy', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ conj1 = ast.conjunctions[0]
+ conj2 = ast.conjunctions[1]
+ self.assertEqual([MakeCond(TEXT_HAS, [ANY_FIELD], ['ham'], [])],
+ conj1.conds)
+ self.assertEqual([MakeCond(TEXT_HAS, [ANY_FIELD], ['fancy'], [])],
+ conj2.conds)
+
+ def testParseUserQuery_Words(self):
+ # an "ORTerm" is actually anything appearing on either side of an
+ # "OR" operator. So this could be thought of as "simple" query parsing.
+
+ # a simple query with no spaces
+ ast = query2ast.ParseUserQuery(
+ 'hamfancy', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ fulltext_cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['hamfancy'], []), fulltext_cond)
+
+ # negative word
+ ast = query2ast.ParseUserQuery(
+ '-hamfancy', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ fulltext_cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ # note: not NOT_TEXT_HAS.
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['hamfancy'], []),
+ fulltext_cond)
+
+ # an explicit "AND" query in the "featured" context
+ warnings = []
+ query2ast.ParseUserQuery(
+ 'ham AND fancy', 'label:featured', BUILTIN_ISSUE_FIELDS,
+ self.default_config, warnings=warnings)
+ self.assertEqual(
+ ['The only supported boolean operator is OR (all capitals).'],
+ warnings)
+
+ # an implicit "AND" query
+ ast = query2ast.ParseUserQuery(
+ 'ham fancy', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, ft_cond1, ft_cond2 = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['ham'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['fancy'], []), ft_cond2)
+
+ # Use word with special prefix.
+ word_with_special_prefix = '%stest' % fulltext_helpers.NON_OP_PREFIXES[0]
+ ast = query2ast.ParseUserQuery(
+ word_with_special_prefix, '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ fulltext_cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], [word_with_special_prefix], []),
+ fulltext_cond)
+
+ # mix positive and negative words
+ ast = query2ast.ParseUserQuery(
+ 'ham -fancy', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, ft_cond1, ft_cond2 = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['ham'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['fancy'], []), ft_cond2)
+
+ # converts terms to lower case
+ ast = query2ast.ParseUserQuery(
+ 'AmDude', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, fulltext_cond = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['amdude'], []), fulltext_cond)
+
+ def testParseUserQuery_Phrases(self):
+ # positive phrases
+ ast = query2ast.ParseUserQuery(
+ '"one two"', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, fulltext_cond = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['"one two"'], []), fulltext_cond)
+
+ # negative phrases
+ ast = query2ast.ParseUserQuery(
+ '-"one two"', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, fulltext_cond = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['"one two"'], []), fulltext_cond)
+
+ # multiple phrases
+ ast = query2ast.ParseUserQuery(
+ '-"a b" "x y"', '-label:deprecated', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ scope_cond1, ft_cond1, ft_cond2 = ast.conjunctions[0].conds
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['deprecated'], []),
+ scope_cond1)
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [ANY_FIELD], ['"a b"'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['"x y"'], []), ft_cond2)
+
+ def testParseUserQuery_CodeSyntaxThatWeNeedToCopeWith(self):
+ # positive phrases
+ ast = query2ast.ParseUserQuery(
+ 'Base::Tuple', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD],
+ ['"base::tuple"'], []),
+ cond)
+
+ def testParseUserQuery_HasOperator(self):
+ # Search for issues with at least one attachment
+ ast = query2ast.ParseUserQuery(
+ 'has:attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-has:attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_NOT_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'has=attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-has=attachment', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_NOT_DEFINED, [BUILTIN_ISSUE_FIELDS['attachment']], [], []),
+ cond1)
+
+ # Search for numeric fields for searches with 'has' prefix
+ ast = query2ast.ParseUserQuery(
+ 'has:attachments', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_DEFINED, [BUILTIN_ISSUE_FIELDS['attachments']], [], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-has:attachments', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(IS_NOT_DEFINED, [BUILTIN_ISSUE_FIELDS['attachments']],
+ [], []),
+ cond1)
+
+ def testParseUserQuery_Components(self):
+ """Parse user queries for components"""
+ ast = query2ast.ParseUserQuery(
+ 'component:UI', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['component']],
+ ['ui'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'Component:UI>AboutBox', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['component']],
+ ['ui>aboutbox'], []),
+ cond1)
+
+ def testParseUserQuery_OwnersReportersAndCc(self):
+ """Parse user queries for owner:, reporter: and cc:."""
+ ast = query2ast.ParseUserQuery(
+ 'owner:user', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['owner']],
+ ['user'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'owner:user@example.com', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['owner']],
+ ['user@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'owner=user@example.com', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['owner']],
+ ['user@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-reporter=user@example.com', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['reporter']],
+ ['user@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'cc=user@example.com,user2@example.com', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['cc']],
+ ['user@example.com', 'user2@example.com'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'cc:user,user2', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['cc']],
+ ['user', 'user2'], []),
+ cond1)
+
+ def testParseUserQuery_SearchWithinFields(self):
+ # Search for issues with certain filenames
+ ast = query2ast.ParseUserQuery(
+ 'attachment:filename', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-attachment:filename', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NOT_TEXT_HAS, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ # Search for issues with a certain number of attachments
+ ast = query2ast.ParseUserQuery(
+ 'attachments:2', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['attachments']],
+ ['2'], [2]),
+ cond1)
+
+ # Searches with '=' syntax
+ ast = query2ast.ParseUserQuery(
+ 'attachment=filename', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-attachment=filename', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['attachment']],
+ ['filename'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'milestone=2009', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']], ['milestone-2009'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-milestone=2009', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['label']], ['milestone-2009'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'milestone=2009-Q1', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2009-q1'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ '-milestone=2009-Q1', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(NE, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2009-q1'], []),
+ cond1)
+
+ # Searches with ':' syntax
+ ast = query2ast.ParseUserQuery(
+ 'summary:foo', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['summary']], ['foo'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'summary:"greetings programs"', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['summary']], ['greetings programs'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'summary:"Ӓ"', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['summary']], ['Ӓ'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'priority:high', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(KEY_HAS,
+ [BUILTIN_ISSUE_FIELDS['label']], ['priority-high'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'type:security', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(KEY_HAS,
+ [BUILTIN_ISSUE_FIELDS['label']], ['type-security'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'label:priority-high', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS,
+ [BUILTIN_ISSUE_FIELDS['label']], ['priority-high'], []),
+ cond1)
+
+ def testParseUserQuery_QuickOr(self):
+ # quick-or searches
+ ast = query2ast.ParseUserQuery(
+ 'milestone:2008,2009,2010', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(KEY_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2008', 'milestone-2009', 'milestone-2010'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'label:milestone-2008,milestone-2009,milestone-2010', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2008', 'milestone-2009', 'milestone-2010'], []),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'milestone=2008,2009,2010', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ self.assertEqual(
+ MakeCond(EQ, [BUILTIN_ISSUE_FIELDS['label']],
+ ['milestone-2008', 'milestone-2009', 'milestone-2010'], []),
+ cond1)
+
+ def testParseUserQuery_Dates(self):
+ # query with a daterange
+ ast = query2ast.ParseUserQuery(
+ 'modified>=2009-5-12', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+
+ # query with multiple dateranges
+ ast = query2ast.ParseUserQuery(
+ 'modified>=2009-5-12 opened<2008/1/1', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1, cond2 = ast.conjunctions[0].conds
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+ ts2 = int(time.mktime(datetime.datetime(2008, 1, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(LT, [BUILTIN_ISSUE_FIELDS['opened']], [], [ts2]), cond2)
+
+ # query with multiple dateranges plus a search term
+ ast = query2ast.ParseUserQuery(
+ 'one two modified>=2009-5-12 opened<2008/1/1', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ ft_cond1, ft_cond2, cond1, cond2 = ast.conjunctions[0].conds
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['one'], []), ft_cond1)
+ self.assertEqual(
+ MakeCond(TEXT_HAS, [ANY_FIELD], ['two'], []), ft_cond2)
+ self.assertEqual(
+ MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+ ts2 = int(time.mktime(datetime.datetime(2008, 1, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(LT, [BUILTIN_ISSUE_FIELDS['opened']], [], [ts2]), cond2)
+
+ # query with a date field compared to "today"
+ ast = query2ast.ParseUserQuery(
+ 'modified<today', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = query2ast._CalculatePastDate(0)
+ self.assertEqual(MakeCond(LT, [BUILTIN_ISSUE_FIELDS['modified']],
+ [], [ts1]),
+ cond1)
+
+ # query with a daterange using today-N alias
+ ast = query2ast.ParseUserQuery(
+ 'modified>=today-13', '', BUILTIN_ISSUE_FIELDS,
+ self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = query2ast._CalculatePastDate(13)
+ self.assertEqual(MakeCond(GE, [BUILTIN_ISSUE_FIELDS['modified']],
+ [], [ts1]),
+ cond1)
+
+ ast = query2ast.ParseUserQuery(
+ 'modified>today-13', '', BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1 = ast.conjunctions[0].conds[0]
+ ts1 = query2ast._CalculatePastDate(13)
+ self.assertEqual(MakeCond(GT, [BUILTIN_ISSUE_FIELDS['modified']],
+ [], [ts1]),
+ cond1)
+
+ # query with multiple old date query terms.
+ ast = query2ast.ParseUserQuery(
+ 'modified-after:2009-5-12 opened-before:2008/1/1 '
+ 'closed-after:2007-2-1', '',
+ BUILTIN_ISSUE_FIELDS, self.default_config)
+ cond1, cond2, cond3 = ast.conjunctions[0].conds
+ ts1 = int(time.mktime(datetime.datetime(2009, 5, 12).timetuple()))
+ self.assertEqual(
+ MakeCond(GT, [BUILTIN_ISSUE_FIELDS['modified']], [], [ts1]), cond1)
+ ts2 = int(time.mktime(datetime.datetime(2008, 1, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(LT, [BUILTIN_ISSUE_FIELDS['opened']], [], [ts2]), cond2)
+ ts3 = int(time.mktime(datetime.datetime(2007, 2, 1).timetuple()))
+ self.assertEqual(
+ MakeCond(GT, [BUILTIN_ISSUE_FIELDS['closed']], [], [ts3]), cond3)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/search/test/searchpipeline_test.py b/appengine/monorail/search/test/searchpipeline_test.py
new file mode 100644
index 0000000..bf0b6be
--- /dev/null
+++ b/appengine/monorail/search/test/searchpipeline_test.py
@@ -0,0 +1,73 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the searchpipeline module."""
+
+import unittest
+
+from proto import ast_pb2
+from proto import tracker_pb2
+from search import searchpipeline
+from services import service_manager
+from testing import fake
+from tracker import tracker_bizobj
+
+
+class SearchPipelineTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService())
+ self.services.user.TestAddUser('a@example.com', 111L)
+
+ def testIsStarredRE(self):
+ """IS_STARRED_RE matches only the is:starred term."""
+ input_output = {
+ 'something:else': 'something:else',
+ 'genesis:starred': 'genesis:starred',
+ 'is:starred-in-bookmarks': 'is:starred-in-bookmarks',
+ 'is:starred': 'foo',
+ 'Is:starred': 'foo',
+ 'is:STARRED': 'foo',
+ 'is:starred is:open': 'foo is:open',
+ 'is:open is:starred': 'is:open foo',
+ }
+ for i, o in input_output.items():
+ self.assertEqual(o, searchpipeline.IS_STARRED_RE.sub('foo', i))
+
+ def testMeRE(self):
+ """ME_RE matches only the 'me' value keyword."""
+ input_output = {
+ 'something:else': 'something:else',
+ 'else:some': 'else:some',
+ 'me': 'me', # It needs to have a ":" in front.
+ 'cc:me-team': 'cc:me-team',
+ 'cc:me=domain@otherdomain': 'cc:me=domain@otherdomain',
+ 'cc:me@example.com': 'cc:me@example.com',
+ 'me:the-boss': 'me:the-boss',
+ 'cc:me': 'cc:foo',
+ 'cc=me': 'cc=foo',
+ 'owner:Me': 'owner:foo',
+ 'reporter:ME': 'reporter:foo',
+ 'cc:me is:open': 'cc:foo is:open',
+ 'is:open cc:me': 'is:open cc:foo',
+ }
+ for i, o in input_output.items():
+ self.assertEqual(o, searchpipeline.ME_RE.sub('foo', i))
+
+ def testAccumulateIssueProjectsAndConfigs(self):
+ pass # TODO(jrobbins): write tests
+
+ def testReplaceKeywordsWithUserID(self):
+ pass # TODO(jrobbins): write tests
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/__init__.py b/appengine/monorail/services/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/services/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/services/api_pb2_v1_helpers.py b/appengine/monorail/services/api_pb2_v1_helpers.py
new file mode 100644
index 0000000..787bc34
--- /dev/null
+++ b/appengine/monorail/services/api_pb2_v1_helpers.py
@@ -0,0 +1,451 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Convert Monorail PB objects to API PB objects"""
+
+import datetime
+import logging
+
+from framework import framework_constants
+from framework import framework_helpers
+from framework import permissions
+from proto import api_pb2_v1
+from proto import project_pb2
+from proto import tracker_pb2
+from services import issue_svc
+from services import project_svc
+from services import user_svc
+from tracker import tracker_bizobj
+from tracker import tracker_helpers
+
+
+def convert_project(project, config, role):
+ """Convert Monorail Project PB to API ProjectWrapper PB."""
+
+ return api_pb2_v1.ProjectWrapper(
+ kind='monorail#project',
+ name=project.project_name,
+ externalId=project.project_name,
+ htmlLink='/p/%s/' % project.project_name,
+ summary=project.summary,
+ description=project.description,
+ role=role,
+ issuesConfig=convert_project_config(config))
+
+
+def convert_project_config(config):
+ """Convert Monorail ProjectIssueConfig PB to API ProjectIssueConfig PB."""
+
+ return api_pb2_v1.ProjectIssueConfig(
+ kind='monorail#projectIssueConfig',
+ restrictToKnown=config.restrict_to_known,
+ defaultColumns=config.default_col_spec.split(),
+ defaultSorting=config.default_sort_spec.split(),
+ statuses=[convert_status(s) for s in config.well_known_statuses],
+ labels=[convert_label(l) for l in config.well_known_labels],
+ prompts=[convert_template(t) for t in config.templates],
+ defaultPromptForMembers=config.default_template_for_developers,
+ defaultPromptForNonMembers=config.default_template_for_users)
+
+
+def convert_status(status):
+ """Convert Monorail StatusDef PB to API Status PB."""
+
+ return api_pb2_v1.Status(
+ status=status.status,
+ meansOpen=status.means_open,
+ description=status.status_docstring)
+
+
+def convert_label(label):
+ """Convert Monorail LabelDef PB to API Label PB."""
+
+ return api_pb2_v1.Label(
+ label=label.label,
+ description=label.label_docstring)
+
+
+def convert_template(template):
+ """Convert Monorail TemplateDef PB to API Prompt PB."""
+
+ return api_pb2_v1.Prompt(
+ name=template.name,
+ title=template.summary,
+ description=template.content,
+ titleMustBeEdited=template.summary_must_be_edited,
+ status=template.status,
+ labels=template.labels,
+ membersOnly=template.members_only,
+ defaultToMember=template.owner_defaults_to_member,
+ componentRequired=template.component_required)
+
+
+def convert_person(user_id, cnxn, services, trap_exception=False):
+ """Convert user id to API AtomPerson PB."""
+
+ if not user_id:
+ return None
+ user_email = None
+ try:
+ user_email = services.user.LookupUserEmail(cnxn, user_id)
+ except user_svc.NoSuchUserException as ex:
+ if trap_exception:
+ logging.warning(str(ex))
+ return None
+ else:
+ raise ex
+ return api_pb2_v1.AtomPerson(
+ kind='monorail#issuePerson',
+ name=user_email,
+ htmlLink='https://%s/u/%d' % (framework_helpers.GetHostPort(), user_id))
+
+
+def convert_issue_ids(issue_ids, mar, services):
+ """Convert global issue ids to API IssueRef PB."""
+
+ # missed issue ids are filtered out.
+ issues = services.issue.GetIssues(mar.cnxn, issue_ids)
+ result = []
+ for issue in issues:
+ issue_ref = api_pb2_v1.IssueRef(
+ issueId=issue.local_id,
+ projectId=issue.project_name,
+ kind='monorail#issueRef')
+ result.append(issue_ref)
+ return result
+
+
+def convert_issueref_pbs(issueref_pbs, mar, services):
+ """Convert API IssueRef PBs to global issue ids."""
+
+ if issueref_pbs:
+ result = []
+ for ir in issueref_pbs:
+ project_id = mar.project_id
+ if ir.projectId:
+ project = services.project.GetProjectByName(
+ mar.cnxn, ir.projectId)
+ if project:
+ project_id = project.project_id
+ try:
+ issue = services.issue.GetIssueByLocalID(
+ mar.cnxn, project_id, ir.issueId)
+ result.append(issue.issue_id)
+ except issue_svc.NoSuchIssueException:
+ logging.warning(
+ 'Issue (%s:%d) does not exist.' % (ir.projectId, ir.issueId))
+ return result
+ else:
+ return None
+
+
+def convert_issue(cls, issue, mar, services):
+ """Convert Monorail Issue PB to API IssuesGetInsertResponse."""
+
+ config = services.config.GetProjectConfig(mar.cnxn, issue.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mar.auth.effective_ids, config)
+ issue_project = services.project.GetProject(mar.cnxn, issue.project_id)
+ component_list = []
+ for cd in config.component_defs:
+ cid = cd.component_id
+ if cid in issue.component_ids:
+ component_list.append(cd.path)
+ cc_list = [convert_person(p, mar.cnxn, services) for p in issue.cc_ids]
+ cc_list = [p for p in cc_list if p is not None]
+ field_values_list = []
+ field_id_dict = {
+ fd.field_id: fd.field_name for fd in config.field_defs}
+ for fv in issue.field_values:
+ field_name = field_id_dict.get(fv.field_id)
+ if not field_name:
+ logging.warning('Custom field %d of project %s does not exist',
+ fv.field_id, issue_project.project_name)
+ continue
+ val = None
+ if fv.user_id:
+ try:
+ val = services.user.LookupUserEmail(mar.cnxn, fv.user_id)
+ except user_svc.NoSuchUserException:
+ val = ''
+ elif fv.str_value:
+ val = fv.str_value
+ elif fv.int_value:
+ val = str(fv.int_value)
+ new_fv = api_pb2_v1.FieldValue(
+ fieldName=field_name,
+ fieldValue=val,
+ derived=fv.derived)
+ field_values_list.append(new_fv)
+ resp = cls(
+ kind='monorail#issue',
+ id=issue.local_id,
+ title=issue.summary,
+ summary=issue.summary,
+ projectId=issue_project.project_name,
+ stars=issue.star_count,
+ starred=services.issue_star.IsItemStarredBy(
+ mar.cnxn, issue.issue_id, mar.auth.user_id),
+ status=issue.status,
+ state=(api_pb2_v1.IssueState.open if
+ tracker_helpers.MeansOpenInProject(
+ tracker_bizobj.GetStatus(issue), config)
+ else api_pb2_v1.IssueState.closed),
+ labels=issue.labels,
+ components=component_list,
+ author=convert_person(issue.reporter_id, mar.cnxn, services),
+ owner=convert_person(issue.owner_id, mar.cnxn, services),
+ cc=cc_list,
+ updated=datetime.datetime.fromtimestamp(issue.modified_timestamp),
+ published=datetime.datetime.fromtimestamp(issue.opened_timestamp),
+ blockedOn=convert_issue_ids(issue.blocked_on_iids, mar, services),
+ blocking=convert_issue_ids(issue.blocking_iids, mar, services),
+ canComment=permissions.CanCommentIssue(
+ mar.auth.effective_ids, mar.perms, issue_project, issue,
+ granted_perms=granted_perms),
+ canEdit=permissions.CanEditIssue(
+ mar.auth.effective_ids, mar.perms, issue_project, issue,
+ granted_perms=granted_perms),
+ fieldValues=field_values_list)
+ if issue.closed_timestamp > 0:
+ resp.closed = datetime.datetime.fromtimestamp(issue.closed_timestamp)
+ if issue.merged_into:
+ resp.mergedInto=convert_issue_ids([issue.merged_into], mar, services)[0]
+ return resp
+
+
+def convert_comment(issue, comment, mar, services, granted_perms):
+ """Convert Monorail IssueComment PB to API IssueCommentWrapper."""
+
+ can_delete = permissions.CanDelete(
+ mar.auth.user_id, mar.auth.effective_ids, mar.perms,
+ comment.deleted_by, comment.user_id, mar.project,
+ permissions.GetRestrictions(issue), granted_perms=granted_perms)
+
+ return api_pb2_v1.IssueCommentWrapper(
+ attachments=[convert_attachment(a) for a in comment.attachments],
+ author=convert_person(comment.user_id, mar.cnxn, services,
+ trap_exception=True),
+ canDelete=can_delete,
+ content=comment.content,
+ deletedBy=convert_person(comment.deleted_by, mar.cnxn, services,
+ trap_exception=True),
+ id=comment.sequence,
+ published=datetime.datetime.fromtimestamp(comment.timestamp),
+ updates=convert_amendments(issue, comment.amendments, mar, services),
+ kind='monorail#issueComment')
+
+
+def convert_attachment(attachment):
+ """Convert Monorail Attachment PB to API Attachment."""
+
+ return api_pb2_v1.Attachment(
+ attachmentId=attachment.attachment_id,
+ fileName=attachment.filename,
+ fileSize=attachment.filesize,
+ mimetype=attachment.mimetype,
+ isDeleted=attachment.deleted)
+
+
+def convert_amendments(issue, amendments, mar, services):
+ """Convert a list of Monorail Amendment PBs to API Update."""
+
+ result = api_pb2_v1.Update(kind='monorail#issueCommentUpdate')
+ for amendment in amendments:
+ if amendment.field == tracker_pb2.FieldID.SUMMARY:
+ result.summary = amendment.newvalue
+ elif amendment.field == tracker_pb2.FieldID.STATUS:
+ result.status = amendment.newvalue
+ elif amendment.field == tracker_pb2.FieldID.OWNER:
+ if len(amendment.added_user_ids) == 0:
+ result.owner = framework_constants.NO_USER_NAME
+ else:
+ user_email = services.user.LookupUserEmail(
+ mar.cnxn, amendment.added_user_ids[0])
+ result.owner = user_email
+ elif amendment.field == tracker_pb2.FieldID.LABELS:
+ result.labels = amendment.newvalue.split()
+ elif amendment.field == tracker_pb2.FieldID.CC:
+ for user_id in amendment.added_user_ids:
+ user_email = services.user.LookupUserEmail(mar.cnxn, user_id)
+ result.cc.append(user_email)
+ for user_id in amendment.removed_user_ids:
+ user_email = services.user.LookupUserEmail(mar.cnxn, user_id)
+ result.cc.append('-%s' % user_email)
+ elif amendment.field == tracker_pb2.FieldID.BLOCKEDON:
+ result.blockedOn = _append_project(
+ amendment.newvalue, issue.project_name)
+ elif amendment.field == tracker_pb2.FieldID.BLOCKING:
+ result.blocking = _append_project(
+ amendment.newvalue, issue.project_name)
+ elif amendment.field == tracker_pb2.FieldID.MERGEDINTO:
+ result.mergedInto = amendment.newvalue
+ elif amendment.field == tracker_pb2.FieldID.COMPONENTS:
+ result.components = amendment.newvalue.split()
+ elif amendment.field == tracker_pb2.FieldID.CUSTOM:
+ fv = api_pb2_v1.FieldValue()
+ fv.fieldName = amendment.custom_field_name
+ fv.fieldValue = amendment.newvalue
+ result.fieldValues.append(fv)
+
+ return result
+
+
+def _append_project(issue_ids, project_name):
+ """Append project name to convert <id> to <project>:<id> format."""
+
+ result = []
+ id_list = issue_ids.split()
+ for id_str in id_list:
+ if ':' in id_str:
+ result.append(id_str)
+ # '-' means this issue is being removed
+ elif id_str.startswith('-'):
+ result.append('-%s:%s' % (project_name, id_str[1:]))
+ else:
+ result.append('%s:%s' % (project_name, id_str))
+ return result
+
+
+def split_remove_add(item_list):
+ """Split one list of items into two: items to add and items to remove."""
+
+ list_to_add = []
+ list_to_remove = []
+
+ for item in item_list:
+ if item.startswith('-'):
+ list_to_remove.append(item[1:])
+ else:
+ list_to_add.append(item)
+
+ return list_to_add, list_to_remove
+
+
+# TODO(sheyang): batch the SQL queries to fetch projects/issues.
+def issue_global_ids(project_local_id_pairs, project_id, mar, services):
+ """Find global issues ids given <project_name>:<issue_local_id> pairs."""
+
+ result = []
+ for pair in project_local_id_pairs:
+ issue_project_id = None
+ local_id = None
+ if ':' in pair:
+ pair_ary = pair.split(':')
+ project_name = pair_ary[0]
+ local_id = int(pair_ary[1])
+ project = services.project.GetProjectByName(mar.cnxn, project_name)
+ if not project:
+ raise project_svc.NoSuchProjectException(
+ 'Project %s does not exist' % project_name)
+ issue_project_id = project.project_id
+ else:
+ issue_project_id = project_id
+ local_id = int(pair)
+ result.append(
+ services.issue.LookupIssueID(mar.cnxn, issue_project_id, local_id))
+
+ return result
+
+
+def convert_group_settings(group_name, setting):
+ """Convert UserGroupSettings to UserGroupSettingsWrapper."""
+ return api_pb2_v1.UserGroupSettingsWrapper(
+ groupName=group_name,
+ who_can_view_members=setting.who_can_view_members,
+ ext_group_type=setting.ext_group_type,
+ last_sync_time=setting.last_sync_time)
+
+
+def convert_component_def(cd, mar, services):
+ """Convert ComponentDef PB to Component PB."""
+ project_name = services.project.LookupProjectNames(
+ mar.cnxn, [cd.project_id])[cd.project_id]
+ user_ids = set()
+ user_ids.update(
+ cd.admin_ids + cd.cc_ids + [cd.creator_id] + [cd.modifier_id])
+ user_names_dict = services.user.LookupUserEmails(mar.cnxn, list(user_ids))
+ component = api_pb2_v1.Component(
+ componentId=cd.component_id,
+ projectName=project_name,
+ componentPath=cd.path,
+ description=cd.docstring,
+ admin=sorted([user_names_dict[uid] for uid in cd.admin_ids]),
+ cc=sorted([user_names_dict[uid] for uid in cd.cc_ids]),
+ deprecated=cd.deprecated)
+ if cd.created:
+ component.created = datetime.datetime.fromtimestamp(cd.created)
+ component.creator = user_names_dict[cd.creator_id]
+ if cd.modified:
+ component.modified = datetime.datetime.fromtimestamp(cd.modified)
+ component.modifier = user_names_dict[cd.modifier_id]
+ return component
+
+
+def convert_component_ids(config, component_names):
+ """Convert a list of component names to ids."""
+ component_names_lower = [name.lower() for name in component_names]
+ result = []
+ for cd in config.component_defs:
+ cpath = cd.path
+ if cpath.lower() in component_names_lower:
+ result.append(cd.component_id)
+ return result
+
+
+def convert_field_values(field_values, mar, services):
+ """Convert user passed in field value list to FieldValue PB, or labels."""
+ fv_list_add = []
+ fv_list_remove = []
+ fv_list_clear = []
+ label_list_add = []
+ label_list_remove = []
+ field_name_dict = {
+ fd.field_name: fd for fd in mar.config.field_defs}
+
+ for fv in field_values:
+ field_def = field_name_dict.get(fv.fieldName)
+ if not field_def:
+ logging.warning('Custom field %s of does not exist', fv.fieldName)
+ continue
+
+ if fv.operator == api_pb2_v1.FieldValueOperator.clear:
+ fv_list_clear.append(field_def.field_id)
+ continue
+
+ # Enum fields are stored as labels
+ if field_def.field_type == tracker_pb2.FieldTypes.ENUM_TYPE:
+ raw_val = '%s-%s' % (fv.fieldName, fv.fieldValue)
+ if fv.operator == api_pb2_v1.FieldValueOperator.remove:
+ label_list_remove.append(raw_val)
+ elif fv.operator == api_pb2_v1.FieldValueOperator.add:
+ label_list_add.append(raw_val)
+ else:
+ logging.warning('Unsupported field value operater %s', fv.operator)
+ else:
+ new_fv = tracker_pb2.FieldValue(
+ field_id=field_def.field_id)
+ if field_def.field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ try:
+ new_fv.user_id = services.user.LookupUserID(mar.cnxn, fv.fieldValue)
+ except user_svc.NoSuchUserException:
+ new_fv.user_id = 0
+ elif field_def.field_type == tracker_pb2.FieldTypes.STR_TYPE:
+ new_fv.str_value = fv.fieldValue
+ elif field_def.field_type == tracker_pb2.FieldTypes.INT_TYPE:
+ new_fv.int_value = int(fv.fieldValue)
+ else:
+ logging.warning(
+ 'Unsupported field value type %s', field_def.field_type)
+
+ if fv.operator == api_pb2_v1.FieldValueOperator.remove:
+ fv_list_remove.append(new_fv)
+ elif fv.operator == api_pb2_v1.FieldValueOperator.add:
+ fv_list_add.append(new_fv)
+ else:
+ logging.warning('Unsupported field value operater %s', fv.operator)
+
+ return (fv_list_add, fv_list_remove, fv_list_clear,
+ label_list_add, label_list_remove)
diff --git a/appengine/monorail/services/api_svc_v1.py b/appengine/monorail/services/api_svc_v1.py
new file mode 100644
index 0000000..2f228fa
--- /dev/null
+++ b/appengine/monorail/services/api_svc_v1.py
@@ -0,0 +1,1164 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""API service"""
+
+import datetime
+import endpoints
+import functools
+import logging
+import re
+import time
+from google.appengine.api import oauth
+from protorpc import message_types
+from protorpc import protojson
+from protorpc import remote
+
+import settings
+from features import filterrules_helpers
+from features import notify
+from framework import actionlimit
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import monorailrequest
+from framework import permissions
+from framework import profiler
+from framework import sql
+from project import project_helpers
+from proto import api_pb2_v1
+from proto import project_pb2
+from search import frontendsearchpipeline
+from services import api_pb2_v1_helpers
+from services import client_config_svc
+from services import config_svc
+from services import issue_svc
+from services import project_svc
+from services import service_manager
+from services import tracker_fulltext
+from services import user_svc
+from services import usergroup_svc
+from sitewide import sitewide_helpers
+from tracker import field_helpers
+from tracker import issuedetail
+from tracker import tracker_constants
+from tracker import tracker_bizobj
+
+from infra_libs.ts_mon.common import http_metrics
+
+
+ENDPOINTS_API_NAME = 'monorail'
+DOC_URL = ('https://chromium.googlesource.com/infra/infra/+/master/'
+ 'appengine/monorail/doc/api.md')
+
+
+def monorail_api_method(
+ request_message, response_message, **kwargs):
+ """Extends endpoints.method by performing base checks."""
+ time_fn = kwargs.pop('time_fn', time.time)
+ method_name = kwargs.get('name', '')
+ method_path = kwargs.get('path', '')
+ def new_decorator(func):
+ @endpoints.method(request_message, response_message, **kwargs)
+ @functools.wraps(func)
+ def wrapper(self, *args, **kwargs):
+ method_identifier = (ENDPOINTS_API_NAME + '.' +
+ (method_name or func.__name__)
+ + '/' + (method_path or func.__name__))
+ start_time = time_fn()
+ approximate_http_status = 200
+ request = args[0]
+ ret = None
+ try:
+ requester = endpoints.get_current_user()
+ auth_client_ids, auth_emails = (
+ client_config_svc.GetClientConfigSvc().GetClientIDEmails())
+ auth_client_ids.append(endpoints.API_EXPLORER_CLIENT_ID)
+ logging.info('Whitelist ID %r email %r', auth_client_ids, auth_emails)
+ if self._services is None:
+ self._set_services(service_manager.set_up_services())
+ api_base_checks(
+ request, requester,
+ self._services, sql.MonorailConnection(),
+ auth_client_ids, auth_emails)
+ self.increment_request_limit(request)
+ ret = func(self, *args, **kwargs)
+ except user_svc.NoSuchUserException as e:
+ approximate_http_status = 404
+ raise endpoints.NotFoundException(
+ 'The user does not exist: %s' % str(e))
+ except (project_svc.NoSuchProjectException,
+ issue_svc.NoSuchIssueException,
+ config_svc.NoSuchComponentException) as e:
+ approximate_http_status = 404
+ raise endpoints.NotFoundException(str(e))
+ except (permissions.BannedUserException,
+ permissions.PermissionException) as e:
+ approximate_http_status = 403
+ raise endpoints.ForbiddenException(str(e))
+ except endpoints.BadRequestException:
+ approximate_http_status = 400
+ raise
+ except endpoints.UnauthorizedException:
+ approximate_http_status = 401
+ raise
+ except actionlimit.ExcessiveActivityException as e:
+ approximate_http_status = 403
+ raise endpoints.ForbiddenException(
+ 'The requester has exceeded API quotas limit')
+ except (usergroup_svc.GroupExistsException,
+ config_svc.InvalidComponentNameException) as e:
+ approximate_http_status = 400
+ raise endpoints.BadRequestException(str(e))
+ except Exception as e:
+ approximate_http_status = 500
+ logging.exception('Unexpected error in monorail API')
+ raise
+ finally:
+ elapsed_ms = int((time_fn() - start_time) * 1000)
+
+ fields = {
+ # Endpoints APIs don't return the full set of http status values.
+ 'status': approximate_http_status,
+ # Use the api name, not the request path, to prevent an
+ # explosion in possible field values.
+ 'name': method_identifier,
+ 'is_robot': False,
+ }
+
+ http_metrics.server_durations.add(elapsed_ms, fields=fields)
+ http_metrics.server_response_status.increment(fields=fields)
+ http_metrics.server_request_bytes.add(len(protojson.encode_message(
+ request)), fields=fields)
+ response_size = 0
+ if ret:
+ response_size = len(protojson.encode_message(ret))
+ http_metrics.server_response_bytes.add(response_size, fields=fields)
+
+ return ret
+
+ return wrapper
+ return new_decorator
+
+
+def api_base_checks(request, requester, services, cnxn,
+ auth_client_ids, auth_emails):
+ """Base checks for API users.
+
+ Args:
+ request: The HTTP request from Cloud Endpoints.
+ requester: The user who sends the request.
+ services: Services object.
+ cnxn: connection to the SQL database.
+ auth_client_ids: authorized client ids.
+ auth_emails: authorized emails when client is anonymous.
+
+ Returns:
+ Nothing
+
+ Raises:
+ endpoints.UnauthorizedException: If the requester is anonymous.
+ user_svc.NoSuchUserException: If the requester does not exist in Monorail.
+ project_svc.NoSuchProjectException: If the project does not exist in
+ Monorail.
+ permissions.BannedUserException: If the requester is banned.
+ permissions.PermissionException: If the requester does not have
+ permisssion to view.
+ """
+ valid_user = False
+ auth_err = ''
+ client_id = None
+
+ try:
+ client_id = oauth.get_client_id(framework_constants.OAUTH_SCOPE)
+ logging.info('Oauth client ID %s', client_id)
+ except oauth.Error as ex:
+ auth_err = 'oauth.Error: %s' % ex
+
+ if not requester:
+ try:
+ requester = oauth.get_current_user(framework_constants.OAUTH_SCOPE)
+ logging.info('Oauth requester %s', requester.email())
+ except oauth.Error as ex:
+ auth_err = 'oauth.Error: %s' % ex
+
+ if client_id and requester:
+ if client_id != 'anonymous':
+ if client_id in auth_client_ids:
+ valid_user = True
+ else:
+ auth_err = 'Client ID %s is not whitelisted' % client_id
+ # Some service accounts may have anonymous client ID
+ else:
+ if requester.email() in auth_emails:
+ valid_user = True
+ else:
+ auth_err = 'Client email %s is not whitelisted' % requester.email()
+
+ if not valid_user:
+ raise endpoints.UnauthorizedException('Auth error: %s' % auth_err)
+
+ project_name = None
+ if hasattr(request, 'projectId'):
+ project_name = request.projectId
+ issue_local_id = None
+ if hasattr(request, 'issueId'):
+ issue_local_id = request.issueId
+ # This could raise user_svc.NoSuchUserException
+ requester_id = services.user.LookupUserID(cnxn, requester.email())
+ requester_pb = services.user.GetUser(cnxn, requester_id)
+ requester_view = framework_views.UserView(
+ requester_id, requester.email(), requester_pb.obscure_email)
+ if permissions.IsBanned(requester_pb, requester_view):
+ raise permissions.BannedUserException(
+ 'The user %s has been banned from using Monorail' %
+ requester.email())
+ if project_name:
+ project = services.project.GetProjectByName(
+ cnxn, project_name)
+ if not project:
+ raise project_svc.NoSuchProjectException(
+ 'Project %s does not exist' % project_name)
+ if project.state != project_pb2.ProjectState.LIVE:
+ raise permissions.PermissionException(
+ 'API may not access project %s because it is not live'
+ % project_name)
+ requester_effective_ids = services.usergroup.LookupMemberships(
+ cnxn, requester_id)
+ requester_effective_ids.add(requester_id)
+ if not permissions.UserCanViewProject(
+ requester_pb, requester_effective_ids, project):
+ raise permissions.PermissionException(
+ 'The user %s has no permission for project %s' %
+ (requester.email(), project_name))
+ if issue_local_id:
+ # This may raise a NoSuchIssueException.
+ issue = services.issue.GetIssueByLocalID(
+ cnxn, project.project_id, issue_local_id)
+ perms = permissions.GetPermissions(
+ requester_pb, requester_effective_ids, project)
+ config = services.config.GetProjectConfig(cnxn, project.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, requester_effective_ids, config)
+ if not permissions.CanViewIssue(
+ requester_effective_ids, perms, project, issue,
+ granted_perms=granted_perms):
+ raise permissions.PermissionException(
+ 'User is not allowed to view this issue %s:%d' %
+ (project_name, issue_local_id))
+
+
+@endpoints.api(name=ENDPOINTS_API_NAME, version='v1',
+ description='Monorail API to manage issues.',
+ auth_level=endpoints.AUTH_LEVEL.NONE,
+ allowed_client_ids=endpoints.SKIP_CLIENT_ID_CHECK,
+ documentation=DOC_URL)
+class MonorailApi(remote.Service):
+
+ # Class variables. Handy to mock.
+ _services = None
+ _mar = None
+
+ @classmethod
+ def _set_services(cls, services):
+ cls._services = services
+
+ def mar_factory(self, request):
+ if not self._mar:
+ self._mar = monorailrequest.MonorailApiRequest(request, self._services)
+ return self._mar
+
+ def aux_delete_comment(self, request, delete=True):
+ mar = self.mar_factory(request)
+ action_name = 'delete' if delete else 'undelete'
+
+ issue = self._services.issue.GetIssueByLocalID(
+ mar.cnxn, mar.project_id, request.issueId)
+ all_comments = self._services.issue.GetCommentsForIssue(
+ mar.cnxn, issue.issue_id)
+ try:
+ issue_comment = all_comments[request.commentId]
+ except IndexError:
+ raise issue_svc.NoSuchIssueException(
+ 'The issue %s:%d does not have comment %d.' %
+ (mar.project_name, request.issueId, request.commentId))
+
+ if not permissions.CanDelete(
+ mar.auth.user_id, mar.auth.effective_ids, mar.perms,
+ issue_comment.deleted_by, issue_comment.user_id, mar.project,
+ permissions.GetRestrictions(issue), mar.granted_perms):
+ raise permissions.PermissionException(
+ 'User is not allowed to %s the comment %d of issue %s:%d' %
+ (action_name, request.commentId, mar.project_name,
+ request.issueId))
+
+ self._services.issue.SoftDeleteComment(
+ mar.cnxn, mar.project_id, request.issueId, request.commentId,
+ mar.auth.user_id, self._services.user, delete=delete)
+ return api_pb2_v1.IssuesCommentsDeleteResponse()
+
+ def increment_request_limit(self, request):
+ """Check whether the requester has exceeded API quotas limit,
+ and increment request count.
+ """
+ mar = self.mar_factory(request)
+ # soft_limit == hard_limit for api_request, so this function either
+ # returns False if under limit, or raise ExcessiveActivityException
+ if not actionlimit.NeedCaptcha(
+ mar.auth.user_pb, actionlimit.API_REQUEST, skip_lifetime_check=True):
+ actionlimit.CountAction(
+ mar.auth.user_pb, actionlimit.API_REQUEST, delta=1)
+ self._services.user.UpdateUser(
+ mar.cnxn, mar.auth.user_id, mar.auth.user_pb)
+
+ @monorail_api_method(
+ api_pb2_v1.ISSUES_COMMENTS_DELETE_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.IssuesCommentsDeleteResponse,
+ path='projects/{projectId}/issues/{issueId}/comments/{commentId}',
+ http_method='DELETE',
+ name='issues.comments.delete')
+ def issues_comments_delete(self, request):
+ """Delete a comment."""
+ return self.aux_delete_comment(request, True)
+
+ @monorail_api_method(
+ api_pb2_v1.ISSUES_COMMENTS_INSERT_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.IssuesCommentsInsertResponse,
+ path='projects/{projectId}/issues/{issueId}/comments',
+ http_method='POST',
+ name='issues.comments.insert')
+ def issues_comments_insert(self, request):
+ """Add a comment."""
+ mar = self.mar_factory(request)
+ issue = self._services.issue.GetIssueByLocalID(
+ mar.cnxn, mar.project_id, request.issueId)
+ old_owner_id = tracker_bizobj.GetOwnerId(issue)
+ if not permissions.CanCommentIssue(
+ mar.auth.effective_ids, mar.perms, mar.project, issue,
+ mar.granted_perms):
+ raise permissions.PermissionException(
+ 'User is not allowed to comment this issue (%s, %d)' %
+ (request.projectId, request.issueId))
+
+ updates_dict = {}
+ if request.updates:
+ if request.updates.moveToProject:
+ move_to = request.updates.moveToProject.lower()
+ move_to_project = issuedetail.CheckMoveIssueRequest(
+ self._services, mar, issue, True, move_to, mar.errors)
+ if mar.errors.AnyErrors():
+ raise endpoints.BadRequestException(mar.errors.move_to)
+ updates_dict['move_to_project'] = move_to_project
+
+ updates_dict['summary'] = request.updates.summary
+ updates_dict['status'] = request.updates.status
+ if request.updates.owner:
+ if request.updates.owner == framework_constants.NO_USER_NAME:
+ updates_dict['owner'] = framework_constants.NO_USER_SPECIFIED
+ else:
+ updates_dict['owner'] = self._services.user.LookupUserID(
+ mar.cnxn, request.updates.owner)
+ updates_dict['cc_add'], updates_dict['cc_remove'] = (
+ api_pb2_v1_helpers.split_remove_add(request.updates.cc))
+ updates_dict['cc_add'] = self._services.user.LookupUserIDs(
+ mar.cnxn, updates_dict['cc_add']).values()
+ updates_dict['cc_remove'] = self._services.user.LookupUserIDs(
+ mar.cnxn, updates_dict['cc_remove']).values()
+ updates_dict['labels_add'], updates_dict['labels_remove'] = (
+ api_pb2_v1_helpers.split_remove_add(request.updates.labels))
+ blocked_on_add_strs, blocked_on_remove_strs = (
+ api_pb2_v1_helpers.split_remove_add(request.updates.blockedOn))
+ updates_dict['blocked_on_add'] = api_pb2_v1_helpers.issue_global_ids(
+ blocked_on_add_strs, issue.project_id, mar,
+ self._services)
+ updates_dict['blocked_on_remove'] = api_pb2_v1_helpers.issue_global_ids(
+ blocked_on_remove_strs, issue.project_id, mar,
+ self._services)
+ blocking_add_strs, blocking_remove_strs = (
+ api_pb2_v1_helpers.split_remove_add(request.updates.blocking))
+ updates_dict['blocking_add'] = api_pb2_v1_helpers.issue_global_ids(
+ blocking_add_strs, issue.project_id, mar,
+ self._services)
+ updates_dict['blocking_remove'] = api_pb2_v1_helpers.issue_global_ids(
+ blocking_remove_strs, issue.project_id, mar,
+ self._services)
+ components_add_strs, components_remove_strs = (
+ api_pb2_v1_helpers.split_remove_add(request.updates.components))
+ updates_dict['components_add'] = (
+ api_pb2_v1_helpers.convert_component_ids(
+ mar.config, components_add_strs))
+ updates_dict['components_remove'] = (
+ api_pb2_v1_helpers.convert_component_ids(
+ mar.config, components_remove_strs))
+ if request.updates.mergedInto:
+ updates_dict['merged_into'] = self._services.issue.LookupIssueID(
+ mar.cnxn, issue.project_id, int(request.updates.mergedInto))
+ (updates_dict['field_vals_add'], updates_dict['field_vals_remove'],
+ updates_dict['fields_clear'], updates_dict['fields_labels_add'],
+ updates_dict['fields_labels_remove']) = (
+ api_pb2_v1_helpers.convert_field_values(
+ request.updates.fieldValues, mar, self._services))
+
+ field_helpers.ValidateCustomFields(
+ mar, self._services,
+ (updates_dict.get('field_vals_add', []) +
+ updates_dict.get('field_vals_remove', [])),
+ mar.config, mar.errors)
+ if mar.errors.AnyErrors():
+ raise endpoints.BadRequestException(
+ 'Invalid field values: %s' % mar.errors.custom_fields)
+
+ _, comment = self._services.issue.DeltaUpdateIssue(
+ cnxn=mar.cnxn, services=self._services,
+ reporter_id=mar.auth.user_id,
+ project_id=mar.project_id, config=mar.config, issue=issue,
+ status=updates_dict.get('status'), owner_id=updates_dict.get('owner'),
+ cc_add=updates_dict.get('cc_add', []),
+ cc_remove=updates_dict.get('cc_remove', []),
+ comp_ids_add=updates_dict.get('components_add', []),
+ comp_ids_remove=updates_dict.get('components_remove', []),
+ labels_add=(updates_dict.get('labels_add', []) +
+ updates_dict.get('fields_labels_add', [])),
+ labels_remove=(updates_dict.get('labels_remove', []) +
+ updates_dict.get('fields_labels_remove', [])),
+ field_vals_add=updates_dict.get('field_vals_add', []),
+ field_vals_remove=updates_dict.get('field_vals_remove', []),
+ fields_clear=updates_dict.get('fields_clear', []),
+ blocked_on_add=updates_dict.get('blocked_on_add', []),
+ blocked_on_remove=updates_dict.get('blocked_on_remove', []),
+ blocking_add=updates_dict.get('blocking_add', []),
+ blocking_remove=updates_dict.get('blocking_remove', []),
+ merged_into=updates_dict.get('merged_into'),
+ index_now=False,
+ comment=request.content,
+ summary=updates_dict.get('summary'),
+ )
+
+ move_comment = None
+ if 'move_to_project' in updates_dict:
+ move_to_project = updates_dict['move_to_project']
+ old_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id)
+ tracker_fulltext.UnindexIssues([issue.issue_id])
+ moved_back_iids = self._services.issue.MoveIssues(
+ mar.cnxn, move_to_project, [issue], self._services.user)
+ new_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id)
+ if issue.issue_id in moved_back_iids:
+ content = 'Moved %s back to %s again.' % (old_text_ref, new_text_ref)
+ else:
+ content = 'Moved %s to now be %s.' % (old_text_ref, new_text_ref)
+ move_comment = self._services.issue.CreateIssueComment(
+ mar.cnxn, move_to_project.project_id, issue.local_id, mar.auth.user_id,
+ content, amendments=[
+ tracker_bizobj.MakeProjectAmendment(move_to_project.project_name)])
+
+ tracker_fulltext.IndexIssues(
+ mar.cnxn, [issue], self._services.user, self._services.issue,
+ self._services.config)
+
+ comment = comment or move_comment
+ if comment is None:
+ return api_pb2_v1.IssuesCommentsInsertResponse()
+
+ cmnts = self._services.issue.GetCommentsForIssue(mar.cnxn, issue.issue_id)
+ seq = len(cmnts) - 1
+
+ if request.sendEmail:
+ notify.PrepareAndSendIssueChangeNotification(
+ issue.project_id, issue.local_id, framework_helpers.GetHostPort(),
+ comment.user_id, seq, send_email=True, old_owner_id=old_owner_id)
+
+ can_delete = permissions.CanDelete(
+ mar.auth.user_id, mar.auth.effective_ids, mar.perms,
+ comment.deleted_by, comment.user_id, mar.project,
+ permissions.GetRestrictions(issue), granted_perms=mar.granted_perms)
+ return api_pb2_v1.IssuesCommentsInsertResponse(
+ id=seq,
+ kind='monorail#issueComment',
+ author=api_pb2_v1_helpers.convert_person(
+ comment.user_id, mar.cnxn, self._services),
+ content=comment.content,
+ published=datetime.datetime.fromtimestamp(comment.timestamp),
+ updates=api_pb2_v1_helpers.convert_amendments(
+ issue, comment.amendments, mar, self._services),
+ canDelete=can_delete)
+
+ @monorail_api_method(
+ api_pb2_v1.ISSUES_COMMENTS_LIST_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.IssuesCommentsListResponse,
+ path='projects/{projectId}/issues/{issueId}/comments',
+ http_method='GET',
+ name='issues.comments.list')
+ def issues_comments_list(self, request):
+ """List all comments for an issue."""
+ mar = self.mar_factory(request)
+ issue = self._services.issue.GetIssueByLocalID(
+ mar.cnxn, mar.project_id, request.issueId)
+ comments = self._services.issue.GetCommentsForIssue(
+ mar.cnxn, issue.issue_id)
+ visible_comments = []
+ for comment in comments[
+ request.startIndex:(request.startIndex + request.maxResults)]:
+ visible_comments.append(
+ api_pb2_v1_helpers.convert_comment(
+ issue, comment, mar, self._services, mar.granted_perms))
+
+ return api_pb2_v1.IssuesCommentsListResponse(
+ kind='monorail#issueCommentList',
+ totalResults=len(comments),
+ items=visible_comments)
+
+ @monorail_api_method(
+ api_pb2_v1.ISSUES_COMMENTS_DELETE_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.IssuesCommentsDeleteResponse,
+ path='projects/{projectId}/issues/{issueId}/comments/{commentId}',
+ http_method='POST',
+ name='issues.comments.undelete')
+ def issues_comments_undelete(self, request):
+ """Restore a deleted comment."""
+ return self.aux_delete_comment(request, False)
+
+ @monorail_api_method(
+ api_pb2_v1.USERS_GET_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.UsersGetResponse,
+ path='users/{userId}',
+ http_method='GET',
+ name='users.get')
+ def users_get(self, request):
+ """Get a user."""
+ owner_project_only = request.ownerProjectsOnly
+ mar = self.mar_factory(request)
+ (visible_ownership, visible_deleted, visible_membership,
+ visible_contrib) = sitewide_helpers.GetUserProjects(
+ mar.cnxn, self._services, mar.auth.user_pb, mar.auth.effective_ids,
+ mar.viewed_user_auth.effective_ids)
+
+ project_list = []
+ for proj in (visible_ownership + visible_deleted):
+ config = self._services.config.GetProjectConfig(
+ mar.cnxn, proj.project_id)
+ proj_result = api_pb2_v1_helpers.convert_project(
+ proj, config, api_pb2_v1.Role.owner)
+ project_list.append(proj_result)
+ if not owner_project_only:
+ for proj in visible_membership:
+ config = self._services.config.GetProjectConfig(
+ mar.cnxn, proj.project_id)
+ proj_result = api_pb2_v1_helpers.convert_project(
+ proj, config, api_pb2_v1.Role.member)
+ project_list.append(proj_result)
+ for proj in visible_contrib:
+ config = self._services.config.GetProjectConfig(
+ mar.cnxn, proj.project_id)
+ proj_result = api_pb2_v1_helpers.convert_project(
+ proj, config, api_pb2_v1.Role.contributor)
+ project_list.append(proj_result)
+
+ return api_pb2_v1.UsersGetResponse(
+ id=str(mar.viewed_user_auth.user_id),
+ kind='monorail#user',
+ projects=project_list,
+ )
+
+ @monorail_api_method(
+ api_pb2_v1.ISSUES_GET_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.IssuesGetInsertResponse,
+ path='projects/{projectId}/issues/{issueId}',
+ http_method='GET',
+ name='issues.get')
+ def issues_get(self, request):
+ """Get an issue."""
+ mar = self.mar_factory(request)
+ issue = self._services.issue.GetIssueByLocalID(
+ mar.cnxn, mar.project_id, request.issueId)
+
+ return api_pb2_v1_helpers.convert_issue(
+ api_pb2_v1.IssuesGetInsertResponse, issue, mar, self._services)
+
+ @monorail_api_method(
+ api_pb2_v1.ISSUES_INSERT_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.IssuesGetInsertResponse,
+ path='projects/{projectId}/issues',
+ http_method='POST',
+ name='issues.insert')
+ def issues_insert(self, request):
+ """Add a new issue."""
+ mar = self.mar_factory(request)
+ if not mar.perms.CanUsePerm(
+ permissions.CREATE_ISSUE, mar.auth.effective_ids, mar.project, []):
+ raise permissions.PermissionException(
+ 'The requester %s is not allowed to create issues for project %s.' %
+ (mar.auth.email, mar.project_name))
+
+ owner_id = None
+ if request.owner:
+ try:
+ owner_id = self._services.user.LookupUserID(
+ mar.cnxn, request.owner.name)
+ except user_svc.NoSuchUserException:
+ raise endpoints.BadRequestException(
+ 'The specified owner %s does not exist.' % request.owner.name)
+
+ cc_ids = []
+ if request.cc:
+ cc_ids = self._services.user.LookupUserIDs(
+ mar.cnxn, [ap.name for ap in request.cc]).values()
+ comp_ids = api_pb2_v1_helpers.convert_component_ids(
+ mar.config, request.components)
+ fields_add, _, _, fields_labels, _ = (
+ api_pb2_v1_helpers.convert_field_values(
+ request.fieldValues, mar, self._services))
+ field_helpers.ValidateCustomFields(
+ mar, self._services, fields_add, mar.config, mar.errors)
+ if mar.errors.AnyErrors():
+ raise endpoints.BadRequestException(
+ 'Invalid field values: %s' % mar.errors.custom_fields)
+
+ local_id = self._services.issue.CreateIssue(
+ mar.cnxn, self._services, mar.project_id,
+ request.summary, request.status, owner_id,
+ cc_ids, request.labels + fields_labels, fields_add,
+ comp_ids, mar.auth.user_id, request.description,
+ blocked_on=api_pb2_v1_helpers.convert_issueref_pbs(
+ request.blockedOn, mar, self._services),
+ blocking=api_pb2_v1_helpers.convert_issueref_pbs(
+ request.blocking, mar, self._services))
+ new_issue = self._services.issue.GetIssueByLocalID(
+ mar.cnxn, mar.project_id, local_id)
+
+ if request.sendEmail:
+ notify.PrepareAndSendIssueChangeNotification(
+ mar.project_id, local_id, framework_helpers.GetHostPort(),
+ new_issue.reporter_id, 0)
+
+ return api_pb2_v1_helpers.convert_issue(
+ api_pb2_v1.IssuesGetInsertResponse, new_issue, mar, self._services)
+
+ @monorail_api_method(
+ api_pb2_v1.ISSUES_LIST_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.IssuesListResponse,
+ path='projects/{projectId}/issues',
+ http_method='GET',
+ name='issues.list')
+ def issues_list(self, request):
+ """List issues for projects."""
+ mar = self.mar_factory(request)
+
+ if request.additionalProject:
+ for project_name in request.additionalProject:
+ project = self._services.project.GetProjectByName(
+ mar.cnxn, project_name)
+ if project and not permissions.UserCanViewProject(
+ mar.auth.user_pb, mar.auth.effective_ids, project):
+ raise permissions.PermissionException(
+ 'The user %s has no permission for project %s' %
+ (mar.auth.email, project_name))
+ prof = profiler.Profiler()
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ mar, self._services, prof, mar.num)
+ if not mar.errors.AnyErrors():
+ pipeline.SearchForIIDs()
+ pipeline.MergeAndSortIssues()
+ pipeline.Paginate()
+ else:
+ raise endpoints.BadRequestException(mar.errors.query)
+
+ issue_list = [
+ api_pb2_v1_helpers.convert_issue(
+ api_pb2_v1.IssueWrapper, r, mar, self._services)
+ for r in pipeline.visible_results]
+ return api_pb2_v1.IssuesListResponse(
+ kind='monorail#issueList',
+ totalResults=pipeline.total_count,
+ items=issue_list)
+
+ @monorail_api_method(
+ api_pb2_v1.GROUPS_SETTINGS_LIST_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.GroupsSettingsListResponse,
+ path='groups/settings',
+ http_method='GET',
+ name='groups.settings.list')
+ def groups_settings_list(self, request):
+ """List all group settings."""
+ mar = self.mar_factory(request)
+ all_groups = self._services.usergroup.GetAllUserGroupsInfo(mar.cnxn)
+ group_settings = []
+ for g in all_groups:
+ setting = g[2]
+ wrapper = api_pb2_v1_helpers.convert_group_settings(g[0], setting)
+ if not request.importedGroupsOnly or wrapper.ext_group_type:
+ group_settings.append(wrapper)
+ return api_pb2_v1.GroupsSettingsListResponse(
+ groupSettings=group_settings)
+
+ @monorail_api_method(
+ api_pb2_v1.GROUPS_CREATE_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.GroupsCreateResponse,
+ path='groups',
+ http_method='POST',
+ name='groups.create')
+ def groups_create(self, request):
+ """Create a new user group."""
+ mar = self.mar_factory(request)
+ if not permissions.CanCreateGroup(mar.perms):
+ raise permissions.PermissionException(
+ 'The user is not allowed to create groups.')
+
+ user_dict = self._services.user.LookupExistingUserIDs(
+ mar.cnxn, [request.groupName])
+ if request.groupName.lower() in user_dict:
+ raise usergroup_svc.GroupExistsException(
+ 'group %s already exists' % request.groupName)
+
+ if request.ext_group_type:
+ ext_group_type = str(request.ext_group_type).lower()
+ else:
+ ext_group_type = None
+ group_id = self._services.usergroup.CreateGroup(
+ mar.cnxn, self._services, request.groupName,
+ str(request.who_can_view_members).lower(),
+ ext_group_type)
+
+ return api_pb2_v1.GroupsCreateResponse(
+ groupID=group_id)
+
+ @monorail_api_method(
+ api_pb2_v1.GROUPS_GET_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.GroupsGetResponse,
+ path='groups/{groupName}',
+ http_method='GET',
+ name='groups.get')
+ def groups_get(self, request):
+ """Get a group's settings and users."""
+ mar = self.mar_factory(request)
+ if not mar.viewed_user_auth:
+ raise user_svc.NoSuchUserException(request.groupName)
+ group_id = mar.viewed_user_auth.user_id
+ group_settings = self._services.usergroup.GetGroupSettings(
+ mar.cnxn, group_id)
+ member_ids, owner_ids = self._services.usergroup.LookupAllMembers(
+ mar.cnxn, [group_id])
+ (owned_project_ids, membered_project_ids,
+ contrib_project_ids) = self._services.project.GetUserRolesInAllProjects(
+ mar.cnxn, mar.auth.effective_ids)
+ project_ids = owned_project_ids.union(
+ membered_project_ids).union(contrib_project_ids)
+ if not permissions.CanViewGroup(
+ mar.perms, mar.auth.effective_ids, group_settings, member_ids[group_id],
+ owner_ids[group_id], project_ids):
+ raise permissions.PermissionException(
+ 'The user is not allowed to view this group.')
+
+ member_ids, owner_ids = self._services.usergroup.LookupMembers(
+ mar.cnxn, [group_id])
+
+ member_emails = self._services.user.LookupUserEmails(
+ mar.cnxn, member_ids[group_id]).values()
+ owner_emails = self._services.user.LookupUserEmails(
+ mar.cnxn, owner_ids[group_id]).values()
+
+ return api_pb2_v1.GroupsGetResponse(
+ groupID=group_id,
+ groupSettings=api_pb2_v1_helpers.convert_group_settings(
+ request.groupName, group_settings),
+ groupOwners=owner_emails,
+ groupMembers=member_emails)
+
+ @monorail_api_method(
+ api_pb2_v1.GROUPS_UPDATE_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.GroupsUpdateResponse,
+ path='groups/{groupName}',
+ http_method='POST',
+ name='groups.update')
+ def groups_update(self, request):
+ """Update a group's settings and users."""
+ mar = self.mar_factory(request)
+ group_id = mar.viewed_user_auth.user_id
+ member_ids_dict, owner_ids_dict = self._services.usergroup.LookupMembers(
+ mar.cnxn, [group_id])
+ owner_ids = owner_ids_dict.get(group_id, [])
+ member_ids = member_ids_dict.get(group_id, [])
+ if not permissions.CanEditGroup(
+ mar.perms, mar.auth.effective_ids, owner_ids):
+ raise permissions.PermissionException(
+ 'The user is not allowed to edit this group.')
+
+ group_settings = self._services.usergroup.GetGroupSettings(
+ mar.cnxn, group_id)
+ if (request.who_can_view_members or request.ext_group_type
+ or request.last_sync_time or request.friend_projects):
+ group_settings.who_can_view_members = (
+ request.who_can_view_members or group_settings.who_can_view_members)
+ group_settings.ext_group_type = (
+ request.ext_group_type or group_settings.ext_group_type)
+ group_settings.last_sync_time = (
+ request.last_sync_time or group_settings.last_sync_time)
+ if framework_constants.NO_VALUES in request.friend_projects:
+ group_settings.friend_projects = []
+ else:
+ id_dict = self._services.project.LookupProjectIDs(
+ mar.cnxn, request.friend_projects)
+ group_settings.friend_projects = (
+ id_dict.values() or group_settings.friend_projects)
+ self._services.usergroup.UpdateSettings(
+ mar.cnxn, group_id, group_settings)
+
+ if request.groupOwners or request.groupMembers:
+ self._services.usergroup.RemoveMembers(
+ mar.cnxn, group_id, owner_ids + member_ids)
+ owners_dict = self._services.user.LookupUserIDs(
+ mar.cnxn, request.groupOwners, True)
+ self._services.usergroup.UpdateMembers(
+ mar.cnxn, group_id, owners_dict.values(), 'owner')
+ members_dict = self._services.user.LookupUserIDs(
+ mar.cnxn, request.groupMembers, True)
+ self._services.usergroup.UpdateMembers(
+ mar.cnxn, group_id, members_dict.values(), 'member')
+
+ return api_pb2_v1.GroupsUpdateResponse()
+
+ @monorail_api_method(
+ api_pb2_v1.COMPONENTS_LIST_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.ComponentsListResponse,
+ path='projects/{projectId}/components',
+ http_method='GET',
+ name='components.list')
+ def components_list(self, request):
+ """List all components of a given project."""
+ mar = self.mar_factory(request)
+ config = self._services.config.GetProjectConfig(mar.cnxn, mar.project_id)
+ components = [api_pb2_v1_helpers.convert_component_def(
+ cd, mar, self._services) for cd in config.component_defs]
+ return api_pb2_v1.ComponentsListResponse(
+ components=components)
+
+ @monorail_api_method(
+ api_pb2_v1.COMPONENTS_CREATE_REQUEST_RESOURCE_CONTAINER,
+ api_pb2_v1.Component,
+ path='projects/{projectId}/components',
+ http_method='POST',
+ name='components.create')
+ def components_create(self, request):
+ """Create a component."""
+ mar = self.mar_factory(request)
+ if not mar.perms.CanUsePerm(
+ permissions.EDIT_PROJECT, mar.auth.effective_ids, mar.project, []):
+ raise permissions.PermissionException(
+ 'User is not allowed to create components for this project')
+
+ config = self._services.config.GetProjectConfig(mar.cnxn, mar.project_id)
+ leaf_name = request.componentName
+ if not tracker_constants.COMPONENT_NAME_RE.match(leaf_name):
+ raise config_svc.InvalidComponentNameException(
+ 'The component name %s is invalid.' % leaf_name)
+
+ parent_path = request.parentPath
+ if parent_path:
+ parent_def = tracker_bizobj.FindComponentDef(parent_path, config)
+ if not parent_def:
+ raise config_svc.NoSuchComponentException(
+ 'Parent component %s does not exist.' % parent_path)
+ if not permissions.CanEditComponentDef(
+ mar.auth.effective_ids, mar.perms, mar.project, parent_def, config):
+ raise permissions.PermissionException(
+ 'User is not allowed to add a subcomponent to component %s' %
+ parent_path)
+
+ path = '%s>%s' % (parent_path, leaf_name)
+ else:
+ path = leaf_name
+
+ if tracker_bizobj.FindComponentDef(path, config):
+ raise config_svc.InvalidComponentNameException(
+ 'The name %s is already in use.' % path)
+
+ created = int(time.time())
+ user_emails = set()
+ user_emails.update([mar.auth.email] + request.admin + request.cc)
+ user_ids_dict = self._services.user.LookupUserIDs(
+ mar.cnxn, list(user_emails), autocreate=False)
+ admin_ids = [user_ids_dict[uname] for uname in request.admin]
+ cc_ids = [user_ids_dict[uname] for uname in request.cc]
+
+ component_id = self._services.config.CreateComponentDef(
+ mar.cnxn, mar.project_id, path, request.description, request.deprecated,
+ admin_ids, cc_ids, created, user_ids_dict[mar.auth.email])
+
+ return api_pb2_v1.Component(
+ componentId=component_id,
+ projectName=request.projectId,
+ componentPath=path,
+ description=request.description,
+ admin=request.admin,
+ cc=request.cc,
+ deprecated=request.deprecated,
+ created=datetime.datetime.fromtimestamp(created),
+ creator=mar.auth.email)
+
+ @monorail_api_method(
+ api_pb2_v1.COMPONENTS_DELETE_REQUEST_RESOURCE_CONTAINER,
+ message_types.VoidMessage,
+ path='projects/{projectId}/components/{componentPath}',
+ http_method='DELETE',
+ name='components.delete')
+ def components_delete(self, request):
+ """Delete a component."""
+ mar = self.mar_factory(request)
+ config = self._services.config.GetProjectConfig(mar.cnxn, mar.project_id)
+ component_path = request.componentPath
+ component_def = tracker_bizobj.FindComponentDef(
+ component_path, config)
+ if not component_def:
+ raise config_svc.NoSuchComponentException(
+ 'The component %s does not exist.' % component_path)
+ if not permissions.CanViewComponentDef(
+ mar.auth.effective_ids, mar.perms, mar.project, component_def):
+ raise permissions.PermissionException(
+ 'User is not allowed to view this component %s' % component_path)
+ if not permissions.CanEditComponentDef(
+ mar.auth.effective_ids, mar.perms, mar.project, component_def, config):
+ raise permissions.PermissionException(
+ 'User is not allowed to delete this component %s' % component_path)
+
+ allow_delete = not tracker_bizobj.FindDescendantComponents(
+ config, component_def)
+ if not allow_delete:
+ raise permissions.PermissionException(
+ 'User tried to delete component that had subcomponents')
+
+ self._services.issue.DeleteComponentReferences(
+ mar.cnxn, component_def.component_id)
+ self._services.config.DeleteComponentDef(
+ mar.cnxn, mar.project_id, component_def.component_id)
+ return message_types.VoidMessage()
+
+ @monorail_api_method(
+ api_pb2_v1.COMPONENTS_UPDATE_REQUEST_RESOURCE_CONTAINER,
+ message_types.VoidMessage,
+ path='projects/{projectId}/components/{componentPath}',
+ http_method='POST',
+ name='components.update')
+ def components_update(self, request):
+ """Update a component."""
+ mar = self.mar_factory(request)
+ config = self._services.config.GetProjectConfig(mar.cnxn, mar.project_id)
+ component_path = request.componentPath
+ component_def = tracker_bizobj.FindComponentDef(
+ component_path, config)
+ if not component_def:
+ raise config_svc.NoSuchComponentException(
+ 'The component %s does not exist.' % component_path)
+ if not permissions.CanViewComponentDef(
+ mar.auth.effective_ids, mar.perms, mar.project, component_def):
+ raise permissions.PermissionException(
+ 'User is not allowed to view this component %s' % component_path)
+ if not permissions.CanEditComponentDef(
+ mar.auth.effective_ids, mar.perms, mar.project, component_def, config):
+ raise permissions.PermissionException(
+ 'User is not allowed to edit this component %s' % component_path)
+
+ original_path = component_def.path
+ new_path = component_def.path
+ new_docstring = component_def.docstring
+ new_deprecated = component_def.deprecated
+ new_admin_ids = component_def.admin_ids
+ new_cc_ids = component_def.cc_ids
+ update_filterrule = False
+ for update in request.updates:
+ if update.field == api_pb2_v1.ComponentUpdateFieldID.LEAF_NAME:
+ leaf_name = update.leafName
+ if not tracker_constants.COMPONENT_NAME_RE.match(leaf_name):
+ raise config_svc.InvalidComponentNameException(
+ 'The component name %s is invalid.' % leaf_name)
+
+ if '>' in original_path:
+ parent_path = original_path[:original_path.rindex('>')]
+ new_path = '%s>%s' % (parent_path, leaf_name)
+ else:
+ new_path = leaf_name
+
+ conflict = tracker_bizobj.FindComponentDef(new_path, config)
+ if conflict and conflict.component_id != component_def.component_id:
+ raise config_svc.InvalidComponentNameException(
+ 'The name %s is already in use.' % new_path)
+ update_filterrule = True
+ elif update.field == api_pb2_v1.ComponentUpdateFieldID.DESCRIPTION:
+ new_docstring = update.description
+ elif update.field == api_pb2_v1.ComponentUpdateFieldID.ADMIN:
+ user_ids_dict = self._services.user.LookupUserIDs(
+ mar.cnxn, list(update.admin), autocreate=False)
+ new_admin_ids = [user_ids_dict[email] for email in update.admin]
+ elif update.field == api_pb2_v1.ComponentUpdateFieldID.CC:
+ user_ids_dict = self._services.user.LookupUserIDs(
+ mar.cnxn, list(update.cc), autocreate=False)
+ new_cc_ids = [user_ids_dict[email] for email in update.cc]
+ update_filterrule = True
+ elif update.field == api_pb2_v1.ComponentUpdateFieldID.DEPRECATED:
+ new_deprecated = update.deprecated
+ else:
+ logging.error('Unknown component field %r', update.field)
+
+ new_modified = int(time.time())
+ new_modifier_id = self._services.user.LookupUserID(
+ mar.cnxn, mar.auth.email, autocreate=False)
+ logging.info(
+ 'Updating component id %d: path-%s, docstring-%s, deprecated-%s,'
+ ' admin_ids-%s, cc_ids-%s modified by %s', component_def.component_id,
+ new_path, new_docstring, new_deprecated, new_admin_ids, new_cc_ids,
+ new_modifier_id)
+ self._services.config.UpdateComponentDef(
+ mar.cnxn, mar.project_id, component_def.component_id,
+ path=new_path, docstring=new_docstring, deprecated=new_deprecated,
+ admin_ids=new_admin_ids, cc_ids=new_cc_ids, modified=new_modified,
+ modifier_id=new_modifier_id)
+
+ # TODO(sheyang): reuse the code in componentdetails
+ if original_path != new_path:
+ # If the name changed then update all of its subcomponents as well.
+ subcomponent_ids = tracker_bizobj.FindMatchingComponentIDs(
+ original_path, config, exact=False)
+ for subcomponent_id in subcomponent_ids:
+ if subcomponent_id == component_def.component_id:
+ continue
+ subcomponent_def = tracker_bizobj.FindComponentDefByID(
+ subcomponent_id, config)
+ subcomponent_new_path = subcomponent_def.path.replace(
+ original_path, new_path, 1)
+ self._services.config.UpdateComponentDef(
+ mar.cnxn, mar.project_id, subcomponent_def.component_id,
+ path=subcomponent_new_path)
+
+ if update_filterrule:
+ filterrules_helpers.RecomputeAllDerivedFields(
+ mar.cnxn, self._services, mar.project, config)
+
+ return message_types.VoidMessage()
+
+
+@endpoints.api(name='monorail_client_configs', version='v1',
+ description='Monorail API client configs.')
+class ClientConfigApi(remote.Service):
+
+ # Class variables. Handy to mock.
+ _services = None
+ _mar = None
+
+ @classmethod
+ def _set_services(cls, services):
+ cls._services = services
+
+ def mar_factory(self, request):
+ if not self._mar:
+ self._mar = monorailrequest.MonorailApiRequest(request, self._services)
+ return self._mar
+
+ @endpoints.method(
+ message_types.VoidMessage,
+ message_types.VoidMessage,
+ path='client_configs',
+ http_method='POST',
+ name='client_configs.update')
+ def client_configs_update(self, request):
+ mar = self.mar_factory(request)
+ if not mar.perms.HasPerm(permissions.ADMINISTER_SITE, None, None):
+ raise permissions.PermissionException(
+ 'The requester %s is not allowed to update client configs.' %
+ mar.auth.email)
+
+ ROLE_DICT = {
+ 1: permissions.COMMITTER_ROLE,
+ 2: permissions.CONTRIBUTOR_ROLE,
+ }
+
+ client_config = client_config_svc.GetClientConfigSvc()
+
+ cfg = client_config.GetConfigs()
+ if not cfg:
+ msg = 'Failed to fetch client configs.'
+ logging.error(msg)
+ raise endpoints.InternalServerErrorException(msg)
+
+ for client in cfg.clients:
+ if not client.client_email:
+ continue
+ # 1: create the user if non-existent
+ user_id = self._services.user.LookupUserID(
+ mar.cnxn, client.client_email, autocreate=True)
+ user_pb = self._services.user.GetUser(mar.cnxn, user_id)
+
+ logging.info('User ID %d for email %s', user_id, client.client_email)
+
+ # 2: set period and lifetime limit
+ # new_soft_limit, new_hard_limit, new_lifetime_limit
+ new_limit_tuple = (
+ client.period_limit, client.period_limit, client.lifetime_limit)
+ action_limit_updates = {'api_request': new_limit_tuple}
+ self._services.user.UpdateUserSettings(
+ mar.cnxn, user_id, user_pb, action_limit_updates=action_limit_updates)
+
+ logging.info('Updated api request limit %r', new_limit_tuple)
+
+ # 3: Update project role and extra perms
+ projects_dict = self._services.project.GetAllProjects(mar.cnxn)
+ project_name_to_ids = {
+ p.project_name: p.project_id for p in projects_dict.itervalues()}
+
+ # Set project role and extra perms
+ for perm in client.project_permissions:
+ project_ids = self._GetProjectIDs(perm.project, project_name_to_ids)
+ logging.info('Matching projects %r for name %s',
+ project_ids, perm.project)
+
+ role = ROLE_DICT[perm.role]
+ for p_id in project_ids:
+ project = projects_dict[p_id]
+ people_list = []
+ if role == 'owner':
+ people_list = project.owner_ids
+ elif role == 'committer':
+ people_list = project.committer_ids
+ elif role == 'contributor':
+ people_list = project.contributor_ids
+ # Onlu update role/extra perms iff changed
+ if not user_id in people_list:
+ logging.info('Update project %s role %s for user %s',
+ project.project_name, role, client.client_email)
+ owner_ids, committer_ids, contributor_ids = (
+ project_helpers.MembersWith(project, {user_id}, role))
+ self._services.project.UpdateProjectRoles(
+ mar.cnxn, p_id, owner_ids, committer_ids,
+ contributor_ids)
+ if perm.extra_permissions:
+ member_extra_perms = permissions.FindExtraPerms(project, user_id)
+ if (member_extra_perms and
+ set(member_extra_perms.perms) == set(perm.extra_permissions)):
+ continue
+ logging.info('Update project %s extra perm %s for user %s',
+ project.project_name, perm.extra_permissions,
+ client.client_email)
+ self._services.project.UpdateExtraPerms(
+ mar.cnxn, p_id, user_id, list(perm.extra_permissions))
+
+ return message_types.VoidMessage()
+
+ def _GetProjectIDs(self, project_str, project_name_to_ids):
+ result = []
+ if any(ch in project_str for ch in ['*', '+', '?', '.']):
+ pattern = re.compile(project_str)
+ for p_name in project_name_to_ids.iterkeys():
+ if pattern.match(p_name):
+ project_id = project_name_to_ids.get(p_name)
+ if project_id:
+ result.append(project_id)
+ else:
+ project_id = project_name_to_ids.get(project_str)
+ if project_id:
+ result.append(project_id)
+
+ if not result:
+ logging.warning('Cannot find projects for specified name %s',
+ project_str)
+ return result
+
+
diff --git a/appengine/monorail/services/cachemanager_svc.py b/appengine/monorail/services/cachemanager_svc.py
new file mode 100644
index 0000000..9c6ddb8
--- /dev/null
+++ b/appengine/monorail/services/cachemanager_svc.py
@@ -0,0 +1,154 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A simple in-RAM cache with distributed invalidation.
+
+Here's how it works:
+ + Each frontend or backend job has one CacheManager which
+ owns a set of RamCache objects, which are basically dictionaries.
+ + Each job can put objects in its own local cache, and retrieve them.
+ + When an item is modified, the item at the corresponding cache key
+ is invalidated, which means two things: (a) it is dropped from the
+ local RAM cache, and (b) the key is written to the Invalidate table.
+ + On each incoming request, the job checks the Invalidate table for
+ any entries added since the last time that it checked. If it finds
+ any, it drops all RamCache entries for the corresponding key.
+ + There is also a cron task that truncates old Invalidate entries
+ when the table is too large. If a frontend job sees more than the
+ max Invalidate rows, it will drop everything from all caches,
+ because it does not know what it missed due to truncation.
+ + The special key 0 means to drop all cache entries.
+
+This approach makes jobs use cached values that are not stale at the
+time that processing of each request begins. There is no guarantee that
+an item will not be modified by some other job and that the cached entry
+could become stale during the lifetime of that same request.
+
+TODO(jrobbins): Listener hook so that client code can register its own
+handler for invalidation events. E.g., the sorting code has a cache that
+is correctly invalidated on each issue change, but needs to be completely
+dropped when a config is modified.
+
+TODO(jrobbins): If this part of the system becomes a bottleneck, consider
+some optimizations: (a) splitting the table into multiple tables by
+kind, or (b) sharding the table by cache_key. Or, maybe leverage memcache
+to avoid even hitting the DB in the frequent case where nothing has changed.
+"""
+
+import collections
+import logging
+
+from framework import jsonfeed
+from framework import sql
+from services import caches
+
+
+INVALIDATE_TABLE_NAME = 'Invalidate'
+INVALIDATE_COLS = ['timestep', 'kind', 'cache_key']
+INVALIDATE_ALL_KEYS = 0
+MAX_INVALIDATE_ROWS_TO_CONSIDER = 1000
+
+
+class CacheManager(object):
+ """Service class to manage RAM caches and shared Invalidate table."""
+
+ def __init__(self):
+ self.cache_registry = collections.defaultdict(list)
+ self.processed_invalidations_up_to = 0
+ self.invalidate_tbl = sql.SQLTableManager(INVALIDATE_TABLE_NAME)
+
+ def MakeCache(self, kind, max_size=None, use_value_centric_cache=False):
+ """Make a new cache and register it for future invalidations."""
+ if use_value_centric_cache:
+ cache = caches.ValueCentricRamCache(self, kind, max_size=max_size)
+ else:
+ cache = caches.RamCache(self, kind, max_size=max_size)
+ self.cache_registry[kind].append(cache)
+ return cache
+
+ def _InvalidateAllCaches(self):
+ """Invalidate all cache entries."""
+ for cache_list in self.cache_registry.values():
+ for cache in cache_list:
+ cache.LocalInvalidateAll()
+
+ def _ProcessInvalidationRows(self, rows):
+ """Invalidate cache entries indicated by database rows."""
+ for timestep, kind, key in rows:
+ self.processed_invalidations_up_to = max(
+ self.processed_invalidations_up_to, timestep)
+ for cache in self.cache_registry[kind]:
+ if key == INVALIDATE_ALL_KEYS:
+ cache.LocalInvalidateAll()
+ else:
+ cache.LocalInvalidate(key)
+
+ def DoDistributedInvalidation(self, cnxn):
+ """Drop any cache entries that were invalidated by other jobs."""
+ # Only consider a reasonable number of rows so that we can never
+ # get bogged down on this step. If there are too many rows to
+ # process, just invalidate all caches, and process the last group
+ # of rows to update processed_invalidations_up_to.
+ rows = self.invalidate_tbl.Select(
+ cnxn, cols=INVALIDATE_COLS,
+ where=[('timestep > %s', [self.processed_invalidations_up_to])],
+ order_by=[('timestep DESC', [])],
+ limit=MAX_INVALIDATE_ROWS_TO_CONSIDER)
+
+ if len(rows) == MAX_INVALIDATE_ROWS_TO_CONSIDER:
+ logging.info('Invaliditing all caches: there are too many invalidations')
+ self._InvalidateAllCaches()
+
+ logging.info('Saw %d invalidation rows', len(rows))
+ self._ProcessInvalidationRows(rows)
+
+ def StoreInvalidateRows(self, cnxn, kind, keys):
+ """Store rows to let all jobs know to invalidate the given keys."""
+ assert kind in caches.INVALIDATE_KIND_VALUES
+ self.invalidate_tbl.InsertRows(
+ cnxn, ['kind', 'cache_key'], [(kind, key) for key in keys])
+
+ def StoreInvalidateAll(self, cnxn, kind):
+ """Store a value to tell all jobs to invalidate all items of this kind."""
+ last_timestep = self.invalidate_tbl.InsertRow(
+ cnxn, kind=kind, cache_key=INVALIDATE_ALL_KEYS)
+ self.invalidate_tbl.Delete(
+ cnxn, kind=kind, where=[('timestep < %s', [last_timestep])])
+
+
+class RamCacheConsolidate(jsonfeed.InternalTask):
+ """Drop old Invalidate rows when there are too many of them."""
+
+ def HandleRequest(self, mr):
+ """Drop excessive rows in the Invalidate table and return some stats.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format. The stats are just for debugging,
+ they are not used by any other part of the system.
+ """
+ tbl = self.services.cache_manager.invalidate_tbl
+ old_count = tbl.SelectValue(mr.cnxn, 'COUNT(*)')
+
+ # Delete anything other than the last 1000 rows because we won't
+ # look at them anyway. If a job gets a request and sees 1000 new
+ # rows, it will drop all caches of all types, so it is as if there
+ # were
+ if old_count > MAX_INVALIDATE_ROWS_TO_CONSIDER:
+ kept_timesteps = tbl.Select(
+ mr.cnxn, ['timestep'],
+ order_by=[('timestep DESC', [])],
+ limit=MAX_INVALIDATE_ROWS_TO_CONSIDER)
+ earliest_kept = kept_timesteps[-1][0]
+ tbl.Delete(mr.cnxn, where=[('timestep < %s', [earliest_kept])])
+
+ new_count = tbl.SelectValue(mr.cnxn, 'COUNT(*)')
+
+ return {
+ 'old_count': old_count,
+ 'new_count': new_count,
+ }
diff --git a/appengine/monorail/services/caches.py b/appengine/monorail/services/caches.py
new file mode 100644
index 0000000..ccb82c8
--- /dev/null
+++ b/appengine/monorail/services/caches.py
@@ -0,0 +1,303 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes to manage cached values.
+
+Monorail makes full use of the RAM of GAE frontends to reduce latency
+and load on the database.
+"""
+
+import logging
+
+from protorpc import protobuf
+
+from google.appengine.api import memcache
+
+from framework import framework_constants
+
+
+INVALIDATE_KIND_VALUES = ['user', 'project', 'issue', 'issue_id']
+DEFAULT_MAX_SIZE = 10000
+
+
+class RamCache(object):
+ """An in-RAM cache with distributed invalidation."""
+
+ def __init__(self, cache_manager, kind, max_size=None):
+ assert kind in INVALIDATE_KIND_VALUES
+ self.cache_manager = cache_manager
+ self.kind = kind
+ self.cache = {}
+ self.max_size = max_size or DEFAULT_MAX_SIZE
+
+ def CacheItem(self, key, item):
+ """Store item at key in this cache, discarding a random item if needed."""
+ if len(self.cache) >= self.max_size:
+ self.cache.popitem()
+
+ self.cache[key] = item
+
+ def CacheAll(self, new_item_dict):
+ """Cache all items in the given dict, dropping old items if needed."""
+ if len(new_item_dict) >= self.max_size:
+ logging.warn('Dumping the entire cache! %s', self.kind)
+ self.cache = {}
+ else:
+ while len(self.cache) + len(new_item_dict) > self.max_size:
+ self.cache.popitem()
+
+ self.cache.update(new_item_dict)
+
+ def GetItem(self, key):
+ """Return the cached item if present, otherwise None."""
+ return self.cache.get(key)
+
+ def HasItem(self, key):
+ """Return True if there is a value cached at the given key."""
+ return key in self.cache
+
+ def GetAll(self, keys):
+ """Look up the given keys.
+
+ Args:
+ keys: a list of cache keys to look up.
+
+ Returns:
+ A pair: (hits_dict, misses_list) where hits_dict is a dictionary of
+ all the given keys and the values that were found in the cache, and
+ misses_list is a list of given keys that were not in the cache.
+ """
+ hits, misses = {}, []
+ for key in keys:
+ try:
+ hits[key] = self.cache[key]
+ except KeyError:
+ misses.append(key)
+
+ return hits, misses
+
+ def LocalInvalidate(self, key):
+ """Drop the given key from this cache, without distributed notification."""
+ # logging.info('Locally invalidating %r in kind=%r', key, self.kind)
+ self.cache.pop(key, None)
+
+ def Invalidate(self, cnxn, key):
+ """Drop key locally, and append it to the Invalidate DB table."""
+ self.InvalidateKeys(cnxn, [key])
+
+ def InvalidateKeys(self, cnxn, keys):
+ """Drop keys locally, and append them to the Invalidate DB table."""
+ for key in keys:
+ self.LocalInvalidate(key)
+ if self.cache_manager:
+ self.cache_manager.StoreInvalidateRows(cnxn, self.kind, keys)
+
+ def LocalInvalidateAll(self):
+ """Invalidate all keys locally: just start over with an empty dict."""
+ logging.info('Locally invalidating all in kind=%r', self.kind)
+ self.cache = {}
+
+ def InvalidateAll(self, cnxn):
+ """Invalidate all keys in this cache."""
+ self.LocalInvalidateAll()
+ if self.cache_manager:
+ self.cache_manager.StoreInvalidateAll(cnxn, self.kind)
+
+
+class ValueCentricRamCache(RamCache):
+ """Specialized version of RamCache that stores values in InvalidateTable.
+
+ This is useful for caches that have non integer keys.
+ """
+
+ def LocalInvalidate(self, value):
+ """Use the specified value to drop entries from the local cache."""
+ keys_to_drop = []
+ # Loop through and collect all keys with the specified value.
+ for k, v in self.cache.iteritems():
+ if v == value:
+ keys_to_drop.append(k)
+ for k in keys_to_drop:
+ self.cache.pop(k, None)
+
+ def InvalidateKeys(self, cnxn, keys):
+ """Drop keys locally, and append their values to the Invalidate DB table."""
+ # Find values to invalidate.
+ values = [self.cache[key] for key in keys if self.cache.has_key(key)]
+ if len(values) == len(keys):
+ for value in values:
+ self.LocalInvalidate(value)
+ if self.cache_manager:
+ self.cache_manager.StoreInvalidateRows(cnxn, self.kind, values)
+ else:
+ # If a value is not found in the cache then invalidate the whole cache.
+ # This is done to ensure that we are not in an inconsistent state or in a
+ # race condition.
+ self.InvalidateAll(cnxn)
+
+
+class AbstractTwoLevelCache(object):
+ """A class to manage both RAM and memcache to retrieve objects.
+
+ Subclasses must implement the FetchItems() method to get objects from
+ the database when both caches miss.
+ """
+
+ # When loading a huge number of issues from the database, do it in chunks
+ # so as to avoid timeouts.
+ _FETCH_BATCH_SIZE = 10000
+
+ def __init__(
+ self, cache_manager, kind, memcache_prefix, pb_class, max_size=None,
+ use_value_centric_cache=False):
+ self.cache = cache_manager.MakeCache(
+ kind, max_size=max_size,
+ use_value_centric_cache=use_value_centric_cache)
+ self.memcache_prefix = memcache_prefix
+ self.pb_class = pb_class
+
+ def CacheItem(self, key, value):
+ """Add the given key-value pair to RAM and memcache."""
+ self.cache.CacheItem(key, value)
+ self._WriteToMemcache({key: value})
+
+ def HasItem(self, key):
+ """Return True if the given key is in the RAM cache."""
+ return self.cache.HasItem(key)
+
+ def GetAnyOnHandItem(self, keys, start=None, end=None):
+ """Try to find one of the specified items in RAM."""
+ if start is None:
+ start = 0
+ if end is None:
+ end = len(keys)
+ for i in xrange(start, end):
+ key = keys[i]
+ if self.cache.HasItem(key):
+ return self.cache.GetItem(key)
+
+ # Note: We could check memcache here too, but the round-trips to memcache
+ # are kind of slow. And, getting too many hits from memcache actually
+ # fills our RAM cache too quickly and could lead to thrashing.
+
+ return None
+
+ def GetAll(self, cnxn, keys, use_cache=True, **kwargs):
+ """Get values for the given keys from RAM, memcache, or the DB.
+
+ Args:
+ cnxn: connection to the database.
+ keys: list of integer keys to look up.
+ use_cache: set to False to always hit the database.
+ **kwargs: any additional keywords are passed to FetchItems().
+
+ Returns:
+ A pair: hits, misses. Where hits is {key: value} and misses is
+ a list of any keys that were not found anywhere.
+ """
+ if use_cache:
+ result_dict, missed_keys = self.cache.GetAll(keys)
+ else:
+ result_dict, missed_keys = {}, list(keys)
+
+ if missed_keys and use_cache:
+ memcache_hits, missed_keys = self._ReadFromMemcache(missed_keys)
+ result_dict.update(memcache_hits)
+
+ while missed_keys:
+ missed_batch = missed_keys[:self._FETCH_BATCH_SIZE]
+ missed_keys = missed_keys[self._FETCH_BATCH_SIZE:]
+ retrieved_dict = self.FetchItems(cnxn, missed_batch, **kwargs)
+ result_dict.update(retrieved_dict)
+ if use_cache:
+ self.cache.CacheAll(retrieved_dict)
+ self._WriteToMemcache(retrieved_dict)
+
+ still_missing_keys = [key for key in keys if key not in result_dict]
+ return result_dict, still_missing_keys
+
+ def _ReadFromMemcache(self, keys):
+ """Read the given keys from memcache, return {key: value}, missing_keys."""
+ memcache_hits = {}
+ cached_dict = memcache.get_multi(
+ [self._KeyToStr(key) for key in keys], key_prefix=self.memcache_prefix)
+
+ for key_str, serialized_value in cached_dict.iteritems():
+ value = self._StrToValue(serialized_value)
+ key = self._StrToKey(key_str)
+ memcache_hits[key] = value
+ self.cache.CacheItem(key, value)
+
+ still_missing_keys = [key for key in keys if key not in memcache_hits]
+ logging.info(
+ 'decoded %d values from memcache %s, missing %d',
+ len(memcache_hits), self.memcache_prefix, len(still_missing_keys))
+ return memcache_hits, still_missing_keys
+
+ def _WriteToMemcache(self, retrieved_dict):
+ """Write entries for each key-value pair to memcache. Encode PBs."""
+ strs_to_cache = {
+ self._KeyToStr(key): self._ValueToStr(value)
+ for key, value in retrieved_dict.iteritems()}
+ memcache.set_multi(
+ strs_to_cache, key_prefix=self.memcache_prefix,
+ time=framework_constants.MEMCACHE_EXPIRATION)
+ logging.info('cached batch of %d values in memcache %s',
+ len(retrieved_dict), self.memcache_prefix)
+
+ def _KeyToStr(self, key):
+ """Convert our int IDs to strings for use as memcache keys."""
+ return str(key)
+
+ def _StrToKey(self, key_str):
+ """Convert memcache keys back to the ints that we use as IDs."""
+ return int(key_str)
+
+ def _ValueToStr(self, value):
+ """Serialize an application object so that it can be stored in memcache."""
+ if not self.pb_class:
+ return value
+ elif self.pb_class == int:
+ return str(value)
+ else:
+ return protobuf.encode_message(value)
+
+ def _StrToValue(self, serialized_value):
+ """Deserialize an application object that was stored in memcache."""
+ if not self.pb_class:
+ return serialized_value
+ elif self.pb_class == int:
+ return int(serialized_value)
+ else:
+ return protobuf.decode_message(self.pb_class, serialized_value)
+
+ def InvalidateKeys(self, cnxn, keys):
+ """Drop the given keys from both RAM and memcache."""
+ self.cache.InvalidateKeys(cnxn, keys)
+ memcache.delete_multi(
+ [self._KeyToStr(key) for key in keys], key_prefix=self.memcache_prefix)
+
+ def InvalidateAllKeys(self, cnxn, keys):
+ """Drop the given keys from memcache and invalidate all keys in RAM.
+
+ Useful for avoiding inserting many rows into the Invalidate table when
+ invalidating a large group of keys all at once. Only use when necessary.
+ """
+ self.cache.InvalidateAll(cnxn)
+ memcache.delete_multi(
+ [self._KeyToStr(key) for key in keys], key_prefix=self.memcache_prefix)
+
+ def GetAllAlreadyInRam(self, keys):
+ """Look only in RAM to return {key: values}, missed_keys."""
+ result_dict, missed_keys = self.cache.GetAll(keys)
+ return result_dict, missed_keys
+
+ def InvalidateAllRamEntries(self, cnxn):
+ """Drop all RAM cache entries. It will refill as needed from memcache."""
+ self.cache.InvalidateAll(cnxn)
+
+ def FetchItems(self, cnxn, keys, **kwargs):
+ """On RAM and memcache miss, hit the database."""
+ raise NotImplementedError()
diff --git a/appengine/monorail/services/client_config_svc.py b/appengine/monorail/services/client_config_svc.py
new file mode 100644
index 0000000..4a92cdc
--- /dev/null
+++ b/appengine/monorail/services/client_config_svc.py
@@ -0,0 +1,134 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+import base64
+import json
+import logging
+import os
+import time
+import urllib
+
+from google import protobuf
+from google.appengine.api import app_identity
+from google.appengine.api import urlfetch
+
+import settings
+from framework import framework_constants
+from proto import api_clients_config_pb2
+
+
+CONFIG_FILE_PATH = os.path.join(
+ os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
+ 'testing', 'api_clients.cfg')
+MONORAIL_CONFIG_SET = urllib.quote(
+ 'services/%s' % app_identity.get_application_id(), safe='')
+LUCI_CONFIG_URL = (
+ 'https://luci-config.appspot.com/_ah/api/config/v1/config_sets'
+ '/%s/config/api_clients.cfg') % MONORAIL_CONFIG_SET
+
+
+client_config_svc = None
+service_account_map = None
+
+
+class ClientConfigService(object):
+ """The persistence layer for client config data."""
+
+ # One hour
+ EXPIRES_IN = 3600
+
+ def __init__(self):
+ self.client_configs = None
+ self.load_time = 0
+
+ def GetConfigs(self, use_cache=True, cur_time=None):
+ """Read client configs."""
+
+ cur_time = cur_time or int(time.time())
+ force_load = False
+ if not self.client_configs:
+ force_load = True
+ elif not use_cache:
+ force_load = True
+ elif cur_time - self.load_time > self.EXPIRES_IN:
+ force_load = True
+
+ if force_load:
+ if settings.dev_mode or settings.unit_test_mode:
+ self._ReadFromLocal()
+ else:
+ self._ReadFromLuciConfig()
+
+ return self.client_configs
+
+ def _ReadFromLocal(self):
+ try:
+ with open(CONFIG_FILE_PATH, 'r') as f:
+ content_text = f.read()
+ logging.info('Read client configs from local file.')
+ cfg = api_clients_config_pb2.ClientCfg()
+ protobuf.text_format.Merge(content_text, cfg)
+ self.client_configs = cfg
+ self.load_time = int(time.time())
+ except Exception as ex:
+ logging.exception(
+ 'Failed to read client configs: %s',
+ str(ex))
+
+ def _ReadFromLuciConfig(self):
+ try:
+ authorization_token, _ = app_identity.get_access_token(
+ framework_constants.OAUTH_SCOPE)
+ response = urlfetch.fetch(
+ LUCI_CONFIG_URL,
+ method=urlfetch.GET,
+ follow_redirects=False,
+ headers={'Content-Type': 'application/json; charset=UTF-8',
+ 'Authorization': 'Bearer ' + authorization_token})
+ if response.status_code == 200:
+ content = json.loads(response.content)
+ config_content = content['content']
+ content_text = base64.b64decode(config_content)
+ logging.info('luci-config content decoded: %r.', content_text)
+ cfg = api_clients_config_pb2.ClientCfg()
+ protobuf.text_format.Merge(content_text, cfg)
+ self.client_configs = cfg
+ self.load_time = int(time.time())
+ else:
+ logging.error('Invalid response from luci-config: %r', response)
+ except Exception as ex:
+ logging.exception(
+ 'Failed to retrieve client configs from luci-config: %s',
+ str(ex))
+
+ def GetClientIDEmails(self):
+ """Get client IDs and Emails."""
+ self.GetConfigs(use_cache=True)
+ client_ids = [c.client_id for c in self.client_configs.clients]
+ client_emails = [c.client_email for c in self.client_configs.clients]
+ return client_ids, client_emails
+
+ def GetDisplayNames(self):
+ """Get client display names."""
+ self.GetConfigs(use_cache=True)
+ names_dict = {}
+ for client in self.client_configs.clients:
+ if client.display_name:
+ names_dict[client.client_email] = client.display_name
+ return names_dict
+
+
+def GetClientConfigSvc():
+ global client_config_svc
+ if client_config_svc is None:
+ client_config_svc = ClientConfigService()
+ return client_config_svc
+
+def GetServiceAccountMap():
+ global service_account_map
+ if service_account_map is None:
+ service_account_map = GetClientConfigSvc().GetDisplayNames()
+ return service_account_map
+
\ No newline at end of file
diff --git a/appengine/monorail/services/config_svc.py b/appengine/monorail/services/config_svc.py
new file mode 100644
index 0000000..1cd27a2
--- /dev/null
+++ b/appengine/monorail/services/config_svc.py
@@ -0,0 +1,1348 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes and functions for persistence of issue tracker configuration.
+
+This module provides functions to get, update, create, and (in some
+cases) delete each type of business object. It provides a logical
+persistence layer on top of an SQL database.
+
+Business objects are described in tracker_pb2.py and tracker_bizobj.py.
+"""
+
+import collections
+import logging
+
+from google.appengine.api import memcache
+
+import settings
+from framework import sql
+from proto import tracker_pb2
+from services import caches
+from tracker import tracker_bizobj
+
+
+TEMPLATE_TABLE_NAME = 'Template'
+TEMPLATE2LABEL_TABLE_NAME = 'Template2Label'
+TEMPLATE2ADMIN_TABLE_NAME = 'Template2Admin'
+TEMPLATE2COMPONENT_TABLE_NAME = 'Template2Component'
+TEMPLATE2FIELDVALUE_TABLE_NAME = 'Template2FieldValue'
+PROJECTISSUECONFIG_TABLE_NAME = 'ProjectIssueConfig'
+LABELDEF_TABLE_NAME = 'LabelDef'
+FIELDDEF_TABLE_NAME = 'FieldDef'
+FIELDDEF2ADMIN_TABLE_NAME = 'FieldDef2Admin'
+COMPONENTDEF_TABLE_NAME = 'ComponentDef'
+COMPONENT2ADMIN_TABLE_NAME = 'Component2Admin'
+COMPONENT2CC_TABLE_NAME = 'Component2Cc'
+STATUSDEF_TABLE_NAME = 'StatusDef'
+
+TEMPLATE_COLS = [
+ 'id', 'project_id', 'name', 'content', 'summary', 'summary_must_be_edited',
+ 'owner_id', 'status', 'members_only', 'owner_defaults_to_member',
+ 'component_required']
+TEMPLATE2LABEL_COLS = ['template_id', 'label']
+TEMPLATE2COMPONENT_COLS = ['template_id', 'component_id']
+TEMPLATE2ADMIN_COLS = ['template_id', 'admin_id']
+TEMPLATE2FIELDVALUE_COLS = [
+ 'template_id', 'field_id', 'int_value', 'str_value', 'user_id']
+PROJECTISSUECONFIG_COLS = [
+ 'project_id', 'statuses_offer_merge', 'exclusive_label_prefixes',
+ 'default_template_for_developers', 'default_template_for_users',
+ 'default_col_spec', 'default_sort_spec', 'default_x_attr',
+ 'default_y_attr', 'custom_issue_entry_url']
+STATUSDEF_COLS = [
+ 'id', 'project_id', 'rank', 'status', 'means_open', 'docstring',
+ 'deprecated']
+LABELDEF_COLS = [
+ 'id', 'project_id', 'rank', 'label', 'docstring', 'deprecated']
+FIELDDEF_COLS = [
+ 'id', 'project_id', 'rank', 'field_name', 'field_type', 'applicable_type',
+ 'applicable_predicate', 'is_required', 'is_multivalued',
+ 'min_value', 'max_value', 'regex', 'needs_member', 'needs_perm',
+ 'grants_perm', 'notify_on', 'docstring', 'is_deleted']
+FIELDDEF2ADMIN_COLS = ['field_id', 'admin_id']
+COMPONENTDEF_COLS = ['id', 'project_id', 'path', 'docstring', 'deprecated',
+ 'created', 'creator_id', 'modified', 'modifier_id']
+COMPONENT2ADMIN_COLS = ['component_id', 'admin_id']
+COMPONENT2CC_COLS = ['component_id', 'cc_id']
+
+NOTIFY_ON_ENUM = ['never', 'any_comment']
+
+
+class LabelRowTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for label rows.
+
+ Label rows exist for every label used in a project, even those labels
+ that were added to issues in an ad hoc way without being defined in the
+ config ahead of time.
+ """
+
+ def __init__(self, cache_manager, config_service):
+ super(LabelRowTwoLevelCache, self).__init__(
+ cache_manager, 'project', 'label_rows:', None)
+ self.config_service = config_service
+
+ def _DeserializeLabelRows(self, label_def_rows):
+ """Convert DB result rows into a dict {project_id: [row, ...]}."""
+ result_dict = collections.defaultdict(list)
+ for label_id, project_id, rank, label, docstr, deprecated in label_def_rows:
+ result_dict[project_id].append(
+ (label_id, project_id, rank, label, docstr, deprecated))
+
+ return result_dict
+
+ def FetchItems(self, cnxn, keys):
+ """On RAM and memcache miss, hit the database."""
+ label_def_rows = self.config_service.labeldef_tbl.Select(
+ cnxn, cols=LABELDEF_COLS, project_id=keys,
+ order_by=[('rank DESC', []), ('label DESC', [])])
+ label_rows_dict = self._DeserializeLabelRows(label_def_rows)
+
+ # Make sure that every requested project is represented in the result
+ for project_id in keys:
+ label_rows_dict.setdefault(project_id, [])
+
+ return label_rows_dict
+
+
+class StatusRowTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for status rows."""
+
+ def __init__(self, cache_manager, config_service):
+ super(StatusRowTwoLevelCache, self).__init__(
+ cache_manager, 'project', 'status_rows:', None)
+ self.config_service = config_service
+
+ def _DeserializeStatusRows(self, def_rows):
+ """Convert status definition rows into {project_id: [row, ...]}."""
+ result_dict = collections.defaultdict(list)
+ for (status_id, project_id, rank, status,
+ means_open, docstr, deprecated) in def_rows:
+ result_dict[project_id].append(
+ (status_id, project_id, rank, status, means_open, docstr, deprecated))
+
+ return result_dict
+
+ def FetchItems(self, cnxn, keys):
+ """On cache miss, get status definition rows from the DB."""
+ status_def_rows = self.config_service.statusdef_tbl.Select(
+ cnxn, cols=STATUSDEF_COLS, project_id=keys,
+ order_by=[('rank DESC', []), ('status DESC', [])])
+ status_rows_dict = self._DeserializeStatusRows(status_def_rows)
+
+ # Make sure that every requested project is represented in the result
+ for project_id in keys:
+ status_rows_dict.setdefault(project_id, [])
+
+ return status_rows_dict
+
+
+class FieldRowTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for field rows.
+
+ Field rows exist for every field used in a project, since they cannot be
+ created through ad-hoc means.
+ """
+
+ def __init__(self, cache_manager, config_service):
+ super(FieldRowTwoLevelCache, self).__init__(
+ cache_manager, 'project', 'field_rows:', None)
+ self.config_service = config_service
+
+ def _DeserializeFieldRows(self, field_def_rows):
+ """Convert DB result rows into a dict {project_id: [row, ...]}."""
+ result_dict = collections.defaultdict(list)
+ # TODO(agable): Actually process the rest of the items.
+ for (field_id, project_id, rank, field_name, _field_type, _applicable_type,
+ _applicable_predicate, _is_required, _is_multivalued, _min_value,
+ _max_value, _regex, _needs_member, _needs_perm, _grants_perm,
+ _notify_on, docstring, _is_deleted) in field_def_rows:
+ result_dict[project_id].append(
+ (field_id, project_id, rank, field_name, docstring))
+
+ return result_dict
+
+ def FetchItems(self, cnxn, keys):
+ """On RAM and memcache miss, hit the database."""
+ field_def_rows = self.config_service.fielddef_tbl.Select(
+ cnxn, cols=FIELDDEF_COLS, project_id=keys,
+ order_by=[('rank DESC', []), ('field_name DESC', [])])
+ field_rows_dict = self._DeserializeFieldRows(field_def_rows)
+
+ # Make sure that every requested project is represented in the result
+ for project_id in keys:
+ field_rows_dict.setdefault(project_id, [])
+
+ return field_rows_dict
+
+
+class ConfigTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for IssueProjectConfig PBs."""
+
+ def __init__(self, cache_manager, config_service):
+ super(ConfigTwoLevelCache, self).__init__(
+ cache_manager, 'project', 'config:', tracker_pb2.ProjectIssueConfig)
+ self.config_service = config_service
+
+ def _UnpackProjectIssueConfig(self, config_row):
+ """Partially construct a config object using info from a DB row."""
+ (project_id, statuses_offer_merge, exclusive_label_prefixes,
+ default_template_for_developers, default_template_for_users,
+ default_col_spec, default_sort_spec, default_x_attr, default_y_attr,
+ custom_issue_entry_url) = config_row
+ config = tracker_pb2.ProjectIssueConfig()
+ config.project_id = project_id
+ config.statuses_offer_merge.extend(statuses_offer_merge.split())
+ config.exclusive_label_prefixes.extend(exclusive_label_prefixes.split())
+ config.default_template_for_developers = default_template_for_developers
+ config.default_template_for_users = default_template_for_users
+ config.default_col_spec = default_col_spec
+ config.default_sort_spec = default_sort_spec
+ config.default_x_attr = default_x_attr
+ config.default_y_attr = default_y_attr
+ if custom_issue_entry_url is not None:
+ config.custom_issue_entry_url = custom_issue_entry_url
+
+ return config
+
+ def _UnpackTemplate(self, template_row):
+ """Partially construct a template object using info from a DB row."""
+ (template_id, project_id, name, content, summary,
+ summary_must_be_edited, owner_id, status,
+ members_only, owner_defaults_to_member, component_required) = template_row
+ template = tracker_pb2.TemplateDef()
+ template.template_id = template_id
+ template.name = name
+ template.content = content
+ template.summary = summary
+ template.summary_must_be_edited = bool(
+ summary_must_be_edited)
+ template.owner_id = owner_id or 0
+ template.status = status
+ template.members_only = bool(members_only)
+ template.owner_defaults_to_member = bool(owner_defaults_to_member)
+ template.component_required = bool(component_required)
+
+ return template, project_id
+
+ def _UnpackFieldDef(self, fielddef_row):
+ """Partially construct a FieldDef object using info from a DB row."""
+ (field_id, project_id, _rank, field_name, field_type,
+ applic_type, applic_pred, is_required, is_multivalued,
+ min_value, max_value, regex, needs_member, needs_perm,
+ grants_perm, notify_on_str, docstring, is_deleted) = fielddef_row
+ if notify_on_str == 'any_comment':
+ notify_on = tracker_pb2.NotifyTriggers.ANY_COMMENT
+ else:
+ notify_on = tracker_pb2.NotifyTriggers.NEVER
+
+ return tracker_bizobj.MakeFieldDef(
+ field_id, project_id, field_name,
+ tracker_pb2.FieldTypes(field_type.upper()), applic_type, applic_pred,
+ is_required, is_multivalued, min_value, max_value, regex,
+ needs_member, needs_perm, grants_perm, notify_on, docstring,
+ is_deleted)
+
+ def _UnpackComponentDef(
+ self, cd_row, component2admin_rows, component2cc_rows):
+ """Partially construct a FieldDef object using info from a DB row."""
+ (component_id, project_id, path, docstring, deprecated, created,
+ creator_id, modified, modifier_id) = cd_row
+ cd = tracker_bizobj.MakeComponentDef(
+ component_id, project_id, path, docstring, deprecated,
+ [admin_id for comp_id, admin_id in component2admin_rows
+ if comp_id == component_id],
+ [cc_id for comp_id, cc_id in component2cc_rows
+ if comp_id == component_id],
+ created, creator_id, modified, modifier_id)
+
+ return cd
+
+ def _DeserializeIssueConfigs(
+ self, config_rows, template_rows, template2label_rows,
+ template2component_rows, template2admin_rows, template2fieldvalue_rows,
+ statusdef_rows, labeldef_rows, fielddef_rows, fielddef2admin_rows,
+ componentdef_rows, component2admin_rows, component2cc_rows):
+ """Convert the given row tuples into a dict of ProjectIssueConfig PBs."""
+ result_dict = {}
+ template_dict = {}
+ fielddef_dict = {}
+
+ for config_row in config_rows:
+ config = self._UnpackProjectIssueConfig(config_row)
+ result_dict[config.project_id] = config
+
+ for template_row in template_rows:
+ template, project_id = self._UnpackTemplate(template_row)
+ if project_id in result_dict:
+ result_dict[project_id].templates.append(template)
+ template_dict[template.template_id] = template
+
+ for template2label_row in template2label_rows:
+ template_id, label = template2label_row
+ template = template_dict.get(template_id)
+ if template:
+ template.labels.append(label)
+
+ for template2component_row in template2component_rows:
+ template_id, component_id = template2component_row
+ template = template_dict.get(template_id)
+ if template:
+ template.component_ids.append(component_id)
+
+ for template2admin_row in template2admin_rows:
+ template_id, admin_id = template2admin_row
+ template = template_dict.get(template_id)
+ if template:
+ template.admin_ids.append(admin_id)
+
+ for fv_row in template2fieldvalue_rows:
+ template_id, field_id, int_value, str_value, user_id = fv_row
+ fv = tracker_bizobj.MakeFieldValue(
+ field_id, int_value, str_value, user_id, False)
+ template = template_dict.get(template_id)
+ if template:
+ template.field_values.append(fv)
+
+ for statusdef_row in statusdef_rows:
+ (_, project_id, _rank, status,
+ means_open, docstring, deprecated) = statusdef_row
+ if project_id in result_dict:
+ wks = tracker_pb2.StatusDef(
+ status=status, means_open=bool(means_open),
+ status_docstring=docstring or '', deprecated=bool(deprecated))
+ result_dict[project_id].well_known_statuses.append(wks)
+
+ for labeldef_row in labeldef_rows:
+ _, project_id, _rank, label, docstring, deprecated = labeldef_row
+ if project_id in result_dict:
+ wkl = tracker_pb2.LabelDef(
+ label=label, label_docstring=docstring or '',
+ deprecated=bool(deprecated))
+ result_dict[project_id].well_known_labels.append(wkl)
+
+ for fd_row in fielddef_rows:
+ fd = self._UnpackFieldDef(fd_row)
+ result_dict[fd.project_id].field_defs.append(fd)
+ fielddef_dict[fd.field_id] = fd
+
+ for fd2admin_row in fielddef2admin_rows:
+ field_id, admin_id = fd2admin_row
+ fd = fielddef_dict.get(field_id)
+ if fd:
+ fd.admin_ids.append(admin_id)
+
+ for cd_row in componentdef_rows:
+ cd = self._UnpackComponentDef(
+ cd_row, component2admin_rows, component2cc_rows)
+ result_dict[cd.project_id].component_defs.append(cd)
+
+ return result_dict
+
+ def _FetchConfigs(self, cnxn, project_ids):
+ """On RAM and memcache miss, hit the database."""
+ config_rows = self.config_service.projectissueconfig_tbl.Select(
+ cnxn, cols=PROJECTISSUECONFIG_COLS, project_id=project_ids)
+ template_rows = self.config_service.template_tbl.Select(
+ cnxn, cols=TEMPLATE_COLS, project_id=project_ids,
+ order_by=[('name', [])])
+ template_ids = [row[0] for row in template_rows]
+ template2label_rows = self.config_service.template2label_tbl.Select(
+ cnxn, cols=TEMPLATE2LABEL_COLS, template_id=template_ids)
+ template2component_rows = self.config_service.template2component_tbl.Select(
+ cnxn, cols=TEMPLATE2COMPONENT_COLS, template_id=template_ids)
+ template2admin_rows = self.config_service.template2admin_tbl.Select(
+ cnxn, cols=TEMPLATE2ADMIN_COLS, template_id=template_ids)
+ template2fv_rows = self.config_service.template2fieldvalue_tbl.Select(
+ cnxn, cols=TEMPLATE2FIELDVALUE_COLS, template_id=template_ids)
+ logging.info('t2fv is %r', template2fv_rows)
+ statusdef_rows = self.config_service.statusdef_tbl.Select(
+ cnxn, cols=STATUSDEF_COLS, project_id=project_ids,
+ where=[('rank IS NOT NULL', [])], order_by=[('rank', [])])
+ labeldef_rows = self.config_service.labeldef_tbl.Select(
+ cnxn, cols=LABELDEF_COLS, project_id=project_ids,
+ where=[('rank IS NOT NULL', [])], order_by=[('rank', [])])
+ # TODO(jrobbins): For now, sort by field name, but someday allow admins
+ # to adjust the rank to group and order field definitions logically.
+ fielddef_rows = self.config_service.fielddef_tbl.Select(
+ cnxn, cols=FIELDDEF_COLS, project_id=project_ids,
+ order_by=[('field_name', [])])
+ field_ids = [row[0] for row in fielddef_rows]
+ fielddef2admin_rows = self.config_service.fielddef2admin_tbl.Select(
+ cnxn, cols=FIELDDEF2ADMIN_COLS, field_id=field_ids)
+ componentdef_rows = self.config_service.componentdef_tbl.Select(
+ cnxn, cols=COMPONENTDEF_COLS, project_id=project_ids,
+ order_by=[('LOWER(path)', [])])
+ component_ids = [cd_row[0] for cd_row in componentdef_rows]
+ component2admin_rows = self.config_service.component2admin_tbl.Select(
+ cnxn, cols=COMPONENT2ADMIN_COLS, component_id=component_ids)
+ component2cc_rows = self.config_service.component2cc_tbl.Select(
+ cnxn, cols=COMPONENT2CC_COLS, component_id=component_ids)
+
+ retrieved_dict = self._DeserializeIssueConfigs(
+ config_rows, template_rows, template2label_rows,
+ template2component_rows, template2admin_rows,
+ template2fv_rows, statusdef_rows, labeldef_rows,
+ fielddef_rows, fielddef2admin_rows, componentdef_rows,
+ component2admin_rows, component2cc_rows)
+ return retrieved_dict
+
+ def FetchItems(self, cnxn, keys):
+ """On RAM and memcache miss, hit the database."""
+ retrieved_dict = self._FetchConfigs(cnxn, keys)
+
+ # Any projects which don't have stored configs should use a default
+ # config instead.
+ for project_id in keys:
+ if project_id not in retrieved_dict:
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(project_id)
+ retrieved_dict[project_id] = config
+
+ return retrieved_dict
+
+
+class ConfigService(object):
+ """The persistence layer for Monorail's issue tracker configuration data."""
+
+ def __init__(self, cache_manager):
+ """Initialize this object so that it is ready to use.
+
+ Args:
+ cache_manager: manages local caches with distributed invalidation.
+ """
+ self.template_tbl = sql.SQLTableManager(TEMPLATE_TABLE_NAME)
+ self.template2label_tbl = sql.SQLTableManager(TEMPLATE2LABEL_TABLE_NAME)
+ self.template2component_tbl = sql.SQLTableManager(
+ TEMPLATE2COMPONENT_TABLE_NAME)
+ self.template2admin_tbl = sql.SQLTableManager(TEMPLATE2ADMIN_TABLE_NAME)
+ self.template2fieldvalue_tbl = sql.SQLTableManager(
+ TEMPLATE2FIELDVALUE_TABLE_NAME)
+ self.projectissueconfig_tbl = sql.SQLTableManager(
+ PROJECTISSUECONFIG_TABLE_NAME)
+ self.statusdef_tbl = sql.SQLTableManager(STATUSDEF_TABLE_NAME)
+ self.labeldef_tbl = sql.SQLTableManager(LABELDEF_TABLE_NAME)
+ self.fielddef_tbl = sql.SQLTableManager(FIELDDEF_TABLE_NAME)
+ self.fielddef2admin_tbl = sql.SQLTableManager(FIELDDEF2ADMIN_TABLE_NAME)
+ self.componentdef_tbl = sql.SQLTableManager(COMPONENTDEF_TABLE_NAME)
+ self.component2admin_tbl = sql.SQLTableManager(COMPONENT2ADMIN_TABLE_NAME)
+ self.component2cc_tbl = sql.SQLTableManager(COMPONENT2CC_TABLE_NAME)
+
+ self.config_2lc = ConfigTwoLevelCache(cache_manager, self)
+ self.label_row_2lc = LabelRowTwoLevelCache(cache_manager, self)
+ self.label_cache = cache_manager.MakeCache('project')
+ self.status_row_2lc = StatusRowTwoLevelCache(cache_manager, self)
+ self.status_cache = cache_manager.MakeCache('project')
+ self.field_row_2lc = FieldRowTwoLevelCache(cache_manager, self)
+ self.field_cache = cache_manager.MakeCache('project')
+
+ ### Label lookups
+
+ def GetLabelDefRows(self, cnxn, project_id):
+ """Get SQL result rows for all labels used in the specified project."""
+ pids_to_label_rows, misses = self.label_row_2lc.GetAll(cnxn, [project_id])
+ assert not misses
+ return pids_to_label_rows[project_id]
+
+ def GetLabelDefRowsAnyProject(self, cnxn, where=None):
+ """Get all LabelDef rows for the whole site. Used in whole-site search."""
+ # TODO(jrobbins): maybe add caching for these too.
+ label_def_rows = self.labeldef_tbl.Select(
+ cnxn, cols=LABELDEF_COLS, where=where,
+ order_by=[('rank DESC', []), ('label DESC', [])])
+ return label_def_rows
+
+ def _DeserializeLabels(self, def_rows):
+ """Convert label defs into bi-directional mappings of names and IDs."""
+ label_id_to_name = {
+ label_id: label for
+ label_id, _pid, _rank, label, _doc, _deprecated
+ in def_rows}
+ label_name_to_id = {
+ label.lower(): label_id
+ for label_id, label in label_id_to_name.iteritems()}
+
+ return label_id_to_name, label_name_to_id
+
+ def _EnsureLabelCacheEntry(self, cnxn, project_id):
+ """Make sure that self.label_cache has an entry for project_id."""
+ if not self.label_cache.HasItem(project_id):
+ def_rows = self.GetLabelDefRows(cnxn, project_id)
+ self.label_cache.CacheItem(project_id, self._DeserializeLabels(def_rows))
+
+ def LookupLabel(self, cnxn, project_id, label_id):
+ """Lookup a label string given the label_id.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the label is defined or used.
+ label_id: int label ID.
+
+ Returns:
+ Label name string for the given label_id, or None.
+ """
+ self._EnsureLabelCacheEntry(cnxn, project_id)
+ label_id_to_name, _label_name_to_id = self.label_cache.GetItem(
+ project_id)
+ return label_id_to_name.get(label_id)
+
+ def LookupLabelID(self, cnxn, project_id, label, autocreate=True):
+ """Look up a label ID, optionally interning it.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the statuses are defined.
+ label: label string.
+ autocreate: if not already in the DB, store it and generate a new ID.
+
+ Returns:
+ The label ID for the given label string.
+ """
+ self._EnsureLabelCacheEntry(cnxn, project_id)
+ _label_id_to_name, label_name_to_id = self.label_cache.GetItem(
+ project_id)
+ if label.lower() in label_name_to_id:
+ return label_name_to_id[label.lower()]
+
+ if autocreate:
+ logging.info('No label %r is known in project %d, so intern it.',
+ label, project_id)
+ label_id = self.labeldef_tbl.InsertRow(
+ cnxn, project_id=project_id, label=label)
+ self.label_row_2lc.InvalidateKeys(cnxn, [project_id])
+ self.label_cache.Invalidate(cnxn, project_id)
+ return label_id
+
+ return None # It was not found and we don't want to create it.
+
+ def LookupLabelIDs(self, cnxn, project_id, labels, autocreate=False):
+ """Look up several label IDs.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the statuses are defined.
+ labels: list of label strings.
+ autocreate: if not already in the DB, store it and generate a new ID.
+
+ Returns:
+ Returns a list of int label IDs for the given label strings.
+ """
+ result = []
+ for lab in labels:
+ label_id = self.LookupLabelID(
+ cnxn, project_id, lab, autocreate=autocreate)
+ if label_id is not None:
+ result.append(label_id)
+
+ return result
+
+ def LookupIDsOfLabelsMatching(self, cnxn, project_id, regex):
+ """Look up the IDs of all labels in a project that match the regex.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the statuses are defined.
+ regex: regular expression object to match against the label strings.
+
+ Returns:
+ List of label IDs for labels that match the regex.
+ """
+ self._EnsureLabelCacheEntry(cnxn, project_id)
+ label_id_to_name, _label_name_to_id = self.label_cache.GetItem(
+ project_id)
+ result = [label_id for label_id, label in label_id_to_name.iteritems()
+ if regex.match(label)]
+
+ return result
+
+ def LookupLabelIDsAnyProject(self, cnxn, label):
+ """Return the IDs of labels with the given name in any project.
+
+ Args:
+ cnxn: connection to SQL database.
+ label: string label to look up. Case sensitive.
+
+ Returns:
+ A list of int label IDs of all labels matching the given string.
+ """
+ # TODO(jrobbins): maybe add caching for these too.
+ label_id_rows = self.labeldef_tbl.Select(
+ cnxn, cols=['id'], label=label)
+ label_ids = [row[0] for row in label_id_rows]
+ return label_ids
+
+ def LookupIDsOfLabelsMatchingAnyProject(self, cnxn, regex):
+ """Return the IDs of matching labels in any project."""
+ label_rows = self.labeldef_tbl.Select(
+ cnxn, cols=['id', 'label'])
+ matching_ids = [
+ label_id for label_id, label in label_rows if regex.match(label)]
+ return matching_ids
+
+ ### Status lookups
+
+ def GetStatusDefRows(self, cnxn, project_id):
+ """Return a list of status definition rows for the specified project."""
+ pids_to_status_rows, misses = self.status_row_2lc.GetAll(
+ cnxn, [project_id])
+ assert not misses
+ return pids_to_status_rows[project_id]
+
+ def GetStatusDefRowsAnyProject(self, cnxn):
+ """Return all status definition rows on the whole site."""
+ # TODO(jrobbins): maybe add caching for these too.
+ status_def_rows = self.statusdef_tbl.Select(
+ cnxn, cols=STATUSDEF_COLS,
+ order_by=[('rank DESC', []), ('status DESC', [])])
+ return status_def_rows
+
+ def _DeserializeStatuses(self, def_rows):
+ """Convert status defs into bi-directional mappings of names and IDs."""
+ status_id_to_name = {
+ status_id: status
+ for (status_id, _pid, _rank, status, _means_open,
+ _doc, _deprecated) in def_rows}
+ status_name_to_id = {
+ status.lower(): status_id
+ for status_id, status in status_id_to_name.iteritems()}
+ closed_status_ids = [
+ status_id
+ for (status_id, _pid, _rank, _status, means_open,
+ _doc, _deprecated) in def_rows
+ if means_open == 0] # Only 0 means closed. NULL/None means open.
+
+ return status_id_to_name, status_name_to_id, closed_status_ids
+
+ def _EnsureStatusCacheEntry(self, cnxn, project_id):
+ """Make sure that self.status_cache has an entry for project_id."""
+ if not self.status_cache.HasItem(project_id):
+ def_rows = self.GetStatusDefRows(cnxn, project_id)
+ self.status_cache.CacheItem(
+ project_id, self._DeserializeStatuses(def_rows))
+
+ def LookupStatus(self, cnxn, project_id, status_id):
+ """Look up a status string for the given status ID.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the statuses are defined.
+ status_id: int ID of the status value.
+
+ Returns:
+ A status string, or None.
+ """
+ if status_id == 0:
+ return ''
+
+ self._EnsureStatusCacheEntry(cnxn, project_id)
+ (status_id_to_name, _status_name_to_id,
+ _closed_status_ids) = self.status_cache.GetItem(project_id)
+
+ return status_id_to_name.get(status_id)
+
+ def LookupStatusID(self, cnxn, project_id, status, autocreate=True):
+ """Look up a status ID for the given status string.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the statuses are defined.
+ status: status string.
+ autocreate: if not already in the DB, store it and generate a new ID.
+
+ Returns:
+ The status ID for the given status string, or None.
+ """
+ if not status:
+ return None
+
+ self._EnsureStatusCacheEntry(cnxn, project_id)
+ (_status_id_to_name, status_name_to_id,
+ _closed_status_ids) = self.status_cache.GetItem(project_id)
+ if status.lower() in status_name_to_id:
+ return status_name_to_id[status.lower()]
+
+ if autocreate:
+ logging.info('No status %r is known in project %d, so intern it.',
+ status, project_id)
+ status_id = self.statusdef_tbl.InsertRow(
+ cnxn, project_id=project_id, status=status)
+ self.status_row_2lc.InvalidateKeys(cnxn, [project_id])
+ self.status_cache.Invalidate(cnxn, project_id)
+ return status_id
+
+ return None # It was not found and we don't want to create it.
+
+ def LookupStatusIDs(self, cnxn, project_id, statuses):
+ """Look up several status IDs for the given status strings.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the statuses are defined.
+ statuses: list of status strings.
+
+ Returns:
+ A list of int status IDs.
+ """
+ result = []
+ for stat in statuses:
+ status_id = self.LookupStatusID(cnxn, project_id, stat, autocreate=False)
+ if status_id:
+ result.append(status_id)
+
+ return result
+
+ def LookupClosedStatusIDs(self, cnxn, project_id):
+ """Return the IDs of closed statuses defined in the given project."""
+ self._EnsureStatusCacheEntry(cnxn, project_id)
+ (_status_id_to_name, _status_name_to_id,
+ closed_status_ids) = self.status_cache.GetItem(project_id)
+
+ return closed_status_ids
+
+ def LookupClosedStatusIDsAnyProject(self, cnxn):
+ """Return the IDs of closed statuses defined in any project."""
+ status_id_rows = self.statusdef_tbl.Select(
+ cnxn, cols=['id'], means_open=False)
+ status_ids = [row[0] for row in status_id_rows]
+ return status_ids
+
+ def LookupStatusIDsAnyProject(self, cnxn, status):
+ """Return the IDs of statues with the given name in any project."""
+ status_id_rows = self.statusdef_tbl.Select(
+ cnxn, cols=['id'], status=status)
+ status_ids = [row[0] for row in status_id_rows]
+ return status_ids
+
+ # TODO(jrobbins): regex matching for status values.
+
+ ### Issue tracker configuration objects
+
+ def GetProjectConfigs(self, cnxn, project_ids, use_cache=True):
+ """Get several project issue config objects."""
+ config_dict, missed_ids = self.config_2lc.GetAll(
+ cnxn, project_ids, use_cache=use_cache)
+ assert not missed_ids
+ return config_dict
+
+ def GetProjectConfig(self, cnxn, project_id, use_cache=True):
+ """Load a ProjectIssueConfig for the specified project from the database.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the current project.
+ use_cache: if False, always hit the database.
+
+ Returns:
+ A ProjectIssueConfig describing how the issue tracker in the specified
+ project is configured. Projects only have a stored ProjectIssueConfig if
+ a project owner has edited the configuration. Other projects use a
+ default configuration.
+ """
+ config_dict = self.GetProjectConfigs(
+ cnxn, [project_id], use_cache=use_cache)
+ return config_dict[project_id]
+
+ def TemplatesWithComponent(self, cnxn, component_id, config):
+ """Returns all templates with the specified component.
+
+ Args:
+ cnxn: connection to SQL database.
+ component_id: int component id.
+ config: ProjectIssueConfig instance.
+ """
+ template2component_rows = self.template2component_tbl.Select(
+ cnxn, cols=['template_id'], component_id=component_id)
+ template_ids = [r[0] for r in template2component_rows]
+ return [t for t in config.templates if t.template_id in template_ids]
+
+ def StoreConfig(self, cnxn, config):
+ """Update an issue config in the database.
+
+ Args:
+ cnxn: connection to SQL database.
+ config: ProjectIssueConfig PB to update.
+ """
+ # TODO(jrobbins): Convert default template index values into foreign
+ # key references. Updating an entire config might require (1) adding
+ # new templates, (2) updating the config with new foreign key values,
+ # and finally (3) deleting only the specific templates that should be
+ # deleted.
+ self.projectissueconfig_tbl.InsertRow(
+ cnxn, replace=True,
+ project_id=config.project_id,
+ statuses_offer_merge=' '.join(config.statuses_offer_merge),
+ exclusive_label_prefixes=' '.join(config.exclusive_label_prefixes),
+ default_template_for_developers=config.default_template_for_developers,
+ default_template_for_users=config.default_template_for_users,
+ default_col_spec=config.default_col_spec,
+ default_sort_spec=config.default_sort_spec,
+ default_x_attr=config.default_x_attr,
+ default_y_attr=config.default_y_attr,
+ custom_issue_entry_url=config.custom_issue_entry_url,
+ commit=False)
+
+ self._UpdateTemplates(cnxn, config)
+ self._UpdateWellKnownLabels(cnxn, config)
+ self._UpdateWellKnownStatuses(cnxn, config)
+ cnxn.Commit()
+
+ def _UpdateTemplates(self, cnxn, config):
+ """Update the templates part of a project's issue configuration.
+
+ Args:
+ cnxn: connection to SQL database.
+ config: ProjectIssueConfig PB to update in the DB.
+ """
+ # Delete dependent rows of existing templates. It is all rewritten below.
+ template_id_rows = self.template_tbl.Select(
+ cnxn, cols=['id'], project_id=config.project_id)
+ template_ids = [row[0] for row in template_id_rows]
+ self.template2label_tbl.Delete(
+ cnxn, template_id=template_ids, commit=False)
+ self.template2component_tbl.Delete(
+ cnxn, template_id=template_ids, commit=False)
+ self.template2admin_tbl.Delete(
+ cnxn, template_id=template_ids, commit=False)
+ self.template2fieldvalue_tbl.Delete(
+ cnxn, template_id=template_ids, commit=False)
+ self.template_tbl.Delete(
+ cnxn, project_id=config.project_id, commit=False)
+
+ # Now, update existing ones and add new ones.
+ template_rows = []
+ for template in config.templates:
+ row = (template.template_id,
+ config.project_id,
+ template.name,
+ template.content,
+ template.summary,
+ template.summary_must_be_edited,
+ template.owner_id or None,
+ template.status,
+ template.members_only,
+ template.owner_defaults_to_member,
+ template.component_required)
+ template_rows.append(row)
+
+ # Maybe first insert ones that have a template_id and then insert new ones
+ # separately.
+ generated_ids = self.template_tbl.InsertRows(
+ cnxn, TEMPLATE_COLS, template_rows, replace=True, commit=False,
+ return_generated_ids=True)
+ logging.info('generated_ids is %r', generated_ids)
+ for template in config.templates:
+ if not template.template_id:
+ # Get IDs from the back of the list because the original template IDs
+ # have already been added to template_rows.
+ template.template_id = generated_ids.pop()
+
+ template2label_rows = []
+ template2component_rows = []
+ template2admin_rows = []
+ template2fieldvalue_rows = []
+ for template in config.templates:
+ for label in template.labels:
+ if label:
+ template2label_rows.append((template.template_id, label))
+ for component_id in template.component_ids:
+ template2component_rows.append((template.template_id, component_id))
+ for admin_id in template.admin_ids:
+ template2admin_rows.append((template.template_id, admin_id))
+ for fv in template.field_values:
+ template2fieldvalue_rows.append(
+ (template.template_id, fv.field_id, fv.int_value, fv.str_value,
+ fv.user_id or None))
+
+ self.template2label_tbl.InsertRows(
+ cnxn, TEMPLATE2LABEL_COLS, template2label_rows, ignore=True,
+ commit=False)
+ self.template2component_tbl.InsertRows(
+ cnxn, TEMPLATE2COMPONENT_COLS, template2component_rows, commit=False)
+ self.template2admin_tbl.InsertRows(
+ cnxn, TEMPLATE2ADMIN_COLS, template2admin_rows, commit=False)
+ self.template2fieldvalue_tbl.InsertRows(
+ cnxn, TEMPLATE2FIELDVALUE_COLS, template2fieldvalue_rows, commit=False)
+
+ def _UpdateWellKnownLabels(self, cnxn, config):
+ """Update the labels part of a project's issue configuration.
+
+ Args:
+ cnxn: connection to SQL database.
+ config: ProjectIssueConfig PB to update in the DB.
+ """
+ update_labeldef_rows = []
+ new_labeldef_rows = []
+ for rank, wkl in enumerate(config.well_known_labels):
+ # We must specify label ID when replacing, otherwise a new ID is made.
+ label_id = self.LookupLabelID(
+ cnxn, config.project_id, wkl.label, autocreate=False)
+ if label_id:
+ row = (label_id, config.project_id, rank, wkl.label,
+ wkl.label_docstring, wkl.deprecated)
+ update_labeldef_rows.append(row)
+ else:
+ row = (
+ config.project_id, rank, wkl.label, wkl.label_docstring,
+ wkl.deprecated)
+ new_labeldef_rows.append(row)
+
+ self.labeldef_tbl.Update(
+ cnxn, {'rank': None}, project_id=config.project_id, commit=False)
+ self.labeldef_tbl.InsertRows(
+ cnxn, LABELDEF_COLS, update_labeldef_rows, replace=True, commit=False)
+ self.labeldef_tbl.InsertRows(
+ cnxn, LABELDEF_COLS[1:], new_labeldef_rows, commit=False)
+ self.label_row_2lc.InvalidateKeys(cnxn, [config.project_id])
+ self.label_cache.Invalidate(cnxn, config.project_id)
+
+ def _UpdateWellKnownStatuses(self, cnxn, config):
+ """Update the status part of a project's issue configuration.
+
+ Args:
+ cnxn: connection to SQL database.
+ config: ProjectIssueConfig PB to update in the DB.
+ """
+ update_statusdef_rows = []
+ new_statusdef_rows = []
+ for rank, wks in enumerate(config.well_known_statuses):
+ # We must specify label ID when replacing, otherwise a new ID is made.
+ status_id = self.LookupStatusID(cnxn, config.project_id, wks.status,
+ autocreate=False)
+ if status_id is not None:
+ row = (status_id, config.project_id, rank, wks.status,
+ bool(wks.means_open), wks.status_docstring, wks.deprecated)
+ update_statusdef_rows.append(row)
+ else:
+ row = (config.project_id, rank, wks.status,
+ bool(wks.means_open), wks.status_docstring, wks.deprecated)
+ new_statusdef_rows.append(row)
+
+ self.statusdef_tbl.Update(
+ cnxn, {'rank': None}, project_id=config.project_id, commit=False)
+ self.statusdef_tbl.InsertRows(
+ cnxn, STATUSDEF_COLS, update_statusdef_rows, replace=True,
+ commit=False)
+ self.statusdef_tbl.InsertRows(
+ cnxn, STATUSDEF_COLS[1:], new_statusdef_rows, commit=False)
+ self.status_row_2lc.InvalidateKeys(cnxn, [config.project_id])
+ self.status_cache.Invalidate(cnxn, config.project_id)
+
+ def UpdateConfig(
+ self, cnxn, project, well_known_statuses=None,
+ statuses_offer_merge=None, well_known_labels=None,
+ excl_label_prefixes=None, templates=None,
+ default_template_for_developers=None, default_template_for_users=None,
+ list_prefs=None, restrict_to_known=None):
+ """Update project's issue tracker configuration with the given info.
+
+ Args:
+ cnxn: connection to SQL database.
+ project: the project in which to update the issue tracker config.
+ well_known_statuses: [(status_name, docstring, means_open, deprecated),..]
+ statuses_offer_merge: list of status values that trigger UI to merge.
+ well_known_labels: [(label_name, docstring, deprecated),...]
+ excl_label_prefixes: list of prefix strings. Each issue should
+ have only one label with each of these prefixed.
+ templates: List of PBs for issue templates.
+ default_template_for_developers: int ID of template to use for devs.
+ default_template_for_users: int ID of template to use for non-members.
+ list_prefs: defaults for columns and sorting.
+ restrict_to_known: optional bool to allow project owners
+ to limit issue status and label values to only the well-known ones.
+
+ Returns:
+ The updated ProjectIssueConfig PB.
+ """
+ project_id = project.project_id
+ project_config = self.GetProjectConfig(cnxn, project_id, use_cache=False)
+
+ if well_known_statuses is not None:
+ tracker_bizobj.SetConfigStatuses(project_config, well_known_statuses)
+
+ if statuses_offer_merge is not None:
+ project_config.statuses_offer_merge = statuses_offer_merge
+
+ if well_known_labels is not None:
+ tracker_bizobj.SetConfigLabels(project_config, well_known_labels)
+
+ if excl_label_prefixes is not None:
+ project_config.exclusive_label_prefixes = excl_label_prefixes
+
+ if templates is not None:
+ project_config.templates = templates
+
+ if default_template_for_developers is not None:
+ project_config.default_template_for_developers = (
+ default_template_for_developers)
+ if default_template_for_users is not None:
+ project_config.default_template_for_users = default_template_for_users
+
+ if list_prefs:
+ default_col_spec, default_sort_spec, x_attr, y_attr = list_prefs
+ project_config.default_col_spec = default_col_spec
+ project_config.default_sort_spec = default_sort_spec
+ project_config.default_x_attr = x_attr
+ project_config.default_y_attr = y_attr
+
+ if restrict_to_known is not None:
+ project_config.restrict_to_known = restrict_to_known
+
+ self.StoreConfig(cnxn, project_config)
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+ self.InvalidateMemcacheForEntireProject(project_id)
+ # Invalidate all issue caches in all frontends to clear out
+ # sorting.art_values_cache which now has wrong sort orders.
+ cache_manager = self.config_2lc.cache.cache_manager
+ cache_manager.StoreInvalidateAll(cnxn, 'issue')
+
+ return project_config
+
+ def ExpungeConfig(self, cnxn, project_id):
+ """Completely delete the specified project config from the database."""
+ logging.info('expunging the config for %r', project_id)
+ template_id_rows = self.template_tbl.Select(
+ cnxn, cols=['id'], project_id=project_id)
+ template_ids = [row[0] for row in template_id_rows]
+ self.template2label_tbl.Delete(cnxn, template_id=template_ids)
+ self.template2component_tbl.Delete(cnxn, template_id=template_ids)
+ self.template_tbl.Delete(cnxn, project_id=project_id)
+ self.statusdef_tbl.Delete(cnxn, project_id=project_id)
+ self.labeldef_tbl.Delete(cnxn, project_id=project_id)
+ self.projectissueconfig_tbl.Delete(cnxn, project_id=project_id)
+
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+
+ ### Custom field definitions
+
+ def CreateFieldDef(
+ self, cnxn, project_id, field_name, field_type_str, applic_type,
+ applic_pred, is_required, is_multivalued,
+ min_value, max_value, regex, needs_member, needs_perm,
+ grants_perm, notify_on, docstring, admin_ids):
+ """Create a new field definition with the given info.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the current project.
+ field_name: name of the new custom field.
+ field_type_str: string identifying the type of the custom field.
+ applic_type: string specifying issue type the field is applicable to.
+ applic_pred: string condition to test if the field is applicable.
+ is_required: True if the field should be required on issues.
+ is_multivalued: True if the field can occur multiple times on one issue.
+ min_value: optional validation for int_type fields.
+ max_value: optional validation for int_type fields.
+ regex: optional validation for str_type fields.
+ needs_member: optional validation for user_type fields.
+ needs_perm: optional validation for user_type fields.
+ grants_perm: optional string for perm to grant any user named in field.
+ notify_on: int enum of when to notify users named in field.
+ docstring: string describing this field.
+ admin_ids: list of additional user IDs who can edit this field def.
+
+ Returns:
+ Integer field_id of the new field definition.
+ """
+ field_id = self.fielddef_tbl.InsertRow(
+ cnxn, project_id=project_id,
+ field_name=field_name, field_type=field_type_str,
+ applicable_type=applic_type, applicable_predicate=applic_pred,
+ is_required=is_required, is_multivalued=is_multivalued,
+ min_value=min_value, max_value=max_value, regex=regex,
+ needs_member=needs_member, needs_perm=needs_perm,
+ grants_perm=grants_perm, notify_on=NOTIFY_ON_ENUM[notify_on],
+ docstring=docstring, commit=False)
+ self.fielddef2admin_tbl.InsertRows(
+ cnxn, FIELDDEF2ADMIN_COLS,
+ [(field_id, admin_id) for admin_id in admin_ids],
+ commit=False)
+ cnxn.Commit()
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+ self.InvalidateMemcacheForEntireProject(project_id)
+ return field_id
+
+ def _DeserializeFields(self, def_rows):
+ """Convert field defs into bi-directional mappings of names and IDs."""
+ field_id_to_name = {
+ field_id: field
+ for field_id, _pid, _rank, field, _doc in def_rows}
+ field_name_to_id = {
+ field.lower(): field_id
+ for field_id, field in field_id_to_name.iteritems()}
+
+ return field_id_to_name, field_name_to_id
+
+ def GetFieldDefRows(self, cnxn, project_id):
+ """Get SQL result rows for all fields used in the specified project."""
+ pids_to_field_rows, misses = self.field_row_2lc.GetAll(cnxn, [project_id])
+ assert not misses
+ return pids_to_field_rows[project_id]
+
+ def _EnsureFieldCacheEntry(self, cnxn, project_id):
+ """Make sure that self.field_cache has an entry for project_id."""
+ if not self.field_cache.HasItem(project_id):
+ def_rows = self.GetFieldDefRows(cnxn, project_id)
+ self.field_cache.CacheItem(
+ project_id, self._DeserializeFields(def_rows))
+
+ def LookupField(self, cnxn, project_id, field_id):
+ """Lookup a field string given the field_id.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the label is defined or used.
+ field_id: int field ID.
+
+ Returns:
+ Field name string for the given field_id, or None.
+ """
+ self._EnsureFieldCacheEntry(cnxn, project_id)
+ field_id_to_name, _field_name_to_id = self.field_cache.GetItem(
+ project_id)
+ return field_id_to_name.get(field_id)
+
+ def LookupFieldID(self, cnxn, project_id, field):
+ """Look up a field ID.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project where the fields are defined.
+ field: field string.
+
+ Returns:
+ The field ID for the given field string.
+ """
+ self._EnsureFieldCacheEntry(cnxn, project_id)
+ _field_id_to_name, field_name_to_id = self.field_cache.GetItem(
+ project_id)
+ return field_name_to_id.get(field.lower())
+
+ def SoftDeleteFieldDef(self, cnxn, project_id, field_id):
+ """Mark the specified field as deleted, it will be reaped later."""
+ self.fielddef_tbl.Update(cnxn, {'is_deleted': True}, id=field_id)
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+ self.InvalidateMemcacheForEntireProject(project_id)
+
+ # TODO(jrobbins): GC deleted field defs after field values are gone.
+
+ def UpdateFieldDef(
+ self, cnxn, project_id, field_id, field_name=None,
+ applicable_type=None, applicable_predicate=None, is_required=None,
+ is_multivalued=None, min_value=None, max_value=None, regex=None,
+ needs_member=None, needs_perm=None, grants_perm=None, notify_on=None,
+ docstring=None, admin_ids=None):
+ """Update the specified field definition."""
+ new_values = {}
+ if field_name is not None:
+ new_values['field_name'] = field_name
+ if applicable_type is not None:
+ new_values['applicable_type'] = applicable_type
+ if applicable_predicate is not None:
+ new_values['applicable_predicate'] = applicable_predicate
+ if is_required is not None:
+ new_values['is_required'] = bool(is_required)
+ if is_multivalued is not None:
+ new_values['is_multivalued'] = bool(is_multivalued)
+ if min_value is not None:
+ new_values['min_value'] = min_value
+ if max_value is not None:
+ new_values['max_value'] = max_value
+ if regex is not None:
+ new_values['regex'] = regex
+ if needs_member is not None:
+ new_values['needs_member'] = needs_member
+ if needs_perm is not None:
+ new_values['needs_perm'] = needs_perm
+ if grants_perm is not None:
+ new_values['grants_perm'] = grants_perm
+ if notify_on is not None:
+ new_values['notify_on'] = NOTIFY_ON_ENUM[notify_on]
+ if docstring is not None:
+ new_values['docstring'] = docstring
+
+ self.fielddef_tbl.Update(cnxn, new_values, id=field_id, commit=False)
+ self.fielddef2admin_tbl.Delete(cnxn, field_id=field_id, commit=False)
+ self.fielddef2admin_tbl.InsertRows(
+ cnxn, FIELDDEF2ADMIN_COLS,
+ [(field_id, admin_id) for admin_id in admin_ids],
+ commit=False)
+ cnxn.Commit()
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+ self.InvalidateMemcacheForEntireProject(project_id)
+
+ ### Component definitions
+
+ def FindMatchingComponentIDsAnyProject(self, cnxn, path_list, exact=True):
+ """Look up component IDs across projects.
+
+ Args:
+ cnxn: connection to SQL database.
+ path_list: list of component path prefixes.
+ exact: set to False to include all components which have one of the
+ given paths as their ancestor, instead of exact matches.
+
+ Returns:
+ A list of component IDs of component's whose paths match path_list.
+ """
+ or_terms = []
+ args = []
+ for path in path_list:
+ or_terms.append('path = %s')
+ args.append(path)
+
+ if not exact:
+ for path in path_list:
+ or_terms.append('path LIKE %s')
+ args.append(path + '>%')
+
+ cond_str = '(' + ' OR '.join(or_terms) + ')'
+ rows = self.componentdef_tbl.Select(
+ cnxn, cols=['id'], where=[(cond_str, args)])
+ return [row[0] for row in rows]
+
+ def CreateComponentDef(
+ self, cnxn, project_id, path, docstring, deprecated, admin_ids, cc_ids,
+ created, creator_id):
+ """Create a new component definition with the given info.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the current project.
+ path: string pathname of the new component.
+ docstring: string describing this field.
+ deprecated: whether or not this should be autocompleted
+ admin_ids: list of int IDs of users who can administer.
+ cc_ids: list of int IDs of users to notify when an issue in
+ this component is updated.
+ created: timestamp this component was created at.
+ creator_id: int ID of user who created this component.
+
+ Returns:
+ Integer component_id of the new component definition.
+ """
+ component_id = self.componentdef_tbl.InsertRow(
+ cnxn, project_id=project_id, path=path, docstring=docstring,
+ deprecated=deprecated, created=created, creator_id=creator_id,
+ commit=False)
+ self.component2admin_tbl.InsertRows(
+ cnxn, COMPONENT2ADMIN_COLS,
+ [(component_id, admin_id) for admin_id in admin_ids],
+ commit=False)
+ self.component2cc_tbl.InsertRows(
+ cnxn, COMPONENT2CC_COLS,
+ [(component_id, cc_id) for cc_id in cc_ids],
+ commit=False)
+ cnxn.Commit()
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+ self.InvalidateMemcacheForEntireProject(project_id)
+ return component_id
+
+ def UpdateComponentDef(
+ self, cnxn, project_id, component_id, path=None, docstring=None,
+ deprecated=None, admin_ids=None, cc_ids=None, created=None,
+ creator_id=None, modified=None, modifier_id=None):
+ """Update the specified component definition."""
+ new_values = {}
+ if path is not None:
+ assert path
+ new_values['path'] = path
+ if docstring is not None:
+ new_values['docstring'] = docstring
+ if deprecated is not None:
+ new_values['deprecated'] = deprecated
+ if created is not None:
+ new_values['created'] = created
+ if creator_id is not None:
+ new_values['creator_id'] = creator_id
+ if modified is not None:
+ new_values['modified'] = modified
+ if modifier_id is not None:
+ new_values['modifier_id'] = modifier_id
+
+ if admin_ids is not None:
+ self.component2admin_tbl.Delete(
+ cnxn, component_id=component_id, commit=False)
+ self.component2admin_tbl.InsertRows(
+ cnxn, COMPONENT2ADMIN_COLS,
+ [(component_id, admin_id) for admin_id in admin_ids],
+ commit=False)
+
+ if cc_ids is not None:
+ self.component2cc_tbl.Delete(
+ cnxn, component_id=component_id, commit=False)
+ self.component2cc_tbl.InsertRows(
+ cnxn, COMPONENT2CC_COLS,
+ [(component_id, cc_id) for cc_id in cc_ids],
+ commit=False)
+
+ self.componentdef_tbl.Update(
+ cnxn, new_values, id=component_id, commit=False)
+ cnxn.Commit()
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+ self.InvalidateMemcacheForEntireProject(project_id)
+
+ def DeleteComponentDef(self, cnxn, project_id, component_id):
+ """Delete the specified component definition."""
+ self.component2cc_tbl.Delete(
+ cnxn, component_id=component_id, commit=False)
+ self.component2admin_tbl.Delete(
+ cnxn, component_id=component_id, commit=False)
+ self.componentdef_tbl.Delete(cnxn, id=component_id, commit=False)
+ cnxn.Commit()
+ self.config_2lc.InvalidateKeys(cnxn, [project_id])
+ self.InvalidateMemcacheForEntireProject(project_id)
+
+ ### Memcache management
+
+ def InvalidateMemcache(self, issues, key_prefix=''):
+ """Delete the memcache entries for issues and their project-shard pairs."""
+ memcache.delete_multi(
+ [str(issue.issue_id) for issue in issues], key_prefix='issue:')
+ project_shards = set(
+ (issue.project_id, issue.issue_id % settings.num_logical_shards)
+ for issue in issues)
+ self._InvalidateMemcacheShards(project_shards, key_prefix=key_prefix)
+
+ def _InvalidateMemcacheShards(self, project_shards, key_prefix=''):
+ """Delete the memcache entries for the given project-shard pairs.
+
+ Deleting these rows does not delete the actual cached search results
+ but it does mean that they will be considered stale and thus not used.
+
+ Args:
+ project_shards: list of (pid, sid) pairs.
+ key_prefix: string to pass as memcache key prefix.
+ """
+ cache_entries = ['%d;%d' % ps for ps in project_shards]
+ # Whenever any project is invalidated, also invalidate the 'all'
+ # entry that is used in site-wide searches.
+ shard_id_set = {sid for _pid, sid in project_shards}
+ cache_entries.extend(('all;%d' % sid) for sid in shard_id_set)
+
+ memcache.delete_multi(cache_entries, key_prefix=key_prefix)
+
+ def InvalidateMemcacheForEntireProject(self, project_id):
+ """Delete the memcache entries for all searches in a project."""
+ project_shards = set((project_id, shard_id)
+ for shard_id in range(settings.num_logical_shards))
+ self._InvalidateMemcacheShards(project_shards)
+ memcache.delete_multi([str(project_id)], key_prefix='config:')
+ memcache.delete_multi([str(project_id)], key_prefix='label_rows:')
+ memcache.delete_multi([str(project_id)], key_prefix='status_rows:')
+ memcache.delete_multi([str(project_id)], key_prefix='field_rows:')
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+class NoSuchComponentException(Error):
+ """No component with the specified name exists."""
+ pass
+
+
+class InvalidComponentNameException(Error):
+ """The component name is invalid."""
+ pass
diff --git a/appengine/monorail/services/features_svc.py b/appengine/monorail/services/features_svc.py
new file mode 100644
index 0000000..2fc2c65
--- /dev/null
+++ b/appengine/monorail/services/features_svc.py
@@ -0,0 +1,384 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class that provides persistence for Monorail's additional features.
+
+Business objects are described in tracker_pb2.py and tracker_bizobj.py.
+"""
+
+import collections
+import logging
+
+from features import filterrules_helpers
+from framework import sql
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+QUICKEDITHISTORY_TABLE_NAME = 'QuickEditHistory'
+QUICKEDITMOSTRECENT_TABLE_NAME = 'QuickEditMostRecent'
+SAVEDQUERY_TABLE_NAME = 'SavedQuery'
+PROJECT2SAVEDQUERY_TABLE_NAME = 'Project2SavedQuery'
+SAVEDQUERYEXECUTESINPROJECT_TABLE_NAME = 'SavedQueryExecutesInProject'
+USER2SAVEDQUERY_TABLE_NAME = 'User2SavedQuery'
+FILTERRULE_TABLE_NAME = 'FilterRule'
+FILTERRULE_COLS = ['project_id', 'rank', 'predicate', 'consequence']
+
+
+QUICKEDITHISTORY_COLS = [
+ 'user_id', 'project_id', 'slot_num', 'command', 'comment']
+QUICKEDITMOSTRECENT_COLS = ['user_id', 'project_id', 'slot_num']
+SAVEDQUERY_COLS = ['id', 'name', 'base_query_id', 'query']
+PROJECT2SAVEDQUERY_COLS = ['project_id', 'rank', 'query_id']
+SAVEDQUERYEXECUTESINPROJECT_COLS = ['query_id', 'project_id']
+USER2SAVEDQUERY_COLS = ['user_id', 'rank', 'query_id', 'subscription_mode']
+
+
+class FeaturesService(object):
+ """The persistence layer for servlets in the features directory."""
+
+ def __init__(self, cache_manager):
+ """Initialize this object so that it is ready to use.
+
+ Args:
+ cache_manager: local cache with distributed invalidation.
+ """
+ self.quickedithistory_tbl = sql.SQLTableManager(QUICKEDITHISTORY_TABLE_NAME)
+ self.quickeditmostrecent_tbl = sql.SQLTableManager(
+ QUICKEDITMOSTRECENT_TABLE_NAME)
+
+ self.savedquery_tbl = sql.SQLTableManager(SAVEDQUERY_TABLE_NAME)
+ self.project2savedquery_tbl = sql.SQLTableManager(
+ PROJECT2SAVEDQUERY_TABLE_NAME)
+ self.savedqueryexecutesinproject_tbl = sql.SQLTableManager(
+ SAVEDQUERYEXECUTESINPROJECT_TABLE_NAME)
+ self.user2savedquery_tbl = sql.SQLTableManager(USER2SAVEDQUERY_TABLE_NAME)
+
+ self.filterrule_tbl = sql.SQLTableManager(FILTERRULE_TABLE_NAME)
+
+ self.saved_query_cache = cache_manager.MakeCache('user', max_size=1000)
+
+ ### QuickEdit command history
+
+ def GetRecentCommands(self, cnxn, user_id, project_id):
+ """Return recent command items for the "Redo" menu.
+
+ Args:
+ cnxn: Connection to SQL database.
+ user_id: int ID of the current user.
+ project_id: int ID of the current project.
+
+ Returns:
+ A pair (cmd_slots, recent_slot_num). cmd_slots is a list of
+ 3-tuples that can be used to populate the "Redo" menu of the
+ quick-edit dialog. recent_slot_num indicates which of those
+ slots should initially populate the command and comment fields.
+ """
+ # Always start with the standard 5 commands.
+ history = tracker_constants.DEFAULT_RECENT_COMMANDS[:]
+ # If the user has modified any, then overwrite some standard ones.
+ history_rows = self.quickedithistory_tbl.Select(
+ cnxn, cols=['slot_num', 'command', 'comment'],
+ user_id=user_id, project_id=project_id)
+ for slot_num, command, comment in history_rows:
+ if slot_num < len(history):
+ history[slot_num - 1] = (command, comment)
+
+ slots = []
+ for idx, (command, comment) in enumerate(history):
+ slots.append((idx + 1, command, comment))
+
+ recent_slot_num = self.quickeditmostrecent_tbl.SelectValue(
+ cnxn, 'slot_num', default=1, user_id=user_id, project_id=project_id)
+
+ return slots, recent_slot_num
+
+ def StoreRecentCommand(
+ self, cnxn, user_id, project_id, slot_num, command, comment):
+ """Store the given command and comment in the user's command history."""
+ self.quickedithistory_tbl.InsertRow(
+ cnxn, replace=True, user_id=user_id, project_id=project_id,
+ slot_num=slot_num, command=command, comment=comment)
+ self.quickeditmostrecent_tbl.InsertRow(
+ cnxn, replace=True, user_id=user_id, project_id=project_id,
+ slot_num=slot_num)
+
+ def ExpungeQuickEditHistory(self, cnxn, project_id):
+ """Completely delete every users' quick edit history for this project."""
+ self.quickeditmostrecent_tbl.Delete(cnxn, project_id=project_id)
+ self.quickedithistory_tbl.Delete(cnxn, project_id=project_id)
+
+ ### Saved User and Project Queries
+
+ def GetSavedQueries(self, cnxn, query_ids):
+ """Retrieve the specified SaveQuery PBs."""
+ # TODO(jrobbins): RAM cache
+ saved_queries = {}
+ savedquery_rows = self.savedquery_tbl.Select(
+ cnxn, cols=SAVEDQUERY_COLS, id=query_ids)
+ for saved_query_tuple in savedquery_rows:
+ qid, name, base_id, query = saved_query_tuple
+ saved_queries[qid] = tracker_bizobj.MakeSavedQuery(
+ qid, name, base_id, query)
+
+ sqeip_rows = self.savedqueryexecutesinproject_tbl.Select(
+ cnxn, cols=SAVEDQUERYEXECUTESINPROJECT_COLS,
+ query_id=query_ids)
+ for query_id, project_id in sqeip_rows:
+ saved_queries[query_id].executes_in_project_ids.append(project_id)
+
+ return saved_queries
+
+ def GetSavedQuery(self, cnxn, query_id):
+ """Retrieve the specified SaveQuery PB."""
+ saved_queries = self.GetSavedQueries(cnxn, [query_id])
+ return saved_queries[query_id]
+
+ def _GetUsersSavedQueriesDict(self, cnxn, user_ids):
+ """Return a dict of all SavedQuery PBs for the specified users."""
+ results_dict, missed_uids = self.saved_query_cache.GetAll(user_ids)
+
+ if missed_uids:
+ savedquery_rows = self.user2savedquery_tbl.Select(
+ cnxn, cols=SAVEDQUERY_COLS + ['user_id', 'subscription_mode'],
+ left_joins=[('SavedQuery ON query_id = id', [])],
+ order_by=[('rank', [])], user_id=missed_uids)
+ sqeip_rows = self.savedqueryexecutesinproject_tbl.Select(
+ cnxn, cols=SAVEDQUERYEXECUTESINPROJECT_COLS,
+ query_id={row[0] for row in savedquery_rows})
+ sqeip_dict = {}
+ for qid, pid in sqeip_rows:
+ sqeip_dict.setdefault(qid, []).append(pid)
+
+ for saved_query_tuple in savedquery_rows:
+ query_id, name, base_id, query, uid, sub_mode = saved_query_tuple
+ sq = tracker_bizobj.MakeSavedQuery(
+ query_id, name, base_id, query, subscription_mode=sub_mode,
+ executes_in_project_ids=sqeip_dict.get(query_id, []))
+ results_dict.setdefault(uid, []).append(sq)
+
+ self.saved_query_cache.CacheAll(results_dict)
+ return results_dict
+
+ # TODO(jrobbins): change this termonology to "canned query" rather than
+ # "saved" throughout the application.
+ def GetSavedQueriesByUserID(self, cnxn, user_id):
+ """Return a list of SavedQuery PBs for the specified user."""
+ saved_queries_dict = self._GetUsersSavedQueriesDict(cnxn, [user_id])
+ saved_queries = saved_queries_dict.get(user_id, [])
+ return saved_queries[:]
+
+ def GetCannedQueriesForProjects(self, cnxn, project_ids):
+ """Return a dict {project_id: [saved_query]} for the specified projects."""
+ # TODO(jrobbins): caching
+ cannedquery_rows = self.project2savedquery_tbl.Select(
+ cnxn, cols=['project_id'] + SAVEDQUERY_COLS,
+ left_joins=[('SavedQuery ON query_id = id', [])],
+ order_by=[('rank', [])], project_id=project_ids)
+
+ result_dict = collections.defaultdict(list)
+ for cq_row in cannedquery_rows:
+ project_id = cq_row[0]
+ canned_query_tuple = cq_row[1:]
+ result_dict[project_id].append(
+ tracker_bizobj.MakeSavedQuery(*canned_query_tuple))
+
+ return result_dict
+
+ def GetCannedQueriesByProjectID(self, cnxn, project_id):
+ """Return the list of SavedQueries for the specified project."""
+ project_ids_to_canned_queries = self.GetCannedQueriesForProjects(
+ cnxn, [project_id])
+ return project_ids_to_canned_queries.get(project_id, [])
+
+ def _UpdateSavedQueries(self, cnxn, saved_queries, commit=True):
+ """Store the given SavedQueries to the DB."""
+ savedquery_rows = [
+ (sq.query_id or None, sq.name, sq.base_query_id, sq.query)
+ for sq in saved_queries]
+ existing_query_ids = [sq.query_id for sq in saved_queries if sq.query_id]
+ if existing_query_ids:
+ self.savedquery_tbl.Delete(cnxn, id=existing_query_ids, commit=commit)
+
+ generated_ids = self.savedquery_tbl.InsertRows(
+ cnxn, SAVEDQUERY_COLS, savedquery_rows, commit=commit,
+ return_generated_ids=True)
+ if generated_ids:
+ logging.info('generated_ids are %r', generated_ids)
+ for sq in saved_queries:
+ generated_id = generated_ids.pop(0)
+ if not sq.query_id:
+ sq.query_id = generated_id
+
+ def UpdateCannedQueries(self, cnxn, project_id, canned_queries):
+ """Update the canned queries for a project.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int project ID of the project that contains these queries.
+ canned_queries: list of SavedQuery PBs to update.
+ """
+ self.project2savedquery_tbl.Delete(
+ cnxn, project_id=project_id, commit=False)
+ self._UpdateSavedQueries(cnxn, canned_queries, commit=False)
+ project2savedquery_rows = [
+ (project_id, rank, sq.query_id)
+ for rank, sq in enumerate(canned_queries)]
+ self.project2savedquery_tbl.InsertRows(
+ cnxn, PROJECT2SAVEDQUERY_COLS, project2savedquery_rows,
+ commit=False)
+ cnxn.Commit()
+
+ def UpdateUserSavedQueries(self, cnxn, user_id, saved_queries):
+ """Store the given saved_queries for the given user."""
+ saved_query_ids = [sq.query_id for sq in saved_queries if sq.query_id]
+ self.savedqueryexecutesinproject_tbl.Delete(
+ cnxn, query_id=saved_query_ids, commit=False)
+ self.user2savedquery_tbl.Delete(cnxn, user_id=user_id, commit=False)
+
+ self._UpdateSavedQueries(cnxn, saved_queries, commit=False)
+ user2savedquery_rows = []
+ for rank, sq in enumerate(saved_queries):
+ user2savedquery_rows.append(
+ (user_id, rank, sq.query_id, sq.subscription_mode or 'noemail'))
+
+ self.user2savedquery_tbl.InsertRows(
+ cnxn, USER2SAVEDQUERY_COLS, user2savedquery_rows, commit=False)
+
+ sqeip_rows = []
+ for sq in saved_queries:
+ for pid in sq.executes_in_project_ids:
+ sqeip_rows.append((sq.query_id, pid))
+
+ self.savedqueryexecutesinproject_tbl.InsertRows(
+ cnxn, SAVEDQUERYEXECUTESINPROJECT_COLS, sqeip_rows, commit=False)
+ cnxn.Commit()
+
+ self.saved_query_cache.Invalidate(cnxn, user_id)
+
+ ### Subscriptions
+
+ def GetSubscriptionsInProjects(self, cnxn, project_ids):
+ """Return all saved queries for users that have any subscription there.
+
+ Args:
+ cnxn: Connection to SQL database.
+ project_ids: list of int project IDs that contain the modified issues.
+
+ Returns:
+ A dict {user_id: all_saved_queries, ...} for all users that have any
+ subscription in any of the specified projects.
+ """
+ join_str = (
+ 'SavedQueryExecutesInProject ON '
+ 'SavedQueryExecutesInProject.query_id = User2SavedQuery.query_id')
+ # TODO(jrobbins): cache this since it rarely changes.
+ subscriber_rows = self.user2savedquery_tbl.Select(
+ cnxn, cols=['user_id'], distinct=True,
+ joins=[(join_str, [])],
+ subscription_mode='immediate', project_id=project_ids)
+ subscriber_ids = [row[0] for row in subscriber_rows]
+ logging.info('subscribers relevant to projects %r are %r',
+ project_ids, subscriber_ids)
+ user_ids_to_saved_queries = self._GetUsersSavedQueriesDict(
+ cnxn, subscriber_ids)
+ return user_ids_to_saved_queries
+
+ def ExpungeSavedQueriesExecuteInProject(self, cnxn, project_id):
+ """Remove any references from saved queries to projects in the database."""
+ self.savedqueryexecutesinproject_tbl.Delete(cnxn, project_id=project_id)
+
+ savedquery_rows = self.project2savedquery_tbl.Select(
+ cnxn, cols=['query_id'], project_id=project_id)
+ savedquery_ids = [row[0] for row in savedquery_rows]
+ self.project2savedquery_tbl.Delete(cnxn, project_id=project_id)
+ self.savedquery_tbl.Delete(cnxn, id=savedquery_ids)
+
+ ### Filter rules
+
+ def _DeserializeFilterRules(self, filterrule_rows):
+ """Convert the given DB row tuples into PBs."""
+ result_dict = collections.defaultdict(list)
+
+ for filterrule_row in sorted(filterrule_rows):
+ project_id, _rank, predicate, consequence = filterrule_row
+ (default_status, default_owner_id, add_cc_ids, add_labels,
+ add_notify) = self._DeserializeRuleConsequence(consequence)
+ rule = filterrules_helpers.MakeRule(
+ predicate, default_status=default_status,
+ default_owner_id=default_owner_id, add_cc_ids=add_cc_ids,
+ add_labels=add_labels, add_notify=add_notify)
+ result_dict[project_id].append(rule)
+
+ return result_dict
+
+ def _DeserializeRuleConsequence(self, consequence):
+ """Decode the THEN-part of a filter rule."""
+ (default_status, default_owner_id, add_cc_ids, add_labels,
+ add_notify) = None, None, [], [], []
+ for action in consequence.split():
+ verb, noun = action.split(':')
+ if verb == 'default_status':
+ default_status = noun
+ elif verb == 'default_owner_id':
+ default_owner_id = int(noun)
+ elif verb == 'add_cc_id':
+ add_cc_ids.append(int(noun))
+ elif verb == 'add_label':
+ add_labels.append(noun)
+ elif verb == 'add_notify':
+ add_notify.append(noun)
+
+ return (default_status, default_owner_id, add_cc_ids, add_labels,
+ add_notify)
+
+ def _GetFilterRulesByProjectIDs(self, cnxn, project_ids):
+ """Return {project_id: [FilterRule, ...]} for the specified projects."""
+ # TODO(jrobbins): caching
+ filterrule_rows = self.filterrule_tbl.Select(
+ cnxn, cols=FILTERRULE_COLS, project_id=project_ids)
+ return self._DeserializeFilterRules(filterrule_rows)
+
+ def GetFilterRules(self, cnxn, project_id):
+ """Return a list of FilterRule PBs for the specified project."""
+ rules_by_project_id = self._GetFilterRulesByProjectIDs(cnxn, [project_id])
+ return rules_by_project_id[project_id]
+
+ def _SerializeRuleConsequence(self, rule):
+ """Put all actions of a filter rule into one string."""
+ assignments = []
+ for add_lab in rule.add_labels:
+ assignments.append('add_label:%s' % add_lab)
+ if rule.default_status:
+ assignments.append('default_status:%s' % rule.default_status)
+ if rule.default_owner_id:
+ assignments.append('default_owner_id:%d' % rule.default_owner_id)
+ for add_cc_id in rule.add_cc_ids:
+ assignments.append('add_cc_id:%d' % add_cc_id)
+ for add_notify in rule.add_notify_addrs:
+ assignments.append('add_notify:%s' % add_notify)
+
+ return ' '.join(assignments)
+
+ def UpdateFilterRules(self, cnxn, project_id, rules):
+ """Update the filter rules part of a project's issue configuration.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the current project.
+ rules: a list of FilterRule PBs.
+ """
+ rows = []
+ for rank, rule in enumerate(rules):
+ predicate = rule.predicate
+ consequence = self._SerializeRuleConsequence(rule)
+ if predicate and consequence:
+ rows.append((project_id, rank, predicate, consequence))
+
+ self.filterrule_tbl.Delete(cnxn, project_id=project_id)
+ self.filterrule_tbl.InsertRows(cnxn, FILTERRULE_COLS, rows)
+
+ def ExpungeFilterRules(self, cnxn, project_id):
+ """Completely destroy filter rule info for the specified project."""
+ self.filterrule_tbl.Delete(cnxn, project_id=project_id)
diff --git a/appengine/monorail/services/fulltext_helpers.py b/appengine/monorail/services/fulltext_helpers.py
new file mode 100644
index 0000000..99cd4b7
--- /dev/null
+++ b/appengine/monorail/services/fulltext_helpers.py
@@ -0,0 +1,122 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of helpers functions for fulltext search."""
+
+import logging
+
+from google.appengine.api import search
+
+import settings
+from proto import ast_pb2
+from proto import tracker_pb2
+
+# GAE search API can only respond with 500 results per call.
+_SEARCH_RESULT_CHUNK_SIZE = 500
+
+# Do not treat strings that start with the below as key:value search terms.
+# See bugs.chromium.org/p/monorail/issues/detail?id=419 for more detail.
+NON_OP_PREFIXES = (
+ 'http:',
+ 'https:',
+)
+
+
+def BuildFTSQuery(query_ast_conj, fulltext_fields):
+ """Convert a Monorail query AST into a GAE search query string.
+
+ Args:
+ query_ast_conj: a Conjunction PB with a list of Comparison PBs that each
+ have operator, field definitions, string values, and int values.
+ All Conditions should be AND'd together.
+ fulltext_fields: a list of string names of fields that may exist in the
+ fulltext documents. E.g., issue fulltext documents have a "summary"
+ field.
+
+ Returns:
+ A string that can be passed to AppEngine's search API. Or, None if there
+ were no fulltext conditions, so no fulltext search should be done.
+ """
+ fulltext_parts = [
+ _BuildFTSCondition(cond, fulltext_fields)
+ for cond in query_ast_conj.conds]
+ if any(fulltext_parts):
+ return ' '.join(fulltext_parts)
+ else:
+ return None
+
+
+def _BuildFTSCondition(cond, fulltext_fields):
+ """Convert one query AST condition into a GAE search query string."""
+ if cond.op == ast_pb2.QueryOp.NOT_TEXT_HAS:
+ neg = 'NOT '
+ elif cond.op == ast_pb2.QueryOp.TEXT_HAS:
+ neg = ''
+ else:
+ return '' # FTS only looks at TEXT_HAS and NOT_TEXT_HAS
+
+ parts = []
+
+ for fd in cond.field_defs:
+ if fd.field_name in fulltext_fields:
+ pattern = fd.field_name + ':"%s"'
+ elif fd.field_name == ast_pb2.ANY_FIELD:
+ pattern = '"%s"'
+ elif fd.field_id and fd.field_type == tracker_pb2.FieldTypes.STR_TYPE:
+ pattern = 'custom_' + str(fd.field_id) + ':"%s"'
+ else:
+ continue # This issue field is searched via SQL.
+
+ for value in cond.str_values:
+ # Strip out quotes around the value.
+ value = value.strip('"')
+ special_prefixes_match = any(value.startswith(p) for p in NON_OP_PREFIXES)
+ if not special_prefixes_match:
+ value = value.replace(':', ' ')
+ assert ('"' not in value), 'Value %r has a quote in it' % value
+ parts.append(pattern % value)
+
+ if parts:
+ return neg + '(%s)' % ' OR '.join(parts)
+ else:
+ return '' # None of the fields were fulltext fields.
+
+
+def ComprehensiveSearch(fulltext_query, index_name):
+ """Call the GAE search API, and keep calling it to get all results.
+
+ Args:
+ fulltext_query: string in the GAE search API query language.
+ index_name: string name of the GAE fulltext index to hit.
+
+ Returns:
+ A list of integer issue IIDs or project IDs.
+ """
+ search_index = search.Index(name=index_name)
+
+ response = search_index.search(search.Query(
+ fulltext_query,
+ options=search.QueryOptions(
+ limit=_SEARCH_RESULT_CHUNK_SIZE, returned_fields=[], ids_only=True,
+ cursor=search.Cursor())))
+ logging.info('got %d initial results', len(response.results))
+ ids = [int(result.doc_id) for result in response]
+
+ remaining_iterations = int(
+ settings.fulltext_limit_per_shard - 1 / _SEARCH_RESULT_CHUNK_SIZE)
+ for _ in range(remaining_iterations):
+ if not response.cursor:
+ break
+ response = search_index.search(search.Query(
+ fulltext_query,
+ options=search.QueryOptions(
+ limit=_SEARCH_RESULT_CHUNK_SIZE, returned_fields=[], ids_only=True,
+ cursor=response.cursor)))
+ logging.info(
+ 'got %d more results: %r', len(response.results), response.results)
+ ids.extend(int(result.doc_id) for result in response)
+
+ logging.info('FTS result ids %d', len(ids))
+ return ids
diff --git a/appengine/monorail/services/issue_svc.py b/appengine/monorail/services/issue_svc.py
new file mode 100644
index 0000000..6a98c45
--- /dev/null
+++ b/appengine/monorail/services/issue_svc.py
@@ -0,0 +1,2557 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions that provide persistence for Monorail issue tracking.
+
+This module provides functions to get, update, create, and (in some
+cases) delete each type of business object. It provides a logical
+persistence layer on top of an SQL database.
+
+Business objects are described in tracker_pb2.py and tracker_bizobj.py.
+"""
+
+import collections
+import json
+import logging
+import os
+import time
+import uuid
+
+from google.appengine.api import app_identity
+from google.appengine.api import images
+from third_party import cloudstorage
+
+import settings
+from features import filterrules_helpers
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import gcs_helpers
+from framework import permissions
+from framework import sql
+from infra_libs import ts_mon
+from proto import project_pb2
+from proto import tracker_pb2
+from services import caches
+from services import tracker_fulltext
+from tracker import tracker_bizobj
+from tracker import tracker_helpers
+
+
+ISSUE_TABLE_NAME = 'Issue'
+ISSUESUMMARY_TABLE_NAME = 'IssueSummary'
+ISSUE2LABEL_TABLE_NAME = 'Issue2Label'
+ISSUE2COMPONENT_TABLE_NAME = 'Issue2Component'
+ISSUE2CC_TABLE_NAME = 'Issue2Cc'
+ISSUE2NOTIFY_TABLE_NAME = 'Issue2Notify'
+ISSUE2FIELDVALUE_TABLE_NAME = 'Issue2FieldValue'
+COMMENT_TABLE_NAME = 'Comment'
+ATTACHMENT_TABLE_NAME = 'Attachment'
+ISSUERELATION_TABLE_NAME = 'IssueRelation'
+DANGLINGRELATION_TABLE_NAME = 'DanglingIssueRelation'
+ISSUEUPDATE_TABLE_NAME = 'IssueUpdate'
+ISSUEFORMERLOCATIONS_TABLE_NAME = 'IssueFormerLocations'
+REINDEXQUEUE_TABLE_NAME = 'ReindexQueue'
+LOCALIDCOUNTER_TABLE_NAME = 'LocalIDCounter'
+
+ISSUE_COLS = [
+ 'id', 'project_id', 'local_id', 'status_id', 'owner_id', 'reporter_id',
+ 'opened', 'closed', 'modified', 'derived_owner_id', 'derived_status_id',
+ 'deleted', 'star_count', 'attachment_count', 'is_spam']
+ISSUESUMMARY_COLS = ['issue_id', 'summary']
+ISSUE2LABEL_COLS = ['issue_id', 'label_id', 'derived']
+ISSUE2COMPONENT_COLS = ['issue_id', 'component_id', 'derived']
+ISSUE2CC_COLS = ['issue_id', 'cc_id', 'derived']
+ISSUE2NOTIFY_COLS = ['issue_id', 'email']
+ISSUE2FIELDVALUE_COLS = [
+ 'issue_id', 'field_id', 'int_value', 'str_value', 'user_id', 'derived']
+COMMENT_COLS = [
+ 'Comment.id', 'issue_id', 'created', 'Comment.project_id', 'commenter_id',
+ 'content', 'inbound_message', 'was_escaped', 'deleted_by',
+ 'Comment.is_spam']
+ABBR_COMMENT_COLS = ['Comment.id', 'commenter_id', 'deleted_by']
+ATTACHMENT_COLS = [
+ 'id', 'issue_id', 'comment_id', 'filename', 'filesize', 'mimetype',
+ 'deleted', 'gcs_object_id']
+ISSUERELATION_COLS = ['issue_id', 'dst_issue_id', 'kind']
+DANGLINGRELATION_COLS = [
+ 'issue_id', 'dst_issue_project', 'dst_issue_local_id', 'kind']
+ISSUEUPDATE_COLS = [
+ 'id', 'issue_id', 'comment_id', 'field', 'old_value', 'new_value',
+ 'added_user_id', 'removed_user_id', 'custom_field_name']
+ISSUEFORMERLOCATIONS_COLS = ['issue_id', 'project_id', 'local_id']
+REINDEXQUEUE_COLS = ['issue_id', 'created']
+
+CHUNK_SIZE = 1000
+
+
+class IssueIDTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for Issue IDs."""
+
+ def __init__(self, cache_manager, issue_service):
+ super(IssueIDTwoLevelCache, self).__init__(
+ cache_manager, 'issue_id', 'issue_id:', int,
+ max_size=settings.issue_cache_max_size, use_value_centric_cache=True)
+ self.issue_service = issue_service
+
+ def _DeserializeIssueIDs(self, project_local_issue_ids):
+ """Convert database rows into a dict {(project_id, local_id): issue_id}."""
+ return {(project_id, local_id): issue_id
+ for (project_id, local_id, issue_id) in project_local_issue_ids}
+
+ def FetchItems(self, cnxn, keys):
+ """On RAM and memcache miss, hit the database."""
+ local_ids_by_pid = collections.defaultdict(list)
+ for project_id, local_id in keys:
+ local_ids_by_pid[project_id].append(local_id)
+
+ where = [] # We OR per-project pairs of conditions together.
+ for project_id, local_ids_in_project in local_ids_by_pid.iteritems():
+ term_str = ('(Issue.project_id = %%s AND Issue.local_id IN (%s))' %
+ sql.PlaceHolders(local_ids_in_project))
+ where.append((term_str, [project_id] + local_ids_in_project))
+
+ rows = self.issue_service.issue_tbl.Select(
+ cnxn, cols=['project_id', 'local_id', 'id'],
+ where=where, or_where_conds=True)
+ return self._DeserializeIssueIDs(rows)
+
+ def _KeyToStr(self, key):
+ """This cache uses pairs of ints as keys. Convert them to strings."""
+ return '%d,%d' % key
+
+ def _StrToKey(self, key_str):
+ """This cache uses pairs of ints as keys. Convert them from strings."""
+ project_id_str, local_id_str = key_str.split(',')
+ return int(project_id_str), int(local_id_str)
+
+
+class IssueTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for Issue PBs."""
+
+ def __init__(
+ self, cache_manager, issue_service, project_service, config_service):
+ super(IssueTwoLevelCache, self).__init__(
+ cache_manager, 'issue', 'issue:', tracker_pb2.Issue,
+ max_size=settings.issue_cache_max_size)
+ self.issue_service = issue_service
+ self.project_service = project_service
+ self.config_service = config_service
+
+ def _UnpackIssue(self, cnxn, issue_row):
+ """Partially construct an issue object using info from a DB row."""
+ (issue_id, project_id, local_id, status_id, owner_id, reporter_id,
+ opened, closed, modified, derived_owner_id, derived_status_id,
+ deleted, star_count, attachment_count, is_spam) = issue_row
+
+ issue = tracker_pb2.Issue()
+ project = self.project_service.GetProject(cnxn, project_id)
+ issue.project_name = project.project_name
+ issue.issue_id = issue_id
+ issue.project_id = project_id
+ issue.local_id = local_id
+ if status_id is not None:
+ status = self.config_service.LookupStatus(cnxn, project_id, status_id)
+ issue.status = status
+ issue.owner_id = owner_id or 0
+ issue.reporter_id = reporter_id or 0
+ issue.derived_owner_id = derived_owner_id or 0
+ if derived_status_id is not None:
+ derived_status = self.config_service.LookupStatus(
+ cnxn, project_id, derived_status_id)
+ issue.derived_status = derived_status
+ issue.deleted = bool(deleted)
+ if opened:
+ issue.opened_timestamp = opened
+ if closed:
+ issue.closed_timestamp = closed
+ if modified:
+ issue.modified_timestamp = modified
+ issue.star_count = star_count
+ issue.attachment_count = attachment_count
+ issue.is_spam = bool(is_spam)
+ return issue
+
+ def _UnpackFieldValue(self, fv_row):
+ """Construct a field value object from a DB row."""
+ (issue_id, field_id, int_value, str_value, user_id, derived) = fv_row
+ fv = tracker_bizobj.MakeFieldValue(
+ field_id, int_value, str_value, user_id, bool(derived))
+ return fv, issue_id
+
+ def _DeserializeIssues(
+ self, cnxn, issue_rows, summary_rows, label_rows, component_rows,
+ cc_rows, notify_rows, fieldvalue_rows, relation_rows,
+ dangling_relation_rows):
+ """Convert the given DB rows into a dict of Issue PBs."""
+ results_dict = {}
+ for issue_row in issue_rows:
+ issue = self._UnpackIssue(cnxn, issue_row)
+ results_dict[issue.issue_id] = issue
+
+ for issue_id, summary in summary_rows:
+ results_dict[issue_id].summary = summary
+
+ # TODO(jrobbins): it would be nice to order labels by rank and name.
+ for issue_id, label_id, derived in label_rows:
+ issue = results_dict.get(issue_id)
+ if not issue:
+ logging.info('Got label for an unknown issue: %r %r',
+ label_rows, issue_rows)
+ continue
+ label = self.config_service.LookupLabel(cnxn, issue.project_id, label_id)
+ assert label, ('Label ID %r on IID %r not found in project %r' %
+ (label_id, issue_id, issue.project_id))
+ if derived:
+ results_dict[issue_id].derived_labels.append(label)
+ else:
+ results_dict[issue_id].labels.append(label)
+
+ for issue_id, component_id, derived in component_rows:
+ if derived:
+ results_dict[issue_id].derived_component_ids.append(component_id)
+ else:
+ results_dict[issue_id].component_ids.append(component_id)
+
+ for issue_id, user_id, derived in cc_rows:
+ if derived:
+ results_dict[issue_id].derived_cc_ids.append(user_id)
+ else:
+ results_dict[issue_id].cc_ids.append(user_id)
+
+ for issue_id, email in notify_rows:
+ results_dict[issue_id].derived_notify_addrs.append(email)
+
+ for fv_row in fieldvalue_rows:
+ fv, issue_id = self._UnpackFieldValue(fv_row)
+ results_dict[issue_id].field_values.append(fv)
+
+ for issue_id, dst_issue_id, kind in relation_rows:
+ src_issue = results_dict.get(issue_id)
+ dst_issue = results_dict.get(dst_issue_id)
+ assert src_issue or dst_issue, (
+ 'Neither source issue %r nor dest issue %r was found' %
+ (issue_id, dst_issue_id))
+ if src_issue:
+ if kind == 'blockedon':
+ src_issue.blocked_on_iids.append(dst_issue_id)
+ elif kind == 'mergedinto':
+ src_issue.merged_into = dst_issue_id
+ else:
+ logging.info('unknown relation kind %r', kind)
+ continue
+
+ if dst_issue:
+ if kind == 'blockedon':
+ dst_issue.blocking_iids.append(issue_id)
+
+ for issue_id, dst_issue_proj, dst_issue_id, kind in dangling_relation_rows:
+ src_issue = results_dict.get(issue_id)
+ if kind == 'blockedon':
+ src_issue.dangling_blocked_on_refs.append(
+ tracker_bizobj.MakeDanglingIssueRef(dst_issue_proj, dst_issue_id))
+ elif kind == 'blocking':
+ src_issue.dangling_blocking_refs.append(
+ tracker_bizobj.MakeDanglingIssueRef(dst_issue_proj, dst_issue_id))
+ else:
+ logging.warn('unhandled danging relation kind %r', kind)
+ continue
+
+ return results_dict
+
+ # Note: sharding is used to here to allow us to load issues from the replicas
+ # without placing load on the master. Writes are not sharded.
+ # pylint: disable=arguments-differ
+ def FetchItems(self, cnxn, issue_ids, shard_id=None):
+ """Retrieve and deserialize issues."""
+ issue_rows = self.issue_service.issue_tbl.Select(
+ cnxn, cols=ISSUE_COLS, id=issue_ids, shard_id=shard_id)
+
+ summary_rows = self.issue_service.issuesummary_tbl.Select(
+ cnxn, cols=ISSUESUMMARY_COLS, shard_id=shard_id, issue_id=issue_ids)
+ label_rows = self.issue_service.issue2label_tbl.Select(
+ cnxn, cols=ISSUE2LABEL_COLS, shard_id=shard_id, issue_id=issue_ids)
+ component_rows = self.issue_service.issue2component_tbl.Select(
+ cnxn, cols=ISSUE2COMPONENT_COLS, shard_id=shard_id, issue_id=issue_ids)
+ cc_rows = self.issue_service.issue2cc_tbl.Select(
+ cnxn, cols=ISSUE2CC_COLS, shard_id=shard_id, issue_id=issue_ids)
+ notify_rows = self.issue_service.issue2notify_tbl.Select(
+ cnxn, cols=ISSUE2NOTIFY_COLS, shard_id=shard_id, issue_id=issue_ids)
+ fieldvalue_rows = self.issue_service.issue2fieldvalue_tbl.Select(
+ cnxn, cols=ISSUE2FIELDVALUE_COLS, shard_id=shard_id,
+ issue_id=issue_ids)
+ if issue_ids:
+ ph = sql.PlaceHolders(issue_ids)
+ relation_rows = self.issue_service.issuerelation_tbl.Select(
+ cnxn, cols=ISSUERELATION_COLS,
+ where=[('(issue_id IN (%s) OR dst_issue_id IN (%s))' % (ph, ph),
+ issue_ids + issue_ids)])
+ dangling_relation_rows = self.issue_service.danglingrelation_tbl.Select(
+ cnxn, cols=DANGLINGRELATION_COLS, issue_id=issue_ids)
+ else:
+ relation_rows = []
+ dangling_relation_rows = []
+
+ return self._DeserializeIssues(
+ cnxn, issue_rows, summary_rows, label_rows, component_rows, cc_rows,
+ notify_rows, fieldvalue_rows, relation_rows, dangling_relation_rows)
+
+
+class IssueService(object):
+ """The persistence layer for Monorail's issues, comments, and attachments."""
+ spam_labels = ts_mon.CounterMetric('monorail/issue_svc/spam_label')
+
+ def __init__(self, project_service, config_service, cache_manager):
+ """Initialize this object so that it is ready to use.
+
+ Args:
+ project_service: services object for project info.
+ config_service: services object for tracker configuration info.
+ cache_manager: local cache with distributed invalidation.
+ """
+ # Tables that represent issue data.
+ self.issue_tbl = sql.SQLTableManager(ISSUE_TABLE_NAME)
+ self.issuesummary_tbl = sql.SQLTableManager(ISSUESUMMARY_TABLE_NAME)
+ self.issue2label_tbl = sql.SQLTableManager(ISSUE2LABEL_TABLE_NAME)
+ self.issue2component_tbl = sql.SQLTableManager(ISSUE2COMPONENT_TABLE_NAME)
+ self.issue2cc_tbl = sql.SQLTableManager(ISSUE2CC_TABLE_NAME)
+ self.issue2notify_tbl = sql.SQLTableManager(ISSUE2NOTIFY_TABLE_NAME)
+ self.issue2fieldvalue_tbl = sql.SQLTableManager(ISSUE2FIELDVALUE_TABLE_NAME)
+ self.issuerelation_tbl = sql.SQLTableManager(ISSUERELATION_TABLE_NAME)
+ self.danglingrelation_tbl = sql.SQLTableManager(DANGLINGRELATION_TABLE_NAME)
+ self.issueformerlocations_tbl = sql.SQLTableManager(
+ ISSUEFORMERLOCATIONS_TABLE_NAME)
+
+ # Tables that represent comments.
+ self.comment_tbl = sql.SQLTableManager(COMMENT_TABLE_NAME)
+ self.issueupdate_tbl = sql.SQLTableManager(ISSUEUPDATE_TABLE_NAME)
+ self.attachment_tbl = sql.SQLTableManager(ATTACHMENT_TABLE_NAME)
+
+ # Tables for cron tasks.
+ self.reindexqueue_tbl = sql.SQLTableManager(REINDEXQUEUE_TABLE_NAME)
+
+ # Tables for generating sequences of local IDs.
+ self.localidcounter_tbl = sql.SQLTableManager(LOCALIDCOUNTER_TABLE_NAME)
+
+ # Like a dictionary {(project_id, local_id): issue_id}
+ # Use value centric cache here because we cannot store a tuple in the
+ # Invalidate table.
+ self.issue_id_2lc = IssueIDTwoLevelCache(cache_manager, self)
+ # Like a dictionary {issue_id: issue}
+ self.issue_2lc = IssueTwoLevelCache(
+ cache_manager, self, project_service, config_service)
+
+ self._config_service = config_service
+
+ ### Issue ID lookups
+
+ def LookupIssueIDs(self, cnxn, project_local_id_pairs):
+ """Find the global issue IDs given the project ID and local ID of each."""
+ issue_id_dict, _misses = self.issue_id_2lc.GetAll(
+ cnxn, project_local_id_pairs)
+
+ # Put the Issue IDs in the order specified by project_local_id_pairs
+ issue_ids = [issue_id_dict[pair] for pair in project_local_id_pairs
+ if pair in issue_id_dict]
+
+ return issue_ids
+
+ def LookupIssueID(self, cnxn, project_id, local_id):
+ """Find the global issue ID given the project ID and local ID."""
+ issue_ids = self.LookupIssueIDs(cnxn, [(project_id, local_id)])
+ try:
+ return issue_ids[0]
+ except IndexError:
+ raise NoSuchIssueException()
+
+ def ResolveIssueRefs(
+ self, cnxn, ref_projects, default_project_name, refs):
+ """Look up all the referenced issues and return their issue_ids.
+
+ Args:
+ cnxn: connection to SQL database.
+ ref_projects: pre-fetched dict {project_name: project} of all projects
+ mentioned in the refs as well as the default project.
+ default_project_name: string name of the current project, this is used
+ when the project_name in a ref is None.
+ refs: list of (project_name, local_id) pairs. These are parsed from
+ textual references in issue descriptions, comments, and the input
+ in the blocked-on field.
+
+ Returns:
+ A list of issue_ids for all the referenced issues. References to issues
+ in deleted projects and any issues not found are simply ignored.
+ """
+ if not refs:
+ return []
+
+ project_local_id_pairs = []
+ for project_name, local_id in refs:
+ project = ref_projects.get(project_name or default_project_name)
+ if not project or project.state == project_pb2.ProjectState.DELETABLE:
+ continue # ignore any refs to issues in deleted projects
+ project_local_id_pairs.append((project.project_id, local_id))
+
+ issue_ids = self.LookupIssueIDs(cnxn, project_local_id_pairs)
+ return issue_ids
+
+ ### Issue objects
+
+ def CreateIssue(
+ self, cnxn, services, project_id, summary, status,
+ owner_id, cc_ids, labels, field_values, component_ids, reporter_id,
+ marked_description, blocked_on=None, blocking=None, attachments=None,
+ timestamp=None, index_now=True):
+ """Create and store a new issue with all the given information.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: persistence layer for users, issues, and projects.
+ project_id: int ID for the current project.
+ summary: one-line summary string summarizing this issue.
+ status: string issue status value. E.g., 'New'.
+ owner_id: user ID of the issue owner.
+ cc_ids: list of user IDs for users to be CC'd on changes.
+ labels: list of label strings. E.g., 'Priority-High'.
+ field_values: list of FieldValue PBs.
+ component_ids: list of int component IDs.
+ reporter_id: user ID of the user who reported the issue.
+ marked_description: issue description with initial HTML markup.
+ blocked_on: list of issue_ids that this issue is blocked on.
+ blocking: list of issue_ids that this issue blocks.
+ attachments: [(filename, contents, mimetype),...] attachments uploaded at
+ the time the comment was made.
+ timestamp: time that the issue was entered, defaults to now.
+ index_now: True if the issue should be updated in the full text index.
+
+ Returns:
+ The integer local ID of the new issue.
+ """
+ config = self._config_service.GetProjectConfig(cnxn, project_id)
+ iids_to_invalidate = set()
+
+ status = framework_bizobj.CanonicalizeLabel(status)
+ labels = [framework_bizobj.CanonicalizeLabel(l) for l in labels]
+ labels = [l for l in labels if l]
+
+ issue = tracker_pb2.Issue()
+ issue.project_id = project_id
+ issue.summary = summary
+ issue.status = status
+ issue.owner_id = owner_id
+ issue.cc_ids.extend(cc_ids)
+ issue.labels.extend(labels)
+ issue.field_values.extend(field_values)
+ issue.component_ids.extend(component_ids)
+ issue.reporter_id = reporter_id
+ if blocked_on is not None:
+ iids_to_invalidate.update(blocked_on)
+ issue.blocked_on_iids = blocked_on
+ if blocking is not None:
+ iids_to_invalidate.update(blocking)
+ issue.blocking_iids = blocking
+ if attachments:
+ issue.attachment_count = len(attachments)
+ timestamp = timestamp or int(time.time())
+ issue.opened_timestamp = timestamp
+ issue.modified_timestamp = timestamp
+
+ comment = self._MakeIssueComment(
+ project_id, reporter_id, marked_description,
+ attachments=attachments, timestamp=timestamp, was_escaped=True)
+
+ # Set the closed_timestamp both before and after filter rules.
+ if not tracker_helpers.MeansOpenInProject(
+ tracker_bizobj.GetStatus(issue), config):
+ issue.closed_timestamp = timestamp
+ filterrules_helpers.ApplyFilterRules(cnxn, services, issue, config)
+ if not tracker_helpers.MeansOpenInProject(
+ tracker_bizobj.GetStatus(issue), config):
+ issue.closed_timestamp = timestamp
+
+ classification = services.spam.ClassifyIssue(issue, comment)
+
+ label = classification['outputLabel']
+ logging.info('issue/comment classification: %s' % classification)
+ score = 0
+ for output in classification['outputMulti']:
+ if output['label'] == label:
+ score = float(output['score'])
+
+ self.spam_labels.increment({'type': label})
+
+ if label == 'spam' and score > settings.classifier_spam_thresh:
+ # Must be negative so as not to use up actual local_ids.
+ # This can be fixed later if a human declares it to be ham.
+ issue.local_id = self.AllocateNextSpamLocalID(cnxn, project_id)
+ issue.is_spam = True
+ else:
+ issue.local_id = self.AllocateNextLocalID(cnxn, project_id)
+
+ issue_id = self.InsertIssue(cnxn, issue)
+ comment.issue_id = issue_id
+ self.InsertComment(cnxn, comment)
+
+ issue.issue_id = issue_id
+ services.spam.RecordClassifierIssueVerdict(
+ cnxn, issue, label=='spam', score)
+
+ if permissions.HasRestrictions(issue, 'view'):
+ self._config_service.InvalidateMemcache(
+ [issue], key_prefix='nonviewable:')
+
+ # Add a comment to existing issues saying they are now blocking or
+ # blocked on this issue.
+ blocked_add_issues = self.GetIssues(cnxn, blocked_on or [])
+ for add_issue in blocked_add_issues:
+ self.CreateIssueComment(
+ cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
+ content='',
+ amendments=[tracker_bizobj.MakeBlockingAmendment(
+ [(issue.project_name, issue.local_id)], [],
+ default_project_name=add_issue.project_name)])
+ blocking_add_issues = self.GetIssues(cnxn, blocking or [])
+ for add_issue in blocking_add_issues:
+ self.CreateIssueComment(
+ cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
+ content='',
+ amendments=[tracker_bizobj.MakeBlockedOnAmendment(
+ [(issue.project_name, issue.local_id)], [],
+ default_project_name=add_issue.project_name)])
+
+ self._UpdateIssuesModified(
+ cnxn, iids_to_invalidate, modified_timestamp=timestamp)
+
+ if index_now:
+ tracker_fulltext.IndexIssues(
+ cnxn, [issue], services.user, self, self._config_service)
+
+ return issue.local_id
+
+ def AllocateNewLocalIDs(self, cnxn, issues):
+ # Filter to just the issues that need new local IDs.
+ issues = [issue for issue in issues if issue.local_id < 0]
+
+ for issue in issues:
+ if issue.local_id < 0:
+ issue.local_id = self.AllocateNextLocalID(cnxn, issue.project_id)
+
+ self.UpdateIssues(cnxn, issues)
+
+ logging.info("AllocateNewLocalIDs")
+
+ def GetAllIssuesInProject(self, cnxn, project_id, min_local_id=None):
+ """Special query to efficiently get ALL issues in a project.
+
+ This is not done while the user is waiting, only by backround tasks.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: the ID of the project.
+ min_local_id: optional int to start at.
+
+ Returns:
+ A list of Issue protocol buffers for all issues.
+ """
+ all_local_ids = self.GetAllLocalIDsInProject(
+ cnxn, project_id, min_local_id=min_local_id)
+ return self.GetIssuesByLocalIDs(cnxn, project_id, all_local_ids)
+
+ def GetAnyOnHandIssue(self, issue_ids, start=None, end=None):
+ """Get any one issue from RAM or memcache, otherwise return None."""
+ return self.issue_2lc.GetAnyOnHandItem(issue_ids, start=start, end=end)
+
+ def GetIssuesDict(self, cnxn, issue_ids, use_cache=True, shard_id=None):
+ """Get a dict {iid: issue} from the DB or cache."""
+ issue_dict, _missed_iids = self.issue_2lc.GetAll(
+ cnxn, issue_ids, use_cache=use_cache, shard_id=shard_id)
+ return issue_dict
+
+ def GetIssues(self, cnxn, issue_ids, use_cache=True, shard_id=None):
+ """Get a list of Issue PBs from the DB or cache.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue_ids: integer global issue IDs of the issues.
+ use_cache: optional boolean to turn off using the cache.
+ shard_id: optional int shard_id to limit retrieval.
+
+ Returns:
+ A list of Issue PBs in the same order as the given issue_ids.
+ """
+ issue_dict = self.GetIssuesDict(
+ cnxn, issue_ids, use_cache=use_cache, shard_id=shard_id)
+
+ # Return a list that is ordered the same as the given issue_ids.
+ issue_list = [issue_dict[issue_id] for issue_id in issue_ids
+ if issue_id in issue_dict]
+
+ return issue_list
+
+ def GetIssue(self, cnxn, issue_id):
+ """Get one Issue PB from the DB.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue_id: integer global issue ID of the issue.
+
+ Returns:
+ The requested Issue protocol buffer.
+
+ Raises:
+ NoSuchIssueException: the issue was not found.
+ """
+ issues = self.GetIssues(cnxn, [issue_id])
+ try:
+ return issues[0]
+ except IndexError:
+ raise NoSuchIssueException()
+
+ def GetIssuesByLocalIDs(
+ self, cnxn, project_id, local_id_list, shard_id=None):
+ """Get all the requested issues.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project to which the issues belong.
+ local_id_list: list of integer local IDs for the requested issues.
+ shard_id: optional int shard_id to choose a replica.
+
+ Returns:
+ List of Issue PBs for the requested issues. The result Issues
+ will be ordered in the same order as local_id_list.
+ """
+ issue_ids_to_fetch = self.LookupIssueIDs(
+ cnxn, [(project_id, local_id) for local_id in local_id_list])
+ issues = self.GetIssues(cnxn, issue_ids_to_fetch, shard_id=shard_id)
+ return issues
+
+ def GetIssueByLocalID(self, cnxn, project_id, local_id):
+ """Get one Issue PB from the DB.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: the ID of the project to which the issue belongs.
+ local_id: integer local ID of the issue.
+
+ Returns:
+ The requested Issue protocol buffer.
+ """
+ issues = self.GetIssuesByLocalIDs(cnxn, project_id, [local_id])
+ try:
+ return issues[0]
+ except IndexError:
+ raise NoSuchIssueException('The issue %s:%d does not exist.' % (
+ project_id, local_id))
+
+ def GetOpenAndClosedIssues(self, cnxn, issue_ids):
+ """Return the requested issues in separate open and closed lists.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue_ids: list of int issue issue_ids.
+
+ Returns:
+ A pair of lists, the first with open issues, second with closed issues.
+ """
+ if not issue_ids:
+ return [], [] # make one common case efficient
+
+ issues = self.GetIssues(cnxn, issue_ids)
+ project_ids = {issue.project_id for issue in issues}
+ configs = self._config_service.GetProjectConfigs(cnxn, project_ids)
+ open_issues = []
+ closed_issues = []
+ for issue in issues:
+ config = configs[issue.project_id]
+ if tracker_helpers.MeansOpenInProject(
+ tracker_bizobj.GetStatus(issue), config):
+ open_issues.append(issue)
+ else:
+ closed_issues.append(issue)
+
+ return open_issues, closed_issues
+
+ def GetCurrentLocationOfMovedIssue(self, cnxn, project_id, local_id):
+ """Return the current location of a moved issue based on old location."""
+ issue_id = int(self.issueformerlocations_tbl.SelectValue(
+ cnxn, 'issue_id', default=0, project_id=project_id, local_id=local_id))
+ if not issue_id:
+ return None, None
+ project_id, local_id = self.issue_tbl.SelectRow(
+ cnxn, cols=['project_id', 'local_id'], id=issue_id)
+ return project_id, local_id
+
+ def GetPreviousLocations(self, cnxn, issue):
+ """Get all the previous locations of an issue."""
+ location_rows = self.issueformerlocations_tbl.Select(
+ cnxn, cols=['project_id', 'local_id'], issue_id=issue.issue_id)
+ locations = [(pid, local_id) for (pid, local_id) in location_rows
+ if pid != issue.project_id or local_id != issue.local_id]
+ return locations
+
+ def InsertIssue(self, cnxn, issue):
+ """Store the given issue in SQL.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue: Issue PB to insert into the database.
+
+ Returns:
+ The int issue_id of the newly created issue.
+ """
+ status_id = self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.status)
+ row = (issue.project_id, issue.local_id, status_id,
+ issue.owner_id or None,
+ issue.reporter_id,
+ issue.opened_timestamp,
+ issue.closed_timestamp,
+ issue.modified_timestamp,
+ issue.derived_owner_id or None,
+ self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.derived_status),
+ bool(issue.deleted),
+ issue.star_count, issue.attachment_count,
+ issue.is_spam)
+ # ISSUE_COLs[1:] to skip setting the ID
+ # Insert into the Master DB.
+ generated_ids = self.issue_tbl.InsertRows(
+ cnxn, ISSUE_COLS[1:], [row], commit=False, return_generated_ids=True)
+ issue_id = generated_ids[0]
+ issue.issue_id = issue_id
+ self.issue_tbl.Update(
+ cnxn, {'shard': issue_id % settings.num_logical_shards},
+ id=issue.issue_id, commit=False)
+
+ self._UpdateIssuesSummary(cnxn, [issue], commit=False)
+ self._UpdateIssuesLabels(
+ cnxn, [issue], issue.project_id, commit=False)
+ self._UpdateIssuesFields(cnxn, [issue], commit=False)
+ self._UpdateIssuesComponents(cnxn, [issue], commit=False)
+ self._UpdateIssuesCc(cnxn, [issue], commit=False)
+ self._UpdateIssuesNotify(cnxn, [issue], commit=False)
+ self._UpdateIssuesRelation(cnxn, [issue], commit=False)
+ cnxn.Commit()
+ self._config_service.InvalidateMemcache([issue])
+
+ return issue_id
+
+ def UpdateIssues(
+ self, cnxn, issues, update_cols=None, just_derived=False, commit=True,
+ invalidate=True):
+ """Update the given issues in SQL.
+
+ Args:
+ cnxn: connection to SQL database.
+ issues: list of issues to update.
+ update_cols: optional list of just the field names to update.
+ just_derived: set to True when only updating derived fields.
+ commit: set to False to skip the DB commit and do it in the caller.
+ invalidate: set to False to leave cache invalidatation to the caller.
+ """
+ if not issues:
+ return
+
+ project_id = issues[0].project_id # All must be in the same project.
+ assert all(issue.project_id == project_id for issue in issues)
+
+ for issue in issues: # slow, but mysql will not allow REPLACE rows.
+ delta = {
+ 'project_id': issue.project_id,
+ 'local_id': issue.local_id,
+ 'owner_id': issue.owner_id or None,
+ 'status_id': self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.status) or None,
+ 'opened': issue.opened_timestamp,
+ 'closed': issue.closed_timestamp,
+ 'modified': issue.modified_timestamp,
+ 'derived_owner_id': issue.derived_owner_id or None,
+ 'derived_status_id': self._config_service.LookupStatusID(
+ cnxn, issue.project_id, issue.derived_status) or None,
+ 'deleted': bool(issue.deleted),
+ 'star_count': issue.star_count,
+ 'attachment_count': issue.attachment_count,
+ 'is_spam': issue.is_spam,
+ }
+ if update_cols is not None:
+ delta = {key: val for key, val in delta.iteritems()
+ if key in update_cols}
+ self.issue_tbl.Update(cnxn, delta, id=issue.issue_id, commit=False)
+
+ if not update_cols:
+ self._UpdateIssuesLabels(
+ cnxn, issues, project_id, commit=False)
+ self._UpdateIssuesCc(cnxn, issues, commit=False)
+ self._UpdateIssuesFields(cnxn, issues, commit=False)
+ self._UpdateIssuesComponents(cnxn, issues, commit=False)
+ self._UpdateIssuesNotify(cnxn, issues, commit=False)
+ if not just_derived:
+ self._UpdateIssuesSummary(cnxn, issues, commit=False)
+ self._UpdateIssuesRelation(cnxn, issues, commit=False)
+
+ iids_to_invalidate = [issue.issue_id for issue in issues]
+ if just_derived and invalidate:
+ self.issue_2lc.InvalidateAllKeys(cnxn, iids_to_invalidate)
+ elif invalidate:
+ self.issue_2lc.InvalidateKeys(cnxn, iids_to_invalidate)
+ if commit:
+ cnxn.Commit()
+ if invalidate:
+ self._config_service.InvalidateMemcache(issues)
+
+ def UpdateIssue(
+ self, cnxn, issue, update_cols=None, just_derived=False, commit=True,
+ invalidate=True):
+ """Update the given issue in SQL.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue: the issue to update.
+ update_cols: optional list of just the field names to update.
+ just_derived: set to True when only updating derived fields.
+ commit: set to False to skip the DB commit and do it in the caller.
+ invalidate: set to False to leave cache invalidatation to the caller.
+ """
+ self.UpdateIssues(
+ cnxn, [issue], update_cols=update_cols, just_derived=just_derived,
+ commit=commit, invalidate=invalidate)
+
+ def _UpdateIssuesSummary(self, cnxn, issues, commit=True):
+ """Update the IssueSummary table rows for the given issues."""
+ self.issuesummary_tbl.InsertRows(
+ cnxn, ISSUESUMMARY_COLS,
+ [(issue.issue_id, issue.summary) for issue in issues],
+ replace=True, commit=commit)
+
+ def _UpdateIssuesLabels(self, cnxn, issues, project_id, commit=True):
+ """Update the Issue2Label table rows for the given issues."""
+ label_rows = []
+ for issue in issues:
+ issue_shard = issue.issue_id % settings.num_logical_shards
+ # TODO(jrobbins): If the user adds many novel labels in one issue update,
+ # that could be slow. Solution is to add all new labels in a batch first.
+ label_rows.extend(
+ (issue.issue_id,
+ self._config_service.LookupLabelID(cnxn, project_id, label), False,
+ issue_shard)
+ for label in issue.labels)
+ label_rows.extend(
+ (issue.issue_id,
+ self._config_service.LookupLabelID(cnxn, project_id, label), True,
+ issue_shard)
+ for label in issue.derived_labels)
+
+ self.issue2label_tbl.Delete(
+ cnxn, issue_id=[issue.issue_id for issue in issues],
+ commit=False)
+ self.issue2label_tbl.InsertRows(
+ cnxn, ISSUE2LABEL_COLS + ['issue_shard'],
+ label_rows, ignore=True, commit=commit)
+
+ def _UpdateIssuesFields(self, cnxn, issues, commit=True):
+ """Update the Issue2FieldValue table rows for the given issues."""
+ fieldvalue_rows = []
+ for issue in issues:
+ issue_shard = issue.issue_id % settings.num_logical_shards
+ for fv in issue.field_values:
+ fieldvalue_rows.append(
+ (issue.issue_id, fv.field_id, fv.int_value, fv.str_value,
+ fv.user_id or None, fv.derived, issue_shard))
+
+ self.issue2fieldvalue_tbl.Delete(
+ cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
+ self.issue2fieldvalue_tbl.InsertRows(
+ cnxn, ISSUE2FIELDVALUE_COLS + ['issue_shard'],
+ fieldvalue_rows, commit=commit)
+
+ def _UpdateIssuesComponents(self, cnxn, issues, commit=True):
+ """Update the Issue2Component table rows for the given issues."""
+ issue2component_rows = []
+ for issue in issues:
+ issue_shard = issue.issue_id % settings.num_logical_shards
+ issue2component_rows.extend(
+ (issue.issue_id, component_id, False, issue_shard)
+ for component_id in issue.component_ids)
+ issue2component_rows.extend(
+ (issue.issue_id, component_id, True, issue_shard)
+ for component_id in issue.derived_component_ids)
+
+ self.issue2component_tbl.Delete(
+ cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
+ self.issue2component_tbl.InsertRows(
+ cnxn, ISSUE2COMPONENT_COLS + ['issue_shard'],
+ issue2component_rows, ignore=True, commit=commit)
+
+ def _UpdateIssuesCc(self, cnxn, issues, commit=True):
+ """Update the Issue2Cc table rows for the given issues."""
+ cc_rows = []
+ for issue in issues:
+ issue_shard = issue.issue_id % settings.num_logical_shards
+ cc_rows.extend(
+ (issue.issue_id, cc_id, False, issue_shard)
+ for cc_id in issue.cc_ids)
+ cc_rows.extend(
+ (issue.issue_id, cc_id, True, issue_shard)
+ for cc_id in issue.derived_cc_ids)
+
+ self.issue2cc_tbl.Delete(
+ cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
+ self.issue2cc_tbl.InsertRows(
+ cnxn, ISSUE2CC_COLS + ['issue_shard'],
+ cc_rows, ignore=True, commit=commit)
+
+ def _UpdateIssuesNotify(self, cnxn, issues, commit=True):
+ """Update the Issue2Notify table rows for the given issues."""
+ notify_rows = []
+ for issue in issues:
+ derived_rows = [[issue.issue_id, email]
+ for email in issue.derived_notify_addrs]
+ notify_rows.extend(derived_rows)
+
+ self.issue2notify_tbl.Delete(
+ cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
+ self.issue2notify_tbl.InsertRows(
+ cnxn, ISSUE2NOTIFY_COLS, notify_rows, ignore=True, commit=commit)
+
+ def _UpdateIssuesRelation(self, cnxn, issues, commit=True):
+ """Update the IssueRelation table rows for the given issues."""
+ relation_rows = []
+ dangling_relation_rows = []
+ for issue in issues:
+ for dst_issue_id in issue.blocked_on_iids:
+ relation_rows.append((issue.issue_id, dst_issue_id, 'blockedon'))
+ for dst_issue_id in issue.blocking_iids:
+ relation_rows.append((dst_issue_id, issue.issue_id, 'blockedon'))
+ for dst_ref in issue.dangling_blocked_on_refs:
+ dangling_relation_rows.append((
+ issue.issue_id, dst_ref.project, dst_ref.issue_id, 'blockedon'))
+ for dst_ref in issue.dangling_blocking_refs:
+ dangling_relation_rows.append((
+ issue.issue_id, dst_ref.project, dst_ref.issue_id, 'blocking'))
+ if issue.merged_into:
+ relation_rows.append((issue.issue_id, issue.merged_into, 'mergedinto'))
+
+ self.issuerelation_tbl.Delete(
+ cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
+ self.issuerelation_tbl.Delete(
+ cnxn, dst_issue_id=[issue.issue_id for issue in issues],
+ kind='blockedon', commit=False)
+ self.issuerelation_tbl.InsertRows(
+ cnxn, ISSUERELATION_COLS, relation_rows, ignore=True, commit=commit)
+ self.danglingrelation_tbl.Delete(
+ cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
+ self.danglingrelation_tbl.InsertRows(
+ cnxn, DANGLINGRELATION_COLS, dangling_relation_rows, ignore=True,
+ commit=commit)
+
+ def _UpdateIssuesModified(
+ self, cnxn, iids, modified_timestamp=None, invalidate=True):
+ """Store a modified timestamp for each of the specified issues."""
+ delta = {'modified': modified_timestamp or int(time.time())}
+ self.issue_tbl.Update(cnxn, delta, id=iids, commit=False)
+ if invalidate:
+ self.InvalidateIIDs(cnxn, iids)
+
+ def DeltaUpdateIssue(
+ self, cnxn, services, reporter_id, project_id,
+ config, issue, status, owner_id, cc_add, cc_remove, comp_ids_add,
+ comp_ids_remove, labels_add, labels_remove, field_vals_add,
+ field_vals_remove, fields_clear, blocked_on_add=None,
+ blocked_on_remove=None, blocking_add=None, blocking_remove=None,
+ merged_into=None, index_now=False, comment=None, summary=None,
+ iids_to_invalidate=None, rules=None, predicate_asts=None,
+ timestamp=None):
+ """Update the issue in the database and return a set of update tuples.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to persistence layer.
+ reporter_id: user ID of the user making this change.
+ project_id: int ID for the current project.
+ config: ProjectIssueConfig PB for this project.
+ issue: Issue PB of issue to update.
+ status: new issue status string, if a change is desired.
+ owner_id: user ID of the new issue owner, if a change is desired.
+ cc_add: list of user IDs of users to add to CC list.
+ cc_remove: list of user IDs of users to remove from CC list.
+ comp_ids_add: list of component IDs to add to the issue.
+ comp_ids_remove: list of component IDs to remove from the issue.
+ labels_add: list of issue label strings to add.
+ labels_remove: list of issue label strings to remove.
+ field_vals_add: dict of FieldValue PBs to add.
+ field_vals_remove: list of FieldValue PBs to remove.
+ fields_clear: list of custom field IDs to clear.
+ blocked_on_add: list of IIDs that this issue is now blocked on.
+ blocked_on_remove: list of IIDs that this issue is no longer blocked on.
+ blocking_add: list of IIDs that this issue is blocking.
+ blocking_remove: list of IIDs that this issue is no longer blocking.
+ merged_into: IID of issue that this issue was merged into, 0 to clear,
+ or None for no change.
+ index_now: True if the issue should be updated in the full text index.
+ comment: This should be the content of the comment
+ corresponding to this change.
+ summary: new issue summary, currently only used by GData API.
+ rules: optional list of preloaded FilterRule PBs for this project.
+ predicate_asts: optional list of QueryASTs for the rules. If rules are
+ provided, then predicate_asts should also be provided.
+ timestamp: int timestamp set during testing, otherwise defaults to
+ int(time.time()).
+
+ Returns:
+ A list of Amendment PBs that describe the set of metadata updates that
+ the user made. This tuple is later used in making the IssueComment.
+ """
+ old_effective_status = tracker_bizobj.GetStatus(issue)
+
+ # Make all user input safe to echo out again later.
+ status = framework_bizobj.CanonicalizeLabel(status)
+ labels_add = [framework_bizobj.CanonicalizeLabel(l) for l in labels_add]
+ labels_add = [l for l in labels_add if l]
+ labels_remove = [framework_bizobj.CanonicalizeLabel(l)
+ for l in labels_remove]
+ labels_remove = [l for l in labels_remove if l]
+
+ logging.info(
+ 'Bulk edit to project_id %s issue.local_id %s',
+ project_id, issue.local_id)
+ if iids_to_invalidate is None:
+ iids_to_invalidate = set([issue.issue_id])
+ invalidate = True
+ else:
+ iids_to_invalidate.add(issue.issue_id)
+ invalidate = False # Caller will do it.
+
+ # Store each updated value in the issue PB, and compute Update PBs
+ amendments = []
+ if status is not None and status != issue.status:
+ amendments.append(tracker_bizobj.MakeStatusAmendment(
+ status, issue.status))
+ issue.status = status
+ if owner_id is not None and owner_id != issue.owner_id:
+ amendments.append(tracker_bizobj.MakeOwnerAmendment(
+ owner_id, issue.owner_id))
+ issue.owner_id = owner_id
+
+ # compute the set of cc'd users added and removed
+ cc_add = [cc for cc in cc_add if cc not in issue.cc_ids]
+ cc_remove = [cc for cc in cc_remove if cc in issue.cc_ids]
+ if cc_add or cc_remove:
+ cc_ids = [cc for cc in list(issue.cc_ids) + cc_add
+ if cc not in cc_remove]
+ issue.cc_ids = cc_ids
+ amendments.append(tracker_bizobj.MakeCcAmendment(cc_add, cc_remove))
+
+ # compute the set of components added and removed
+ comp_ids_add = [c for c in comp_ids_add if c not in issue.component_ids]
+ comp_ids_remove = [c for c in comp_ids_remove if c in issue.component_ids]
+ if comp_ids_add or comp_ids_remove:
+ comp_ids = [cid for cid in list(issue.component_ids) + comp_ids_add
+ if cid not in comp_ids_remove]
+ issue.component_ids = comp_ids
+ amendments.append(tracker_bizobj.MakeComponentsAmendment(
+ comp_ids_add, comp_ids_remove, config))
+
+ # compute the set of labels added and removed
+ (labels, update_labels_add,
+ update_labels_remove) = framework_bizobj.MergeLabels(
+ issue.labels, labels_add, labels_remove,
+ config.exclusive_label_prefixes)
+
+ if update_labels_add or update_labels_remove:
+ issue.labels = labels
+ amendments.append(tracker_bizobj.MakeLabelsAmendment(
+ update_labels_add, update_labels_remove))
+
+ # compute the set of custom fields added and removed
+ (field_vals, update_fields_add,
+ update_fields_remove) = tracker_bizobj.MergeFields(
+ issue.field_values, field_vals_add, field_vals_remove,
+ config.field_defs)
+
+ if update_fields_add or update_fields_remove:
+ issue.field_values = field_vals
+ for fd in config.field_defs:
+ added_values_this_field = [
+ fv for fv in update_fields_add if fv.field_id == fd.field_id]
+ if added_values_this_field:
+ amendments.append(tracker_bizobj.MakeFieldAmendment(
+ fd.field_id, config,
+ [tracker_bizobj.GetFieldValue(fv, {})
+ for fv in added_values_this_field],
+ old_values=[]))
+ removed_values_this_field = [
+ fv for fv in update_fields_remove if fv.field_id == fd.field_id]
+ if removed_values_this_field:
+ amendments.append(tracker_bizobj.MakeFieldAmendment(
+ fd.field_id, config, [],
+ old_values=[tracker_bizobj.GetFieldValue(fv, {})
+ for fv in removed_values_this_field]))
+
+ if fields_clear:
+ field_clear_set = set(fields_clear)
+ revised_fields = []
+ for fd in config.field_defs:
+ if fd.field_id not in field_clear_set:
+ revised_fields.extend(
+ fv for fv in issue.field_values if fv.field_id == fd.field_id)
+ else:
+ amendments.append(
+ tracker_bizobj.MakeFieldClearedAmendment(fd.field_id, config))
+ if fd.field_type == tracker_pb2.FieldTypes.ENUM_TYPE:
+ prefix = fd.field_name.lower() + '-'
+ filtered_labels = [
+ lab for lab in issue.labels
+ if not lab.lower().startswith(prefix)]
+ issue.labels = filtered_labels
+
+ issue.field_values = revised_fields
+
+ if blocked_on_add or blocked_on_remove:
+ old_blocked_on = issue.blocked_on_iids
+ blocked_on_add = [iid for iid in blocked_on_add
+ if iid not in old_blocked_on]
+ add_refs = [(ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in self.GetIssues(cnxn, blocked_on_add)]
+ blocked_on_rm = [iid for iid in blocked_on_remove
+ if iid in old_blocked_on]
+ remove_refs = [
+ (ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in self.GetIssues(cnxn, blocked_on_rm)]
+ amendments.append(tracker_bizobj.MakeBlockedOnAmendment(
+ add_refs, remove_refs, default_project_name=issue.project_name))
+ blocked_on = [iid for iid in old_blocked_on + blocked_on_add
+ if iid not in blocked_on_remove]
+ issue.blocked_on_iids = blocked_on
+ iids_to_invalidate.update(blocked_on_add + blocked_on_remove)
+
+ if blocking_add or blocking_remove:
+ old_blocking = issue.blocking_iids
+ blocking_add = [iid for iid in blocking_add
+ if iid not in old_blocking]
+ add_refs = [(ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in self.GetIssues(cnxn, blocking_add)]
+ blocking_remove = [iid for iid in blocking_remove
+ if iid in old_blocking]
+ remove_refs = [
+ (ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in self.GetIssues(cnxn, blocking_remove)]
+ amendments.append(tracker_bizobj.MakeBlockingAmendment(
+ add_refs, remove_refs, default_project_name=issue.project_name))
+ blocking_refs = [iid for iid in old_blocking + blocking_add
+ if iid not in blocking_remove]
+ issue.blocking_iids = blocking_refs
+ iids_to_invalidate.update(blocking_add + blocking_remove)
+
+ if merged_into is not None and merged_into != issue.merged_into:
+ merged_remove = issue.merged_into
+ merged_add = merged_into
+ issue.merged_into = merged_into
+ try:
+ remove_issue = self.GetIssue(cnxn, merged_remove)
+ remove_ref = remove_issue.project_name, remove_issue.local_id
+ iids_to_invalidate.add(merged_remove)
+ except NoSuchIssueException:
+ remove_ref = None
+
+ try:
+ add_issue = self.GetIssue(cnxn, merged_add)
+ add_ref = add_issue.project_name, add_issue.local_id
+ iids_to_invalidate.add(merged_add)
+ except NoSuchIssueException:
+ add_ref = None
+
+ amendments.append(tracker_bizobj.MakeMergedIntoAmendment(
+ add_ref, remove_ref, default_project_name=issue.project_name))
+
+ if summary and summary != issue.summary:
+ amendments.append(tracker_bizobj.MakeSummaryAmendment(
+ summary, issue.summary))
+ issue.summary = summary
+
+ # If this was a no-op with no comment, bail out and don't save,
+ # invalidate, or re-index anything.
+ if not amendments and (not comment or not comment.strip()):
+ return [], None
+
+ # Note: no need to check for collisions when the user is doing a delta.
+
+ # update the modified_timestamp for any comment added, even if it was
+ # just a text comment with no issue fields changed.
+ issue.modified_timestamp = timestamp or int(time.time())
+
+ # Update the closed timestamp before filter rules so that rules
+ # can test for closed_timestamp, and also after filter rules
+ # so that closed_timestamp will be set if the issue is closed by the rule.
+ _UpdateClosedTimestamp(config, issue, old_effective_status)
+ if rules is None:
+ logging.info('Rules were not given')
+ rules = services.features.GetFilterRules(cnxn, config.project_id)
+ predicate_asts = filterrules_helpers.ParsePredicateASTs(
+ rules, config, None)
+
+ filterrules_helpers.ApplyGivenRules(
+ cnxn, services, issue, config, rules, predicate_asts)
+ _UpdateClosedTimestamp(config, issue, old_effective_status)
+
+ # Store the issue in SQL.
+ self.UpdateIssue(cnxn, issue, commit=False, invalidate=False)
+
+ comment_pb = self.CreateIssueComment(
+ cnxn, project_id, issue.local_id, reporter_id, comment,
+ amendments=amendments, commit=False)
+ self._UpdateIssuesModified(
+ cnxn, iids_to_invalidate, modified_timestamp=issue.modified_timestamp,
+ invalidate=invalidate)
+
+ if not invalidate:
+ cnxn.Commit()
+
+ if index_now:
+ tracker_fulltext.IndexIssues(
+ cnxn, [issue], services.user_service, self, self._config_service)
+
+ return amendments, comment_pb
+
+ def InvalidateIIDs(self, cnxn, iids_to_invalidate):
+ """Invalidate the specified issues in the Invalidate table and memcache."""
+ issues_to_invalidate = self.GetIssues(cnxn, iids_to_invalidate)
+ self.issue_2lc.InvalidateKeys(cnxn, iids_to_invalidate)
+ self._config_service.InvalidateMemcache(issues_to_invalidate)
+
+ def ApplyIssueComment(
+ self, cnxn, services, reporter_id, project_id,
+ local_id, summary, status, owner_id, cc_ids, labels, field_values,
+ component_ids, blocked_on, blocking, dangling_blocked_on_refs,
+ dangling_blocking_refs, merged_into, index_now=True,
+ page_gen_ts=None, comment=None, inbound_message=None, attachments=None,
+ timestamp=None):
+ """Update the issue in the database and return info for notifications.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connection to persistence layer.
+ reporter_id: user ID of the user making this change.
+ project_id: int Project ID for the current project.
+ local_id: integer local ID of the issue to update.
+ summary: new issue summary string.
+ status: new issue status string.
+ owner_id: user ID of the new issue owner.
+ cc_ids: list of user IDs of users to CC when the issue changes.
+ labels: list of new issue label strings.
+ field_values: list of FieldValue PBs.
+ component_ids: list of int component IDs.
+ blocked_on: list of IIDs that this issue is blocked on.
+ blocking: list of IIDs that this issue is blocking.
+ dangling_blocked_on_refs: list of Codesite issues this is blocked on.
+ dangling_blocking_refs: list of Codesite issues this is blocking.
+ merged_into: IID of issue that this issue was merged into, 0 to clear.
+ index_now: True if the issue should be updated in the full text index.
+ page_gen_ts: time at which the issue HTML page was generated,
+ used in detecting mid-air collisions.
+ comment: This should be the content of the comment
+ corresponding to this change.
+ inbound_message: optional string full text of an email that caused
+ this comment to be added.
+ attachments: This should be a list of
+ [(filename, contents, mimetype),...] attachments uploaded at
+ the time the comment was made.
+ timestamp: int timestamp set during testing, otherwise defaults to
+ int(time.time()).
+
+ Returns:
+ (amendments, comment_pb). Amendments is a list of Amendment PBs
+ that describe the set of metadata updates that the user made.
+ Comment_pb is the IssueComment for the change.
+
+ Raises:
+ MidAirCollisionException: indicates that the issue has been
+ changed since the user loaded the page.
+ """
+ status = framework_bizobj.CanonicalizeLabel(status)
+ labels = [framework_bizobj.CanonicalizeLabel(l) for l in labels]
+ labels = [l for l in labels if l]
+
+ # Use canonical label names
+ label_ids = self._config_service.LookupLabelIDs(
+ cnxn, project_id, labels, autocreate=True)
+ labels = [self._config_service.LookupLabel(cnxn, project_id, l_id)
+ for l_id in label_ids]
+
+ # Get the issue and project configurations.
+ config = self._config_service.GetProjectConfig(cnxn, project_id)
+ issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
+
+ # Store each updated value in the issue PB, and compute amendments
+ amendments = []
+ iids_to_invalidate = set()
+
+ if summary and summary != issue.summary:
+ amendments.append(tracker_bizobj.MakeSummaryAmendment(
+ summary, issue.summary))
+ issue.summary = summary
+
+ old_effective_status = tracker_bizobj.GetStatus(issue)
+ if status != issue.status:
+ amendments.append(tracker_bizobj.MakeStatusAmendment(
+ status, issue.status))
+ issue.status = status
+
+ if owner_id != issue.owner_id:
+ amendments.append(tracker_bizobj.MakeOwnerAmendment(
+ owner_id, issue.owner_id))
+ if owner_id == framework_constants.NO_USER_SPECIFIED:
+ issue.reset('owner_id')
+ else:
+ issue.owner_id = owner_id
+
+ # TODO(jrobbins): factor the CC code into a method and add a test
+ # compute the set of cc'd users added and removed
+ cc_added = [cc for cc in cc_ids if cc not in issue.cc_ids]
+ cc_removed = [cc for cc in issue.cc_ids if cc not in cc_ids]
+ if cc_added or cc_removed:
+ amendments.append(tracker_bizobj.MakeCcAmendment(cc_added, cc_removed))
+ issue.cc_ids = cc_ids
+
+ # TODO(jrobbins): factor the labels code into a method and add a test
+ # compute the set of labels added and removed
+ labels_added = [lab for lab in labels
+ if lab not in issue.labels]
+ labels_removed = [lab for lab in issue.labels
+ if lab not in labels]
+ if labels_added or labels_removed:
+ amendments.append(tracker_bizobj.MakeLabelsAmendment(
+ labels_added, labels_removed))
+ issue.labels = labels
+
+ old_field_values = collections.defaultdict(list)
+ for ofv in issue.field_values:
+ # Passing {} because I just want the user_id, not the email address.
+ old_field_values[ofv.field_id].append(
+ tracker_bizobj.GetFieldValue(ofv, {}))
+ for field_id, values in old_field_values.iteritems():
+ old_field_values[field_id] = sorted(values)
+
+ new_field_values = collections.defaultdict(list)
+ for nfv in field_values:
+ new_field_values[nfv.field_id].append(
+ tracker_bizobj.GetFieldValue(nfv, {}))
+ for field_id, values in new_field_values.iteritems():
+ new_field_values[field_id] = sorted(values)
+
+ field_ids_added = {fv.field_id for fv in field_values
+ if fv.field_id not in old_field_values}
+ field_ids_removed = {ofv.field_id for ofv in issue.field_values
+ if ofv.field_id not in new_field_values}
+ field_ids_changed = {
+ fv.field_id for fv in field_values
+ if (fv.field_id in old_field_values and
+ old_field_values[fv.field_id] != new_field_values[fv.field_id])}
+
+ if field_ids_added or field_ids_removed or field_ids_changed:
+ amendments.extend(
+ tracker_bizobj.MakeFieldAmendment(fid, config, new_field_values[fid])
+ for fid in field_ids_added)
+ amendments.extend(
+ tracker_bizobj.MakeFieldAmendment(
+ fid, config, new_field_values[fid],
+ old_values=old_field_values[fid])
+ for fid in field_ids_changed)
+ amendments.extend(
+ tracker_bizobj.MakeFieldAmendment(fid, config, [])
+ for fid in field_ids_removed)
+
+ issue.field_values = field_values
+
+ comps_added = [comp for comp in component_ids
+ if comp not in issue.component_ids]
+ comps_removed = [comp for comp in issue.component_ids
+ if comp not in component_ids]
+ if comps_added or comps_removed:
+ amendments.append(tracker_bizobj.MakeComponentsAmendment(
+ comps_added, comps_removed, config))
+ issue.component_ids = component_ids
+
+ if merged_into != issue.merged_into:
+ # TODO(jrobbins): refactor this into LookupIssueRefByIssueID().
+ try:
+ merged_remove = self.GetIssue(cnxn, issue.merged_into)
+ remove_ref = merged_remove.project_name, merged_remove.local_id
+ iids_to_invalidate.add(issue.merged_into)
+ except NoSuchIssueException:
+ remove_ref = None
+
+ try:
+ merged_add = self.GetIssue(cnxn, merged_into)
+ add_ref = merged_add.project_name, merged_add.local_id
+ iids_to_invalidate.add(merged_into)
+ except NoSuchIssueException:
+ add_ref = None
+
+ issue.merged_into = merged_into
+ amendments.append(tracker_bizobj.MakeMergedIntoAmendment(
+ add_ref, remove_ref, default_project_name=issue.project_name))
+
+ blockers_added, blockers_removed = framework_helpers.ComputeListDeltas(
+ issue.blocked_on_iids, blocked_on)
+ danglers_added, danglers_removed = framework_helpers.ComputeListDeltas(
+ issue.dangling_blocked_on_refs, dangling_blocked_on_refs)
+ blocked_add_issues = []
+ blocked_remove_issues = []
+ if blockers_added or blockers_removed or danglers_added or danglers_removed:
+ blocked_add_issues = self.GetIssues(cnxn, blockers_added)
+ add_refs = [(ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in blocked_add_issues]
+ add_refs.extend([(ref.project, ref.issue_id) for ref in danglers_added])
+ blocked_remove_issues = self.GetIssues(cnxn, blockers_removed)
+ remove_refs = [
+ (ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in blocked_remove_issues]
+ remove_refs.extend([(ref.project, ref.issue_id)
+ for ref in danglers_removed])
+ amendments.append(tracker_bizobj.MakeBlockedOnAmendment(
+ add_refs, remove_refs, default_project_name=issue.project_name))
+ issue.blocked_on_iids = blocked_on
+ issue.dangling_blocked_on_refs = dangling_blocked_on_refs
+ iids_to_invalidate.update(blockers_added + blockers_removed)
+
+ blockers_added, blockers_removed = framework_helpers.ComputeListDeltas(
+ issue.blocking_iids, blocking)
+ danglers_added, danglers_removed = framework_helpers.ComputeListDeltas(
+ issue.dangling_blocking_refs, dangling_blocking_refs)
+ blocking_add_issues = []
+ blocking_remove_issues = []
+ if blockers_added or blockers_removed or danglers_added or danglers_removed:
+ blocking_add_issues = self.GetIssues(cnxn, blockers_added)
+ add_refs = [(ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in blocking_add_issues]
+ add_refs.extend([(ref.project, ref.issue_id) for ref in danglers_added])
+ blocking_remove_issues = self.GetIssues(cnxn, blockers_removed)
+ remove_refs = [
+ (ref_issue.project_name, ref_issue.local_id)
+ for ref_issue in blocking_remove_issues]
+ remove_refs.extend([(ref.project, ref.issue_id)
+ for ref in danglers_removed])
+ amendments.append(tracker_bizobj.MakeBlockingAmendment(
+ add_refs, remove_refs, default_project_name=issue.project_name))
+ issue.blocking_iids = blocking
+ issue.dangling_blocking_refs = dangling_blocking_refs
+ iids_to_invalidate.update(blockers_added + blockers_removed)
+
+ logging.info('later amendments so far is %r', amendments)
+
+ # Raise an exception if the issue was changed by another user
+ # while this user was viewing/editing the issue.
+ if page_gen_ts and amendments:
+ # The issue timestamp is stored in seconds, convert to microseconds to
+ # match the page_gen_ts.
+ issue_ts = issue.modified_timestamp * 1000000
+ if issue_ts > page_gen_ts:
+ logging.info('%d > %d', issue_ts, page_gen_ts)
+ logging.info('amendments: %s', amendments)
+ # Forget all the modificiations made to this issue in RAM.
+ self.issue_2lc.InvalidateKeys(cnxn, [issue.issue_id])
+ raise MidAirCollisionException('issue %d' % local_id, local_id)
+
+ # update the modified_timestamp for any comment added, even if it was
+ # just a text comment with no issue fields changed.
+ issue.modified_timestamp = timestamp or int(time.time())
+
+ # Update closed_timestamp both before and after filter rules.
+ _UpdateClosedTimestamp(config, issue, old_effective_status)
+ filterrules_helpers.ApplyFilterRules(cnxn, services, issue, config)
+ _UpdateClosedTimestamp(config, issue, old_effective_status)
+
+ self.UpdateIssue(cnxn, issue)
+ # TODO(jrobbins): only invalidate nonviewable if the following changed:
+ # restriction label, owner, cc, or user-type custom field.
+ self._config_service.InvalidateMemcache([issue], key_prefix='nonviewable:')
+
+ classification = services.spam.ClassifyComment(comment)
+
+ label = classification['outputLabel']
+ logging.info('comment classification: %s' % classification)
+ score = 0
+ is_spam = False
+ for output in classification['outputMulti']:
+ if output['label'] == label:
+ score = float(output['score'])
+ if label == 'spam' and score > settings.classifier_spam_thresh:
+ logging.info('spam comment: %s' % comment)
+ is_spam = True
+
+ if amendments or (comment and comment.strip()) or attachments:
+ logging.info('amendments = %r', amendments)
+ comment_pb = self.CreateIssueComment(
+ cnxn, project_id, local_id, reporter_id, comment,
+ amendments=amendments, attachments=attachments,
+ inbound_message=inbound_message, is_spam=is_spam)
+ services.spam.RecordClassifierCommentVerdict(
+ cnxn, comment_pb, is_spam, score)
+ else:
+ comment_pb = None
+
+ # Add a comment to the newly added issues saying they are now blocking
+ # this issue.
+ for add_issue in blocked_add_issues:
+ self.CreateIssueComment(
+ cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
+ content='',
+ amendments=[tracker_bizobj.MakeBlockingAmendment(
+ [(issue.project_name, issue.local_id)], [],
+ default_project_name=add_issue.project_name)])
+ # Add a comment to the newly removed issues saying they are no longer
+ # blocking this issue.
+ for remove_issue in blocked_remove_issues:
+ self.CreateIssueComment(
+ cnxn, remove_issue.project_id, remove_issue.local_id, reporter_id,
+ content='',
+ amendments=[tracker_bizobj.MakeBlockingAmendment(
+ [], [(issue.project_name, issue.local_id)],
+ default_project_name=remove_issue.project_name)])
+
+ # Add a comment to the newly added issues saying they are now blocked on
+ # this issue.
+ for add_issue in blocking_add_issues:
+ self.CreateIssueComment(
+ cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
+ content='',
+ amendments=[tracker_bizobj.MakeBlockedOnAmendment(
+ [(issue.project_name, issue.local_id)], [],
+ default_project_name=add_issue.project_name)])
+ # Add a comment to the newly removed issues saying they are no longer
+ # blocked on this issue.
+ for remove_issue in blocking_remove_issues:
+ self.CreateIssueComment(
+ cnxn, remove_issue.project_id, remove_issue.local_id, reporter_id,
+ content='',
+ amendments=[tracker_bizobj.MakeBlockedOnAmendment(
+ [], [(issue.project_name, issue.local_id)],
+ default_project_name=remove_issue.project_name)])
+
+ self._UpdateIssuesModified(
+ cnxn, iids_to_invalidate, modified_timestamp=issue.modified_timestamp)
+
+ if index_now:
+ tracker_fulltext.IndexIssues(
+ cnxn, [issue], services.user, self, self._config_service)
+
+ if is_spam:
+ sequence_num = len(self.GetCommentsForIssue(cnxn, issue.issue_id)) - 1
+ # Soft-deletes have to have a user ID, so spam comments are
+ # just "deleted" by the commenter.
+ self.SoftDeleteComment(cnxn, project_id, local_id, sequence_num,
+ reporter_id, services.user, is_spam=True)
+ return amendments, comment_pb
+
+ def RelateIssues(self, cnxn, issue_relation_dict, commit=True):
+ """Update the IssueRelation table rows for the given relationships.
+
+ issue_relation_dict is a mapping of 'source' issues to 'destination' issues,
+ paired with the kind of relationship connecting the two.
+ """
+ relation_rows = []
+ for src_iid, dests in issue_relation_dict.iteritems():
+ for dst_iid, kind in dests:
+ if kind == 'blocking':
+ relation_rows.append((dst_iid, src_iid, 'blockedon'))
+ elif kind == 'blockedon' or kind == 'mergedinto':
+ relation_rows.append((src_iid, dst_iid, kind))
+
+ self.issuerelation_tbl.InsertRows(
+ cnxn, ISSUERELATION_COLS, relation_rows, ignore=True, commit=commit)
+
+ def CopyIssues(self, cnxn, dest_project, issues, user_service, copier_id):
+ """Copy the given issues into the destination project."""
+ created_issues = []
+ iids_to_invalidate = set()
+
+ for target_issue in issues:
+ new_issue = tracker_pb2.Issue()
+ new_issue.project_id = dest_project.project_id
+ new_issue.project_name = dest_project.project_name
+ new_issue.summary = target_issue.summary
+ new_issue.labels.extend(target_issue.labels)
+ new_issue.field_values.extend(target_issue.field_values)
+ new_issue.reporter_id = copier_id
+
+ timestamp = int(time.time())
+ new_issue.opened_timestamp = timestamp
+ new_issue.modified_timestamp = timestamp
+
+ target_comments = self.GetCommentsForIssue(cnxn, target_issue.issue_id)
+ initial_summary_comment = target_comments[0]
+
+ # Note that blocking and merge_into are not copied.
+ if target_issue.blocked_on_iids:
+ blocked_on = target_issue.blocked_on_iids
+ iids_to_invalidate.update(blocked_on)
+ new_issue.blocked_on_iids = blocked_on
+
+ # Gather list of attachments from the target issue's summary comment.
+ # MakeIssueComments expects a list of [(filename, contents, mimetype),...]
+ attachments = []
+ for attachment in initial_summary_comment.attachments:
+ object_path = ('/' + app_identity.get_default_gcs_bucket_name() +
+ attachment.gcs_object_id)
+ with cloudstorage.open(object_path, 'r') as f:
+ content = f.read()
+ attachments.append(
+ [attachment.filename, content, attachment.mimetype])
+
+ if attachments:
+ new_issue.attachment_count = len(attachments)
+
+ # Create the same summary comment as the target issue.
+ comment = self._MakeIssueComment(
+ dest_project.project_id, copier_id, initial_summary_comment.content,
+ attachments=attachments, timestamp=timestamp, was_escaped=True)
+
+ new_issue.local_id = self.AllocateNextLocalID(
+ cnxn, dest_project.project_id)
+ issue_id = self.InsertIssue(cnxn, new_issue)
+ comment.issue_id = issue_id
+ self.InsertComment(cnxn, comment)
+
+ if permissions.HasRestrictions(new_issue, 'view'):
+ self._config_service.InvalidateMemcache(
+ [new_issue], key_prefix='nonviewable:')
+
+ tracker_fulltext.IndexIssues(
+ cnxn, [new_issue], user_service, self, self._config_service)
+ created_issues.append(new_issue)
+
+ # The referenced issues are all modified when the relationship is added.
+ self._UpdateIssuesModified(
+ cnxn, iids_to_invalidate, modified_timestamp=timestamp)
+
+ return created_issues
+
+ def MoveIssues(self, cnxn, dest_project, issues, user_service):
+ """Move the given issues into the destination project."""
+ old_location_rows = [
+ (issue.issue_id, issue.project_id, issue.local_id)
+ for issue in issues]
+ moved_back_iids = set()
+
+ former_locations_in_project = self.issueformerlocations_tbl.Select(
+ cnxn, cols=ISSUEFORMERLOCATIONS_COLS,
+ project_id=dest_project.project_id,
+ issue_id=[issue.issue_id for issue in issues])
+ former_locations = {
+ issue_id: local_id
+ for issue_id, project_id, local_id in former_locations_in_project}
+
+ # Remove the issue id from issue_id_2lc so that it does not stay
+ # around in cache and memcache.
+ # The Key of IssueIDTwoLevelCache is (project_id, local_id).
+ issue_id_2lc_key = (issues[0].project_id, issues[0].local_id)
+ self.issue_id_2lc.InvalidateKeys(cnxn, [issue_id_2lc_key])
+
+ for issue in issues:
+ if issue.issue_id in former_locations:
+ dest_id = former_locations[issue.issue_id]
+ moved_back_iids.add(issue.issue_id)
+ else:
+ dest_id = self.AllocateNextLocalID(cnxn, dest_project.project_id)
+
+ issue.local_id = dest_id
+ issue.project_id = dest_project.project_id
+ issue.project_name = dest_project.project_name
+
+ # Rewrite each whole issue so that status and label IDs are looked up
+ # in the context of the destination project.
+ self.UpdateIssues(cnxn, issues)
+
+ # Comments also have the project_id because it is needed for an index.
+ self.comment_tbl.Update(
+ cnxn, {'project_id': dest_project.project_id},
+ issue_id=[issue.issue_id for issue in issues], commit=False)
+
+ # Record old locations so that we can offer links if the user looks there.
+ self.issueformerlocations_tbl.InsertRows(
+ cnxn, ISSUEFORMERLOCATIONS_COLS, old_location_rows, ignore=True,
+ commit=False)
+ cnxn.Commit()
+
+ tracker_fulltext.IndexIssues(
+ cnxn, issues, user_service, self, self._config_service)
+
+ return moved_back_iids
+
+ def ExpungeFormerLocations(self, cnxn, project_id):
+ """Delete history of issues that were in this project but moved out."""
+ self.issueformerlocations_tbl.Delete(cnxn, project_id=project_id)
+
+ def ExpungeIssues(self, cnxn, issue_ids):
+ """Completely delete the specified issues from the database."""
+ logging.info('expunging the issues %r', issue_ids)
+ tracker_fulltext.UnindexIssues(issue_ids)
+
+ remaining_iids = issue_ids[:]
+
+ # Note: these are purposely not done in a transaction to allow
+ # incremental progress in what might be a very large change.
+ # We are not concerned about non-atomic deletes because all
+ # this data will be gone eventually anyway.
+ while remaining_iids:
+ iids_in_chunk = remaining_iids[:CHUNK_SIZE]
+ remaining_iids = remaining_iids[CHUNK_SIZE:]
+ self.issuesummary_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issue2label_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issue2component_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issue2cc_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issue2notify_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issueupdate_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.attachment_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.comment_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issuerelation_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issuerelation_tbl.Delete(cnxn, dst_issue_id=iids_in_chunk)
+ self.danglingrelation_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issueformerlocations_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.reindexqueue_tbl.Delete(cnxn, issue_id=iids_in_chunk)
+ self.issue_tbl.Delete(cnxn, id=iids_in_chunk)
+
+ def SoftDeleteIssue(self, cnxn, project_id, local_id, deleted, user_service):
+ """Set the deleted boolean on the indicated issue and store it.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int project ID for the current project.
+ local_id: int local ID of the issue to freeze/unfreeze.
+ deleted: boolean, True to soft-delete, False to undelete.
+ user_service: persistence layer for users, used to lookup user IDs.
+ """
+ issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
+ issue.deleted = deleted
+ self.UpdateIssue(cnxn, issue, update_cols=['deleted'])
+ tracker_fulltext.IndexIssues(
+ cnxn, [issue], user_service, self, self._config_service)
+
+ def DeleteComponentReferences(self, cnxn, component_id):
+ """Delete any references to the specified component."""
+ # TODO(jrobbins): add tasks to re-index any affected issues.
+ # Note: if this call fails, some data could be left
+ # behind, but it would not be displayed, and it could always be
+ # GC'd from the DB later.
+ self.issue2component_tbl.Delete(cnxn, component_id=component_id)
+
+ ### Local ID generation
+
+ def InitializeLocalID(self, cnxn, project_id):
+ """Initialize the local ID counter for the specified project to zero.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project.
+ """
+ self.localidcounter_tbl.InsertRow(
+ cnxn, project_id=project_id, used_local_id=0, used_spam_id=0)
+
+ def SetUsedLocalID(self, cnxn, project_id):
+ """Set the local ID counter based on existing issues.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project.
+ """
+ highest_id = self.GetHighestLocalID(cnxn, project_id)
+ self.localidcounter_tbl.Update(
+ cnxn, {'used_local_id': highest_id}, project_id=project_id)
+ return highest_id
+
+ def AllocateNextLocalID(self, cnxn, project_id):
+ """Return the next available issue ID in the specified project.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project.
+
+ Returns:
+ The next local ID.
+ """
+ try:
+ next_local_id = self.localidcounter_tbl.IncrementCounterValue(
+ cnxn, 'used_local_id', project_id=project_id)
+ except AssertionError:
+ next_local_id = self.SetUsedLocalID(cnxn, project_id) + 1
+ return next_local_id
+
+ def SetUsedSpamID(self, cnxn, project_id):
+ """Set the local ID counter based on existing issues.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project.
+ """
+ current_id = self.localidcounter_tbl.SelectValue(
+ cnxn, 'used_spam_id', project_id=project_id)
+ current_id = current_id or 0 # Will be None if project has no issues.
+
+ self.localidcounter_tbl.Update(
+ cnxn, {'used_spam_id': current_id + 1}, project_id=project_id)
+ return current_id + 1
+
+ def AllocateNextSpamLocalID(self, cnxn, project_id):
+ """Return the next available spam issue ID in the specified project.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project.
+
+ Returns:
+ The next local ID.
+ """
+ try:
+ next_spam_id = self.localidcounter_tbl.IncrementCounterValue(
+ cnxn, 'used_spam_id', project_id=project_id)
+ except AssertionError:
+ next_spam_id = self.SetUsedSpamID(cnxn, project_id) + 1
+ return -next_spam_id
+
+ def GetHighestLocalID(self, cnxn, project_id):
+ """Return the highest used issue ID in the specified project.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project.
+
+ Returns:
+ The highest local ID for an active or moved issues.
+ """
+ highest = self.issue_tbl.SelectValue(
+ cnxn, 'MAX(local_id)', project_id=project_id)
+ highest = highest or 0 # It will be None if the project has no issues.
+ highest_former = self.issueformerlocations_tbl.SelectValue(
+ cnxn, 'MAX(local_id)', project_id=project_id)
+ highest_former = highest_former or 0
+ return max(highest, highest_former)
+
+ def GetAllLocalIDsInProject(self, cnxn, project_id, min_local_id=None):
+ """Return the list of local IDs only, not the actual issues.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: the ID of the project to which the issue belongs.
+ min_local_id: point to start at.
+
+ Returns:
+ A range object of local IDs from 1 to N, or from min_local_id to N. It
+ may be the case that some of those local IDs are no longer used, e.g.,
+ if some issues were moved out of this project.
+ """
+ if not min_local_id:
+ min_local_id = 1
+ highest_local_id = self.GetHighestLocalID(cnxn, project_id)
+ return range(min_local_id, highest_local_id + 1)
+
+ def ExpungeLocalIDCounters(self, cnxn, project_id):
+ """Delete history of local ids that were in this project."""
+ self.localidcounter_tbl.Delete(cnxn, project_id=project_id)
+
+ ### Comments
+
+ def _UnpackComment(self, comment_row):
+ """Partially construct a Comment PB from a DB row."""
+ (comment_id, issue_id, created, project_id, commenter_id, content,
+ inbound_message, was_escaped, deleted_by, is_spam) = comment_row
+ comment = tracker_pb2.IssueComment()
+ comment.id = comment_id
+ comment.issue_id = issue_id
+ comment.timestamp = created
+ comment.project_id = project_id
+ comment.user_id = commenter_id
+ comment.content = content or ''
+ comment.inbound_message = inbound_message or ''
+ comment.was_escaped = bool(was_escaped)
+ comment.deleted_by = deleted_by or 0
+ comment.is_spam = bool(is_spam)
+ return comment
+
+ def _UnpackAmendment(self, amendment_row):
+ """Construct an Amendment PB from a DB row."""
+ (_id, _issue_id, comment_id, field_name,
+ old_value, new_value, added_user_id, removed_user_id,
+ custom_field_name) = amendment_row
+ amendment = tracker_pb2.Amendment()
+ field_enum = tracker_pb2.FieldID(field_name.upper())
+ amendment.field = field_enum
+
+ # TODO(jrobbins): display old values in more cases.
+ if new_value is not None:
+ amendment.newvalue = new_value
+ if old_value is not None:
+ amendment.oldvalue = old_value
+ if added_user_id:
+ amendment.added_user_ids.append(added_user_id)
+ if removed_user_id:
+ amendment.removed_user_ids.append(removed_user_id)
+ if custom_field_name:
+ amendment.custom_field_name = custom_field_name
+ return amendment, comment_id
+
+ def _ConsolidateAmendments(self, amendments):
+ """Consoliodate amendments of the same field in one comment into one
+ amendment PB."""
+
+ fields_dict = {}
+ result = []
+
+ for amendment in amendments:
+ fields_dict.setdefault(amendment.field, []).append(amendment)
+ for field, amendments in fields_dict.iteritems():
+ new_amendment = tracker_pb2.Amendment()
+ new_amendment.field = field
+ for amendment in amendments:
+ if amendment.newvalue is not None:
+ new_amendment.newvalue = amendment.newvalue
+ if amendment.oldvalue is not None:
+ new_amendment.oldvalue = amendment.oldvalue
+ if amendment.added_user_ids:
+ new_amendment.added_user_ids.extend(amendment.added_user_ids)
+ if amendment.removed_user_ids:
+ new_amendment.removed_user_ids.extend(amendment.removed_user_ids)
+ if amendment.custom_field_name:
+ new_amendment.custom_field_name = amendment.custom_field_name
+ result.append(new_amendment)
+ return result
+
+ def _UnpackAttachment(self, attachment_row):
+ """Construct an Attachment PB from a DB row."""
+ (attachment_id, _issue_id, comment_id, filename, filesize, mimetype,
+ deleted, gcs_object_id) = attachment_row
+ attach = tracker_pb2.Attachment()
+ attach.attachment_id = attachment_id
+ attach.filename = filename
+ attach.filesize = filesize
+ attach.mimetype = mimetype
+ attach.deleted = bool(deleted)
+ attach.gcs_object_id = gcs_object_id
+ return attach, comment_id
+
+ def _DeserializeComments(
+ self, comment_rows, amendment_rows, attachment_rows):
+ """Turn rows into IssueComment PBs."""
+ results = [] # keep objects in the same order as the rows
+ results_dict = {} # for fast access when joining.
+
+ for comment_row in comment_rows:
+ comment = self._UnpackComment(comment_row)
+ results.append(comment)
+ results_dict[comment.id] = comment
+
+ for amendment_row in amendment_rows:
+ amendment, comment_id = self._UnpackAmendment(amendment_row)
+ try:
+ results_dict[comment_id].amendments.extend([amendment])
+ except KeyError:
+ logging.error('Found amendment for missing comment: %r', comment_id)
+
+ for attachment_row in attachment_rows:
+ attach, comment_id = self._UnpackAttachment(attachment_row)
+ try:
+ results_dict[comment_id].attachments.append(attach)
+ except KeyError:
+ logging.error('Found attachment for missing comment: %r', comment_id)
+
+ for c in results:
+ c.amendments = self._ConsolidateAmendments(c.amendments)
+
+ return results
+
+ # TODO(jrobbins): make this a private method and expose just the interface
+ # needed by activities.py.
+ def GetComments(self, cnxn, where=None, order_by=None, **kwargs):
+ """Retrieve comments from SQL."""
+ # Explicitly specify column Comment.id to allow joins on other tables that
+ # have an id column.
+ order_by = order_by or [('created', [])]
+ comment_rows = self.comment_tbl.Select(
+ cnxn, cols=COMMENT_COLS, where=where,
+ order_by=order_by, **kwargs)
+ cids = [row[0] for row in comment_rows]
+ amendment_rows = self.issueupdate_tbl.Select(
+ cnxn, cols=ISSUEUPDATE_COLS, comment_id=cids)
+ attachment_rows = self.attachment_tbl.Select(
+ cnxn, cols=ATTACHMENT_COLS, comment_id=cids)
+
+ comments = self._DeserializeComments(
+ comment_rows, amendment_rows, attachment_rows)
+ return comments
+
+ def GetComment(self, cnxn, comment_id):
+ """Get the requested comment, or raise an exception."""
+ comments = self.GetComments(cnxn, id=comment_id)
+ try:
+ return comments[0]
+ except IndexError:
+ raise NoSuchCommentException()
+
+ def GetCommentsForIssue(self, cnxn, issue_id):
+ """Return all IssueComment PBs for the specified issue.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue_id: int global ID of the issue.
+
+ Returns:
+ A list of the IssueComment protocol buffers for the description
+ and comments on this issue.
+ """
+ comments = self.GetComments(cnxn, issue_id=[issue_id])
+ for i, comment in enumerate(comments):
+ comment.sequence = i
+
+ return comments
+
+ def GetCommentsByID(self, cnxn, comment_ids, sequences):
+ """Return all IssueComment PBs by comment ids.
+
+ Args:
+ cnxn: connection to SQL database.
+ comment_ids: a list of comment ids.
+ sequences: sequence of the comments.
+
+ Returns:
+ A list of the IssueComment protocol buffers for the description
+ and comments on this issue.
+ """
+ order_by = [('created ASC', [])]
+ comment_rows = self.comment_tbl.Select(
+ cnxn, cols=COMMENT_COLS, order_by=order_by, id=comment_ids)
+ amendment_rows = self.issueupdate_tbl.Select(
+ cnxn, cols=ISSUEUPDATE_COLS, comment_id=comment_ids)
+ attachment_rows = self.attachment_tbl.Select(
+ cnxn, cols=ATTACHMENT_COLS, comment_id=comment_ids)
+
+ comments = self._DeserializeComments(
+ comment_rows, amendment_rows, attachment_rows)
+
+ for i in xrange(len(comment_ids)):
+ comments[i].sequence = sequences[i]
+
+ return comments
+
+ def GetAbbrCommentsForIssue(self, cnxn, issue_id):
+ """Get all abbreviated comments for the specified issue."""
+ order_by = [('created ASC', [])]
+ comment_rows = self.comment_tbl.Select(
+ cnxn, cols=ABBR_COMMENT_COLS, issue_id=[issue_id], order_by=order_by)
+
+ return comment_rows
+
+ # TODO(jrobbins): remove this message because it is too slow when an issue
+ # has a huge number of comments.
+ def GetCommentsForIssues(self, cnxn, issue_ids):
+ """Return all IssueComment PBs for each issue ID in the given list.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue_ids: list of integer global issue IDs.
+
+ Returns:
+ Dict {issue_id: [IssueComment, ...]} with IssueComment protocol
+ buffers for the description and comments on each issue.
+ """
+ comments = self.GetComments(cnxn, issue_id=issue_ids)
+
+ comments_dict = collections.defaultdict(list)
+ for comment in comments:
+ comment.sequence = len(comments_dict[comment.issue_id])
+ comments_dict[comment.issue_id].append(comment)
+
+ return comments_dict
+
+ def InsertComment(self, cnxn, comment, commit=True):
+ """Store the given issue comment in SQL.
+
+ Args:
+ cnxn: connection to SQL database.
+ comment: IssueComment PB to insert into the database.
+ commit: set to False to avoid doing the commit for now.
+ """
+ comment_id = self.comment_tbl.InsertRow(
+ cnxn, issue_id=comment.issue_id, created=comment.timestamp,
+ project_id=comment.project_id,
+ commenter_id=comment.user_id, content=comment.content,
+ inbound_message=comment.inbound_message,
+ was_escaped=comment.was_escaped,
+ deleted_by=comment.deleted_by or None,
+ is_spam=comment.is_spam,
+ commit=commit)
+ comment.id = comment_id
+
+ amendment_rows = []
+ for amendment in comment.amendments:
+ field_enum = str(amendment.field).lower()
+ if (amendment.get_assigned_value('newvalue') is not None and
+ not amendment.added_user_ids and not amendment.removed_user_ids):
+ amendment_rows.append((
+ comment.issue_id, comment_id, field_enum,
+ amendment.oldvalue, amendment.newvalue,
+ None, None, amendment.custom_field_name))
+ for added_user_id in amendment.added_user_ids:
+ amendment_rows.append((
+ comment.issue_id, comment_id, field_enum, None, None,
+ added_user_id, None, amendment.custom_field_name))
+ for removed_user_id in amendment.removed_user_ids:
+ amendment_rows.append((
+ comment.issue_id, comment_id, field_enum, None, None,
+ None, removed_user_id, amendment.custom_field_name))
+ # ISSUEUPDATE_COLS[1:] to skip id column.
+ self.issueupdate_tbl.InsertRows(
+ cnxn, ISSUEUPDATE_COLS[1:], amendment_rows, commit=commit)
+
+ attachment_rows = []
+ for attach in comment.attachments:
+ attachment_rows.append([
+ comment.issue_id, comment.id, attach.filename, attach.filesize,
+ attach.mimetype, attach.deleted, attach.gcs_object_id])
+ self.attachment_tbl.InsertRows(
+ cnxn, ATTACHMENT_COLS[1:], attachment_rows, commit=commit)
+
+ def _UpdateComment(self, cnxn, comment, update_cols=None):
+ """Update the given issue comment in SQL.
+
+ Args:
+ cnxn: connection to SQL database.
+ comment: IssueComment PB to update in the database.
+ update_cols: optional list of just the field names to update.
+ """
+ delta = {
+ 'commenter_id': comment.user_id,
+ 'content': comment.content,
+ 'deleted_by': comment.deleted_by or None,
+ 'is_spam': comment.is_spam,
+ }
+ if update_cols is not None:
+ delta = {key: val for key, val in delta.iteritems()
+ if key in update_cols}
+
+ self.comment_tbl.Update(cnxn, delta, id=comment.id)
+
+ def _MakeIssueComment(
+ self, project_id, user_id, content, inbound_message=None,
+ amendments=None, attachments=None, timestamp=None, was_escaped=False,
+ is_spam=False):
+ """Create in IssueComment protocol buffer in RAM.
+
+ Args:
+ project_id: Project with the issue.
+ user_id: the user ID of the user who entered the comment.
+ content: string body of the comment.
+ inbound_message: optional string full text of an email that
+ caused this comment to be added.
+ amendments: list of Amendment PBs describing the
+ metadata changes that the user made along w/ comment.
+ attachments: [(filename, contents, mimetype),...] attachments uploaded at
+ the time the comment was made.
+ timestamp: time at which the comment was made, defaults to now.
+ was_escaped: True if the comment was HTML escaped already.
+ is_spam: True if the comment was classified as spam.
+ Returns:
+ The new IssueComment protocol buffer.
+
+ The content may have some markup done during input processing.
+
+ Any attachments are immediately stored.
+ """
+ comment = tracker_pb2.IssueComment()
+ comment.project_id = project_id
+ comment.user_id = user_id
+ comment.content = content or ''
+ comment.was_escaped = was_escaped
+ comment.is_spam = is_spam
+ if not timestamp:
+ timestamp = int(time.time())
+ comment.timestamp = int(timestamp)
+ if inbound_message:
+ comment.inbound_message = inbound_message
+ if amendments:
+ logging.info('amendments is %r', amendments)
+ comment.amendments.extend(amendments)
+
+ if attachments:
+ for filename, body, mimetype in attachments:
+ gcs_object_id = gcs_helpers.StoreObjectInGCS(body, mimetype, project_id)
+ attach = tracker_pb2.Attachment()
+ # attachment id is determined later by the SQL DB.
+ attach.filename = filename
+ attach.filesize = len(body)
+ attach.mimetype = mimetype
+ attach.gcs_object_id = gcs_object_id
+ comment.attachments.extend([attach])
+ logging.info("Save attachment with object_id: %s" % gcs_object_id)
+
+ return comment
+
+ def CreateIssueComment(
+ self, cnxn, project_id, local_id, user_id, content, inbound_message=None,
+ amendments=None, attachments=None, timestamp=None, is_spam=False,
+ commit=True):
+ """Create and store a new comment on the specified issue.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the current Project.
+ local_id: the issue on which to add the comment.
+ user_id: the user ID of the user who entered the comment.
+ content: string body of the comment.
+ inbound_message: optional string full text of an email that caused
+ this comment to be added.
+ amendments: list of Amendment PBs describing the
+ metadata changes that the user made along w/ comment.
+ attachments: [(filename, contents, mimetype),...] attachments uploaded at
+ the time the comment was made.
+ timestamp: time at which the comment was made, defaults to now.
+ is_spam: True if the comment is classified as spam.
+ commit: set to False to not commit to DB yet.
+
+ Returns:
+ The new IssueComment protocol buffer.
+
+ Note that we assume that the content is safe to echo out
+ again. The content may have some markup done during input
+ processing.
+ """
+ issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
+
+ comment = self._MakeIssueComment(
+ issue.project_id, user_id, content, amendments=amendments,
+ inbound_message=inbound_message, attachments=attachments,
+ timestamp=timestamp, is_spam=is_spam)
+ comment.issue_id = issue.issue_id
+
+ if attachments:
+ issue.attachment_count = issue.attachment_count + len(attachments)
+ self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
+
+ self.InsertComment(cnxn, comment, commit=commit)
+
+ return comment
+
+ def SoftDeleteComment(
+ self, cnxn, project_id, local_id, sequence_num, deleted_by_user_id,
+ user_service, delete=True, reindex=True, is_spam=False):
+ """Mark comment as un/deleted, which shows/hides it from average users."""
+ issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
+
+ all_comments = self.GetCommentsForIssue(cnxn, issue.issue_id)
+ try:
+ issue_comment = all_comments[sequence_num]
+ except IndexError:
+ logging.warning(
+ 'Tried to (un)delete non-existent comment #%s in issue %s:%s',
+ sequence_num, project_id, local_id)
+ return
+
+ # Update number of attachments
+ attachments = 0
+ if issue_comment.attachments:
+ for attachment in issue_comment.attachments:
+ if not attachment.deleted:
+ attachments += 1
+
+ # Delete only if it's not in deleted state
+ if delete:
+ if not issue_comment.deleted_by:
+ issue_comment.deleted_by = deleted_by_user_id
+ issue.attachment_count = issue.attachment_count - attachments
+
+ # Undelete only if it's in deleted state
+ elif issue_comment.deleted_by:
+ issue_comment.deleted_by = 0
+ issue.attachment_count = issue.attachment_count + attachments
+
+ issue_comment.is_spam = is_spam
+ self._UpdateComment(
+ cnxn, issue_comment, update_cols=['deleted_by', 'is_spam'])
+ self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
+
+ # Reindex the issue to take the comment deletion/undeletion into account.
+ if reindex:
+ tracker_fulltext.IndexIssues(
+ cnxn, [issue], user_service, self, self._config_service)
+
+ ### Attachments
+
+ def GetAttachmentAndContext(self, cnxn, attachment_id):
+ """Load a IssueAttachment from database, and its comment ID and IID.
+
+ Args:
+ cnxn: connection to SQL database.
+ attachment_id: long integer unique ID of desired issue attachment.
+
+ Returns:
+ An Attachment protocol buffer that contains metadata about the attached
+ file, or None if it doesn't exist. Also, the comment ID and issue IID
+ of the comment and issue that contain this attachment.
+
+ Raises:
+ NoSuchAttachmentException: the attachment was not found.
+ """
+ if attachment_id is None:
+ raise NoSuchAttachmentException()
+
+ attachment_row = self.attachment_tbl.SelectRow(
+ cnxn, cols=ATTACHMENT_COLS, id=attachment_id)
+ if attachment_row:
+ (attach_id, issue_id, comment_id, filename, filesize, mimetype,
+ deleted, gcs_object_id) = attachment_row
+ if not deleted:
+ attachment = tracker_pb2.Attachment(
+ attachment_id=attach_id, filename=filename, filesize=filesize,
+ mimetype=mimetype, deleted=bool(deleted),
+ gcs_object_id=gcs_object_id)
+ return attachment, comment_id, issue_id
+
+ raise NoSuchAttachmentException()
+
+ def _UpdateAttachment(self, cnxn, attach, update_cols=None):
+ """Update attachment metadata in the DB.
+
+ Args:
+ cnxn: connection to SQL database.
+ attach: IssueAttachment PB to update in the DB.
+ update_cols: optional list of just the field names to update.
+ """
+ delta = {
+ 'filename': attach.filename,
+ 'filesize': attach.filesize,
+ 'mimetype': attach.mimetype,
+ 'deleted': bool(attach.deleted),
+ }
+ if update_cols is not None:
+ delta = {key: val for key, val in delta.iteritems()
+ if key in update_cols}
+
+ self.attachment_tbl.Update(cnxn, delta, id=attach.attachment_id)
+
+ def SoftDeleteAttachment(
+ self, cnxn, project_id, local_id, seq_num, attach_id, user_service,
+ delete=True, index_now=True):
+ """Mark attachment as un/deleted, which shows/hides it from avg users."""
+ issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
+ all_comments = self.GetCommentsForIssue(cnxn, issue.issue_id)
+ try:
+ issue_comment = all_comments[seq_num]
+ except IndexError:
+ logging.warning(
+ 'Tried to (un)delete attachment on non-existent comment #%s in '
+ 'issue %s:%s', seq_num, project_id, local_id)
+ return
+
+ attachment = None
+ for attach in issue_comment.attachments:
+ if attach.attachment_id == attach_id:
+ attachment = attach
+
+ if not attachment:
+ logging.warning(
+ 'Tried to (un)delete non-existent attachment #%s in project '
+ '%s issue %s', attach_id, project_id, local_id)
+ return
+
+ if not issue_comment.deleted_by:
+ # Decrement attachment count only if it's not in deleted state
+ if delete:
+ if not attachment.deleted:
+ issue.attachment_count = issue.attachment_count - 1
+
+ # Increment attachment count only if it's in deleted state
+ elif attachment.deleted:
+ issue.attachment_count = issue.attachment_count + 1
+
+ attachment.deleted = delete
+
+ self._UpdateAttachment(cnxn, attachment, update_cols=['deleted'])
+ self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
+
+ if index_now:
+ tracker_fulltext.IndexIssues(
+ cnxn, [issue], user_service, self, self._config_service)
+
+ ### Reindex queue
+
+ def EnqueueIssuesForIndexing(self, cnxn, issue_ids):
+ """Add the given issue IDs to the ReindexQueue table."""
+ reindex_rows = [(issue_id,) for issue_id in issue_ids]
+ self.reindexqueue_tbl.InsertRows(
+ cnxn, ['issue_id'], reindex_rows, ignore=True)
+
+ def ReindexIssues(self, cnxn, num_to_reindex, user_service):
+ """Reindex some issues specified in the IndexQueue table."""
+ rows = self.reindexqueue_tbl.Select(
+ cnxn, order_by=[('created', [])], limit=num_to_reindex)
+ issue_ids = [row[0] for row in rows]
+
+ if issue_ids:
+ issues = self.GetIssues(cnxn, issue_ids)
+ tracker_fulltext.IndexIssues(
+ cnxn, issues, user_service, self, self._config_service)
+ self.reindexqueue_tbl.Delete(cnxn, issue_id=issue_ids)
+
+ return len(issue_ids)
+
+ ### Search functions
+
+ def RunIssueQuery(
+ self, cnxn, left_joins, where, order_by, shard_id=None, limit=None):
+ """Run a SQL query to find matching issue IDs.
+
+ Args:
+ cnxn: connection to SQL database.
+ left_joins: list of SQL LEFT JOIN clauses.
+ where: list of SQL WHERE clauses.
+ order_by: list of SQL ORDER BY clauses.
+ shard_id: int shard ID to focus the search.
+ limit: int maximum number of results, defaults to
+ settings.search_limit_per_shard.
+
+ Returns:
+ (issue_ids, capped) where issue_ids is a list of the result issue IDs,
+ and capped is True if the number of results reached the limit.
+ """
+ limit = limit or settings.search_limit_per_shard
+ where = where + [('Issue.deleted = %s', [False])]
+ rows = self.issue_tbl.Select(
+ cnxn, shard_id=shard_id, distinct=True, cols=['Issue.id'],
+ left_joins=left_joins, where=where, order_by=order_by,
+ limit=limit)
+ issue_ids = [row[0] for row in rows]
+ capped = len(issue_ids) >= limit
+ return issue_ids, capped
+
+ def GetIIDsByLabelIDs(self, cnxn, label_ids, project_id, shard_id):
+ """Return a list of IIDs for issues with any of the given label IDs."""
+ where = []
+ if shard_id is not None:
+ slice_term = ('shard = %s', [shard_id])
+ where.append(slice_term)
+
+ rows = self.issue_tbl.Select(
+ cnxn, shard_id=shard_id, cols=['id'],
+ left_joins=[('Issue2Label ON Issue.id = Issue2Label.issue_id', [])],
+ label_id=label_ids, project_id=project_id, where=where)
+
+ return [row[0] for row in rows]
+
+ def GetIIDsByParticipant(self, cnxn, user_ids, project_ids, shard_id):
+ """Return IIDs for issues where any of the given users participate."""
+ iids = []
+ where = []
+ if shard_id is not None:
+ where.append(('shard = %s', [shard_id]))
+ if project_ids:
+ cond_str = 'Issue.project_id IN (%s)' % sql.PlaceHolders(project_ids)
+ where.append((cond_str, project_ids))
+
+ # TODO(jrobbins): Combine these 3 queries into one with ORs. It currently
+ # is not the bottleneck.
+ rows = self.issue_tbl.Select(
+ cnxn, cols=['id'], reporter_id=user_ids,
+ where=where, shard_id=shard_id)
+ for row in rows:
+ iids.append(row[0])
+
+ rows = self.issue_tbl.Select(
+ cnxn, cols=['id'], owner_id=user_ids,
+ where=where, shard_id=shard_id)
+ for row in rows:
+ iids.append(row[0])
+
+ rows = self.issue_tbl.Select(
+ cnxn, cols=['id'], derived_owner_id=user_ids,
+ where=where, shard_id=shard_id)
+ for row in rows:
+ iids.append(row[0])
+
+ rows = self.issue_tbl.Select(
+ cnxn, cols=['id'],
+ left_joins=[('Issue2Cc ON Issue2Cc.issue_id = Issue.id', [])],
+ cc_id=user_ids,
+ where=where + [('cc_id IS NOT NULL', [])],
+ shard_id=shard_id)
+ for row in rows:
+ iids.append(row[0])
+
+ rows = self.issue_tbl.Select(
+ cnxn, cols=['Issue.id'],
+ left_joins=[
+ ('Issue2FieldValue ON Issue.id = Issue2FieldValue.issue_id', []),
+ ('FieldDef ON Issue2FieldValue.field_id = FieldDef.id', [])],
+ user_id=user_ids, grants_perm='View',
+ where=where + [('user_id IS NOT NULL', [])],
+ shard_id=shard_id)
+ for row in rows:
+ iids.append(row[0])
+
+ return iids
+
+
+def _UpdateClosedTimestamp(config, issue, old_effective_status):
+ """Sets or unsets the closed_timestamp based based on status changes.
+
+ If the status is changing from open to closed, the closed_timestamp is set to
+ the current time.
+
+ If the status is changing form closed to open, the close_timestamp is unset.
+
+ If the status is changing from one closed to another closed, or from one
+ open to another open, no operations are performed.
+
+ Args:
+ config: the project configuration
+ issue: the issue being updated (a protocol buffer)
+ old_effective_status: the old issue status string. E.g., 'New'
+ """
+ # open -> closed
+ if (tracker_helpers.MeansOpenInProject(old_effective_status, config)
+ and not tracker_helpers.MeansOpenInProject(
+ tracker_bizobj.GetStatus(issue), config)):
+
+ logging.info('setting closed_timestamp on issue: %d', issue.local_id)
+
+ issue.closed_timestamp = int(time.time())
+ return
+
+ # closed -> open
+ if (not tracker_helpers.MeansOpenInProject(old_effective_status, config)
+ and tracker_helpers.MeansOpenInProject(
+ tracker_bizobj.GetStatus(issue), config)):
+
+ logging.info('clearing closed_timestamp on issue: %s', issue.local_id)
+
+ issue.reset('closed_timestamp')
+ return
+
+
+class Error(Exception):
+ """Base exception class for this package."""
+ pass
+
+
+class NoSuchIssueException(Error):
+ """The requested issue was not found."""
+ pass
+
+
+class NoSuchAttachmentException(Error):
+ """The requested attachment was not found."""
+ pass
+
+
+class NoSuchCommentException(Error):
+ """The requested comment was not found."""
+ pass
+
+
+class MidAirCollisionException(Error):
+ """The item was updated by another user at the same time."""
+
+ def __init__(self, name, continue_issue_id):
+ super(MidAirCollisionException, self).__init__()
+ self.name = name # human-readable name for the artifact being edited.
+ self.continue_issue_id = continue_issue_id # ID of issue to start over.
diff --git a/appengine/monorail/services/project_svc.py b/appengine/monorail/services/project_svc.py
new file mode 100644
index 0000000..41cf127
--- /dev/null
+++ b/appengine/monorail/services/project_svc.py
@@ -0,0 +1,648 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions that provide persistence for projects.
+
+This module provides functions to get, update, create, and (in some
+cases) delete each type of project business object. It provides
+a logical persistence layer on top of the database.
+
+Business objects are described in project_pb2.py.
+"""
+
+import collections
+import logging
+import time
+
+import settings
+from framework import framework_bizobj
+from framework import permissions
+from framework import sql
+from services import caches
+from proto import project_pb2
+
+
+PROJECT_TABLE_NAME = 'Project'
+USER2PROJECT_TABLE_NAME = 'User2Project'
+EXTRAPERM_TABLE_NAME = 'ExtraPerm'
+MEMBERNOTES_TABLE_NAME = 'MemberNotes'
+USERGROUPPROJECTS_TABLE_NAME = 'Group2Project'
+
+PROJECT_COLS = [
+ 'project_id', 'project_name', 'summary', 'description', 'state', 'access',
+ 'read_only_reason', 'state_reason', 'delete_time', 'issue_notify_address',
+ 'attachment_bytes_used', 'attachment_quota',
+ 'cached_content_timestamp', 'recent_activity_timestamp', 'moved_to',
+ 'process_inbound_email', 'only_owners_remove_restrictions',
+ 'only_owners_see_contributors', 'revision_url_format',
+ 'home_page', 'docs_url', 'logo_gcs_id', 'logo_file_name']
+USER2PROJECT_COLS = ['project_id', 'user_id', 'role_name']
+EXTRAPERM_COLS = ['project_id', 'user_id', 'perm']
+MEMBERNOTES_COLS = ['project_id', 'user_id', 'notes']
+
+
+class ProjectTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage both RAM and memcache for Project PBs."""
+
+ def __init__(self, cachemanager, project_service):
+ super(ProjectTwoLevelCache, self).__init__(
+ cachemanager, 'project', 'project:', project_pb2.Project)
+ self.project_service = project_service
+
+ def _DeserializeProjects(
+ self, project_rows, role_rows, extraperm_rows):
+ """Convert database rows into a dictionary of Project PB keyed by ID."""
+ project_dict = {}
+
+ for project_row in project_rows:
+ (project_id, project_name, summary, description, state_name,
+ access_name, read_only_reason, state_reason, delete_time,
+ issue_notify_address, attachment_bytes_used, attachment_quota, cct,
+ recent_activity_timestamp, moved_to, process_inbound_email,
+ oorr, oosc, revision_url_format, home_page, docs_url,
+ logo_gcs_id, logo_file_name) = project_row
+ project = project_pb2.Project()
+ project.project_id = project_id
+ project.project_name = project_name
+ project.summary = summary
+ project.description = description
+ project.state = project_pb2.ProjectState(state_name.upper())
+ project.state_reason = state_reason or ''
+ project.access = project_pb2.ProjectAccess(access_name.upper())
+ project.read_only_reason = read_only_reason or ''
+ project.issue_notify_address = issue_notify_address or ''
+ project.attachment_bytes_used = attachment_bytes_used or 0
+ project.attachment_quota = attachment_quota
+ project.recent_activity = recent_activity_timestamp or 0
+ project.cached_content_timestamp = cct or 0
+ project.delete_time = delete_time or 0
+ project.moved_to = moved_to or ''
+ project.process_inbound_email = bool(process_inbound_email)
+ project.only_owners_remove_restrictions = bool(oorr)
+ project.only_owners_see_contributors = bool(oosc)
+ project.revision_url_format = revision_url_format or ''
+ project.home_page = home_page or ''
+ project.docs_url = docs_url or ''
+ project.logo_gcs_id = logo_gcs_id or ''
+ project.logo_file_name = logo_file_name or ''
+ project_dict[project_id] = project
+
+ for project_id, user_id, role_name in role_rows:
+ project = project_dict[project_id]
+ if role_name == 'owner':
+ project.owner_ids.append(user_id)
+ elif role_name == 'committer':
+ project.committer_ids.append(user_id)
+ elif role_name == 'contributor':
+ project.contributor_ids.append(user_id)
+
+ for project_id, user_id, perm in extraperm_rows:
+ project = project_dict[project_id]
+ extra_perms = permissions.FindExtraPerms(project, user_id)
+ if not extra_perms:
+ extra_perms = project_pb2.Project.ExtraPerms(
+ member_id=user_id)
+ project.extra_perms.append(extra_perms)
+
+ extra_perms.perms.append(perm)
+
+ return project_dict
+
+ def FetchItems(self, cnxn, keys):
+ """On RAM and memcache miss, hit the database to get missing projects."""
+ project_rows = self.project_service.project_tbl.Select(
+ cnxn, cols=PROJECT_COLS, project_id=keys)
+ role_rows = self.project_service.user2project_tbl.Select(
+ cnxn, cols=['project_id', 'user_id', 'role_name'],
+ project_id=keys)
+ extraperm_rows = self.project_service.extraperm_tbl.Select(
+ cnxn, cols=EXTRAPERM_COLS, project_id=keys)
+ retrieved_dict = self._DeserializeProjects(
+ project_rows, role_rows, extraperm_rows)
+ return retrieved_dict
+
+
+class ProjectService(object):
+ """The persistence layer for project data."""
+
+ def __init__(self, cache_manager):
+ """Initialize this module so that it is ready to use.
+
+ Args:
+ cache_manager: local cache with distributed invalidation.
+ """
+ self.project_tbl = sql.SQLTableManager(PROJECT_TABLE_NAME)
+ self.user2project_tbl = sql.SQLTableManager(USER2PROJECT_TABLE_NAME)
+ self.extraperm_tbl = sql.SQLTableManager(EXTRAPERM_TABLE_NAME)
+ self.membernotes_tbl = sql.SQLTableManager(MEMBERNOTES_TABLE_NAME)
+ self.usergroupprojects_tbl = sql.SQLTableManager(
+ USERGROUPPROJECTS_TABLE_NAME)
+
+ # Like a dictionary {project_id: project}
+ self.project_2lc = ProjectTwoLevelCache(cache_manager, self)
+
+ # The project name to ID cache can never be invalidated by individual
+ # project changes because it is keyed by strings instead of ints. In
+ # the case of rare operations like deleting a project (or a future
+ # project renaming feature), we just InvalidateAll().
+ self.project_names_to_ids = cache_manager.MakeCache('project')
+
+ ### Creating projects
+
+ def CreateProject(
+ self, cnxn, project_name, owner_ids, committer_ids, contributor_ids,
+ summary, description, state=project_pb2.ProjectState.LIVE,
+ access=None, read_only=None, home_page=None, docs_url=None,
+ logo_gcs_id=None, logo_file_name=None):
+ """Create and store a Project with the given attributes.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_name: a valid project name, all lower case.
+ owner_ids: a list of user IDs for the project owners.
+ committer_ids: a list of user IDs for the project members.
+ contributor_ids: a list of user IDs for the project contributors.
+ summary: one-line explanation of the project.
+ description: one-page explanation of the project.
+ state: a project state enum defined in project_pb2.
+ access: optional project access enum defined in project.proto.
+ read_only: if given, provides a status message and marks the project as
+ read-only.
+ home_page: home page of the project
+ docs_url: url to redirect to for wiki/documentation links
+ logo_gcs_id: google storage object id of the project's logo
+ logo_file_name: uploaded file name of the project's logo
+
+ Returns:
+ The int project_id of the new project.
+
+ Raises:
+ ProjectAlreadyExists: if a project with that name already exists.
+ """
+ assert framework_bizobj.IsValidProjectName(project_name)
+ if self.LookupProjectIDs(cnxn, [project_name]):
+ raise ProjectAlreadyExists()
+
+ project = project_pb2.MakeProject(
+ project_name, state=state, access=access,
+ description=description, summary=summary,
+ owner_ids=owner_ids, committer_ids=committer_ids,
+ contributor_ids=contributor_ids, read_only=read_only,
+ home_page=home_page, docs_url=docs_url, logo_gcs_id=logo_gcs_id,
+ logo_file_name=logo_file_name)
+
+ project.project_id = self._InsertProject(cnxn, project)
+ return project.project_id
+
+ def _InsertProject(self, cnxn, project):
+ """Insert the given project into the database."""
+ # Note: project_id is not specified because it is auto_increment.
+ project_id = self.project_tbl.InsertRow(
+ cnxn, project_name=project.project_name,
+ summary=project.summary, description=project.description,
+ state=str(project.state), access=str(project.access),
+ home_page=project.home_page, docs_url=project.docs_url,
+ logo_gcs_id=project.logo_gcs_id, logo_file_name=project.logo_file_name)
+ logging.info('stored project was given project_id %d', project_id)
+
+ self.user2project_tbl.InsertRows(
+ cnxn, ['project_id', 'user_id', 'role_name'],
+ [(project_id, user_id, 'owner')
+ for user_id in project.owner_ids] +
+ [(project_id, user_id, 'committer')
+ for user_id in project.committer_ids] +
+ [(project_id, user_id, 'contributor')
+ for user_id in project.contributor_ids])
+
+ return project_id
+
+ ### Lookup project names and IDs
+
+ def LookupProjectIDs(self, cnxn, project_names):
+ """Return a list of project IDs for the specified projects."""
+ id_dict, missed_names = self.project_names_to_ids.GetAll(project_names)
+ if missed_names:
+ rows = self.project_tbl.Select(
+ cnxn, cols=['project_name', 'project_id'], project_name=missed_names)
+ retrieved_dict = dict(rows)
+ self.project_names_to_ids.CacheAll(retrieved_dict)
+ id_dict.update(retrieved_dict)
+
+ return id_dict
+
+ def LookupProjectNames(self, cnxn, project_ids):
+ """Lookup the names of the projects with the given IDs."""
+ projects_dict = self.GetProjects(cnxn, project_ids)
+ return {p.project_id: p.project_name
+ for p in projects_dict.itervalues()}
+
+ ### Retrieving projects
+
+ def GetAllProjects(self, cnxn, use_cache=True):
+ """Return A dict mapping IDs to all live project PBs."""
+ project_rows = self.project_tbl.Select(
+ cnxn, cols=['project_id'], state=project_pb2.ProjectState.LIVE)
+ project_ids = [row[0] for row in project_rows]
+ projects_dict = self.GetProjects(cnxn, project_ids, use_cache=use_cache)
+
+ return projects_dict
+
+ def GetVisibleLiveProjects(self, cnxn, logged_in_user, effective_ids,
+ use_cache=True):
+ """Return all user visible live project ids.
+
+ Args:
+ cnxn: connection to SQL database.
+ logged_in_user: protocol buffer of the logged in user. Can be None.
+ effective_ids: set of user IDs for this user. Can be None.
+ use_cache: pass False to force database query to find Project protocol
+ buffers.
+
+ Returns:
+ A list of project ids of user visible live projects sorted by the names
+ of the projects.
+ """
+ project_rows = self.project_tbl.Select(
+ cnxn, cols=['project_id'], state=project_pb2.ProjectState.LIVE)
+ project_ids = [row[0] for row in project_rows]
+ projects_dict = self.GetProjects(cnxn, project_ids, use_cache=use_cache)
+
+ visible_projects = [project for project in projects_dict.values()
+ if permissions.UserCanViewProject(
+ logged_in_user, effective_ids, project)]
+ visible_projects.sort(key=lambda p: p.project_name)
+
+ return [project.project_id for project in visible_projects]
+
+ def GetProjects(self, cnxn, project_ids, use_cache=True):
+ """Load all the Project PBs for the given projects.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_ids: list of int project IDs
+ use_cache: pass False to force database query.
+
+ Returns:
+ A dict mapping IDs to the corresponding Project protocol buffers.
+
+ Raises:
+ NoSuchProjectException: if any of the projects was not found.
+ """
+ project_dict, missed_ids = self.project_2lc.GetAll(
+ cnxn, project_ids, use_cache=use_cache)
+
+ # Also, update the project name cache.
+ self.project_names_to_ids.CacheAll(
+ {p.project_name: p.project_id for p in project_dict.itervalues()})
+
+ if missed_ids:
+ raise NoSuchProjectException()
+
+ return project_dict
+
+ def GetProject(self, cnxn, project_id, use_cache=True):
+ """Load the specified project from the database."""
+ project_id_dict = self.GetProjects(cnxn, [project_id], use_cache=use_cache)
+ return project_id_dict[project_id]
+
+ def GetProjectsByName(self, cnxn, project_names, use_cache=True):
+ """Load all the Project PBs for the given projects.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_names: list of project names.
+ use_cache: specifify False to force database query.
+
+ Returns:
+ A dict mapping names to the corresponding Project protocol buffers.
+ """
+ project_ids = self.LookupProjectIDs(cnxn, project_names).values()
+ projects = self.GetProjects(cnxn, project_ids, use_cache=use_cache)
+ return {p.project_name: p for p in projects.itervalues()}
+
+ def GetProjectByName(self, cnxn, project_name, use_cache=True):
+ """Load the specified project from the database, None if does not exist."""
+ project_dict = self.GetProjectsByName(
+ cnxn, [project_name], use_cache=use_cache)
+ return project_dict.get(project_name)
+
+ ### Deleting projects
+
+ def ExpungeProject(self, cnxn, project_id):
+ """Wipes a project from the system."""
+ logging.info('expunging project %r', project_id)
+ self.user2project_tbl.Delete(cnxn, project_id=project_id)
+ self.usergroupprojects_tbl.Delete(cnxn, project_id=project_id)
+ self.extraperm_tbl.Delete(cnxn, project_id=project_id)
+ self.membernotes_tbl.Delete(cnxn, project_id=project_id)
+ self.project_tbl.Delete(cnxn, project_id=project_id)
+
+ ### Updating projects
+
+ def UpdateProject(
+ self, cnxn, project_id, summary=None, description=None,
+ state=None, state_reason=None, access=None, issue_notify_address=None,
+ attachment_bytes_used=None, attachment_quota=None, moved_to=None,
+ process_inbound_email=None, only_owners_remove_restrictions=None,
+ read_only_reason=None, cached_content_timestamp=None,
+ only_owners_see_contributors=None, delete_time=None,
+ recent_activity=None, revision_url_format=None, home_page=None,
+ docs_url=None, logo_gcs_id=None, logo_file_name=None):
+ """Update the DB with the given project information."""
+ # This will be a newly constructed object, not from the cache and not
+ # shared with any other thread.
+ project = self.GetProject(cnxn, project_id, use_cache=False)
+ if not project:
+ raise NoSuchProjectException()
+
+ delta = {}
+ if summary is not None:
+ delta['summary'] = summary
+ if description is not None:
+ delta['description'] = description
+ if state is not None:
+ delta['state'] = str(state).lower()
+ if state is not None:
+ delta['state_reason'] = state_reason
+ if access is not None:
+ delta['access'] = str(access).lower()
+ if read_only_reason is not None:
+ delta['read_only_reason'] = read_only_reason
+ if issue_notify_address is not None:
+ delta['issue_notify_address'] = issue_notify_address
+ if attachment_bytes_used is not None:
+ delta['attachment_bytes_used'] = attachment_bytes_used
+ if attachment_quota is not None:
+ delta['attachment_quota'] = attachment_quota
+ if moved_to is not None:
+ delta['moved_to'] = moved_to
+ if process_inbound_email is not None:
+ delta['process_inbound_email'] = process_inbound_email
+ if only_owners_remove_restrictions is not None:
+ delta['only_owners_remove_restrictions'] = (
+ only_owners_remove_restrictions)
+ if only_owners_see_contributors is not None:
+ delta['only_owners_see_contributors'] = only_owners_see_contributors
+ if delete_time is not None:
+ delta['delete_time'] = delete_time
+ if recent_activity is not None:
+ delta['recent_activity_timestamp'] = recent_activity
+ if revision_url_format is not None:
+ delta['revision_url_format'] = revision_url_format
+ if home_page is not None:
+ delta['home_page'] = home_page
+ if docs_url is not None:
+ delta['docs_url'] = docs_url
+ if logo_gcs_id is not None:
+ delta['logo_gcs_id'] = logo_gcs_id
+ if logo_file_name is not None:
+ delta['logo_file_name'] = logo_file_name
+ if cached_content_timestamp is not None:
+ delta['cached_content_timestamp'] = cached_content_timestamp
+ self.project_tbl.Update(cnxn, delta, project_id=project_id)
+
+ self.project_2lc.InvalidateKeys(cnxn, [project_id])
+
+ # Now update the full-text index.
+ if summary is not None:
+ project.summary = summary
+ if description is not None:
+ project.description = description
+ if state is not None:
+ project.state = state
+ if access is not None:
+ project.access = access
+ if only_owners_remove_restrictions is not None:
+ project.only_owners_remove_restrictions = (
+ only_owners_remove_restrictions)
+ if only_owners_see_contributors is not None:
+ project.only_owners_see_contributors = only_owners_see_contributors
+
+ def UpdateProjectRoles(
+ self, cnxn, project_id, owner_ids, committer_ids, contributor_ids,
+ now=None):
+ """Store the project's roles in the DB and set cached_content_timestamp."""
+ # This will be a newly constructed object, not from the cache and not
+ # shared with any other thread.
+ project = self.GetProject(cnxn, project_id, use_cache=False)
+ if not project:
+ raise NoSuchProjectException()
+
+ now = now or int(time.time())
+ self.project_tbl.Update(
+ cnxn, {'cached_content_timestamp': now},
+ project_id=project_id)
+
+ self.user2project_tbl.Delete(
+ cnxn, project_id=project_id, role_name='owner', commit=False)
+ self.user2project_tbl.Delete(
+ cnxn, project_id=project_id, role_name='committer', commit=False)
+ self.user2project_tbl.Delete(
+ cnxn, project_id=project_id, role_name='contributor', commit=False)
+
+ self.user2project_tbl.InsertRows(
+ cnxn, ['project_id', 'user_id', 'role_name'],
+ [(project_id, user_id, 'owner') for user_id in owner_ids],
+ commit=False)
+ self.user2project_tbl.InsertRows(
+ cnxn, ['project_id', 'user_id', 'role_name'],
+ [(project_id, user_id, 'committer')
+ for user_id in committer_ids], commit=False)
+
+ self.user2project_tbl.InsertRows(
+ cnxn, ['project_id', 'user_id', 'role_name'],
+ [(project_id, user_id, 'contributor')
+ for user_id in contributor_ids], commit=False)
+
+ cnxn.Commit()
+ self.project_2lc.InvalidateKeys(cnxn, [project_id])
+
+ project.owner_ids = owner_ids
+ project.committer_ids = committer_ids
+ project.contributor_ids = contributor_ids
+
+ def MarkProjectDeletable(self, cnxn, project_id, config_service):
+ """Update the project's state to make it DELETABLE and free up the name.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the project that will be deleted soon.
+ config_service: issue tracker configuration persistence service, needed
+ to invalidate cached issue tracker results.
+ """
+ generated_name = 'DELETABLE_%d' % project_id
+ delta = {'project_name': generated_name, 'state': 'deletable'}
+ self.project_tbl.Update(cnxn, delta, project_id=project_id)
+
+ self.project_2lc.InvalidateKeys(cnxn, [project_id])
+ # We cannot invalidate a specific part of the name->proj cache by name,
+ # So, tell every job to just drop the whole cache. It should refill
+ # efficiently and incrementally from memcache.
+ self.project_2lc.InvalidateAllRamEntries(cnxn)
+ config_service.InvalidateMemcacheForEntireProject(project_id)
+
+ def UpdateRecentActivity(self, cnxn, project_id, now=None):
+ """Set the project's recent_activity to the current time."""
+ now = now or int(time.time())
+ self.UpdateProject(cnxn, project_id, recent_activity=now)
+
+ ### Roles and extra perms
+
+ def GetUserRolesInAllProjects(self, cnxn, effective_ids):
+ """Return three sets of project IDs where the user has a role."""
+ owned_project_ids = set()
+ membered_project_ids = set()
+ contrib_project_ids = set()
+
+ rows = self.user2project_tbl.Select(
+ cnxn, cols=['project_id', 'role_name'], user_id=effective_ids)
+
+ for project_id, role_name in rows:
+ if role_name == 'owner':
+ owned_project_ids.add(project_id)
+ elif role_name == 'committer':
+ membered_project_ids.add(project_id)
+ elif role_name == 'contributor':
+ contrib_project_ids.add(project_id)
+ else:
+ logging.warn('Unexpected role name %r', role_name)
+
+ return owned_project_ids, membered_project_ids, contrib_project_ids
+
+ def UpdateExtraPerms(
+ self, cnxn, project_id, member_id, extra_perms, now=None):
+ """Load the project, update the member's extra perms, and store.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the current project.
+ member_id: int user id of the user that was edited.
+ extra_perms: list of strings for perms that the member
+ should have over-and-above what their role gives them.
+ now: fake int(time.time()) value passed in during unit testing.
+ """
+ # This will be a newly constructed object, not from the cache and not
+ # shared with any other thread.
+ project = self.GetProject(cnxn, project_id, use_cache=False)
+
+ member_extra_perms = permissions.FindExtraPerms(project, member_id)
+ if not member_extra_perms and not extra_perms:
+ return
+ if member_extra_perms and list(member_extra_perms.perms) == extra_perms:
+ return
+
+ if member_extra_perms:
+ member_extra_perms.perms = extra_perms
+ else:
+ member_extra_perms = project_pb2.Project.ExtraPerms(
+ member_id=member_id, perms=extra_perms)
+ project.extra_perms.append(member_extra_perms)
+
+ self.extraperm_tbl.Delete(
+ cnxn, project_id=project_id, user_id=member_id, commit=False)
+ self.extraperm_tbl.InsertRows(
+ cnxn, EXTRAPERM_COLS,
+ [(project_id, member_id, perm) for perm in extra_perms],
+ commit=False)
+ now = now or int(time.time())
+ project.cached_content_timestamp = now
+ self.project_tbl.Update(
+ cnxn, {'cached_content_timestamp': project.cached_content_timestamp},
+ project_id=project_id, commit=False)
+ cnxn.Commit()
+
+ self.project_2lc.InvalidateKeys(cnxn, [project_id])
+
+ ### Project Commitments
+
+ def GetProjectCommitments(self, cnxn, project_id):
+ """Get the project commitments (notes) from the DB.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int project ID.
+
+ Returns:
+ A the specified project's ProjectCommitments instance, or an empty one,
+ if the project doesn't exist, or has not documented member
+ commitments.
+ """
+ # Get the notes. Don't get the project_id column
+ # since we already know that value.
+ notes_rows = self.membernotes_tbl.Select(
+ cnxn, cols=['user_id', 'notes'], project_id=project_id)
+ notes_dict = dict(notes_rows)
+
+ project_commitments = project_pb2.ProjectCommitments()
+ project_commitments.project_id = project_id
+ for user_id in notes_dict.keys():
+ commitment = project_pb2.ProjectCommitments.MemberCommitment(
+ member_id=user_id,
+ notes=notes_dict.get(user_id, ''))
+ project_commitments.commitments.append(commitment)
+
+ return project_commitments
+
+ def _StoreProjectCommitments(self, cnxn, project_commitments):
+ """Store an updated set of project commitments in the DB.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_commitments: ProjectCommitments PB
+ """
+ project_id = project_commitments.project_id
+ notes_rows = []
+ for commitment in project_commitments.commitments:
+ notes_rows.append(
+ (project_id, commitment.member_id, commitment.notes))
+
+ # TODO(jrobbins): this should be in a transaction.
+ self.membernotes_tbl.Delete(cnxn, project_id=project_id)
+ self.membernotes_tbl.InsertRows(
+ cnxn, MEMBERNOTES_COLS, notes_rows, ignore=True)
+
+ def UpdateCommitments(self, cnxn, project_id, member_id, notes):
+ """Update the member's commitments in the specified project.
+
+ Args:
+ cnxn: connection to SQL database.
+ project_id: int ID of the current project.
+ member_id: int user ID of the user that was edited.
+ notes: further notes on the member's expected involvment
+ in the project.
+ """
+ project_commitments = self.GetProjectCommitments(cnxn, project_id)
+
+ commitment = None
+ for c in project_commitments.commitments:
+ if c.member_id == member_id:
+ commitment = c
+ break
+ else:
+ commitment = project_pb2.ProjectCommitments.MemberCommitment(
+ member_id=member_id)
+ project_commitments.commitments.append(commitment)
+
+ dirty = False
+
+ if commitment.notes != notes:
+ commitment.notes = notes
+ dirty = True
+
+ if dirty:
+ self._StoreProjectCommitments(cnxn, project_commitments)
+
+
+class Error(Exception):
+ """Base exception class for this package."""
+
+
+class ProjectAlreadyExists(Error):
+ """Tried to create a project that already exists."""
+
+
+class NoSuchProjectException(Error):
+ """No project with the specified name exists."""
+ pass
diff --git a/appengine/monorail/services/secrets_svc.py b/appengine/monorail/services/secrets_svc.py
new file mode 100644
index 0000000..cfb5df2
--- /dev/null
+++ b/appengine/monorail/services/secrets_svc.py
@@ -0,0 +1,110 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions that provide persistence for secret keys.
+
+These keys are used in generating XSRF tokens, calling the CAPTCHA API,
+and validating that inbound emails are replies to notifications that
+we sent.
+
+Unlike other data stored in Monorail, this is kept in the GAE
+datastore rather than SQL because (1) it never needs to be used in
+combination with other SQL data, and (2) we may want to replicate
+issue content for various off-line reporting functionality, but we
+will never want to do that with these keys. A copy is also kept in
+memcache for faster access.
+
+When no secrets are found, a new Secrets entity is created and initialized
+with randomly generated values for XSRF and email keys.
+
+If these secret values ever need to change:
+(1) Make the change on the Google Cloud Console in the Cloud Datastore tab.
+(2) Flush memcache.
+"""
+
+import logging
+
+from google.appengine.api import memcache
+from google.appengine.ext import ndb
+
+import settings
+from framework import framework_helpers
+
+
+GLOBAL_KEY = 'secrets_singleton_key'
+
+
+class Secrets(ndb.Model):
+ """Model for representing secret keys."""
+ # Keys we use to generate tokens.
+ xsrf_key = ndb.StringProperty(required=True)
+ email_key = ndb.StringProperty(required=True)
+
+ # Keys for other APIs that we use.
+ recaptcha_public_key = ndb.StringProperty()
+ recaptcha_private_key = ndb.StringProperty()
+
+
+def MakeSecrets():
+ """Make a new Secrets model with random values for keys."""
+ secrets = Secrets(id=GLOBAL_KEY)
+ secrets.xsrf_key = framework_helpers.MakeRandomKey()
+ secrets.email_key = framework_helpers.MakeRandomKey()
+ # Note that recaptcha keys are not generated. An admin
+ # will need to set them via the Google Cloud Console.
+ return secrets
+
+
+def GetSecrets():
+ """Get secret keys from memcache or datastore. Or, make new ones."""
+ secrets = memcache.get(GLOBAL_KEY)
+ if secrets:
+ return secrets
+
+ secrets = Secrets.get_by_id(GLOBAL_KEY)
+ if not secrets:
+ secrets = MakeSecrets()
+ secrets.put()
+
+ memcache.set(GLOBAL_KEY, secrets)
+ return secrets
+
+
+def GetXSRFKey():
+ """Return a secret key string used to generate XSRF tokens."""
+ return GetSecrets().xsrf_key
+
+
+def GetEmailKey():
+ """Return a secret key string used to generate email tokens."""
+ return GetSecrets().email_key
+
+
+def GetRecaptchaPublicKey():
+ """Return our public API key for reCAPTCHA."""
+ if settings.dev_mode:
+ return '6LebzNMSAAAAAMY8b_FaZvp8wymUO5Jsa0pIX7HO'
+
+ result = GetSecrets().recaptcha_public_key
+ if not result:
+ logging.warn('No recaptcha_public_key set. Get one at recaptcha.net.')
+ logging.warn('Store it in Cloud Datastore via the Google Cloud Console.')
+
+ return result
+
+
+def GetRecaptchaPrivateKey():
+ """Return our private API key for reCAPTCHA."""
+ if settings.dev_mode:
+ return '6LebzNMSAAAAAHNVNiP2I7aNMv2AmxY5nReE2LZ4'
+
+ result = GetSecrets().recaptcha_private_key
+ if not result:
+ logging.warn('No recaptcha_private_key set. Get one at recaptcha.net.')
+ logging.warn('Store it in Cloud Datastore via the Google Cloud Console.')
+
+ return result
+
+
diff --git a/appengine/monorail/services/service_manager.py b/appengine/monorail/services/service_manager.py
new file mode 100644
index 0000000..7ab96bc6
--- /dev/null
+++ b/appengine/monorail/services/service_manager.py
@@ -0,0 +1,68 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Service manager to initialize all services."""
+
+from features import autolink
+from services import cachemanager_svc
+from services import config_svc
+from services import features_svc
+from services import issue_svc
+from services import project_svc
+from services import spam_svc
+from services import star_svc
+from services import user_svc
+from services import usergroup_svc
+
+
+svcs = None
+
+
+class Services(object):
+ """A simple container for widely-used service objects."""
+
+ def __init__(
+ self, project=None, user=None, issue=None, config=None,
+ inboundemail=None, usergroup=None, cache_manager=None, autolink_obj=None,
+ user_star=None, project_star=None, issue_star=None, features=None,
+ spam=None):
+ # Persistence services
+ self.project = project
+ self.user = user
+ self.usergroup = usergroup
+ self.issue = issue
+ self.config = config
+ self.user_star = user_star
+ self.project_star = project_star
+ self.issue_star = issue_star
+ self.features = features
+ self.spam = spam
+
+ # Misc. services
+ self.cache_manager = cache_manager
+ self.inboundemail = inboundemail
+ self.autolink = autolink_obj
+
+
+def set_up_services():
+ """Set up all services."""
+
+ global svcs
+ if svcs is None:
+ svcs = Services()
+ svcs.autolink = autolink.Autolink()
+ svcs.cache_manager = cachemanager_svc.CacheManager()
+ svcs.user = user_svc.UserService(svcs.cache_manager)
+ svcs.user_star = star_svc.UserStarService(svcs.cache_manager)
+ svcs.project_star = star_svc.ProjectStarService(svcs.cache_manager)
+ svcs.issue_star = star_svc.IssueStarService(svcs.cache_manager)
+ svcs.project = project_svc.ProjectService(svcs.cache_manager)
+ svcs.usergroup = usergroup_svc.UserGroupService(svcs.cache_manager)
+ svcs.config = config_svc.ConfigService(svcs.cache_manager)
+ svcs.issue = issue_svc.IssueService(
+ svcs.project, svcs.config, svcs.cache_manager)
+ svcs.features = features_svc.FeaturesService(svcs.cache_manager)
+ svcs.spam = spam_svc.SpamService()
+ return svcs
diff --git a/appengine/monorail/services/spam_svc.py b/appengine/monorail/services/spam_svc.py
new file mode 100644
index 0000000..2823f13
--- /dev/null
+++ b/appengine/monorail/services/spam_svc.py
@@ -0,0 +1,391 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+""" Set of functions for detaling with spam reports.
+"""
+
+import collections
+import httplib2
+import logging
+import settings
+import sys
+import settings
+
+from features import filterrules_helpers
+from framework import sql
+from infra_libs import ts_mon
+
+from apiclient.discovery import build
+from oauth2client.client import GoogleCredentials
+from apiclient.errors import Error as ApiClientError
+from oauth2client.client import Error as Oauth2ClientError
+
+SPAMREPORT_TABLE_NAME = 'SpamReport'
+SPAMVERDICT_TABLE_NAME = 'SpamVerdict'
+ISSUE_TABLE = 'Issue'
+
+REASON_MANUAL = 'manual'
+REASON_THRESHOLD = 'threshold'
+REASON_CLASSIFIER = 'classifier'
+
+SPAMREPORT_COLS = ['issue_id', 'reported_user_id', 'user_id']
+MANUALVERDICT_COLS = ['user_id', 'issue_id', 'is_spam', 'reason', 'project_id']
+THRESHVERDICT_COLS = ['issue_id', 'is_spam', 'reason', 'project_id']
+
+
+class SpamService(object):
+ """The persistence layer for spam reports."""
+ issue_actions = ts_mon.CounterMetric('monorail/spam_svc/issue')
+ comment_actions = ts_mon.CounterMetric('monorail/spam_svc/comment')
+
+ def __init__(self):
+ self.report_tbl = sql.SQLTableManager(SPAMREPORT_TABLE_NAME)
+ self.verdict_tbl = sql.SQLTableManager(SPAMVERDICT_TABLE_NAME)
+ self.issue_tbl = sql.SQLTableManager(ISSUE_TABLE)
+
+ self.prediction_service = None
+ try:
+ credentials = GoogleCredentials.get_application_default()
+ self.prediction_service = build('prediction', 'v1.6',
+ http=httplib2.Http(),
+ credentials=credentials)
+ except (Oauth2ClientError, ApiClientError):
+ logging.error("Error getting GoogleCredentials: %s" % sys.exc_info()[0])
+
+ def LookupFlaggers(self, cnxn, issue_id):
+ """Returns users who've reported the issue or its comments as spam.
+
+ Returns a tuple. First element is a list of users who flagged the issue;
+ second element is a dictionary of comment id to a list of users who flagged
+ that comment.
+ """
+ rows = self.report_tbl.Select(
+ cnxn, cols=['user_id', 'comment_id'],
+ issue_id=issue_id)
+
+ issue_reporters = []
+ comment_reporters = collections.defaultdict(list)
+ for row in rows:
+ if row[1]:
+ comment_reporters[row[1]].append(row[0])
+ else:
+ issue_reporters.append(row[0])
+
+ return issue_reporters, comment_reporters
+
+ def LookUpFlagCounts(self, cnxn, issue_ids):
+ """Returns a map of issue_id to flag counts"""
+ rows = self.report_tbl.Select(cnxn, cols=['issue_id', 'COUNT(*)'],
+ issue_id=issue_ids, group_by=['issue_id'])
+ counts = {}
+ for row in rows:
+ counts[long(row[0])] = row[1]
+ return counts
+
+ def LookUpIssueVerdicts(self, cnxn, issue_ids):
+ """Returns a map of issue_id to most recent spam verdicts"""
+ rows = self.verdict_tbl.Select(cnxn,
+ cols=['issue_id', 'reason', 'MAX(created)'],
+ issue_id=issue_ids, group_by=['issue_id'])
+ counts = {}
+ for row in rows:
+ counts[long(row[0])] = row[1]
+ return counts
+
+ def LookUpIssueVerdictHistory(self, cnxn, issue_ids):
+ """Returns a map of issue_id to most recent spam verdicts"""
+ rows = self.verdict_tbl.Select(cnxn, cols=[
+ 'issue_id', 'reason', 'created', 'is_spam', 'classifier_confidence',
+ 'user_id', 'overruled'],
+ issue_id=issue_ids, order_by=[('issue_id', []), ('created', [])])
+
+ # TODO: group by issue_id, make class instead of dict for verdict.
+ verdicts = []
+ for row in rows:
+ verdicts.append({
+ 'issue_id': row[0],
+ 'reason': row[1],
+ 'created': row[2],
+ 'is_spam': row[3],
+ 'classifier_confidence': row[4],
+ 'user_id': row[5],
+ 'overruled': row[6],
+ })
+
+ return verdicts
+
+ def FlagIssues(self, cnxn, issue_service, issues, reporting_user_id,
+ flagged_spam):
+ """Creates or deletes a spam report on an issue."""
+ verdict_updates = []
+ if flagged_spam:
+ rows = [(issue.issue_id, issue.reporter_id, reporting_user_id)
+ for issue in issues]
+ self.report_tbl.InsertRows(cnxn, SPAMREPORT_COLS, rows, ignore=True)
+ else:
+ issue_ids = [issue.issue_id for issue in issues]
+ self.report_tbl.Delete(
+ cnxn, issue_id=issue_ids, user_id=reporting_user_id,
+ comment_id=None)
+
+ project_id = issues[0].project_id
+
+ # Now record new verdicts and update issue.is_spam, if they've changed.
+ ids = [issue.issue_id for issue in issues]
+ counts = self.LookUpFlagCounts(cnxn, ids)
+ previous_verdicts = self.LookUpIssueVerdicts(cnxn, ids)
+
+ for issue_id in counts:
+ # If the flag counts changed enough to toggle the is_spam bit, need to
+ # record a new verdict and update the Issue.
+ if ((flagged_spam and counts[issue_id] >= settings.spam_flag_thresh or
+ not flagged_spam and counts[issue_id] < settings.spam_flag_thresh) and
+ (previous_verdicts[issue_id] != REASON_MANUAL if issue_id in
+ previous_verdicts else True)):
+ verdict_updates.append(issue_id)
+
+ if len(verdict_updates) == 0:
+ return
+
+ # Some of the issues may have exceed the flag threshold, so issue verdicts
+ # and mark as spam in those cases.
+ rows = [(issue_id, flagged_spam, REASON_THRESHOLD, project_id)
+ for issue_id in verdict_updates]
+ self.verdict_tbl.InsertRows(cnxn, THRESHVERDICT_COLS, rows, ignore=True)
+ update_issues = []
+ for issue in issues:
+ if issue.issue_id in verdict_updates:
+ issue.is_spam = flagged_spam
+ update_issues.append(issue)
+
+ if flagged_spam:
+ self.issue_actions.increment_by(len(update_issues), {'type': 'flag'})
+
+ issue_service.UpdateIssues(cnxn, update_issues, update_cols=['is_spam'])
+
+ def FlagComment(self, cnxn, issue_id, comment_id, reported_user_id,
+ reporting_user_id, flagged_spam):
+ """Creates or deletes a spam report on a comment."""
+ # TODO(seanmccullough): Bulk comment flagging? There's no UI for that.
+ if flagged_spam:
+ self.report_tbl.InsertRow(
+ cnxn, ignore=True, issue_id=issue_id,
+ comment_id=comment_id, reported_user_id=reported_user_id,
+ user_id=reporting_user_id)
+ self.comment_actions.increment({'type': 'flag'})
+ else:
+ self.report_tbl.Delete(
+ cnxn, issue_id=issue_id, comment_id=comment_id,
+ user_id=reporting_user_id)
+
+ def RecordClassifierIssueVerdict(self, cnxn, issue, is_spam, confidence):
+ self.verdict_tbl.InsertRow(cnxn, issue_id=issue.issue_id, is_spam=is_spam,
+ reason=REASON_CLASSIFIER, classifier_confidence=confidence)
+ if is_spam:
+ self.issue_actions.increment({'type': 'classifier'})
+ # This is called at issue creation time, so there's nothing else to do here.
+
+ def RecordManualIssueVerdicts(self, cnxn, issue_service, issues, user_id,
+ is_spam):
+ rows = [(user_id, issue.issue_id, is_spam, REASON_MANUAL, issue.project_id)
+ for issue in issues]
+ issue_ids = [issue.issue_id for issue in issues]
+
+ # Overrule all previous verdicts.
+ self.verdict_tbl.Update(cnxn, {'overruled': True}, [
+ ('issue_id IN (%s)' % sql.PlaceHolders(issue_ids), issue_ids)
+ ], commit=False)
+
+ self.verdict_tbl.InsertRows(cnxn, MANUALVERDICT_COLS, rows, ignore=True)
+
+ for issue in issues:
+ issue.is_spam = is_spam
+
+ if is_spam:
+ self.issue_actions.increment_by(len(issues), {'type': 'manual'})
+ else:
+ issue_service.AllocateNewLocalIDs(cnxn, issues)
+
+ # This will commit the transaction.
+ issue_service.UpdateIssues(cnxn, issues, update_cols=['is_spam'])
+
+ def RecordManualCommentVerdict(self, cnxn, issue_service, user_service,
+ comment_id, sequence_num, user_id, is_spam):
+ # TODO(seanmccullough): Bulk comment verdicts? There's no UI for that.
+ self.verdict_tbl.InsertRow(cnxn, ignore=True,
+ user_id=user_id, comment_id=comment_id, is_spam=is_spam,
+ reason=REASON_MANUAL)
+ comment = issue_service.GetComment(cnxn, comment_id)
+ comment.is_spam = is_spam
+ issue = issue_service.GetIssue(cnxn, comment.issue_id)
+ issue_service.SoftDeleteComment(cnxn, comment.project_id, issue.local_id,
+ sequence_num, user_id, user_service,
+ is_spam, True, is_spam)
+ if is_spam:
+ self.comment_actions.increment({'type': 'manual'})
+
+ def RecordClassifierCommentVerdict(self, cnxn, comment, is_spam, confidence):
+ self.verdict_tbl.InsertRow(cnxn, comment_id=comment.id, is_spam=is_spam,
+ reason=REASON_CLASSIFIER, classifier_confidence=confidence,
+ project_id=comment.project_id)
+ if is_spam:
+ self.comment_actions.increment({'type': 'classifier'})
+
+ def ClassifyIssue(self, issue, firstComment):
+ """Classify an issue as either spam or ham.
+
+ Args:
+ issue: the Issue.
+ firstComment: the first Comment on issue.
+
+ Returns a JSON dict of classifier prediction results from
+ the Cloud Prediction API.
+ """
+ # Fail-safe: not spam.
+ result = {'outputLabel': 'ham',
+ 'outputMulti': [{'label':'ham', 'score': '1.0'}]}
+ if not self.prediction_service:
+ logging.error("prediction_service not initialized.")
+ return result
+
+ remaining_retries = 3
+ while remaining_retries > 0:
+ try:
+ result = self.prediction_service.trainedmodels().predict(
+ project=settings.classifier_project_id,
+ id=settings.classifier_model_id,
+ body={'input': {
+ 'csvInstance': [issue.summary, firstComment.content]}}
+ ).execute()
+ return result
+ except Exception:
+ remaining_retries = remaining_retries - 1
+ logging.error('Error calling prediction API: %s' % sys.exc_info()[0])
+
+ return result
+
+ def ClassifyComment(self, comment_content):
+ """Classify a comment as either spam or ham.
+
+ Args:
+ comment: the comment text.
+
+ Returns a JSON dict of classifier prediction results from
+ the Cloud Prediction API.
+ """
+ # Fail-safe: not spam.
+ result = {'outputLabel': 'ham',
+ 'outputMulti': [{'label':'ham', 'score': '1.0'}]}
+ if not self.prediction_service:
+ logging.error("prediction_service not initialized.")
+ return result
+
+ remaining_retries = 3
+ while remaining_retries > 0:
+ try:
+ result = self.prediction_service.trainedmodels().predict(
+ project=settings.classifier_project_id,
+ id=settings.classifier_model_id,
+ # We re-use the issue classifier here, with a blank
+ # description and use the comment content as the body.
+ body={'input': {'csvInstance': ['', comment_content]}}
+ ).execute()
+ return result
+ except Exception:
+ remaining_retries = remaining_retries - 1
+ logging.error('Error calling prediction API: %s' % sys.exc_info()[0])
+
+ return result
+
+ def GetModerationQueue(
+ self, cnxn, _issue_service, project_id, offset=0, limit=10):
+ """Returns list of recent issues with spam verdicts,
+ ranked in ascending order of confidence (so uncertain items are first).
+ """
+ # TODO(seanmccullough): Optimize pagination. This query probably gets
+ # slower as the number of SpamVerdicts grows, regardless of offset
+ # and limit values used here. Using offset,limit in general may not
+ # be the best way to do this.
+ results = self.verdict_tbl.Select(cnxn,
+ cols=['issue_id', 'is_spam', 'reason', 'classifier_confidence',
+ 'created'],
+ where=[
+ ('project_id = %s', [project_id]),
+ ('classifier_confidence <= %s',
+ [settings.classifier_moderation_thresh]),
+ ('overruled = %s', [False]),
+ ('issue_id IS NOT NULL', []),
+ ],
+ order_by=[
+ ('classifier_confidence ASC', []),
+ ('created ASC', []),
+ ],
+ group_by=['issue_id'],
+ offset=offset,
+ limit=limit,
+ )
+
+ ret = []
+ for row in results:
+ ret.append(ModerationItem(
+ issue_id=long(row[0]),
+ is_spam=row[1] == 1,
+ reason=row[2],
+ classifier_confidence=row[3],
+ verdict_time='%s' % row[4],
+ ))
+
+ count = self.verdict_tbl.SelectValue(cnxn,
+ col='COUNT(*)',
+ where=[
+ ('project_id = %s', [project_id]),
+ ('classifier_confidence <= %s',
+ [settings.classifier_moderation_thresh]),
+ ('overruled = %s', [False]),
+ ('issue_id IS NOT NULL', []),
+ ])
+
+ return ret, count
+
+ def GetTrainingIssues(self, cnxn, issue_service, since, offset=0, limit=100):
+ """Returns list of recent issues with spam verdicts,
+ ranked in ascending order of confidence (so uncertain items are first).
+ """
+
+ # get all of the manual verdicts in the past day.
+ results = self.verdict_tbl.Select(cnxn,
+ cols=['issue_id'],
+ where=[
+ ('overruled = %s', [False]),
+ ('reason = %s', ['manual']),
+ ('issue_id IS NOT NULL', []),
+ ('created > %s', [since.isoformat()]),
+ ],
+ offset=offset,
+ limit=limit,
+ )
+
+ issue_ids = [long(row[0]) for row in results if row[0]]
+ issues = issue_service.GetIssues(cnxn, issue_ids)
+ comments = issue_service.GetCommentsForIssues(cnxn, issue_ids)
+ first_comments = {}
+ for issue in issues:
+ first_comments[issue.issue_id] = (comments[issue.issue_id][0].content
+ if issue.issue_id in comments else "[Empty]")
+
+ count = self.verdict_tbl.SelectValue(cnxn,
+ col='COUNT(*)',
+ where=[
+ ('overruled = %s', [False]),
+ ('reason = %s', ['manual']),
+ ('issue_id IS NOT NULL', []),
+ ('created > %s', [since.isoformat()]),
+ ])
+
+ return issues, first_comments, count
+
+class ModerationItem:
+ def __init__(self, **kwargs):
+ self.__dict__ = kwargs
diff --git a/appengine/monorail/services/star_svc.py b/appengine/monorail/services/star_svc.py
new file mode 100644
index 0000000..9b16653
--- /dev/null
+++ b/appengine/monorail/services/star_svc.py
@@ -0,0 +1,190 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions that provide persistence for stars.
+
+Stars can be on users, projects, or issues.
+"""
+
+import logging
+
+import settings
+from features import filterrules_helpers
+from framework import sql
+
+
+USERSTAR_TABLE_NAME = 'UserStar'
+PROJECTSTAR_TABLE_NAME = 'ProjectStar'
+ISSUESTAR_TABLE_NAME = 'IssueStar'
+
+# TODO(jrobbins): Consider adding memcache here if performance testing shows
+# that stars are a bottleneck. Keep in mind that issue star counts are
+# already denormalized and stored in the Issue, which is cached in memcache.
+
+
+class AbstractStarService(object):
+ """The persistence layer for any kind of star data."""
+
+ def __init__(self, cache_manager, tbl, item_col, user_col, cache_kind):
+ """Constructor.
+
+ Args:
+ cache_manager: local cache with distributed invalidation.
+ tbl: SQL table that stores star data.
+ item_col: string SQL column name that holds int item IDs.
+ user_col: string SQL column name that holds int user IDs
+ of the user who starred the item.
+ cache_kind: string saying the kind of RAM cache.
+ """
+ self.tbl = tbl
+ self.item_col = item_col
+ self.user_col = user_col
+
+ # Items starred by users, keyed by user who did the starring.
+ self.star_cache = cache_manager.MakeCache('user')
+ # Users that starred an item, keyed by item ID.
+ self.starrer_cache = cache_manager.MakeCache(cache_kind)
+ # Counts of the users that starred an item, keyed by item ID.
+ self.star_count_cache = cache_manager.MakeCache(cache_kind)
+
+ def ExpungeStars(self, cnxn, item_id):
+ """Wipes an item's stars from the system."""
+ self.tbl.Delete(cnxn, **{self.item_col: item_id})
+
+ def LookupItemStarrers(self, cnxn, item_id):
+ """Returns list of users having stars on the specified item."""
+ starrer_list_dict = self.LookupItemsStarrers(cnxn, [item_id])
+ return starrer_list_dict[item_id]
+
+ def LookupItemsStarrers(self, cnxn, items_ids):
+ """Returns {item_id: [uid, ...]} of users who starred these items."""
+ starrer_list_dict, missed_ids = self.starrer_cache.GetAll(items_ids)
+
+ if missed_ids:
+ rows = self.tbl.Select(
+ cnxn, cols=[self.item_col, self.user_col],
+ **{self.item_col: missed_ids})
+ # Ensure that every requested item_id has an entry so that even
+ # zero-star items get cached.
+ retrieved_starrers = {item_id: [] for item_id in missed_ids}
+ for item_id, starrer_id in rows:
+ retrieved_starrers[item_id].append(starrer_id)
+ starrer_list_dict.update(retrieved_starrers)
+ self.starrer_cache.CacheAll(retrieved_starrers)
+
+ return starrer_list_dict
+
+ def LookupStarredItemIDs(self, cnxn, starrer_user_id):
+ """Returns list of item IDs that were starred by the specified user."""
+ if not starrer_user_id:
+ return [] # Anon user cannot star anything.
+
+ cached_item_ids = self.star_cache.GetItem(starrer_user_id)
+ if cached_item_ids is not None:
+ return cached_item_ids
+
+ rows = self.tbl.Select(cnxn, cols=[self.item_col], user_id=starrer_user_id)
+ starred_ids = [row[0] for row in rows]
+ self.star_cache.CacheItem(starrer_user_id, starred_ids)
+ return starred_ids
+
+ def IsItemStarredBy(self, cnxn, item_id, starrer_user_id):
+ """Return True if the given issue is starred by the given user."""
+ starred_ids = self.LookupStarredItemIDs(cnxn, starrer_user_id)
+ return item_id in starred_ids
+
+ def CountItemStars(self, cnxn, item_id):
+ """Returns the number of stars on the specified item."""
+ count_dict = self.CountItemsStars(cnxn, [item_id])
+ return count_dict.get(item_id, 0)
+
+ def CountItemsStars(self, cnxn, item_ids):
+ """Get a dict {item_id: count} for the given items."""
+ item_count_dict, missed_ids = self.star_count_cache.GetAll(item_ids)
+
+ if missed_ids:
+ rows = self.tbl.Select(
+ cnxn, cols=[self.item_col, 'COUNT(%s)' % self.user_col],
+ group_by=[self.item_col],
+ **{self.item_col: missed_ids})
+ # Ensure that every requested item_id has an entry so that even
+ # zero-star items get cached.
+ retrieved_counts = {item_id: 0 for item_id in missed_ids}
+ retrieved_counts.update(rows)
+ item_count_dict.update(retrieved_counts)
+ self.star_count_cache.CacheAll(retrieved_counts)
+
+ return item_count_dict
+
+ def SetStar(self, cnxn, item_id, starrer_user_id, starred):
+ """Sets or unsets a star for the specified item and user."""
+ if starred:
+ self.tbl.InsertRow(
+ cnxn, ignore=True,
+ **{self.item_col: item_id, self.user_col: starrer_user_id})
+ else:
+ self.tbl.Delete(
+ cnxn, **{self.item_col: item_id, self.user_col: starrer_user_id})
+
+ self.star_cache.Invalidate(cnxn, starrer_user_id)
+ self.starrer_cache.Invalidate(cnxn, item_id)
+
+
+class UserStarService(AbstractStarService):
+ """Star service for stars on users."""
+
+ def __init__(self, cache_manager):
+ tbl = sql.SQLTableManager(USERSTAR_TABLE_NAME)
+ super(UserStarService, self).__init__(
+ cache_manager, tbl, 'starred_user_id', 'user_id', 'user')
+
+
+class ProjectStarService(AbstractStarService):
+ """Star service for stars on projects."""
+
+ def __init__(self, cache_manager):
+ tbl = sql.SQLTableManager(PROJECTSTAR_TABLE_NAME)
+ super(ProjectStarService, self).__init__(
+ cache_manager, tbl, 'project_id', 'user_id', 'project')
+
+
+class IssueStarService(AbstractStarService):
+ """Star service for stars on issues."""
+
+ def __init__(self, cache_manager):
+ tbl = sql.SQLTableManager(ISSUESTAR_TABLE_NAME)
+ super(IssueStarService, self).__init__(
+ cache_manager, tbl, 'issue_id', 'user_id', 'issue')
+
+ # pylint: disable=arguments-differ
+ def SetStar(
+ self, cnxn, services, config, issue_id, starrer_user_id, starred):
+ # TODO(agable): The number of arguments required by this function is
+ # crazy. Find a way to simplify it so that it only needs the same
+ # arguments as AbstractSetStar above.
+ """Add or remove a star on the given issue for the given user.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to persistence layer.
+ config: ProjectIssueConfig PB for the project containing the issue.
+ issue_id: integer global ID of an issue.
+ starrer_user_id: user ID of the user who starred the issue.
+ starred: boolean True for adding a star, False when removing one.
+ """
+ logging.info(
+ 'SetIssueStar:%06d, %s, %s', issue_id, starrer_user_id, starred)
+ super(IssueStarService, self).SetStar(
+ cnxn, issue_id, starrer_user_id, starred)
+
+ issue = services.issue.GetIssue(cnxn, issue_id)
+ issue.star_count = self.CountItemStars(cnxn, issue_id)
+ filterrules_helpers.ApplyFilterRules(cnxn, services, issue, config)
+ # Note: only star_count could change due to the starring, but any
+ # field could have changed as a result of filter rules.
+ services.issue.UpdateIssue(cnxn, issue)
+
+ self.star_cache.Invalidate(cnxn, starrer_user_id)
+ self.starrer_cache.Invalidate(cnxn, issue_id)
diff --git a/appengine/monorail/services/test/__init__.py b/appengine/monorail/services/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/services/test/__init__.py
diff --git a/appengine/monorail/services/test/api_pb2_v1_helpers_test.py b/appengine/monorail/services/test/api_pb2_v1_helpers_test.py
new file mode 100644
index 0000000..1a64d9c
--- /dev/null
+++ b/appengine/monorail/services/test/api_pb2_v1_helpers_test.py
@@ -0,0 +1,333 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the API v1 helpers."""
+
+import datetime
+import mock
+import unittest
+
+from framework import permissions
+from services import api_pb2_v1_helpers
+from services import service_manager
+from proto import api_pb2_v1
+from proto import project_pb2
+from proto import tracker_pb2
+from proto import usergroup_pb2
+from testing import fake
+
+
+def MakeTemplate(prefix):
+ return tracker_pb2.TemplateDef(
+ name='%s-template' % prefix,
+ content='%s-content' % prefix,
+ summary='%s-summary' % prefix,
+ summary_must_be_edited=True,
+ status='New',
+ labels=['%s-label1' % prefix, '%s-label2' % prefix],
+ members_only=True,
+ owner_defaults_to_member=True,
+ component_required=True,
+ )
+
+
+def MakeLabel(prefix):
+ return tracker_pb2.LabelDef(
+ label='%s-label' % prefix,
+ label_docstring='%s-description' % prefix
+ )
+
+
+def MakeStatus(prefix):
+ return tracker_pb2.StatusDef(
+ status='%s-New' % prefix,
+ means_open=True,
+ status_docstring='%s-status' % prefix
+ )
+
+
+def MakeProjectIssueConfig(prefix):
+ return tracker_pb2.ProjectIssueConfig(
+ restrict_to_known=True,
+ default_col_spec='ID Type Priority Summary',
+ default_sort_spec='ID Priority',
+ well_known_statuses=[
+ MakeStatus('%s-status1' % prefix),
+ MakeStatus('%s-status2' % prefix),
+ ],
+ well_known_labels=[
+ MakeLabel('%s-label1' % prefix),
+ MakeLabel('%s-label2' % prefix),
+ ],
+ templates=[
+ MakeTemplate('%s-template1' % prefix),
+ MakeTemplate('%s-template2' % prefix),
+ ],
+ default_template_for_developers=1,
+ default_template_for_users=2
+ )
+
+
+def MakeProject(prefix):
+ return project_pb2.MakeProject(
+ project_name='%s-project' % prefix,
+ summary='%s-summary' % prefix,
+ description='%s-description' % prefix,
+ )
+
+
+class ApiV1HelpersTest(unittest.TestCase):
+
+ def testConvertTemplate(self):
+ """Test convert_template."""
+ template = MakeTemplate('test')
+ prompt = api_pb2_v1_helpers.convert_template(template)
+ self.assertEquals(template.name, prompt.name)
+ self.assertEquals(template.summary, prompt.title)
+ self.assertEquals(template.content, prompt.description)
+ self.assertEquals(
+ template.summary_must_be_edited, prompt.titleMustBeEdited)
+ self.assertEquals(template.status, prompt.status)
+ self.assertEquals(template.labels, prompt.labels)
+ self.assertEquals(template.members_only, prompt.membersOnly)
+ self.assertEquals(
+ template.owner_defaults_to_member, prompt.defaultToMember)
+ self.assertEquals(template.component_required, prompt.componentRequired)
+
+ def testConvertLabel(self):
+ """Test convert_label."""
+ labeldef = MakeLabel('test')
+ label = api_pb2_v1_helpers.convert_label(labeldef)
+ self.assertEquals(labeldef.label, label.label)
+ self.assertEquals(labeldef.label_docstring, label.description)
+
+ def testConvertStatus(self):
+ """Test convert_status."""
+ statusdef = MakeStatus('test')
+ status = api_pb2_v1_helpers.convert_status(statusdef)
+ self.assertEquals(statusdef.status, status.status)
+ self.assertEquals(statusdef.means_open, status.meansOpen)
+ self.assertEquals(statusdef.status_docstring, status.description)
+
+ def testConvertProjectIssueConfig(self):
+ """Test convert_project_config."""
+ config = MakeProjectIssueConfig('test')
+ config_api = api_pb2_v1_helpers.convert_project_config(config)
+ self.assertEquals(config.restrict_to_known, config_api.restrictToKnown)
+ self.assertEquals(
+ config.default_col_spec.split(), config_api.defaultColumns)
+ self.assertEquals(
+ config.default_sort_spec.split(), config_api.defaultSorting)
+ self.assertEquals(2, len(config_api.statuses))
+ self.assertEquals(2, len(config_api.labels))
+ self.assertEquals(2, len(config_api.prompts))
+ self.assertEquals(
+ config.default_template_for_developers,
+ config_api.defaultPromptForMembers)
+ self.assertEquals(
+ config.default_template_for_users,
+ config_api.defaultPromptForNonMembers)
+
+ def testConvertProject(self):
+ """Test convert_project."""
+ project = MakeProject('testprj')
+ config = MakeProjectIssueConfig('testconfig')
+ role = api_pb2_v1.Role.owner
+ project_api = api_pb2_v1_helpers.convert_project(project, config, role)
+ self.assertEquals(project.project_name, project_api.name)
+ self.assertEquals(project.project_name, project_api.externalId)
+ self.assertEquals('/p/%s/' % project.project_name, project_api.htmlLink)
+ self.assertEquals(project.summary, project_api.summary)
+ self.assertEquals(project.description, project_api.description)
+ self.assertEquals(role, project_api.role)
+ self.assertIsInstance(
+ project_api.issuesConfig, api_pb2_v1.ProjectIssueConfig)
+
+ def testConvertPerson(self):
+ """Test convert_person."""
+ svcs = service_manager.Services()
+ svcs.user = fake.UserService()
+ svcs.user.TestAddUser('user@example.com', 1)
+ result = api_pb2_v1_helpers.convert_person(1, None, svcs)
+ self.assertIsInstance(result, api_pb2_v1.AtomPerson)
+ self.assertEquals('user@example.com', result.name)
+
+ def testConvertIssueIDs(self):
+ """Test convert_issue_ids."""
+ svcs = service_manager.Services()
+ svcs.issue = fake.IssueService()
+ issue1 = fake.MakeTestIssue(789, 1, 'one', 'New', 111L)
+ svcs.issue.TestAddIssue(issue1)
+ issue_ids = [100001]
+ mar = mock.Mock()
+ mar.cnxn = None
+ mar.project_name = 'test-project'
+ result = api_pb2_v1_helpers.convert_issue_ids(issue_ids, mar, svcs)
+ self.assertEquals(1, len(result))
+ self.assertEquals(1, result[0].issueId)
+
+ def testConvertIssueRef(self):
+ """Test convert_issueref_pbs."""
+ svcs = service_manager.Services()
+ svcs.issue = fake.IssueService()
+ issue1 = fake.MakeTestIssue(12345, 1, 'one', 'New', 111L)
+ svcs.issue.TestAddIssue(issue1)
+ svcs.project = fake.ProjectService()
+ svcs.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ mar = mock.Mock()
+ mar.cnxn = None
+ mar.project_name = 'test-project'
+ mar.project_id = 12345
+ ir = api_pb2_v1.IssueRef(
+ issueId=1,
+ projectId='test-project'
+ )
+ result = api_pb2_v1_helpers.convert_issueref_pbs([ir], mar, svcs)
+ self.assertEquals(1, len(result))
+ self.assertEquals(100001, result[0])
+
+ def testConvertAttachment(self):
+ """Test convert_attachment."""
+
+ attachment = tracker_pb2.Attachment(
+ attachment_id=1,
+ filename='stats.txt',
+ filesize=12345,
+ mimetype='text/plain',
+ deleted=False)
+
+ result = api_pb2_v1_helpers.convert_attachment(attachment)
+ self.assertEquals(attachment.attachment_id, result.attachmentId)
+ self.assertEquals(attachment.filename, result.fileName)
+ self.assertEquals(attachment.filesize, result.fileSize)
+ self.assertEquals(attachment.mimetype, result.mimetype)
+ self.assertEquals(attachment.deleted, result.isDeleted)
+
+ def testConvertAmendments(self):
+ """Test convert_amendments."""
+
+ svcs = service_manager.Services()
+ svcs.user = fake.UserService()
+ svcs.user.TestAddUser('user@example.com', 1)
+ svcs.user.TestAddUser('user2@example.com', 2)
+ mar = mock.Mock()
+ mar.cnxn = None
+ issue = mock.Mock()
+ issue.project_name = 'test-project'
+
+ amendment_summary = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.SUMMARY,
+ newvalue='new summary')
+ amendment_status = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.STATUS,
+ newvalue='new status')
+ amendment_owner = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.OWNER,
+ added_user_ids=[1])
+ amendment_labels = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.LABELS,
+ newvalue='label1 -label2')
+ amendment_cc_add = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.CC,
+ added_user_ids=[1])
+ amendment_cc_remove = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.CC,
+ removed_user_ids=[2])
+ amendment_blockedon = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.BLOCKEDON,
+ newvalue='1')
+ amendment_blocking = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.BLOCKING,
+ newvalue='other:2 -3')
+ amendment_mergedinto = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID.MERGEDINTO,
+ newvalue='4')
+ amendments = [
+ amendment_summary, amendment_status, amendment_owner,
+ amendment_labels, amendment_cc_add, amendment_cc_remove,
+ amendment_blockedon, amendment_blocking, amendment_mergedinto]
+
+ result = api_pb2_v1_helpers.convert_amendments(
+ issue, amendments, mar, svcs)
+ self.assertEquals(amendment_summary.newvalue, result.summary)
+ self.assertEquals(amendment_status.newvalue, result.status)
+ self.assertEquals('user@example.com', result.owner)
+ self.assertEquals(['label1', '-label2'], result.labels)
+ self.assertEquals(['user@example.com', '-user2@example.com'], result.cc)
+ self.assertEquals(['test-project:1'], result.blockedOn)
+ self.assertEquals(['other:2', '-test-project:3'], result.blocking)
+ self.assertEquals(amendment_mergedinto.newvalue, result.mergedInto)
+
+ def testConvertComment(self):
+ """Test convert_comment."""
+
+ svcs = service_manager.Services()
+ svcs.user = fake.UserService()
+ svcs.user.TestAddUser('user@example.com', 1)
+ mar = mock.Mock()
+ mar.cnxn = None
+ issue = fake.MakeTestIssue(project_id=12345, local_id=1, summary='sum',
+ status='New', owner_id=1001)
+
+ comment = tracker_pb2.IssueComment(
+ user_id=1,
+ content='test content',
+ sequence=1,
+ deleted_by=1,
+ timestamp=1437700000,
+ )
+ result = api_pb2_v1_helpers.convert_comment(
+ issue, comment, mar, svcs, None)
+ self.assertEquals('user@example.com', result.author.name)
+ self.assertEquals(comment.content, result.content)
+ self.assertEquals('user@example.com', result.deletedBy.name)
+ self.assertEquals(1, result.id)
+ # Ensure that the published timestamp falls in a timestamp range to account
+ # for the test being run in different timezones.
+ # Using "Fri, 23 Jul 2015 00:00:00" and "Fri, 25 Jul 2015 00:00:00".
+ self.assertTrue(
+ datetime.datetime(2015, 7, 23, 0, 0, 0) <= result.published <=
+ datetime.datetime(2015, 7, 25, 0, 0, 0))
+
+ def testSplitRemoveAdd(self):
+ """Test split_remove_add."""
+
+ items = ['1', '-2', '-3', '4']
+ list_to_add, list_to_remove = api_pb2_v1_helpers.split_remove_add(items)
+
+ self.assertEquals(['1', '4'], list_to_add)
+ self.assertEquals(['2', '3'], list_to_remove)
+
+ def testIssueGlobalIDs(self):
+ """Test issue_global_ids."""
+
+ svcs = service_manager.Services()
+ svcs.issue = fake.IssueService()
+ issue1 = fake.MakeTestIssue(12345, 1, 'one', 'New', 111L)
+ svcs.issue.TestAddIssue(issue1)
+ svcs.project = fake.ProjectService()
+ svcs.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ mar = mock.Mock()
+ mar.cnxn = None
+ mar.project_name = 'test-project'
+ mar.project_id = 12345
+ pairs = ['test-project:1']
+ result = api_pb2_v1_helpers.issue_global_ids(pairs, 12345, mar, svcs)
+ self.assertEquals(100001, result[0])
+
+ def testConvertGroupSettings(self):
+ """Test convert_group_settings."""
+
+ setting = usergroup_pb2.MakeSettings('owners', 'mdb', 0)
+ result = api_pb2_v1_helpers.convert_group_settings('test-group', setting)
+ self.assertEquals('test-group', result.groupName)
+ self.assertEquals(
+ setting.who_can_view_members, result.who_can_view_members)
+ self.assertEquals(setting.ext_group_type, result.ext_group_type)
+ self.assertEquals(setting.last_sync_time, result.last_sync_time)
diff --git a/appengine/monorail/services/test/api_svc_v1_test.py b/appengine/monorail/services/test/api_svc_v1_test.py
new file mode 100644
index 0000000..8eda278
--- /dev/null
+++ b/appengine/monorail/services/test/api_svc_v1_test.py
@@ -0,0 +1,986 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the API v1."""
+
+import endpoints
+import unittest
+import webtest
+from google.appengine.api import oauth
+from mock import Mock
+from protorpc import messages
+from protorpc import message_types
+
+from framework import monorailrequest
+from framework import permissions
+from framework import template_helpers
+from proto import project_pb2
+from proto import tracker_pb2
+from search import frontendsearchpipeline
+from services import api_svc_v1
+from services import issue_svc
+from services import project_svc
+from services import service_manager
+from services import user_svc
+from testing import fake
+from testing_utils import testing
+from tracker import tracker_bizobj
+
+
+def MakeFakeServiceManager():
+ return service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ issue_star=fake.IssueStarService(),
+ features=fake.FeaturesService(),
+ cache_manager=fake.CacheManager())
+
+
+class FakeMonorailApiRequest(object):
+
+ def __init__(self, request, services, perms=None):
+ self.cnxn = None
+ self.auth = monorailrequest.AuthData.FromEmail(
+ self.cnxn, request['requester'], services)
+ self.me_user_id = self.auth.user_id
+ self.project_name = None
+ self.project = None
+ self.viewed_username = None
+ self.viewed_user_auth = None
+ self.config = None
+ if 'userId' in request:
+ self.viewed_username = request['userId']
+ self.viewed_user_auth = monorailrequest.AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services)
+ elif 'groupName' in request:
+ self.viewed_username = request['groupName']
+ try:
+ self.viewed_user_auth = monorailrequest.AuthData.FromEmail(
+ self.cnxn, self.viewed_username, services)
+ except user_svc.NoSuchUserException:
+ self.viewed_user_auth = None
+ if 'projectId' in request:
+ self.project_name = request['projectId']
+ self.project = services.project.GetProjectByName(
+ self.cnxn, self.project_name)
+ self.config = services.config.GetProjectConfig(
+ self.cnxn, self.project_id)
+ self.perms = perms or permissions.GetPermissions(
+ self.auth.user_pb, self.auth.effective_ids, self.project)
+ self.granted_perms = set()
+
+ self.params = {
+ 'can': request.get('can', 1),
+ 'start': request.get('startIndex', 0),
+ 'num': request.get('maxResults', 100),
+ 'q': request.get('q', ''),
+ 'sort': request.get('sort', ''),
+ 'groupby': '',
+ 'projects': request.get('additionalProject', []) + [self.project_name]}
+ self.use_cached_searches = True
+ self.errors = template_helpers.EZTError()
+ self.mode = None
+
+ self.query_project_names = self.GetParam('projects')
+ self.group_by_spec = self.GetParam('groupby')
+ self.sort_spec = self.GetParam('sort')
+ self.query = self.GetParam('q')
+ self.can = self.GetParam('can')
+ self.start = self.GetParam('start')
+ self.num = self.GetParam('num')
+
+ @property
+ def project_id(self):
+ return self.project.project_id if self.project else None
+
+ def GetParam(self, query_param_name, default_value=None,
+ _antitamper_re=None):
+ return self.params.get(query_param_name, default_value)
+
+ def GetPositiveIntParam(self, query_param_name, default_value=None):
+ """Returns 0 if the user-provided value is less than 0."""
+ return max(self.GetParam(query_param_name, default_value=default_value),
+ 0)
+
+
+class FakeFrontendSearchPipeline(object):
+
+ def __init__(self):
+ issue1 = fake.MakeTestIssue(
+ project_id=12345, local_id=1, owner_id=2, status='New', summary='sum')
+ issue2 = fake.MakeTestIssue(
+ project_id=12345, local_id=2, owner_id=2, status='New', summary='sum')
+ self.allowed_results = [issue1, issue2]
+ self.visible_results = [issue1]
+ self.total_count = len(self.allowed_results)
+ self.config = None
+ self.projectId = 0
+
+ def SearchForIIDs(self):
+ pass
+
+ def MergeAndSortIssues(self):
+ pass
+
+ def Paginate(self):
+ pass
+
+
+class MonorailApiTest(testing.EndpointsTestCase):
+
+ api_service_cls = api_svc_v1.MonorailApi
+
+ def makeMar(self, request):
+ return FakeMonorailApiRequest(request, self.services)
+
+ def setUp(self):
+ super(MonorailApiTest, self).setUp()
+ self.requester = RequesterMock(email='requester@example.com')
+ self.mock(endpoints, 'get_current_user', lambda: self.requester)
+ self.config = None
+ self.services = MakeFakeServiceManager()
+ self.mock(api_svc_v1.MonorailApi, '_services', self.services)
+ self.services.user.TestAddUser('requester@example.com', 1)
+ self.services.user.TestAddUser('user@example.com', 2)
+ self.services.user.TestAddUser('group@example.com', 123)
+ self.services.usergroup.TestAddGroupSettings(123, 'group@example.com')
+ self.request = {
+ 'userId': 'user@example.com',
+ 'ownerProjectsOnly': False,
+ 'requester': 'requester@example.com',
+ 'projectId': 'test-project',
+ 'issueId': 1}
+ self.mock(api_svc_v1.MonorailApi, 'mar_factory',
+ lambda x, y: FakeMonorailApiRequest(self.request, self.services))
+
+ # api_base_checks is tested in AllBaseChecksTest,
+ # so mock it to reduce noise.
+ self.mock(api_svc_v1, 'api_base_checks', lambda x, y, z, u, v, w: None)
+
+ def SetUpComponents(
+ self, project_id, component_id, component_name, component_doc='doc',
+ deprecated=False, admin_ids=None, cc_ids=None, created=100000, creator=1):
+ admin_ids = admin_ids or []
+ cc_ids = cc_ids or []
+ self.config = self.services.config.GetProjectConfig(
+ 'fake cnxn', project_id)
+ self.services.config.StoreConfig('fake cnxn', self.config)
+ cd = tracker_bizobj.MakeComponentDef(
+ component_id, project_id, component_name, component_doc, deprecated,
+ admin_ids, cc_ids, created, creator)
+ self.config.component_defs.append(cd)
+
+ def SetUpFieldDefs(
+ self, field_id, project_id, field_name, field_type_int,
+ min_value=0, max_value=100, needs_member=False, docstring='doc'):
+ self.config = self.services.config.GetProjectConfig(
+ 'fake cnxn', project_id)
+ self.services.config.StoreConfig('fake cnxn', self.config)
+ fd = tracker_bizobj.MakeFieldDef(
+ field_id, project_id, field_name, field_type_int, '',
+ '', False, False, min_value, max_value, None, needs_member, None, '',
+ tracker_pb2.NotifyTriggers.NEVER, docstring, False)
+ self.config.field_defs.append(fd)
+
+ def testUsersGet_NoProject(self):
+ """The viewed user has no projects."""
+
+ self.services.project.TestAddProject(
+ 'public-project', owner_ids=[1])
+ resp = self.call_api('users_get', self.request).json_body
+ expected = {
+ 'id': '2',
+ 'kind': 'monorail#user'}
+ self.assertEqual(expected, resp)
+
+ def testUsersGet_PublicProject(self):
+ """The viewed user has one public project."""
+
+ self.services.project.TestAddProject(
+ 'public-project', owner_ids=[2])
+ resp = self.call_api('users_get', self.request).json_body
+
+ self.assertEqual(1, len(resp['projects']))
+ self.assertEqual('public-project', resp['projects'][0]['name'])
+
+ def testUsersGet_PrivateProject(self):
+ """The viewed user has one project but the requester cannot view."""
+
+ self.services.project.TestAddProject(
+ 'private-project', owner_ids=[2],
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY)
+ resp = self.call_api('users_get', self.request).json_body
+ self.assertNotIn('projects', resp)
+
+ def testUsersGet_OwnerProjectOnly(self):
+ """The viewed user has different roles of projects."""
+
+ self.services.project.TestAddProject(
+ 'owner-project', owner_ids=[2])
+ self.services.project.TestAddProject(
+ 'member-project', owner_ids=[1], committer_ids=[2])
+ resp = self.call_api('users_get', self.request).json_body
+ self.assertEqual(2, len(resp['projects']))
+
+ self.request['ownerProjectsOnly'] = True
+ resp = self.call_api('users_get', self.request).json_body
+ self.assertEqual(1, len(resp['projects']))
+ self.assertEqual('owner-project', resp['projects'][0]['name'])
+
+ def testIssuesGet_GetIssue(self):
+ """Get the requested issue."""
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+ self.SetUpFieldDefs(1, 12345, 'Field1', tracker_pb2.FieldTypes.INT_TYPE)
+
+ fv = tracker_pb2.FieldValue(
+ field_id=1,
+ int_value=11)
+ issue1 = fake.MakeTestIssue(
+ project_id=12345, local_id=1, owner_id=2, reporter_id=1, status='New',
+ summary='sum', component_ids=[1], field_values=[fv])
+ self.services.issue.TestAddIssue(issue1)
+
+ resp = self.call_api('issues_get', self.request).json_body
+ self.assertEqual(1, resp['id'])
+ self.assertEqual('New', resp['status'])
+ self.assertEqual('open', resp['state'])
+ self.assertFalse(resp['canEdit'])
+ self.assertTrue(resp['canComment'])
+ self.assertEqual('requester@example.com', resp['author']['name'])
+ self.assertEqual('user@example.com', resp['owner']['name'])
+ self.assertEqual('API', resp['components'][0])
+ self.assertEqual('Field1', resp['fieldValues'][0]['fieldName'])
+ self.assertEqual('11', resp['fieldValues'][0]['fieldValue'])
+
+ def testIssuesInsert_BadRequest(self):
+ """The request does not specify summary or status."""
+
+ with self.assertRaises(webtest.AppError):
+ self.call_api('issues_insert', self.request)
+
+ issue_dict = {
+ 'status': 'New',
+ 'summary': 'Test issue',
+ 'owner': {'name': 'notexist@example.com'}}
+ self.request.update(issue_dict)
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ with self.call_should_fail(400):
+ self.call_api('issues_insert', self.request)
+
+ # Invalid field value
+ self.SetUpFieldDefs(1, 12345, 'Field1', tracker_pb2.FieldTypes.INT_TYPE)
+ issue_dict = {
+ 'status': 'New',
+ 'summary': 'Test issue',
+ 'owner': {'name': 'requester@example.com'},
+ 'fieldValues': [{'fieldName': 'Field1', 'fieldValue': '111'}]}
+ self.request.update(issue_dict)
+ with self.call_should_fail(400):
+ self.call_api('issues_insert', self.request)
+
+ def testIssuesInsert_NoPermission(self):
+ """The requester has no permission to create issues."""
+
+ issue_dict = {
+ 'status': 'New',
+ 'summary': 'Test issue'}
+ self.request.update(issue_dict)
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY,
+ project_id=12345)
+ with self.call_should_fail(403):
+ self.call_api('issues_insert', self.request)
+
+ def testIssuesInsert_CreateIssue(self):
+ """Create an issue as requested."""
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ self.SetUpFieldDefs(1, 12345, 'Field1', tracker_pb2.FieldTypes.INT_TYPE)
+
+ issue1 = fake.MakeTestIssue(
+ project_id=12345, local_id=1, owner_id=2, reporter_id=1, status='New',
+ summary='Test issue')
+ self.services.issue.TestAddIssue(issue1)
+
+ issue_dict = {
+ 'blockedOn': [{'issueId': 1}],
+ 'cc': [{'name': 'user@example.com'}],
+ 'description': 'description',
+ 'labels': ['label1', 'label2'],
+ 'owner': {'name': 'requester@example.com'},
+ 'status': 'New',
+ 'summary': 'Test issue',
+ 'fieldValues': [{'fieldName': 'Field1', 'fieldValue': '11'}]}
+ self.request.update(issue_dict)
+
+ resp = self.call_api('issues_insert', self.request).json_body
+ self.assertEqual('New', resp['status'])
+ self.assertEqual('requester@example.com', resp['author']['name'])
+ self.assertEqual('requester@example.com', resp['owner']['name'])
+ self.assertEqual('user@example.com', resp['cc'][0]['name'])
+ self.assertEqual(1, resp['blockedOn'][0]['issueId'])
+ self.assertEqual([u'label1', u'label2'], resp['labels'])
+ self.assertEqual('Test issue', resp['summary'])
+ self.assertEqual('Field1', resp['fieldValues'][0]['fieldName'])
+ self.assertEqual('11', resp['fieldValues'][0]['fieldValue'])
+
+ def testIssuesList_NoPermission(self):
+ """No permission for additional projects."""
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+
+ self.services.project.TestAddProject(
+ 'test-project2', owner_ids=[2],
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY,
+ project_id=123456)
+ self.request['additionalProject'] = ['test-project2']
+ with self.call_should_fail(403):
+ self.call_api('issues_list', self.request)
+
+ def testIssuesList_SearchIssues(self):
+ """Find issues of one project."""
+
+ self.mock(frontendsearchpipeline, 'FrontendSearchPipeline',
+ lambda x, y, z, w: FakeFrontendSearchPipeline())
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY,
+ project_id=12345)
+ resp = self.call_api('issues_list', self.request).json_body
+ self.assertEqual(2, int(resp['totalResults']))
+ self.assertEqual(1, len(resp['items']))
+ self.assertEqual(1, resp['items'][0]['id'])
+
+ def testIssuesCommentsList_GetComments(self):
+ """Get comments of requested issue."""
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+
+ issue1 = fake.MakeTestIssue(
+ project_id=12345, local_id=1, summary='test summary', status='New',
+ issue_id=10001, owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue1)
+
+ comment = tracker_pb2.IssueComment(
+ id=123, issue_id=10001,
+ project_id=12345, user_id=2,
+ content='this is a comment',
+ timestamp=1437700000)
+ self.services.issue.TestAddComment(comment, 1)
+
+ resp = self.call_api('issues_comments_list', self.request).json_body
+ self.assertEqual(2, resp['totalResults'])
+ comment1 = resp['items'][0]
+ comment2 = resp['items'][1]
+ self.assertEqual('requester@example.com', comment1['author']['name'])
+ self.assertEqual('test summary', comment1['content'])
+ self.assertEqual('user@example.com', comment2['author']['name'])
+ self.assertEqual('this is a comment', comment2['content'])
+
+ def testIssuesCommentsInsert_NoCommentPermission(self):
+ """No permission to comment an issue."""
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY,
+ project_id=12345)
+
+ issue1 = fake.MakeTestIssue(
+ 12345, 1, 'Issue 1', 'New', 2)
+ self.services.issue.TestAddIssue(issue1)
+
+ with self.call_should_fail(403):
+ self.call_api('issues_comments_insert', self.request)
+
+ def testIssuesCommentsInsert_Amendments(self):
+ """Insert comments with amendments."""
+
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+
+ issue1 = fake.MakeTestIssue(
+ 12345, 1, 'Issue 1', 'New', 2)
+ issue2 = fake.MakeTestIssue(
+ 12345, 2, 'Issue 2', 'New', 2)
+ issue3 = fake.MakeTestIssue(
+ 12345, 3, 'Issue 3', 'New', 2)
+ issue4 = fake.MakeTestIssue(
+ 12345, 4, 'Issue 4', 'New', 2)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue1)
+ self.services.issue.TestAddIssue(issue2)
+ self.services.issue.TestAddIssue(issue3)
+ self.services.issue.TestAddIssue(issue4)
+
+ self.request['updates'] = {
+ 'summary': 'new summary',
+ 'status': 'Duplicate',
+ 'owner': 'requester@example.com',
+ 'cc': ['user@example.com'],
+ 'labels': ['add_label', '-remove_label'],
+ 'blockedOn': ['2'],
+ 'blocking': ['3'],
+ 'merged_into': 4}
+ resp = self.call_api('issues_comments_insert', self.request).json_body
+ self.assertEqual('requester@example.com', resp['author']['name'])
+ self.assertEqual('Updated', resp['updates']['status'])
+
+ def testIssuesCommentInsert_CustomFields(self):
+ """Update custom field values."""
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ issue1 = fake.MakeTestIssue(
+ 12345, 1, 'Issue 1', 'New', 2,
+ project_name='test-project')
+ self.services.issue.TestAddIssue(issue1)
+ self.SetUpFieldDefs(
+ 1, 12345, 'Field_int', tracker_pb2.FieldTypes.INT_TYPE)
+ self.SetUpFieldDefs(
+ 2, 12345, 'Field_enum', tracker_pb2.FieldTypes.ENUM_TYPE)
+
+ self.request['updates'] = {
+ 'fieldValues': [{'fieldName': 'Field_int', 'fieldValue': '11'},
+ {'fieldName': 'Field_enum', 'fieldValue': 'str'}]}
+ resp = self.call_api('issues_comments_insert', self.request).json_body
+ self.assertEqual('Updated', resp['updates']['status'])
+
+ def testIssuesCommentInsert_MoveToProject_Fail(self):
+ """Move issue to a different project and failed."""
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ issue1 = fake.MakeTestIssue(
+ 12345, 1, 'Issue 1', 'New', 2, labels=['Restrict-View-Google'],
+ project_name='test-project')
+ self.services.issue.TestAddIssue(issue1)
+
+ self.services.project.TestAddProject(
+ 'test-project2', owner_ids=[1],
+ project_id=12346)
+ issue2 = fake.MakeTestIssue(
+ 12346, 1, 'Issue 1', 'New', 2, project_name='test-project2')
+ self.services.issue.TestAddIssue(issue2)
+
+ # Project doesn't exist
+ self.request['updates'] = {
+ 'moveToProject': 'not exist'}
+ with self.call_should_fail(400):
+ self.call_api('issues_comments_insert', self.request)
+
+ # The issue is already in destination
+ self.request['updates'] = {
+ 'moveToProject': 'test-project'}
+ with self.call_should_fail(400):
+ self.call_api('issues_comments_insert', self.request)
+
+ # The user has no permission in test-project
+ self.request['projectId'] = 'test-project2'
+ self.request['updates'] = {
+ 'moveToProject': 'test-project'}
+ with self.call_should_fail(400):
+ self.call_api('issues_comments_insert', self.request)
+
+ # Restrict labels
+ self.request['projectId'] = 'test-project'
+ self.request['updates'] = {
+ 'moveToProject': 'test-project2'}
+ with self.call_should_fail(400):
+ self.call_api('issues_comments_insert', self.request)
+
+ def testIssuesCommentInsert_MoveToProject_Normal(self):
+ """Move issue."""
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1, 2],
+ project_id=12345)
+ self.services.project.TestAddProject(
+ 'test-project2', owner_ids=[1, 2],
+ project_id=12346)
+ issue1 = fake.MakeTestIssue(
+ 12345, 1, 'Issue 1', 'New', 2, project_name='test-project')
+ self.services.issue.TestAddIssue(issue1)
+ issue2 = fake.MakeTestIssue(
+ 12346, 1, 'Issue 1', 'New', 2, project_name='test-project2')
+ self.services.issue.TestAddIssue(issue2)
+
+ self.request['updates'] = {
+ 'moveToProject': 'test-project2'}
+ resp = self.call_api('issues_comments_insert', self.request).json_body
+
+ self.assertEqual(
+ 'Moved issue test-project:1 to now be issue test-project:2.',
+ resp['content'])
+
+ def testIssuesCommentsDelete_NoComment(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ issue1 = fake.MakeTestIssue(
+ project_id=12345, local_id=1, summary='test summary',
+ issue_id=10001, status='New', owner_id=2, reporter_id=2)
+ self.services.issue.TestAddIssue(issue1)
+ self.request['commentId'] = 1
+ with self.call_should_fail(404):
+ self.call_api('issues_comments_delete', self.request)
+
+ def testIssuesCommentsDelete_NoDeletePermission(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ issue1 = fake.MakeTestIssue(
+ project_id=12345, local_id=1, summary='test summary',
+ issue_id=10001, status='New', owner_id=2, reporter_id=2)
+ self.services.issue.TestAddIssue(issue1)
+ self.request['commentId'] = 0
+ with self.call_should_fail(403):
+ self.call_api('issues_comments_delete', self.request)
+
+ def testIssuesCommentsDelete_DeleteUndelete(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ issue1 = fake.MakeTestIssue(
+ project_id=12345, local_id=1, summary='test summary',
+ issue_id=10001, status='New', owner_id=2, reporter_id=1)
+ self.services.issue.TestAddIssue(issue1)
+ comment = tracker_pb2.IssueComment(
+ id=123, issue_id=10001,
+ project_id=12345, user_id=1,
+ content='this is a comment',
+ timestamp=1437700000)
+ self.services.issue.TestAddComment(comment, 1)
+ self.request['commentId'] = 1
+
+ comments = self.services.issue.GetCommentsForIssue(None, 10001)
+
+ self.call_api('issues_comments_delete', self.request)
+ self.assertEqual(1, comments[1].deleted_by)
+
+ self.call_api('issues_comments_undelete', self.request)
+ self.assertIsNone(comments[1].deleted_by)
+
+ def testGroupsSettingsList_AllSettings(self):
+ resp = self.call_api('groups_settings_list', self.request).json_body
+ all_settings = resp['groupSettings']
+ self.assertEqual(1, len(all_settings))
+ self.assertEqual('group@example.com', all_settings[0]['groupName'])
+
+ def testGroupsSettingsList_ImportedSettings(self):
+ self.services.user.TestAddUser('imported@example.com', 234)
+ self.services.usergroup.TestAddGroupSettings(
+ 234, 'imported@example.com', external_group_type='mdb')
+ self.request['importedGroupsOnly'] = True
+ resp = self.call_api('groups_settings_list', self.request).json_body
+ all_settings = resp['groupSettings']
+ self.assertEqual(1, len(all_settings))
+ self.assertEqual('imported@example.com', all_settings[0]['groupName'])
+
+ def testGroupsCreate_NoPermission(self):
+ self.request['groupName'] = 'group'
+ with self.call_should_fail(403):
+ self.call_api('groups_create', self.request)
+
+ def SetUpGroupRequest(self, group_name, who_can_view_members='MEMBERS',
+ ext_group_type=None, perms=None,
+ requester='requester@example.com'):
+ request = {
+ 'groupName': group_name,
+ 'requester': requester,
+ 'who_can_view_members': who_can_view_members,
+ 'ext_group_type': ext_group_type}
+ self.request.pop("userId", None)
+ self.mock(api_svc_v1.MonorailApi, 'mar_factory',
+ lambda x, y: FakeMonorailApiRequest(
+ request, self.services, perms))
+ return request
+
+ def testGroupsCreate_Normal(self):
+ request = self.SetUpGroupRequest('newgroup@example.com', 'MEMBERS',
+ 'MDB', permissions.ADMIN_PERMISSIONSET)
+
+ resp = self.call_api('groups_create', request).json_body
+ self.assertIn('groupID', resp)
+
+ def testGroupsGet_NoPermission(self):
+ request = self.SetUpGroupRequest('group@example.com')
+ with self.call_should_fail(403):
+ self.call_api('groups_get', request)
+
+ def testGroupsGet_Normal(self):
+ request = self.SetUpGroupRequest('group@example.com',
+ perms=permissions.ADMIN_PERMISSIONSET)
+ self.services.usergroup.TestAddMembers(123, [1], 'member')
+ self.services.usergroup.TestAddMembers(123, [2], 'owner')
+ resp = self.call_api('groups_get', request).json_body
+ self.assertEqual(123, resp['groupID'])
+ self.assertEqual(['requester@example.com'], resp['groupMembers'])
+ self.assertEqual(['user@example.com'], resp['groupOwners'])
+ self.assertEqual('group@example.com', resp['groupSettings']['groupName'])
+
+ def testGroupsUpdate_NoPermission(self):
+ request = self.SetUpGroupRequest('group@example.com')
+ with self.call_should_fail(403):
+ self.call_api('groups_update', request)
+
+ def testGroupsUpdate_Normal(self):
+ request = self.SetUpGroupRequest('group@example.com')
+ request = self.SetUpGroupRequest('group@example.com',
+ perms=permissions.ADMIN_PERMISSIONSET)
+ request['last_sync_time'] = 123456789
+ request['groupOwners'] = ['requester@example.com']
+ request['groupMembers'] = ['user@example.com']
+ resp = self.call_api('groups_update', request).json_body
+ self.assertFalse(resp.get('error'))
+
+ def testComponentsList(self):
+ """Get components for a project."""
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+ resp = self.call_api('components_list', self.request).json_body
+
+ self.assertEqual(1, len(resp['components']))
+ cd = resp['components'][0]
+ self.assertEqual(1, cd['componentId'])
+ self.assertEqual('API', cd['componentPath'])
+ self.assertEqual(1, cd['componentId'])
+ self.assertEqual('test-project', cd['projectName'])
+
+ def testComponentsCreate_NoPermission(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+
+ cd_dict = {
+ 'componentName': 'Test'}
+ self.request.update(cd_dict)
+
+ with self.call_should_fail(403):
+ self.call_api('components_create', self.request)
+
+ def testComponentsCreate_Invalid(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+
+ # Component with invalid name
+ cd_dict = {
+ 'componentName': 'c>d>e'}
+ self.request.update(cd_dict)
+ with self.call_should_fail(400):
+ self.call_api('components_create', self.request)
+
+ # Name already in use
+ cd_dict = {
+ 'componentName': 'API'}
+ self.request.update(cd_dict)
+ with self.call_should_fail(400):
+ self.call_api('components_create', self.request)
+
+ # Parent component does not exist
+ cd_dict = {
+ 'componentName': 'test',
+ 'parentPath': 'NotExist'}
+ self.request.update(cd_dict)
+ with self.call_should_fail(404):
+ self.call_api('components_create', self.request)
+
+
+ def testComponentsCreate_Normal(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+
+ cd_dict = {
+ 'componentName': 'Test',
+ 'description':'test comp',
+ 'cc': ['requester@example.com']}
+ self.request.update(cd_dict)
+
+ resp = self.call_api('components_create', self.request).json_body
+ self.assertEqual('test comp', resp['description'])
+ self.assertEqual('requester@example.com', resp['creator'])
+ self.assertEqual([u'requester@example.com'], resp['cc'])
+ self.assertEqual('Test', resp['componentPath'])
+
+ cd_dict = {
+ 'componentName': 'TestChild',
+ 'parentPath': 'API'}
+ self.request.update(cd_dict)
+ resp = self.call_api('components_create', self.request).json_body
+
+ self.assertEqual('API>TestChild', resp['componentPath'])
+
+ def testComponentsDelete_Invalid(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+
+ # Fail to delete a non-existent component
+ cd_dict = {
+ 'componentPath': 'NotExist'}
+ self.request.update(cd_dict)
+ with self.call_should_fail(404):
+ self.call_api('components_delete', self.request)
+
+ # The user has no permission to delete component
+ cd_dict = {
+ 'componentPath': 'API'}
+ self.request.update(cd_dict)
+ with self.call_should_fail(403):
+ self.call_api('components_delete', self.request)
+
+ # The user tries to delete component that had subcomponents
+ self.services.project.TestAddProject(
+ 'test-project2', owner_ids=[1],
+ project_id=123456)
+ self.SetUpComponents(123456, 1, 'Parent')
+ self.SetUpComponents(123456, 2, 'Parent>Child')
+ cd_dict = {
+ 'componentPath': 'Parent',
+ 'projectId': 'test-project2',}
+ self.request.update(cd_dict)
+ with self.call_should_fail(403):
+ self.call_api('components_delete', self.request)
+
+ def testComponentsDelete_Normal(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+
+ cd_dict = {
+ 'componentPath': 'API'}
+ self.request.update(cd_dict)
+ _ = self.call_api('components_delete', self.request).json_body
+ self.assertEqual(0, len(self.config.component_defs))
+
+ def testComponentsUpdate_Invalid(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[2],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+ self.SetUpComponents(12345, 2, 'Test', admin_ids=[1])
+
+ # Fail to update a non-existent component
+ cd_dict = {
+ 'componentPath': 'NotExist'}
+ self.request.update(cd_dict)
+ with self.call_should_fail(404):
+ self.call_api('components_update', self.request)
+
+ # The user has no permission to edit component
+ cd_dict = {
+ 'componentPath': 'API'}
+ self.request.update(cd_dict)
+ with self.call_should_fail(403):
+ self.call_api('components_update', self.request)
+
+ # The user tries an invalid component name
+ cd_dict = {
+ 'componentPath': 'Test',
+ 'updates': [{'field': 'LEAF_NAME', 'leafName': 'c>e'}]}
+ self.request.update(cd_dict)
+ with self.call_should_fail(400):
+ self.call_api('components_update', self.request)
+
+ # The user tries a name already in use
+ cd_dict = {
+ 'componentPath': 'Test',
+ 'updates': [{'field': 'LEAF_NAME', 'leafName': 'API'}]}
+ self.request.update(cd_dict)
+ with self.call_should_fail(400):
+ self.call_api('components_update', self.request)
+
+ def testComponentsUpdate_Normal(self):
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1],
+ project_id=12345)
+ self.SetUpComponents(12345, 1, 'API')
+ self.SetUpComponents(12345, 2, 'Parent')
+ self.SetUpComponents(12345, 3, 'Parent>Child')
+
+ cd_dict = {
+ 'componentPath': 'API',
+ 'updates': [
+ {'field': 'DESCRIPTION', 'description': ''},
+ {'field': 'CC', 'cc': ['requester@example.com', 'user@example.com']},
+ {'field': 'DEPRECATED', 'deprecated': True}]}
+ self.request.update(cd_dict)
+ _ = self.call_api('components_update', self.request).json_body
+ component_def = tracker_bizobj.FindComponentDef(
+ 'API', self.config)
+ self.assertIsNotNone(component_def)
+ self.assertEqual('', component_def.docstring)
+ self.assertEqual([1L, 2L], component_def.cc_ids)
+ self.assertTrue(component_def.deprecated)
+
+ cd_dict = {
+ 'componentPath': 'Parent',
+ 'updates': [
+ {'field': 'LEAF_NAME', 'leafName': 'NewParent'}]}
+ self.request.update(cd_dict)
+ _ = self.call_api('components_update', self.request).json_body
+ cd_parent = tracker_bizobj.FindComponentDef(
+ 'NewParent', self.config)
+ cd_child = tracker_bizobj.FindComponentDef(
+ 'NewParent>Child', self.config)
+ self.assertIsNotNone(cd_parent)
+ self.assertIsNotNone(cd_child)
+
+
+class RequestMock(object):
+
+ def __init__(self):
+ self.projectId = None
+ self.issueId = None
+
+
+class RequesterMock(object):
+
+ def __init__(self, email=None):
+ self._email = email
+
+ def email(self):
+ return self._email
+
+
+class AllBaseChecksTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = MakeFakeServiceManager()
+ self.services.user.TestAddUser('test@example.com', 1)
+ self.services.project.TestAddProject(
+ 'test-project', owner_ids=[1], project_id=123,
+ access=project_pb2.ProjectAccess.MEMBERS_ONLY)
+ self.auth_client_ids = ['123456789.apps.googleusercontent.com']
+ oauth.get_client_id = Mock(return_value=self.auth_client_ids[0])
+ oauth.get_current_user = Mock(
+ return_value=RequesterMock(email='test@example.com'))
+
+ def testUnauthorizedRequester(self):
+ with self.assertRaises(endpoints.UnauthorizedException):
+ api_svc_v1.api_base_checks(None, None, None, None, [], [])
+
+ def testNoUser(self):
+ requester = RequesterMock(email='notexist@example.com')
+ with self.assertRaises(user_svc.NoSuchUserException):
+ api_svc_v1.api_base_checks(
+ None, requester, self.services, None, self.auth_client_ids, [])
+
+ def testNoOauthUser(self):
+ oauth.get_current_user.side_effect = oauth.Error()
+ with self.assertRaises(endpoints.UnauthorizedException):
+ api_svc_v1.api_base_checks(
+ None, None, self.services, None, [], [])
+
+ def testBannedUser(self):
+ banned_email = 'banned@example.com'
+ self.services.user.TestAddUser(banned_email, 2, banned=True)
+ requester = RequesterMock(email=banned_email)
+ with self.assertRaises(permissions.BannedUserException):
+ api_svc_v1.api_base_checks(
+ None, requester, self.services, None, self.auth_client_ids, [])
+
+ def testNoProject(self):
+ request = RequestMock()
+ request.projectId = 'notexist-project'
+ requester = RequesterMock(email='test@example.com')
+ with self.assertRaises(project_svc.NoSuchProjectException):
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, self.auth_client_ids, [])
+
+ def testNonLiveProject(self):
+ archived_project = 'archived-project'
+ self.services.project.TestAddProject(
+ archived_project, owner_ids=[1],
+ state=project_pb2.ProjectState.ARCHIVED)
+ request = RequestMock()
+ request.projectId = archived_project
+ requester = RequesterMock(email='test@example.com')
+ with self.assertRaises(permissions.PermissionException):
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, self.auth_client_ids, [])
+
+ def testNoViewProjectPermission(self):
+ nonmember_email = 'nonmember@example.com'
+ self.services.user.TestAddUser(nonmember_email, 2)
+ requester = RequesterMock(email=nonmember_email)
+ request = RequestMock()
+ request.projectId = 'test-project'
+ with self.assertRaises(permissions.PermissionException):
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, self.auth_client_ids, [])
+
+ def testAllPass(self):
+ requester = RequesterMock(email='test@example.com')
+ request = RequestMock()
+ request.projectId = 'test-project'
+ try:
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, self.auth_client_ids, [])
+ except Exception as e:
+ self.fail('Unexpected exception: %s' % str(e))
+
+ def testNoIssue(self):
+ requester = RequesterMock(email='test@example.com')
+ request = RequestMock()
+ request.projectId = 'test-project'
+ request.issueId = 12345
+ with self.assertRaises(issue_svc.NoSuchIssueException):
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, self.auth_client_ids, [])
+
+ def testNoViewIssuePermission(self):
+ requester = RequesterMock(email='test@example.com')
+ request = RequestMock()
+ request.projectId = 'test-project'
+ request.issueId = 1
+ issue1 = fake.MakeTestIssue(
+ project_id=123, local_id=1, summary='test summary',
+ status='New', owner_id=1, reporter_id=1)
+ issue1.deleted = True
+ self.services.issue.TestAddIssue(issue1)
+ with self.assertRaises(permissions.PermissionException):
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, self.auth_client_ids, [])
+
+ def testAnonymousClients(self):
+ oauth.get_client_id = Mock(return_value='anonymous')
+ requester = RequesterMock(email='test@example.com')
+ request = RequestMock()
+ request.projectId = 'test-project'
+ try:
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, [], ['test@example.com'])
+ except Exception as e:
+ self.fail('Unexpected exception: %s' % str(e))
+
+ with self.assertRaises(endpoints.UnauthorizedException):
+ api_svc_v1.api_base_checks(
+ request, requester, self.services, None, [], [])
diff --git a/appengine/monorail/services/test/cachemanager_svc_test.py b/appengine/monorail/services/test/cachemanager_svc_test.py
new file mode 100644
index 0000000..984633b
--- /dev/null
+++ b/appengine/monorail/services/test/cachemanager_svc_test.py
@@ -0,0 +1,183 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the cachemanager service."""
+
+import unittest
+
+import mox
+
+from framework import sql
+from services import cachemanager_svc
+from services import caches
+from services import service_manager
+from testing import testing_helpers
+
+
+class CacheManagerServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.cache_manager = cachemanager_svc.CacheManager()
+ self.cache_manager.invalidate_tbl = self.mox.CreateMock(
+ sql.SQLTableManager)
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testMakeCache(self):
+ ram_cache = self.cache_manager.MakeCache('issue')
+ self.assertTrue(isinstance(ram_cache, caches.RamCache))
+ self.assertTrue(ram_cache in self.cache_manager.cache_registry['issue'])
+
+ def testMakeCache_UnknownKind(self):
+ self.assertRaises(AssertionError, self.cache_manager.MakeCache, 'foo')
+
+ def testProcessInvalidateRows_Empty(self):
+ rows = []
+ self.cache_manager._ProcessInvalidationRows(rows)
+ self.assertEqual(0, self.cache_manager.processed_invalidations_up_to)
+
+ def testProcessInvalidateRows_Some(self):
+ ram_cache = self.cache_manager.MakeCache('issue')
+ ram_cache.CacheAll({
+ 33: 'issue 33',
+ 34: 'issue 34',
+ })
+ rows = [(1, 'issue', 34),
+ (2, 'project', 789),
+ (3, 'issue', 39)]
+ self.cache_manager._ProcessInvalidationRows(rows)
+ self.assertEqual(3, self.cache_manager.processed_invalidations_up_to)
+ self.assertTrue(ram_cache.HasItem(33))
+ self.assertFalse(ram_cache.HasItem(34))
+
+ def testProcessInvalidateRows_All(self):
+ ram_cache = self.cache_manager.MakeCache('issue')
+ ram_cache.CacheAll({
+ 33: 'issue 33',
+ 34: 'issue 34',
+ })
+ rows = [(991, 'issue', 34),
+ (992, 'project', 789),
+ (993, 'issue', cachemanager_svc.INVALIDATE_ALL_KEYS)]
+ self.cache_manager._ProcessInvalidationRows(rows)
+ self.assertEqual(993, self.cache_manager.processed_invalidations_up_to)
+ self.assertEqual({}, ram_cache.cache)
+
+ def SetUpDoDistributedInvalidation(self, rows):
+ self.cache_manager.invalidate_tbl.Select(
+ self.cnxn, cols=['timestep', 'kind', 'cache_key'],
+ where=[('timestep > %s', [0])],
+ order_by=[('timestep DESC', [])],
+ limit=cachemanager_svc.MAX_INVALIDATE_ROWS_TO_CONSIDER
+ ).AndReturn(rows)
+
+ def testDoDistributedInvalidation_Empty(self):
+ rows = []
+ self.SetUpDoDistributedInvalidation(rows)
+ self.mox.ReplayAll()
+ self.cache_manager.DoDistributedInvalidation(self.cnxn)
+ self.mox.VerifyAll()
+ self.assertEqual(0, self.cache_manager.processed_invalidations_up_to)
+
+ def testDoDistributedInvalidation_Some(self):
+ ram_cache = self.cache_manager.MakeCache('issue')
+ ram_cache.CacheAll({
+ 33: 'issue 33',
+ 34: 'issue 34',
+ })
+ rows = [(1, 'issue', 34),
+ (2, 'project', 789),
+ (3, 'issue', 39)]
+ self.SetUpDoDistributedInvalidation(rows)
+ self.mox.ReplayAll()
+ self.cache_manager.DoDistributedInvalidation(self.cnxn)
+ self.mox.VerifyAll()
+ self.assertEqual(3, self.cache_manager.processed_invalidations_up_to)
+ self.assertTrue(ram_cache.HasItem(33))
+ self.assertFalse(ram_cache.HasItem(34))
+
+ def testStoreInvalidateRows_UnknownKind(self):
+ self.assertRaises(
+ AssertionError,
+ self.cache_manager.StoreInvalidateRows, self.cnxn, 'foo', [1, 2])
+
+ def SetUpStoreInvalidateRows(self, rows):
+ self.cache_manager.invalidate_tbl.InsertRows(
+ self.cnxn, ['kind', 'cache_key'], rows)
+
+ def testStoreInvalidateRows(self):
+ rows = [('issue', 1), ('issue', 2)]
+ self.SetUpStoreInvalidateRows(rows)
+ self.mox.ReplayAll()
+ self.cache_manager.StoreInvalidateRows(self.cnxn, 'issue', [1, 2])
+ self.mox.VerifyAll()
+
+ def SetUpStoreInvalidateAll(self, kind):
+ self.cache_manager.invalidate_tbl.InsertRow(
+ self.cnxn, kind=kind, cache_key=cachemanager_svc.INVALIDATE_ALL_KEYS,
+ ).AndReturn(44)
+ self.cache_manager.invalidate_tbl.Delete(
+ self.cnxn, kind=kind, where=[('timestep < %s', [44])])
+
+ def testStoreInvalidateAll(self):
+ self.SetUpStoreInvalidateAll('issue')
+ self.mox.ReplayAll()
+ self.cache_manager.StoreInvalidateAll(self.cnxn, 'issue')
+ self.mox.VerifyAll()
+
+
+class RamCacheConsolidateTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.cache_manager = cachemanager_svc.CacheManager()
+ self.cache_manager.invalidate_tbl = self.mox.CreateMock(
+ sql.SQLTableManager)
+ self.services = service_manager.Services(
+ cache_manager=self.cache_manager)
+ self.servlet = cachemanager_svc.RamCacheConsolidate(
+ 'req', 'res', services=self.services)
+
+ def testHandleRequest_NothingToDo(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ self.cache_manager.invalidate_tbl.SelectValue(
+ mr.cnxn, 'COUNT(*)').AndReturn(112)
+ self.cache_manager.invalidate_tbl.SelectValue(
+ mr.cnxn, 'COUNT(*)').AndReturn(112)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(mr)
+ self.mox.VerifyAll()
+ self.assertEqual(json_data['old_count'], 112)
+ self.assertEqual(json_data['new_count'], 112)
+
+ def testHandleRequest_Truncate(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ self.cache_manager.invalidate_tbl.SelectValue(
+ mr.cnxn, 'COUNT(*)').AndReturn(4012)
+ self.cache_manager.invalidate_tbl.Select(
+ mr.cnxn, ['timestep'],
+ order_by=[('timestep DESC', [])],
+ limit=cachemanager_svc.MAX_INVALIDATE_ROWS_TO_CONSIDER
+ ).AndReturn([[3012]]) # Actual would be 1000 rows ending with 3012.
+ self.cache_manager.invalidate_tbl.Delete(
+ mr.cnxn, where=[('timestep < %s', [3012])])
+ self.cache_manager.invalidate_tbl.SelectValue(
+ mr.cnxn, 'COUNT(*)').AndReturn(1000)
+ self.mox.ReplayAll()
+
+ json_data = self.servlet.HandleRequest(mr)
+ self.mox.VerifyAll()
+ self.assertEqual(json_data['old_count'], 4012)
+ self.assertEqual(json_data['new_count'], 1000)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/caches_test.py b/appengine/monorail/services/test/caches_test.py
new file mode 100644
index 0000000..f99161c
--- /dev/null
+++ b/appengine/monorail/services/test/caches_test.py
@@ -0,0 +1,170 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the cache classes."""
+
+import unittest
+
+from google.appengine.ext import testbed
+
+from services import caches
+from testing import fake
+
+
+class RamCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake connection'
+ self.cache_manager = fake.CacheManager()
+ self.ram_cache = caches.RamCache(self.cache_manager, 'issue', max_size=3)
+
+ def testCacheItem(self):
+ self.ram_cache.CacheItem(123, 'foo')
+ self.assertEqual('foo', self.ram_cache.cache[123])
+
+ def testCacheItem_DropsOldItems(self):
+ self.ram_cache.CacheItem(123, 'foo')
+ self.ram_cache.CacheItem(234, 'foo')
+ self.ram_cache.CacheItem(345, 'foo')
+ self.ram_cache.CacheItem(456, 'foo')
+ # The cache does not get bigger than its limit.
+ self.assertEqual(3, len(self.ram_cache.cache))
+ # An old value is dropped, not the newly added one.
+ self.assertIn(456, self.ram_cache.cache)
+
+ def testCacheAll(self):
+ self.ram_cache.CacheAll({123: 'foo'})
+ self.assertEqual('foo', self.ram_cache.cache[123])
+
+ def testCacheAll_DropsOldItems(self):
+ self.ram_cache.CacheAll({1: 'a', 2: 'b', 3: 'c'})
+ self.ram_cache.CacheAll({4: 'x', 5: 'y'})
+ # The cache does not get bigger than its limit.
+ self.assertEqual(3, len(self.ram_cache.cache))
+ # An old value is dropped, not the newly added one.
+ self.assertIn(4, self.ram_cache.cache)
+ self.assertIn(5, self.ram_cache.cache)
+ self.assertEqual('y', self.ram_cache.cache[5])
+
+ def testHasItem(self):
+ self.ram_cache.CacheItem(123, 'foo')
+ self.assertTrue(self.ram_cache.HasItem(123))
+ self.assertFalse(self.ram_cache.HasItem(999))
+
+ def testGetAll(self):
+ self.ram_cache.CacheItem(123, 'foo')
+ self.ram_cache.CacheItem(124, 'bar')
+ hits, misses = self.ram_cache.GetAll([123, 124, 999])
+ self.assertEqual({123: 'foo', 124: 'bar'}, hits)
+ self.assertEqual([999], misses)
+
+ def testLocalInvalidate(self):
+ self.ram_cache.CacheAll({123: 'a', 124: 'b', 125: 'c'})
+ self.ram_cache.LocalInvalidate(124)
+ self.assertEqual(2, len(self.ram_cache.cache))
+ self.assertNotIn(124, self.ram_cache.cache)
+
+ self.ram_cache.LocalInvalidate(999)
+ self.assertEqual(2, len(self.ram_cache.cache))
+
+ def testInvalidateKeys(self):
+ self.ram_cache.CacheAll({123: 'a', 124: 'b', 125: 'c'})
+ self.ram_cache.InvalidateKeys(self.cnxn, [124])
+ self.assertEqual(2, len(self.ram_cache.cache))
+ self.assertNotIn(124, self.ram_cache.cache)
+ self.assertEqual(self.cache_manager.last_call,
+ ('StoreInvalidateRows', self.cnxn, 'issue', [124]))
+
+ def testLocalInvalidateAll(self):
+ self.ram_cache.CacheAll({123: 'a', 124: 'b', 125: 'c'})
+ self.ram_cache.LocalInvalidateAll()
+ self.assertEqual(0, len(self.ram_cache.cache))
+
+ def testInvalidateAll(self):
+ self.ram_cache.CacheAll({123: 'a', 124: 'b', 125: 'c'})
+ self.ram_cache.InvalidateAll(self.cnxn)
+ self.assertEqual(0, len(self.ram_cache.cache))
+ self.assertEqual(self.cache_manager.last_call,
+ ('StoreInvalidateAll', self.cnxn, 'issue'))
+
+
+class TestableTwoLevelCache(caches.AbstractTwoLevelCache):
+
+ def __init__(self, cache_manager, kind, max_size=None):
+ super(TestableTwoLevelCache, self).__init__(
+ cache_manager, kind, 'testable:', None, max_size=max_size)
+
+ # pylint: disable=unused-argument
+ def FetchItems(self, cnxn, keys, **kwargs):
+ """On RAM and memcache miss, hit the database."""
+ return {key: key for key in keys if key < 900}
+
+
+class AbstractTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.cnxn = 'fake connection'
+ self.cache_manager = fake.CacheManager()
+ self.testable_cache = TestableTwoLevelCache(
+ self.cache_manager, 'issue', max_size=3)
+
+ def testCacheItem(self):
+ self.testable_cache.CacheItem(123, 'foo')
+ self.assertEqual('foo', self.testable_cache.cache.cache[123])
+
+ def testHasItem(self):
+ self.testable_cache.CacheItem(123, 'foo')
+ self.assertTrue(self.testable_cache.HasItem(123))
+ self.assertFalse(self.testable_cache.HasItem(444))
+ self.assertFalse(self.testable_cache.HasItem(999))
+
+ def testGetAll_FetchGetsItFromDB(self):
+ self.testable_cache.CacheItem(123, 'foo')
+ self.testable_cache.CacheItem(124, 'bar')
+ hits, misses = self.testable_cache.GetAll(
+ self.cnxn, [123, 124, 333, 444])
+ self.assertEqual({123: 'foo', 124: 'bar', 333: 333, 444: 444}, hits)
+ self.assertEqual([], misses)
+
+ def testGetAll_FetchDoesNotFindIt(self):
+ self.testable_cache.CacheItem(123, 'foo')
+ self.testable_cache.CacheItem(124, 'bar')
+ hits, misses = self.testable_cache.GetAll(
+ self.cnxn, [123, 124, 999])
+ self.assertEqual({123: 'foo', 124: 'bar'}, hits)
+ self.assertEqual([999], misses)
+
+ def testInvalidateKeys(self):
+ self.testable_cache.CacheItem(123, 'a')
+ self.testable_cache.CacheItem(124, 'b')
+ self.testable_cache.CacheItem(125, 'c')
+ self.testable_cache.InvalidateKeys(self.cnxn, [124])
+ self.assertEqual(2, len(self.testable_cache.cache.cache))
+ self.assertNotIn(124, self.testable_cache.cache.cache)
+ self.assertEqual(self.cache_manager.last_call,
+ ('StoreInvalidateRows', self.cnxn, 'issue', [124]))
+
+ def testGetAllAlreadyInRam(self):
+ self.testable_cache.CacheItem(123, 'foo')
+ self.testable_cache.CacheItem(124, 'bar')
+ hits, misses = self.testable_cache.GetAllAlreadyInRam(
+ [123, 124, 333, 444, 999])
+ self.assertEqual({123: 'foo', 124: 'bar'}, hits)
+ self.assertEqual([333, 444, 999], misses)
+
+ def testInvalidateAllRamEntries(self):
+ self.testable_cache.CacheItem(123, 'foo')
+ self.testable_cache.CacheItem(124, 'bar')
+ self.testable_cache.InvalidateAllRamEntries(self.cnxn)
+ self.assertFalse(self.testable_cache.HasItem(123))
+ self.assertFalse(self.testable_cache.HasItem(124))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/client_config_svc_test.py b/appengine/monorail/services/test/client_config_svc_test.py
new file mode 100644
index 0000000..36f7bf0
--- /dev/null
+++ b/appengine/monorail/services/test/client_config_svc_test.py
@@ -0,0 +1,50 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the client config service."""
+
+import unittest
+
+from services import client_config_svc
+
+
+class ClientConfigServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.client_config_svc = client_config_svc.GetClientConfigSvc()
+ self.client_email = '123456789@developer.gserviceaccount.com'
+ self.client_id = '123456789.apps.googleusercontent.com'
+
+ def testGetDisplayNames(self):
+ display_names_map = self.client_config_svc.GetDisplayNames()
+ self.assertIn(self.client_email, display_names_map)
+ self.assertEquals('johndoe@example.com',
+ display_names_map[self.client_email])
+
+ def testGetClientIDEmails(self):
+ auth_client_ids, auth_emails = self.client_config_svc.GetClientIDEmails()
+ self.assertIn(self.client_id, auth_client_ids)
+ self.assertIn(self.client_email, auth_emails)
+
+ def testForceLoad(self):
+ # First time it will always read the config
+ self.client_config_svc.load_time = 10000
+ self.client_config_svc.GetConfigs(use_cache=True)
+ self.assertNotEquals(10000, self.client_config_svc.load_time)
+
+ # use_cache is false and it will read the config
+ self.client_config_svc.load_time = 10000
+ self.client_config_svc.GetConfigs(use_cache=False, cur_time=11000)
+ self.assertNotEquals(10000, self.client_config_svc.load_time)
+
+ # Cache expires after 3600 sec and it will read the config
+ self.client_config_svc.load_time = 10000
+ self.client_config_svc.GetConfigs(use_cache=True, cur_time=20000)
+ self.assertNotEquals(10000, self.client_config_svc.load_time)
+
+ # otherwise it should just use the cache
+ self.client_config_svc.load_time = 10000
+ self.client_config_svc.GetConfigs(use_cache=True, cur_time=11000)
+ self.assertEquals(10000, self.client_config_svc.load_time)
diff --git a/appengine/monorail/services/test/config_svc_test.py b/appengine/monorail/services/test/config_svc_test.py
new file mode 100644
index 0000000..cf7dadc
--- /dev/null
+++ b/appengine/monorail/services/test/config_svc_test.py
@@ -0,0 +1,982 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for config_svc module."""
+
+import re
+import unittest
+
+import mox
+
+from google.appengine.api import memcache
+from google.appengine.ext import testbed
+
+from framework import sql
+from services import config_svc
+from testing import fake
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+def MakeConfigService(cache_manager, my_mox):
+ config_service = config_svc.ConfigService(cache_manager)
+ for table_var in [
+ 'template_tbl', 'template2label_tbl', 'template2admin_tbl',
+ 'template2component_tbl', 'template2fieldvalue_tbl',
+ 'projectissueconfig_tbl', 'statusdef_tbl', 'labeldef_tbl', 'fielddef_tbl',
+ 'fielddef2admin_tbl', 'componentdef_tbl', 'component2admin_tbl',
+ 'component2cc_tbl']:
+ setattr(config_service, table_var, my_mox.CreateMock(sql.SQLTableManager))
+
+ return config_service
+
+
+class LabelRowTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.cache_manager = fake.CacheManager()
+ self.config_service = MakeConfigService(self.cache_manager, self.mox)
+ self.label_row_2lc = self.config_service.label_row_2lc
+
+ self.rows = [(1, 789, 1, 'A', 'doc', False),
+ (2, 789, 2, 'B', 'doc', False),
+ (3, 678, 1, 'C', 'doc', True),
+ (4, 678, None, 'D', 'doc', False)]
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testDeserializeLabelRows_Empty(self):
+ label_row_dict = self.label_row_2lc._DeserializeLabelRows([])
+ self.assertEqual({}, label_row_dict)
+
+ def testDeserializeLabelRows_Normal(self):
+ label_rows_dict = self.label_row_2lc._DeserializeLabelRows(self.rows)
+ expected = {
+ 678: [(3, 678, 1, 'C', 'doc', True), (4, 678, None, 'D', 'doc', False)],
+ 789: [(1, 789, 1, 'A', 'doc', False), (2, 789, 2, 'B', 'doc', False)],
+ }
+ self.assertEqual(expected, label_rows_dict)
+
+ def SetUpFetchItems(self, keys, rows):
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn, cols=config_svc.LABELDEF_COLS, project_id=keys,
+ order_by=[('rank DESC', []), ('label DESC', [])]).AndReturn(
+ rows)
+
+ def testFetchItems(self):
+ keys = [567, 678, 789]
+ self.SetUpFetchItems(keys, self.rows)
+ self.mox.ReplayAll()
+ label_rows_dict = self.label_row_2lc.FetchItems(self.cnxn, keys)
+ self.mox.VerifyAll()
+ expected = {
+ 567: [],
+ 678: [(3, 678, 1, 'C', 'doc', True), (4, 678, None, 'D', 'doc', False)],
+ 789: [(1, 789, 1, 'A', 'doc', False), (2, 789, 2, 'B', 'doc', False)],
+ }
+ self.assertEqual(expected, label_rows_dict)
+
+
+class StatusRowTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.cache_manager = fake.CacheManager()
+ self.config_service = MakeConfigService(self.cache_manager, self.mox)
+ self.status_row_2lc = self.config_service.status_row_2lc
+
+ self.rows = [(1, 789, 1, 'A', True, 'doc', False),
+ (2, 789, 2, 'B', False, 'doc', False),
+ (3, 678, 1, 'C', True, 'doc', True),
+ (4, 678, None, 'D', True, 'doc', False)]
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testDeserializeStatusRows_Empty(self):
+ status_row_dict = self.status_row_2lc._DeserializeStatusRows([])
+ self.assertEqual({}, status_row_dict)
+
+ def testDeserializeStatusRows_Normal(self):
+ status_rows_dict = self.status_row_2lc._DeserializeStatusRows(self.rows)
+ expected = {
+ 678: [(3, 678, 1, 'C', True, 'doc', True),
+ (4, 678, None, 'D', True, 'doc', False)],
+ 789: [(1, 789, 1, 'A', True, 'doc', False),
+ (2, 789, 2, 'B', False, 'doc', False)],
+ }
+ self.assertEqual(expected, status_rows_dict)
+
+ def SetUpFetchItems(self, keys, rows):
+ self.config_service.statusdef_tbl.Select(
+ self.cnxn, cols=config_svc.STATUSDEF_COLS, project_id=keys,
+ order_by=[('rank DESC', []), ('status DESC', [])]).AndReturn(
+ rows)
+
+ def testFetchItems(self):
+ keys = [567, 678, 789]
+ self.SetUpFetchItems(keys, self.rows)
+ self.mox.ReplayAll()
+ status_rows_dict = self.status_row_2lc.FetchItems(self.cnxn, keys)
+ self.mox.VerifyAll()
+ expected = {
+ 567: [],
+ 678: [(3, 678, 1, 'C', True, 'doc', True),
+ (4, 678, None, 'D', True, 'doc', False)],
+ 789: [(1, 789, 1, 'A', True, 'doc', False),
+ (2, 789, 2, 'B', False, 'doc', False)],
+ }
+ self.assertEqual(expected, status_rows_dict)
+
+
+class ConfigRowTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.cache_manager = fake.CacheManager()
+ self.config_service = MakeConfigService(self.cache_manager, self.mox)
+ self.config_2lc = self.config_service.config_2lc
+
+ self.config_rows = [(789, 'Duplicate', 'Pri Type', 1, 2, 'Type Pri Summary',
+ '-Pri', 'Mstone', 'Owner', None)]
+ self.template_rows = []
+ self.template2label_rows = []
+ self.template2component_rows = []
+ self.template2admin_rows = []
+ self.template2fieldvalue_rows = []
+ self.statusdef_rows = [(1, 789, 1, 'New', True, 'doc', False),
+ (2, 789, 2, 'Fixed', False, 'doc', False)]
+ self.labeldef_rows = [(1, 789, 1, 'Security', 'doc', False),
+ (2, 789, 2, 'UX', 'doc', False)]
+ self.fielddef_rows = [(1, 789, None, 'Field', 'INT_TYPE',
+ 'Defect', '', False, False,
+ 1, 99, None, '', '',
+ None, 'NEVER', 'doc', False)]
+ self.fielddef2admin_rows = []
+ self.componentdef_rows = []
+ self.component2admin_rows = []
+ self.component2cc_rows = []
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testDeserializeIssueConfigs_Empty(self):
+ config_dict = self.config_2lc._DeserializeIssueConfigs(
+ [], [], [], [], [], [], [], [], [], [], [], [], [])
+ self.assertEqual({}, config_dict)
+
+ def testDeserializeIssueConfigs_Normal(self):
+ config_dict = self.config_2lc._DeserializeIssueConfigs(
+ self.config_rows, self.template_rows, self.template2label_rows,
+ self.template2component_rows, self.template2admin_rows,
+ self.template2fieldvalue_rows, self.statusdef_rows, self.labeldef_rows,
+ self.fielddef_rows, self.fielddef2admin_rows, self.componentdef_rows,
+ self.component2admin_rows, self.component2cc_rows)
+ self.assertItemsEqual([789], config_dict.keys())
+ config = config_dict[789]
+ self.assertEqual(789, config.project_id)
+ self.assertEqual(['Duplicate'], config.statuses_offer_merge)
+ self.assertEqual([], config.templates)
+ self.assertEqual(len(self.labeldef_rows), len(config.well_known_labels))
+ self.assertEqual(len(self.statusdef_rows), len(config.well_known_statuses))
+ self.assertEqual(len(self.fielddef_rows), len(config.field_defs))
+ self.assertEqual(len(self.componentdef_rows), len(config.component_defs))
+
+ def SetUpFetchConfigs(self, project_ids):
+ self.config_service.projectissueconfig_tbl.Select(
+ self.cnxn, cols=config_svc.PROJECTISSUECONFIG_COLS,
+ project_id=project_ids).AndReturn(self.config_rows)
+ self.config_service.template_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE_COLS, project_id=project_ids,
+ order_by=[('name', [])]).AndReturn(self.template_rows)
+ template_ids = [row[0] for row in self.template_rows]
+ self.config_service.template2label_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2LABEL_COLS,
+ template_id=template_ids).AndReturn(self.template2label_rows)
+ self.config_service.template2component_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2COMPONENT_COLS,
+ template_id=template_ids).AndReturn(self.template2component_rows)
+ self.config_service.template2admin_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2ADMIN_COLS,
+ template_id=template_ids).AndReturn(self.template2admin_rows)
+ self.config_service.template2fieldvalue_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2FIELDVALUE_COLS,
+ template_id=template_ids).AndReturn(self.template2fieldvalue_rows)
+ self.config_service.statusdef_tbl.Select(
+ self.cnxn, cols=config_svc.STATUSDEF_COLS, project_id=project_ids,
+ where=[('rank IS NOT NULL', [])], order_by=[('rank', [])]).AndReturn(
+ self.statusdef_rows)
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn, cols=config_svc.LABELDEF_COLS, project_id=project_ids,
+ where=[('rank IS NOT NULL', [])], order_by=[('rank', [])]).AndReturn(
+ self.labeldef_rows)
+ self.config_service.fielddef_tbl.Select(
+ self.cnxn, cols=config_svc.FIELDDEF_COLS, project_id=project_ids,
+ order_by=[('field_name', [])]).AndReturn(self.fielddef_rows)
+ field_ids = [row[0] for row in self.fielddef_rows]
+ self.config_service.fielddef2admin_tbl.Select(
+ self.cnxn, cols=config_svc.FIELDDEF2ADMIN_COLS,
+ field_id=field_ids).AndReturn(self.fielddef2admin_rows)
+ self.config_service.componentdef_tbl.Select(
+ self.cnxn, cols=config_svc.COMPONENTDEF_COLS, project_id=project_ids,
+ order_by=[('LOWER(path)', [])]).AndReturn(self.componentdef_rows)
+ component_ids = [cd_row[0] for cd_row in self.componentdef_rows]
+ self.config_service.component2admin_tbl.Select(
+ self.cnxn, cols=config_svc.COMPONENT2ADMIN_COLS,
+ component_id=component_ids).AndReturn(self.component2admin_rows)
+ self.config_service.component2cc_tbl.Select(
+ self.cnxn, cols=config_svc.COMPONENT2CC_COLS,
+ component_id=component_ids).AndReturn(self.component2cc_rows)
+
+ def testFetchConfigs(self):
+ keys = [789]
+ self.SetUpFetchConfigs(keys)
+ self.mox.ReplayAll()
+ config_dict = self.config_2lc._FetchConfigs(self.cnxn, keys)
+ self.mox.VerifyAll()
+ self.assertItemsEqual(keys, config_dict.keys())
+
+ def testFetchItems(self):
+ keys = [678, 789]
+ self.SetUpFetchConfigs(keys)
+ self.mox.ReplayAll()
+ config_dict = self.config_2lc.FetchItems(self.cnxn, keys)
+ self.mox.VerifyAll()
+ self.assertItemsEqual(keys, config_dict.keys())
+
+
+class ConfigServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.cache_manager = fake.CacheManager()
+ self.config_service = MakeConfigService(self.cache_manager, self.mox)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ ### Label lookups
+
+ def testGetLabelDefRows(self):
+ rows = 'foo'
+ self.config_service.label_row_2lc.CacheItem(
+ 789, rows)
+ actual = self.config_service.GetLabelDefRows(self.cnxn, 789)
+ self.assertEqual(rows, actual)
+
+ def SetUpGetLabelDefRowsAnyProject(self, rows):
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn, cols=config_svc.LABELDEF_COLS, where=None,
+ order_by=[('rank DESC', []), ('label DESC', [])]).AndReturn(
+ rows)
+
+ def testGetLabelDefRowsAnyProject(self):
+ rows = 'foo'
+ self.SetUpGetLabelDefRowsAnyProject(rows)
+ self.mox.ReplayAll()
+ actual = self.config_service.GetLabelDefRowsAnyProject(self.cnxn)
+ self.mox.VerifyAll()
+ self.assertEqual(rows, actual)
+
+ def testDeserializeLabels(self):
+ labeldef_rows = [(1, 789, 1, 'Security', 'doc', False),
+ (2, 789, 2, 'UX', 'doc', True)]
+ id_to_name, name_to_id = self.config_service._DeserializeLabels(
+ labeldef_rows)
+ self.assertEqual({1: 'Security', 2: 'UX'}, id_to_name)
+ self.assertEqual({'security': 1, 'ux': 2}, name_to_id)
+
+ def testEnsureLabelCacheEntry_Hit(self):
+ label_dicts = 'foo'
+ self.config_service.label_cache.CacheItem(789, label_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.config_service._EnsureLabelCacheEntry(self.cnxn, 789)
+ self.mox.VerifyAll()
+
+ def SetUpEnsureLabelCacheEntry_Miss(self, keys, rows):
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn, cols=config_svc.LABELDEF_COLS, project_id=keys,
+ order_by=[('rank DESC', []), ('label DESC', [])]).AndReturn(
+ rows)
+
+ def testEnsureLabelCacheEntry_Miss(self):
+ labeldef_rows = [(1, 789, 1, 'Security', 'doc', False),
+ (2, 789, 2, 'UX', 'doc', True)]
+ self.SetUpEnsureLabelCacheEntry_Miss([789], labeldef_rows)
+ self.mox.ReplayAll()
+ self.config_service._EnsureLabelCacheEntry(self.cnxn, 789)
+ self.mox.VerifyAll()
+ label_dicts = {1: 'Security', 2: 'UX'}, {'security': 1, 'ux': 2}
+ self.assertEqual(label_dicts, self.config_service.label_cache.GetItem(789))
+
+ def testLookupLabel_Hit(self):
+ label_dicts = {1: 'Security', 2: 'UX'}, {'security': 1, 'ux': 2}
+ self.config_service.label_cache.CacheItem(789, label_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ 'Security', self.config_service.LookupLabel(self.cnxn, 789, 1))
+ self.assertEqual(
+ 'UX', self.config_service.LookupLabel(self.cnxn, 789, 2))
+ self.mox.VerifyAll()
+
+ def testLookupLabelID_Hit(self):
+ label_dicts = {1: 'Security', 2: 'UX'}, {'security': 1, 'ux': 2}
+ self.config_service.label_cache.CacheItem(789, label_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ 1, self.config_service.LookupLabelID(self.cnxn, 789, 'Security'))
+ self.assertEqual(
+ 2, self.config_service.LookupLabelID(self.cnxn, 789, 'UX'))
+ self.mox.VerifyAll()
+
+ def testLookupLabelIDs_Hit(self):
+ label_dicts = {1: 'Security', 2: 'UX'}, {'security': 1, 'ux': 2}
+ self.config_service.label_cache.CacheItem(789, label_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ [1, 2],
+ self.config_service.LookupLabelIDs(self.cnxn, 789, ['Security', 'UX']))
+ self.mox.VerifyAll()
+
+ def testLookupIDsOfLabelsMatching_Hit(self):
+ label_dicts = {1: 'Security', 2: 'UX'}, {'security': 1, 'ux': 2}
+ self.config_service.label_cache.CacheItem(789, label_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertItemsEqual(
+ [1],
+ self.config_service.LookupIDsOfLabelsMatching(
+ self.cnxn, 789, re.compile('Sec.*')))
+ self.assertItemsEqual(
+ [1, 2],
+ self.config_service.LookupIDsOfLabelsMatching(
+ self.cnxn, 789, re.compile('.*')))
+ self.assertItemsEqual(
+ [],
+ self.config_service.LookupIDsOfLabelsMatching(
+ self.cnxn, 789, re.compile('Zzzzz.*')))
+ self.mox.VerifyAll()
+
+ def SetUpLookupLabelIDsAnyProject(self, label, id_rows):
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn, cols=['id'], label=label).AndReturn(id_rows)
+
+ def testLookupLabelIDsAnyProject(self):
+ self.SetUpLookupLabelIDsAnyProject('Security', [(1,)])
+ self.mox.ReplayAll()
+ actual = self.config_service.LookupLabelIDsAnyProject(
+ self.cnxn, 'Security')
+ self.mox.VerifyAll()
+ self.assertEqual([1], actual)
+
+ def SetUpLookupIDsOfLabelsMatchingAnyProject(self, id_label_rows):
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn, cols=['id', 'label']).AndReturn(id_label_rows)
+
+ def testLookupIDsOfLabelsMatchingAnyProject(self):
+ id_label_rows = [(1, 'Security'), (2, 'UX')]
+ self.SetUpLookupIDsOfLabelsMatchingAnyProject(id_label_rows)
+ self.mox.ReplayAll()
+ actual = self.config_service.LookupIDsOfLabelsMatchingAnyProject(
+ self.cnxn, re.compile('(Sec|Zzz).*'))
+ self.mox.VerifyAll()
+ self.assertEqual([1], actual)
+
+ ### Status lookups
+
+ def testGetStatusDefRows(self):
+ rows = 'foo'
+ self.config_service.status_row_2lc.CacheItem(789, rows)
+ actual = self.config_service.GetStatusDefRows(self.cnxn, 789)
+ self.assertEqual(rows, actual)
+
+ def SetUpGetStatusDefRowsAnyProject(self, rows):
+ self.config_service.statusdef_tbl.Select(
+ self.cnxn, cols=config_svc.STATUSDEF_COLS,
+ order_by=[('rank DESC', []), ('status DESC', [])]).AndReturn(
+ rows)
+
+ def testGetStatusDefRowsAnyProject(self):
+ rows = 'foo'
+ self.SetUpGetStatusDefRowsAnyProject(rows)
+ self.mox.ReplayAll()
+ actual = self.config_service.GetStatusDefRowsAnyProject(self.cnxn)
+ self.mox.VerifyAll()
+ self.assertEqual(rows, actual)
+
+ def testDeserializeStatuses(self):
+ statusdef_rows = [(1, 789, 1, 'New', True, 'doc', False),
+ (2, 789, 2, 'Fixed', False, 'doc', True)]
+ actual = self.config_service._DeserializeStatuses(statusdef_rows)
+ id_to_name, name_to_id, closed_ids = actual
+ self.assertEqual({1: 'New', 2: 'Fixed'}, id_to_name)
+ self.assertEqual({'new': 1, 'fixed': 2}, name_to_id)
+ self.assertEqual([2], closed_ids)
+
+ def testEnsureStatusCacheEntry_Hit(self):
+ status_dicts = 'foo'
+ self.config_service.status_cache.CacheItem(789, status_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.config_service._EnsureStatusCacheEntry(self.cnxn, 789)
+ self.mox.VerifyAll()
+
+ def SetUpEnsureStatusCacheEntry_Miss(self, keys, rows):
+ self.config_service.statusdef_tbl.Select(
+ self.cnxn, cols=config_svc.STATUSDEF_COLS, project_id=keys,
+ order_by=[('rank DESC', []), ('status DESC', [])]).AndReturn(
+ rows)
+
+ def testEnsureStatusCacheEntry_Miss(self):
+ statusdef_rows = [(1, 789, 1, 'New', True, 'doc', False),
+ (2, 789, 2, 'Fixed', False, 'doc', True)]
+ self.SetUpEnsureStatusCacheEntry_Miss([789], statusdef_rows)
+ self.mox.ReplayAll()
+ self.config_service._EnsureStatusCacheEntry(self.cnxn, 789)
+ self.mox.VerifyAll()
+ status_dicts = {1: 'New', 2: 'Fixed'}, {'new': 1, 'fixed': 2}, [2]
+ self.assertEqual(
+ status_dicts, self.config_service.status_cache.GetItem(789))
+
+ def testLookupStatus_Hit(self):
+ status_dicts = {1: 'New', 2: 'Fixed'}, {'new': 1, 'fixed': 2}, [2]
+ self.config_service.status_cache.CacheItem(789, status_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ 'New', self.config_service.LookupStatus(self.cnxn, 789, 1))
+ self.assertEqual(
+ 'Fixed', self.config_service.LookupStatus(self.cnxn, 789, 2))
+ self.mox.VerifyAll()
+
+ def testLookupStatusID_Hit(self):
+ status_dicts = {1: 'New', 2: 'Fixed'}, {'new': 1, 'fixed': 2}, [2]
+ self.config_service.status_cache.CacheItem(789, status_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ 1, self.config_service.LookupStatusID(self.cnxn, 789, 'New'))
+ self.assertEqual(
+ 2, self.config_service.LookupStatusID(self.cnxn, 789, 'Fixed'))
+ self.mox.VerifyAll()
+
+ def testLookupStatusIDs_Hit(self):
+ status_dicts = {1: 'New', 2: 'Fixed'}, {'new': 1, 'fixed': 2}, [2]
+ self.config_service.status_cache.CacheItem(789, status_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ [1, 2],
+ self.config_service.LookupStatusIDs(self.cnxn, 789, ['New', 'Fixed']))
+ self.mox.VerifyAll()
+
+ def testLookupClosedStatusIDs_Hit(self):
+ status_dicts = {1: 'New', 2: 'Fixed'}, {'new': 1, 'fixed': 2}, [2]
+ self.config_service.status_cache.CacheItem(789, status_dicts)
+ # No mock calls set up because none are needed.
+ self.mox.ReplayAll()
+ self.assertEqual(
+ [2],
+ self.config_service.LookupClosedStatusIDs(self.cnxn, 789))
+ self.mox.VerifyAll()
+
+ def SetUpLookupClosedStatusIDsAnyProject(self, id_rows):
+ self.config_service.statusdef_tbl.Select(
+ self.cnxn, cols=['id'], means_open=False).AndReturn(
+ id_rows)
+
+ def testLookupClosedStatusIDsAnyProject(self):
+ self.SetUpLookupClosedStatusIDsAnyProject([(2,)])
+ self.mox.ReplayAll()
+ actual = self.config_service.LookupClosedStatusIDsAnyProject(self.cnxn)
+ self.mox.VerifyAll()
+ self.assertEqual([2], actual)
+
+ def SetUpLookupStatusIDsAnyProject(self, status, id_rows):
+ self.config_service.statusdef_tbl.Select(
+ self.cnxn, cols=['id'], status=status).AndReturn(id_rows)
+
+ def testLookupStatusIDsAnyProject(self):
+ self.SetUpLookupStatusIDsAnyProject('New', [(1,)])
+ self.mox.ReplayAll()
+ actual = self.config_service.LookupStatusIDsAnyProject(self.cnxn, 'New')
+ self.mox.VerifyAll()
+ self.assertEqual([1], actual)
+
+ ### Issue tracker configuration objects
+
+ def SetUpGetProjectConfigs(self, project_ids):
+ self.config_service.projectissueconfig_tbl.Select(
+ self.cnxn, cols=config_svc.PROJECTISSUECONFIG_COLS,
+ project_id=project_ids).AndReturn([])
+ self.config_service.template_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE_COLS,
+ project_id=project_ids, order_by=[('name', [])]).AndReturn([])
+ self.config_service.template2label_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2LABEL_COLS,
+ template_id=[]).AndReturn([])
+ self.config_service.template2component_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2COMPONENT_COLS,
+ template_id=[]).AndReturn([])
+ self.config_service.template2admin_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2ADMIN_COLS,
+ template_id=[]).AndReturn([])
+ self.config_service.template2fieldvalue_tbl.Select(
+ self.cnxn, cols=config_svc.TEMPLATE2FIELDVALUE_COLS,
+ template_id=[]).AndReturn([])
+ self.config_service.statusdef_tbl.Select(
+ self.cnxn, cols=config_svc.STATUSDEF_COLS,
+ project_id=project_ids, where=[('rank IS NOT NULL', [])],
+ order_by=[('rank', [])]).AndReturn([])
+ self.config_service.labeldef_tbl.Select(
+ self.cnxn, cols=config_svc.LABELDEF_COLS,
+ project_id=project_ids, where=[('rank IS NOT NULL', [])],
+ order_by=[('rank', [])]).AndReturn([])
+ self.config_service.fielddef_tbl.Select(
+ self.cnxn, cols=config_svc.FIELDDEF_COLS,
+ project_id=project_ids, order_by=[('field_name', [])]).AndReturn([])
+ self.config_service.fielddef2admin_tbl.Select(
+ self.cnxn, cols=config_svc.FIELDDEF2ADMIN_COLS,
+ field_id=[]).AndReturn([])
+ self.config_service.componentdef_tbl.Select(
+ self.cnxn, cols=config_svc.COMPONENTDEF_COLS,
+ project_id=project_ids, order_by=[('LOWER(path)', [])]).AndReturn([])
+ self.config_service.component2admin_tbl.Select(
+ self.cnxn, cols=config_svc.COMPONENT2ADMIN_COLS,
+ component_id=[]).AndReturn([])
+ self.config_service.component2cc_tbl.Select(
+ self.cnxn, cols=config_svc.COMPONENT2CC_COLS,
+ component_id=[]).AndReturn([])
+
+ def testGetProjectConfigs(self):
+ project_ids = [789, 679]
+ self.SetUpGetProjectConfigs(project_ids)
+
+ self.mox.ReplayAll()
+ config_dict = self.config_service.GetProjectConfigs(
+ self.cnxn, [789, 679], use_cache=False)
+ self.assertEqual(2, len(config_dict))
+ for pid in project_ids:
+ self.assertEqual(pid, config_dict[pid].project_id)
+ self.mox.VerifyAll()
+
+ def testGetProjectConfig_Hit(self):
+ project_id = 789
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(project_id)
+ self.config_service.config_2lc.CacheItem(project_id, config)
+
+ self.mox.ReplayAll()
+ actual = self.config_service.GetProjectConfig(self.cnxn, project_id)
+ self.assertEqual(config, actual)
+ self.mox.VerifyAll()
+
+ def testGetProjectConfig_Miss(self):
+ project_id = 789
+ self.SetUpGetProjectConfigs([project_id])
+
+ self.mox.ReplayAll()
+ config = self.config_service.GetProjectConfig(self.cnxn, project_id)
+ self.assertEqual(project_id, config.project_id)
+ self.mox.VerifyAll()
+
+ def SetUpStoreConfig_Default(self, project_id):
+ self.config_service.projectissueconfig_tbl.InsertRow(
+ self.cnxn, replace=True,
+ project_id=project_id,
+ statuses_offer_merge='Duplicate',
+ exclusive_label_prefixes='Type Priority Milestone',
+ default_template_for_developers=0,
+ default_template_for_users=0,
+ default_col_spec=tracker_constants.DEFAULT_COL_SPEC,
+ default_sort_spec='',
+ default_x_attr='',
+ default_y_attr='',
+ custom_issue_entry_url=None,
+ commit=False)
+
+ self.SetUpUpdateTemplates_Default(project_id)
+ self.SetUpUpdateWellKnownLabels_Default(project_id)
+ self.SetUpUpdateWellKnownStatuses_Default(project_id)
+ self.cnxn.Commit()
+
+ def SetUpUpdateTemplates_Default(self, project_id):
+ self.config_service.template_tbl.Select(
+ self.cnxn, cols=['id'], project_id=project_id).AndReturn([])
+ self.config_service.template2label_tbl.Delete(
+ self.cnxn, template_id=[], commit=False)
+ self.config_service.template2component_tbl.Delete(
+ self.cnxn, template_id=[], commit=False)
+ self.config_service.template2admin_tbl.Delete(
+ self.cnxn, template_id=[], commit=False)
+ self.config_service.template2fieldvalue_tbl.Delete(
+ self.cnxn, template_id=[], commit=False)
+ self.config_service.template_tbl.Delete(
+ self.cnxn, project_id=project_id, commit=False)
+
+ template_rows = []
+ for template_dict in tracker_constants.DEFAULT_TEMPLATES:
+ row = (None,
+ project_id,
+ template_dict['name'],
+ template_dict['content'],
+ template_dict['summary'],
+ template_dict.get('summary_must_be_edited'),
+ None,
+ template_dict['status'],
+ template_dict.get('members_only', False),
+ template_dict.get('owner_defaults_to_member', True),
+ template_dict.get('component_required', False))
+ template_rows.append(row)
+
+ self.config_service.template_tbl.InsertRows(
+ self.cnxn, config_svc.TEMPLATE_COLS, template_rows,
+ replace=True, commit=False, return_generated_ids=True).AndReturn(
+ range(1, len(template_rows) + 1))
+
+ template2label_rows = [
+ (2, 'Type-Defect'),
+ (2, 'Priority-Medium'),
+ (1, 'Type-Defect'),
+ (1, 'Priority-Medium'),
+ ]
+ template2component_rows = []
+ template2admin_rows = []
+ template2fieldvalue_rows = []
+
+ self.config_service.template2label_tbl.InsertRows(
+ self.cnxn, config_svc.TEMPLATE2LABEL_COLS, template2label_rows,
+ ignore=True, commit=False)
+ self.config_service.template2component_tbl.InsertRows(
+ self.cnxn, config_svc.TEMPLATE2COMPONENT_COLS, template2component_rows,
+ commit=False)
+ self.config_service.template2admin_tbl.InsertRows(
+ self.cnxn, config_svc.TEMPLATE2ADMIN_COLS, template2admin_rows,
+ commit=False)
+ self.config_service.template2fieldvalue_tbl.InsertRows(
+ self.cnxn, config_svc.TEMPLATE2FIELDVALUE_COLS,
+ template2fieldvalue_rows, commit=False)
+
+ def SetUpUpdateWellKnownLabels_Default(self, project_id):
+ by_id = {
+ idx + 1: label for idx, (label, _, _) in enumerate(
+ tracker_constants.DEFAULT_WELL_KNOWN_LABELS)}
+ by_name = {name.lower(): label_id
+ for label_id, name in by_id.iteritems()}
+ label_dicts = by_id, by_name
+ self.config_service.label_cache.CacheAll({789: label_dicts})
+
+ update_labeldef_rows = [
+ (idx + 1, project_id, idx, label, doc, deprecated)
+ for idx, (label, doc, deprecated) in enumerate(
+ tracker_constants.DEFAULT_WELL_KNOWN_LABELS)]
+ self.config_service.labeldef_tbl.Update(
+ self.cnxn, {'rank': None}, project_id=project_id, commit=False)
+ self.config_service.labeldef_tbl.InsertRows(
+ self.cnxn, config_svc.LABELDEF_COLS, update_labeldef_rows,
+ replace=True, commit=False)
+ self.config_service.labeldef_tbl.InsertRows(
+ self.cnxn, config_svc.LABELDEF_COLS[1:], [], commit=False)
+
+ def SetUpUpdateWellKnownStatuses_Default(self, project_id):
+ by_id = {
+ idx + 1: status for idx, (status, _, _, _) in enumerate(
+ tracker_constants.DEFAULT_WELL_KNOWN_STATUSES)}
+ by_name = {name.lower(): label_id
+ for label_id, name in by_id.iteritems()}
+ closed_ids = [
+ idx + 1 for idx, (_, _, means_open, _) in enumerate(
+ tracker_constants.DEFAULT_WELL_KNOWN_STATUSES)
+ if not means_open]
+ status_dicts = by_id, by_name, closed_ids
+ self.config_service.status_cache.CacheAll({789: status_dicts})
+
+ update_statusdef_rows = [
+ (idx + 1, project_id, idx, status, means_open, doc, deprecated)
+ for idx, (status, doc, means_open, deprecated) in enumerate(
+ tracker_constants.DEFAULT_WELL_KNOWN_STATUSES)]
+ self.config_service.statusdef_tbl.Update(
+ self.cnxn, {'rank': None}, project_id=project_id, commit=False)
+ self.config_service.statusdef_tbl.InsertRows(
+ self.cnxn, config_svc.STATUSDEF_COLS, update_statusdef_rows,
+ replace=True, commit=False)
+ self.config_service.statusdef_tbl.InsertRows(
+ self.cnxn, config_svc.STATUSDEF_COLS[1:], [], commit=False)
+
+ def testStoreConfig(self):
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.SetUpStoreConfig_Default(789)
+
+ self.mox.ReplayAll()
+ self.config_service.StoreConfig(self.cnxn, config)
+ self.mox.VerifyAll()
+
+ def testUpdateTemplates(self):
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.SetUpUpdateTemplates_Default(789)
+
+ self.mox.ReplayAll()
+ self.config_service._UpdateTemplates(self.cnxn, config)
+ self.mox.VerifyAll()
+
+ def testUpdateWellKnownLabels(self):
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.SetUpUpdateWellKnownLabels_Default(789)
+
+ self.mox.ReplayAll()
+ self.config_service._UpdateWellKnownLabels(self.cnxn, config)
+ self.mox.VerifyAll()
+
+ def testUpdateWellKnownStatuses(self):
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.SetUpUpdateWellKnownStatuses_Default(789)
+
+ self.mox.ReplayAll()
+ self.config_service._UpdateWellKnownStatuses(self.cnxn, config)
+ self.mox.VerifyAll()
+
+ def testUpdateConfig(self):
+ pass # TODO(jrobbins): add a test for this
+
+ def SetUpExpungeConfig(self, project_id):
+ self.config_service.template_tbl.Select(
+ self.cnxn, cols=['id'], project_id=project_id).AndReturn([])
+ self.config_service.template2label_tbl.Delete(self.cnxn, template_id=[])
+ self.config_service.template2component_tbl.Delete(self.cnxn, template_id=[])
+ self.config_service.template_tbl.Delete(self.cnxn, project_id=project_id)
+ self.config_service.statusdef_tbl.Delete(self.cnxn, project_id=project_id)
+ self.config_service.labeldef_tbl.Delete(self.cnxn, project_id=project_id)
+ self.config_service.projectissueconfig_tbl.Delete(
+ self.cnxn, project_id=project_id)
+
+ self.config_service.config_2lc.InvalidateKeys(self.cnxn, [project_id])
+
+ def testExpungeConfig(self):
+ self.SetUpExpungeConfig(789)
+
+ self.mox.ReplayAll()
+ self.config_service.ExpungeConfig(self.cnxn, 789)
+ self.mox.VerifyAll()
+
+ ### Custom field definitions
+
+ def SetUpCreateFieldDef(self, project_id):
+ self.config_service.fielddef_tbl.InsertRow(
+ self.cnxn, project_id=project_id,
+ field_name='PercentDone', field_type='int_type',
+ applicable_type='Defect', applicable_predicate='',
+ is_required=False, is_multivalued=False,
+ min_value=1, max_value=100, regex=None,
+ needs_member=None, needs_perm=None,
+ grants_perm=None, notify_on='never',
+ docstring='doc', commit=False).AndReturn(1)
+ self.config_service.fielddef2admin_tbl.InsertRows(
+ self.cnxn, config_svc.FIELDDEF2ADMIN_COLS, [], commit=False)
+ self.cnxn.Commit()
+
+ def testCreateFieldDef(self):
+ self.SetUpCreateFieldDef(789)
+
+ self.mox.ReplayAll()
+ field_id = self.config_service.CreateFieldDef(
+ self.cnxn, 789, 'PercentDone', 'int_type', 'Defect', '', False, False,
+ 1, 100, None, None, None, None, 0, 'doc', [])
+ self.mox.VerifyAll()
+ self.assertEqual(1, field_id)
+
+ def SetUpSoftDeleteFieldDef(self, field_id):
+ self.config_service.fielddef_tbl.Update(
+ self.cnxn, {'is_deleted': True}, id=field_id)
+
+ def testSoftDeleteFieldDef(self):
+ self.SetUpSoftDeleteFieldDef(1)
+
+ self.mox.ReplayAll()
+ self.config_service.SoftDeleteFieldDef(self.cnxn, 789, 1)
+ self.mox.VerifyAll()
+
+ def SetUpUpdateFieldDef(self, field_id, new_values):
+ self.config_service.fielddef_tbl.Update(
+ self.cnxn, new_values, id=field_id, commit=False)
+ self.config_service.fielddef2admin_tbl.Delete(
+ self.cnxn, field_id=field_id, commit=False)
+ self.config_service.fielddef2admin_tbl.InsertRows(
+ self.cnxn, config_svc.FIELDDEF2ADMIN_COLS, [], commit=False)
+ self.cnxn.Commit()
+
+ def testUpdateFieldDef_NoOp(self):
+ new_values = {}
+ self.SetUpUpdateFieldDef(1, new_values)
+
+ self.mox.ReplayAll()
+ self.config_service.UpdateFieldDef(self.cnxn, 789, 1, admin_ids=[])
+ self.mox.VerifyAll()
+
+ def testUpdateFieldDef_Normal(self):
+ new_values = dict(
+ field_name='newname', applicable_type='defect',
+ applicable_predicate='pri:1', is_required=True,
+ is_multivalued=True, min_value=32, max_value=212, regex='a.*b',
+ needs_member=True, needs_perm='EditIssue', grants_perm='DeleteIssue',
+ notify_on='any_comment', docstring='new doc')
+ self.SetUpUpdateFieldDef(1, new_values)
+
+ self.mox.ReplayAll()
+ new_values = new_values.copy()
+ new_values['notify_on'] = 1
+ self.config_service.UpdateFieldDef(
+ self.cnxn, 789, 1, admin_ids=[], **new_values)
+ self.mox.VerifyAll()
+
+ ### Component definitions
+
+ def SetUpFindMatchingComponentIDsAnyProject(self, _exact, rows):
+ # TODO(jrobbins): more details here.
+ self.config_service.componentdef_tbl.Select(
+ self.cnxn, cols=['id'], where=mox.IsA(list)).AndReturn(rows)
+
+ def testFindMatchingComponentIDsAnyProject_Rooted(self):
+ self.SetUpFindMatchingComponentIDsAnyProject(True, [(1,), (2,), (3,)])
+
+ self.mox.ReplayAll()
+ comp_ids = self.config_service.FindMatchingComponentIDsAnyProject(
+ self.cnxn, ['WindowManager', 'NetworkLayer'])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([1, 2, 3], comp_ids)
+
+ def testFindMatchingComponentIDsAnyProject_NonRooted(self):
+ self.SetUpFindMatchingComponentIDsAnyProject(False, [(1,), (2,), (3,)])
+
+ self.mox.ReplayAll()
+ comp_ids = self.config_service.FindMatchingComponentIDsAnyProject(
+ self.cnxn, ['WindowManager', 'NetworkLayer'], exact=False)
+ self.mox.VerifyAll()
+ self.assertItemsEqual([1, 2, 3], comp_ids)
+
+ def SetUpCreateComponentDef(self, comp_id):
+ self.config_service.componentdef_tbl.InsertRow(
+ self.cnxn, project_id=789, path='WindowManager',
+ docstring='doc', deprecated=False, commit=False,
+ created=0, creator_id=0).AndReturn(comp_id)
+ self.config_service.component2admin_tbl.InsertRows(
+ self.cnxn, config_svc.COMPONENT2ADMIN_COLS, [], commit=False)
+ self.config_service.component2cc_tbl.InsertRows(
+ self.cnxn, config_svc.COMPONENT2CC_COLS, [], commit=False)
+ self.cnxn.Commit()
+
+ def testCreateComponentDef(self):
+ self.SetUpCreateComponentDef(1)
+
+ self.mox.ReplayAll()
+ comp_id = self.config_service.CreateComponentDef(
+ self.cnxn, 789, 'WindowManager', 'doc', False, [], [], 0, 0)
+ self.mox.VerifyAll()
+ self.assertEqual(1, comp_id)
+
+ def SetUpUpdateComponentDef(self, component_id):
+ self.config_service.component2admin_tbl.Delete(
+ self.cnxn, component_id=component_id, commit=False)
+ self.config_service.component2admin_tbl.InsertRows(
+ self.cnxn, config_svc.COMPONENT2ADMIN_COLS, [], commit=False)
+ self.config_service.component2cc_tbl.Delete(
+ self.cnxn, component_id=component_id, commit=False)
+ self.config_service.component2cc_tbl.InsertRows(
+ self.cnxn, config_svc.COMPONENT2CC_COLS, [], commit=False)
+
+ self.config_service.componentdef_tbl.Update(
+ self.cnxn,
+ {'path': 'DisplayManager', 'docstring': 'doc', 'deprecated': True},
+ id=component_id, commit=False)
+ self.cnxn.Commit()
+
+ def testUpdateComponentDef(self):
+ self.SetUpUpdateComponentDef(1)
+
+ self.mox.ReplayAll()
+ self.config_service.UpdateComponentDef(
+ self.cnxn, 789, 1, path='DisplayManager', docstring='doc',
+ deprecated=True, admin_ids=[], cc_ids=[])
+ self.mox.VerifyAll()
+
+ def SetUpDeleteComponentDef(self, component_id):
+ self.config_service.component2cc_tbl.Delete(
+ self.cnxn, component_id=component_id, commit=False)
+ self.config_service.component2admin_tbl.Delete(
+ self.cnxn, component_id=component_id, commit=False)
+ self.config_service.componentdef_tbl.Delete(
+ self.cnxn, id=component_id, commit=False)
+ self.cnxn.Commit()
+
+ def testDeleteComponentDef(self):
+ self.SetUpDeleteComponentDef(1)
+
+ self.mox.ReplayAll()
+ self.config_service.DeleteComponentDef(self.cnxn, 789, 1)
+ self.mox.VerifyAll()
+
+ ### Memcache management
+
+ def testInvalidateMemcache(self):
+ pass # TODO(jrobbins): write this
+
+ def testInvalidateMemcacheShards(self):
+ NOW = 1234567
+ memcache.set('789;1', NOW)
+ memcache.set('789;2', NOW - 1000)
+ memcache.set('789;3', NOW - 2000)
+ memcache.set('all;1', NOW)
+ memcache.set('all;2', NOW - 1000)
+ memcache.set('all;3', NOW - 2000)
+
+ # Delete some of them.
+ self.config_service._InvalidateMemcacheShards(
+ [(789, 1), (789, 2), (789,9)])
+
+ self.assertIsNone(memcache.get('789;1'))
+ self.assertIsNone(memcache.get('789;2'))
+ self.assertEqual(NOW - 2000, memcache.get('789;3'))
+ self.assertIsNone(memcache.get('all;1'))
+ self.assertIsNone(memcache.get('all;2'))
+ self.assertEqual(NOW - 2000, memcache.get('all;3'))
+
+ def testInvalidateMemcacheForEntireProject(self):
+ NOW = 1234567
+ memcache.set('789;1', NOW)
+ memcache.set('config:789', 'serialized config')
+ memcache.set('label_rows:789', 'serialized label rows')
+ memcache.set('status_rows:789', 'serialized status rows')
+ memcache.set('field_rows:789', 'serialized field rows')
+ memcache.set('890;1', NOW) # Other projects will not be affected.
+
+ self.config_service.InvalidateMemcacheForEntireProject(789)
+
+ self.assertIsNone(memcache.get('789;1'))
+ self.assertIsNone(memcache.get('config:789'))
+ self.assertIsNone(memcache.get('status_rows:789'))
+ self.assertIsNone(memcache.get('label_rows:789'))
+ self.assertIsNone(memcache.get('field_rows:789'))
+ self.assertEqual(NOW, memcache.get('890;1'))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/features_svc_test.py b/appengine/monorail/services/test/features_svc_test.py
new file mode 100644
index 0000000..0390969
--- /dev/null
+++ b/appengine/monorail/services/test/features_svc_test.py
@@ -0,0 +1,340 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for features_svc module."""
+
+import unittest
+
+import mox
+
+from features import filterrules_helpers
+from framework import sql
+from services import features_svc
+from testing import fake
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+class FeaturesServiceTest(unittest.TestCase):
+
+ def MakeMockTable(self):
+ return self.mox.CreateMock(sql.SQLTableManager)
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.cache_manager = fake.CacheManager()
+
+ self.features_service = features_svc.FeaturesService(self.cache_manager)
+
+ for table_var in [
+ 'user2savedquery_tbl', 'quickedithistory_tbl',
+ 'quickeditmostrecent_tbl', 'savedquery_tbl',
+ 'savedqueryexecutesinproject_tbl', 'project2savedquery_tbl',
+ 'filterrule_tbl']:
+ setattr(self.features_service, table_var, self.MakeMockTable())
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ ### quickedit command history
+
+ def testGetRecentCommands(self):
+ self.features_service.quickedithistory_tbl.Select(
+ self.cnxn, cols=['slot_num', 'command', 'comment'],
+ user_id=1, project_id=12345).AndReturn(
+ [(1, 'status=New', 'Brand new issue')])
+ self.features_service.quickeditmostrecent_tbl.SelectValue(
+ self.cnxn, 'slot_num', default=1, user_id=1, project_id=12345
+ ).AndReturn(1)
+ self.mox.ReplayAll()
+ slots, recent_slot_num = self.features_service.GetRecentCommands(
+ self.cnxn, 1, 12345)
+ self.mox.VerifyAll()
+
+ self.assertEqual(1, recent_slot_num)
+ self.assertEqual(
+ len(tracker_constants.DEFAULT_RECENT_COMMANDS), len(slots))
+ self.assertEqual('status=New', slots[0][1])
+
+ def testStoreRecentCommand(self):
+ self.features_service.quickedithistory_tbl.InsertRow(
+ self.cnxn, replace=True, user_id=1, project_id=12345,
+ slot_num=1, command='status=New', comment='Brand new issue')
+ self.features_service.quickeditmostrecent_tbl.InsertRow(
+ self.cnxn, replace=True, user_id=1, project_id=12345,
+ slot_num=1)
+ self.mox.ReplayAll()
+ self.features_service.StoreRecentCommand(
+ self.cnxn, 1, 12345, 1, 'status=New', 'Brand new issue')
+ self.mox.VerifyAll()
+
+ def testExpungeQuickEditHistory(self):
+ self.features_service.quickeditmostrecent_tbl.Delete(
+ self.cnxn, project_id=12345)
+ self.features_service.quickedithistory_tbl.Delete(
+ self.cnxn, project_id=12345)
+ self.mox.ReplayAll()
+ self.features_service.ExpungeQuickEditHistory(
+ self.cnxn, 12345)
+ self.mox.VerifyAll()
+
+ ### Saved User and Project Queries
+
+ def testGetSavedQuery(self):
+ self.features_service.savedquery_tbl.Select(
+ self.cnxn, cols=features_svc.SAVEDQUERY_COLS, id=[1]).AndReturn(
+ [(1, 'query1', 100, 'owner:me')])
+ self.features_service.savedqueryexecutesinproject_tbl.Select(
+ self.cnxn, cols=features_svc.SAVEDQUERYEXECUTESINPROJECT_COLS,
+ query_id=[1]).AndReturn([(1, 12345)])
+ self.mox.ReplayAll()
+ saved_query = self.features_service.GetSavedQuery(
+ self.cnxn, 1)
+ self.mox.VerifyAll()
+ self.assertEqual(1, saved_query.query_id)
+ self.assertEqual('query1', saved_query.name)
+ self.assertEqual(100, saved_query.base_query_id)
+ self.assertEqual('owner:me', saved_query.query)
+ self.assertEqual([12345], saved_query.executes_in_project_ids)
+
+ def SetUpUsersSavedQueries(self):
+ query = tracker_bizobj.MakeSavedQuery(1, 'query1', 100, 'owner:me')
+ self.features_service.saved_query_cache.CacheItem(1, [query])
+ self.features_service.user2savedquery_tbl.Select(
+ self.cnxn,
+ cols=features_svc.SAVEDQUERY_COLS + ['user_id', 'subscription_mode'],
+ left_joins=[('SavedQuery ON query_id = id', [])],
+ order_by=[('rank', [])], user_id=[2]).AndReturn(
+ [(2, 'query2', 100, 'status:New', 2, 'Sub_Mode')])
+ self.features_service.savedqueryexecutesinproject_tbl.Select(
+ self.cnxn, cols=features_svc.SAVEDQUERYEXECUTESINPROJECT_COLS,
+ query_id=set([2])).AndReturn([(2, 12345)])
+
+ def testGetUsersSavedQueriesDict(self):
+ self.SetUpUsersSavedQueries()
+ self.mox.ReplayAll()
+ results_dict = self.features_service._GetUsersSavedQueriesDict(
+ self.cnxn, [1, 2])
+ self.mox.VerifyAll()
+ self.assertIn(1, results_dict)
+ self.assertIn(2, results_dict)
+
+ def testGetSavedQueriesByUserID(self):
+ self.SetUpUsersSavedQueries()
+ self.mox.ReplayAll()
+ saved_queries = self.features_service.GetSavedQueriesByUserID(
+ self.cnxn, 2)
+ self.mox.VerifyAll()
+ self.assertEqual(1, len(saved_queries))
+ self.assertEqual(2, saved_queries[0].query_id)
+
+ def SetUpCannedQueriesForProjects(self):
+ self.features_service.project2savedquery_tbl.Select(
+ self.cnxn, cols=['project_id'] + features_svc.SAVEDQUERY_COLS,
+ left_joins=[('SavedQuery ON query_id = id', [])],
+ order_by=[('rank', [])], project_id=[12345]).AndReturn(
+ [(12345, 1, 'query1', 100, 'owner:me')])
+
+ def testGetCannedQueriesForProjects(self):
+ self.SetUpCannedQueriesForProjects()
+ self.mox.ReplayAll()
+ results_dict = self.features_service.GetCannedQueriesForProjects(
+ self.cnxn, [12345])
+ self.mox.VerifyAll()
+ self.assertIn(12345, results_dict)
+
+ def testGetCannedQueriesByProjectID(self):
+ self.SetUpCannedQueriesForProjects()
+ self.mox.ReplayAll()
+ result = self.features_service.GetCannedQueriesByProjectID(
+ self.cnxn, 12345)
+ self.mox.VerifyAll()
+ self.assertEqual(1, len(result))
+ self.assertEqual(1, result[0].query_id)
+
+ def SetUpUpdateSavedQueries(self, commit=True):
+ query1 = tracker_bizobj.MakeSavedQuery(1, 'query1', 100, 'owner:me')
+ query2 = tracker_bizobj.MakeSavedQuery(None, 'query2', 100, 'status:New')
+ saved_queries = [query1, query2]
+ savedquery_rows = [
+ (sq.query_id or None, sq.name, sq.base_query_id, sq.query)
+ for sq in saved_queries]
+ self.features_service.savedquery_tbl.Delete(
+ self.cnxn, id=[1], commit=commit)
+ self.features_service.savedquery_tbl.InsertRows(
+ self.cnxn, features_svc.SAVEDQUERY_COLS, savedquery_rows, commit=commit,
+ return_generated_ids=True).AndReturn([11, 12])
+ return saved_queries
+
+ def testUpdateSavedQueries(self):
+ saved_queries = self.SetUpUpdateSavedQueries()
+ self.mox.ReplayAll()
+ self.features_service._UpdateSavedQueries(
+ self.cnxn, saved_queries, True)
+ self.mox.VerifyAll()
+
+ def testUpdateCannedQueries(self):
+ self.features_service.project2savedquery_tbl.Delete(
+ self.cnxn, project_id=12345, commit=False)
+ canned_queries = self.SetUpUpdateSavedQueries(False)
+ project2savedquery_rows = [(12345, 0, 1), (12345, 1, 12)]
+ self.features_service.project2savedquery_tbl.InsertRows(
+ self.cnxn, features_svc.PROJECT2SAVEDQUERY_COLS,
+ project2savedquery_rows, commit=False)
+ self.cnxn.Commit()
+ self.mox.ReplayAll()
+ self.features_service.UpdateCannedQueries(
+ self.cnxn, 12345, canned_queries)
+ self.mox.VerifyAll()
+
+ def testUpdateUserSavedQueries(self):
+ saved_queries = self.SetUpUpdateSavedQueries(False)
+ self.features_service.savedqueryexecutesinproject_tbl.Delete(
+ self.cnxn, query_id=[1], commit=False)
+ self.features_service.user2savedquery_tbl.Delete(
+ self.cnxn, user_id=1, commit=False)
+ user2savedquery_rows = [
+ (1, 0, 1, 'noemail'), (1, 1, 12, 'noemail')]
+ self.features_service.user2savedquery_tbl.InsertRows(
+ self.cnxn, features_svc.USER2SAVEDQUERY_COLS,
+ user2savedquery_rows, commit=False)
+ self.features_service.savedqueryexecutesinproject_tbl.InsertRows(
+ self.cnxn, features_svc.SAVEDQUERYEXECUTESINPROJECT_COLS, [],
+ commit=False)
+ self.cnxn.Commit()
+ self.mox.ReplayAll()
+ self.features_service.UpdateUserSavedQueries(
+ self.cnxn, 1, saved_queries)
+ self.mox.VerifyAll()
+
+ ### Subscriptions
+
+ def testGetSubscriptionsInProjects(self):
+ join_str = (
+ 'SavedQueryExecutesInProject ON '
+ 'SavedQueryExecutesInProject.query_id = User2SavedQuery.query_id')
+ self.features_service.user2savedquery_tbl.Select(
+ self.cnxn, cols=['user_id'], distinct=True,
+ joins=[(join_str, [])],
+ subscription_mode='immediate', project_id=12345).AndReturn(
+ [(1, 'asd'), (2, 'efg')])
+ self.SetUpUsersSavedQueries()
+ self.mox.ReplayAll()
+ result = self.features_service.GetSubscriptionsInProjects(
+ self.cnxn, 12345)
+ self.mox.VerifyAll()
+ self.assertIn(1, result)
+ self.assertIn(2, result)
+
+ def testExpungeSavedQueriesExecuteInProject(self):
+ self.features_service.savedqueryexecutesinproject_tbl.Delete(
+ self.cnxn, project_id=12345)
+ self.features_service.project2savedquery_tbl.Select(
+ self.cnxn, cols=['query_id'], project_id=12345).AndReturn(
+ [(1, 'asd'), (2, 'efg')])
+ self.features_service.project2savedquery_tbl.Delete(
+ self.cnxn, project_id=12345)
+ self.features_service.savedquery_tbl.Delete(
+ self.cnxn, id=[1, 2])
+ self.mox.ReplayAll()
+ self.features_service.ExpungeSavedQueriesExecuteInProject(
+ self.cnxn, 12345)
+ self.mox.VerifyAll()
+
+ ### Filter Rules
+
+ def testDeserializeFilterRules(self):
+ filterrule_rows = [
+ (12345, 0, 'predicate1', 'default_status:New'),
+ (12345, 1, 'predicate2', 'default_owner_id:1 add_cc_id:2'),
+ ]
+ result_dict = self.features_service._DeserializeFilterRules(
+ filterrule_rows)
+ self.assertIn(12345, result_dict)
+ self.assertEqual(2, len(result_dict[12345]))
+ self.assertEqual('New', result_dict[12345][0].default_status)
+ self.assertEqual(1, result_dict[12345][1].default_owner_id)
+ self.assertEqual([2], result_dict[12345][1].add_cc_ids)
+
+ def testDeserializeRuleConsequence(self):
+ consequence = ('default_status:New default_owner_id:1 add_cc_id:2'
+ ' add_label:label1 add_label:label2 add_notify:admin')
+ (default_status, default_owner_id, add_cc_ids, add_labels,
+ add_notify) = self.features_service._DeserializeRuleConsequence(
+ consequence)
+ self.assertEqual('New', default_status)
+ self.assertEqual(1, default_owner_id)
+ self.assertEqual([2], add_cc_ids)
+ self.assertEqual(['label1', 'label2'], add_labels)
+ self.assertEqual(['admin'], add_notify)
+
+ def SetUpGetFilterRulesByProjectIDs(self):
+ filterrule_rows = [
+ (12345, 0, 'predicate1', 'default_status:New'),
+ (12345, 1, 'predicate2', 'default_owner_id:1 add_cc_id:2'),
+ ]
+
+ self.features_service.filterrule_tbl.Select(
+ self.cnxn, cols=features_svc.FILTERRULE_COLS,
+ project_id=[12345]).AndReturn(filterrule_rows)
+
+ def testGetFilterRulesByProjectIDs(self):
+ self.SetUpGetFilterRulesByProjectIDs()
+ self.mox.ReplayAll()
+ result = self.features_service._GetFilterRulesByProjectIDs(
+ self.cnxn, [12345])
+ self.mox.VerifyAll()
+ self.assertIn(12345, result)
+ self.assertEqual(2, len(result[12345]))
+
+ def testGetFilterRules(self):
+ self.SetUpGetFilterRulesByProjectIDs()
+ self.mox.ReplayAll()
+ result = self.features_service.GetFilterRules(
+ self.cnxn, 12345)
+ self.mox.VerifyAll()
+ self.assertEqual(2, len(result))
+
+ def testSerializeRuleConsequence(self):
+ rule = filterrules_helpers.MakeRule(
+ 'predicate', 'New', 1, [1, 2], ['label1', 'label2'], ['admin'])
+ result = self.features_service._SerializeRuleConsequence(rule)
+ self.assertEqual('add_label:label1 add_label:label2 default_status:New'
+ ' default_owner_id:1 add_cc_id:1 add_cc_id:2'
+ ' add_notify:admin', result)
+
+ def testUpdateFilterRules(self):
+ self.features_service.filterrule_tbl.Delete(self.cnxn, project_id=12345)
+ rows = [
+ (12345, 0, 'predicate1', 'add_label:label1 add_label:label2'
+ ' default_status:New default_owner_id:1'
+ ' add_cc_id:1 add_cc_id:2 add_notify:admin'),
+ (12345, 1, 'predicate2', 'add_label:label2 add_label:label3'
+ ' default_status:Fixed default_owner_id:2'
+ ' add_cc_id:1 add_cc_id:2 add_notify:admin2')
+ ]
+ self.features_service.filterrule_tbl.InsertRows(
+ self.cnxn, features_svc.FILTERRULE_COLS, rows)
+ rule1 = filterrules_helpers.MakeRule(
+ 'predicate1', 'New', 1, [1, 2], ['label1', 'label2'], ['admin'])
+ rule2 = filterrules_helpers.MakeRule(
+ 'predicate2', 'Fixed', 2, [1, 2], ['label2', 'label3'], ['admin2'])
+ self.mox.ReplayAll()
+ self.features_service.UpdateFilterRules(
+ self.cnxn, 12345, [rule1, rule2])
+ self.mox.VerifyAll()
+
+ def testExpungeFilterRules(self):
+ self.features_service.filterrule_tbl.Delete(self.cnxn, project_id=12345)
+ self.mox.ReplayAll()
+ self.features_service.ExpungeFilterRules(
+ self.cnxn, 12345)
+ self.mox.VerifyAll()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/fulltext_helpers_test.py b/appengine/monorail/services/test/fulltext_helpers_test.py
new file mode 100644
index 0000000..8eb6ab5
--- /dev/null
+++ b/appengine/monorail/services/test/fulltext_helpers_test.py
@@ -0,0 +1,231 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the fulltext_helpers module."""
+
+import unittest
+
+import mox
+
+from google.appengine.api import search
+
+from proto import ast_pb2
+from proto import tracker_pb2
+from services import fulltext_helpers
+
+
+TEXT_HAS = ast_pb2.QueryOp.TEXT_HAS
+NOT_TEXT_HAS = ast_pb2.QueryOp.NOT_TEXT_HAS
+
+
+class MockResult(object):
+
+ def __init__(self, doc_id):
+ self.doc_id = doc_id
+
+
+class MockSearchResponse(object):
+ """Mock object that can be iterated over in batches."""
+
+ def __init__(self, results, cursor):
+ """Constructor.
+
+ Args:
+ results: list of strings for document IDs.
+ cursor: search.Cursor object, if there are more results to
+ retrieve in another round-trip. Or, None if there are not.
+ """
+ self.results = [MockResult(r) for r in results]
+ self.cursor = cursor
+
+ def __iter__(self):
+ """The response itself is an iterator over the results."""
+ return self.results.__iter__()
+
+
+class FulltextHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.any_field_fd = tracker_pb2.FieldDef(
+ field_name='any_field', field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ self.summary_fd = tracker_pb2.FieldDef(
+ field_name='summary', field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ self.milestone_fd = tracker_pb2.FieldDef(
+ field_name='milestone', field_type=tracker_pb2.FieldTypes.STR_TYPE,
+ field_id=123)
+ self.fulltext_fields = ['summary']
+
+ self.mock_index = self.mox.CreateMockAnything()
+ self.mox.StubOutWithMock(search, 'Index')
+ self.query = None
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def RecordQuery(self, query):
+ self.query = query
+
+ def testBuildFTSQuery_EmptyQueryConjunction(self):
+ query_ast_conj = ast_pb2.Conjunction()
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ self.assertEqual(None, fulltext_query)
+
+ def testBuildFTSQuery_NoFullTextConditions(self):
+ estimated_hours_fd = tracker_pb2.FieldDef(
+ field_name='estimate', field_type=tracker_pb2.FieldTypes.INT_TYPE,
+ field_id=124)
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.MakeCond(TEXT_HAS, [estimated_hours_fd], [], [40])])
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ self.assertEqual(None, fulltext_query)
+
+ def testBuildFTSQuery_Normal(self):
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.MakeCond(TEXT_HAS, [self.summary_fd], ['needle'], []),
+ ast_pb2.MakeCond(TEXT_HAS, [self.milestone_fd], ['Q3', 'Q4'], [])])
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ self.assertEqual(
+ '(summary:"needle") (custom_123:"Q3" OR custom_123:"Q4")',
+ fulltext_query)
+
+ def testBuildFTSQuery_WithQuotes(self):
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.MakeCond(TEXT_HAS, [self.summary_fd], ['"needle haystack"'],
+ [])])
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ self.assertEqual('(summary:"needle haystack")', fulltext_query)
+
+ def testBuildFTSQuery_IngoreColonInText(self):
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.MakeCond(TEXT_HAS, [self.summary_fd], ['"needle:haystack"'],
+ [])])
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ self.assertEqual('(summary:"needle haystack")', fulltext_query)
+
+ def testBuildFTSQuery_InvalidQuery(self):
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.MakeCond(TEXT_HAS, [self.summary_fd], ['haystack"needle'], []),
+ ast_pb2.MakeCond(TEXT_HAS, [self.milestone_fd], ['Q3', 'Q4'], [])])
+ try:
+ fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ raise Exception('Expected AssertionError')
+ except AssertionError:
+ pass
+
+ def testBuildFTSQuery_SpecialPrefixQuery(self):
+ special_prefix = fulltext_helpers.NON_OP_PREFIXES[0]
+
+ # Test with summary field.
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.MakeCond(TEXT_HAS, [self.summary_fd],
+ ['%s//google.com' % special_prefix], []),
+ ast_pb2.MakeCond(TEXT_HAS, [self.milestone_fd], ['Q3', 'Q4'], [])])
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ self.assertEqual(
+ '(summary:"%s//google.com") (custom_123:"Q3" OR custom_123:"Q4")' % (
+ special_prefix),
+ fulltext_query)
+
+ # Test with any field.
+ any_fd = tracker_pb2.FieldDef(
+ field_name=ast_pb2.ANY_FIELD,
+ field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.MakeCond(
+ TEXT_HAS, [any_fd], ['%s//google.com' % special_prefix], []),
+ ast_pb2.MakeCond(TEXT_HAS, [self.milestone_fd], ['Q3', 'Q4'], [])])
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, self.fulltext_fields)
+ self.assertEqual(
+ '("%s//google.com") (custom_123:"Q3" OR custom_123:"Q4")' % (
+ special_prefix),
+ fulltext_query)
+
+ def testBuildFTSCondition_BuiltinField(self):
+ query_cond = ast_pb2.MakeCond(
+ TEXT_HAS, [self.summary_fd], ['needle'], [])
+ fulltext_query_clause = fulltext_helpers._BuildFTSCondition(
+ query_cond, self.fulltext_fields)
+ self.assertEqual('(summary:"needle")', fulltext_query_clause)
+
+ def testBuildFTSCondition_Negatation(self):
+ query_cond = ast_pb2.MakeCond(
+ NOT_TEXT_HAS, [self.summary_fd], ['needle'], [])
+ fulltext_query_clause = fulltext_helpers._BuildFTSCondition(
+ query_cond, self.fulltext_fields)
+ self.assertEqual('NOT (summary:"needle")', fulltext_query_clause)
+
+ def testBuildFTSCondition_QuickOR(self):
+ query_cond = ast_pb2.MakeCond(
+ TEXT_HAS, [self.summary_fd], ['needle', 'pin'], [])
+ fulltext_query_clause = fulltext_helpers._BuildFTSCondition(
+ query_cond, self.fulltext_fields)
+ self.assertEqual(
+ '(summary:"needle" OR summary:"pin")',
+ fulltext_query_clause)
+
+ def testBuildFTSCondition_NegatedQuickOR(self):
+ query_cond = ast_pb2.MakeCond(
+ NOT_TEXT_HAS, [self.summary_fd], ['needle', 'pin'], [])
+ fulltext_query_clause = fulltext_helpers._BuildFTSCondition(
+ query_cond, self.fulltext_fields)
+ self.assertEqual(
+ 'NOT (summary:"needle" OR summary:"pin")',
+ fulltext_query_clause)
+
+ def testBuildFTSCondition_AnyField(self):
+ query_cond = ast_pb2.MakeCond(
+ TEXT_HAS, [self.any_field_fd], ['needle'], [])
+ fulltext_query_clause = fulltext_helpers._BuildFTSCondition(
+ query_cond, self.fulltext_fields)
+ self.assertEqual('("needle")', fulltext_query_clause)
+
+ def testBuildFTSCondition_NegatedAnyField(self):
+ query_cond = ast_pb2.MakeCond(
+ NOT_TEXT_HAS, [self.any_field_fd], ['needle'], [])
+ fulltext_query_clause = fulltext_helpers._BuildFTSCondition(
+ query_cond, self.fulltext_fields)
+ self.assertEqual('NOT ("needle")', fulltext_query_clause)
+
+ def testBuildFTSCondition_CrossProjectWithMultipleFieldDescriptors(self):
+ other_milestone_fd = tracker_pb2.FieldDef(
+ field_name='milestone', field_type=tracker_pb2.FieldTypes.STR_TYPE,
+ field_id=456)
+ query_cond = ast_pb2.MakeCond(
+ TEXT_HAS, [self.milestone_fd, other_milestone_fd], ['needle'], [])
+ fulltext_query_clause = fulltext_helpers._BuildFTSCondition(
+ query_cond, self.fulltext_fields)
+ self.assertEqual(
+ '(custom_123:"needle" OR custom_456:"needle")', fulltext_query_clause)
+
+ def SetUpComprehensiveSearch(self):
+ search.Index(name='search index name').AndReturn(
+ self.mock_index)
+ self.mock_index.search(mox.IgnoreArg()).WithSideEffects(
+ self.RecordQuery).AndReturn(
+ MockSearchResponse(['123', '234'], search.Cursor()))
+ self.mock_index.search(mox.IgnoreArg()).WithSideEffects(
+ self.RecordQuery).AndReturn(MockSearchResponse(['345'], None))
+
+ def testComprehensiveSearch(self):
+ self.SetUpComprehensiveSearch()
+ self.mox.ReplayAll()
+ project_ids = fulltext_helpers.ComprehensiveSearch(
+ 'browser', 'search index name')
+ self.mox.VerifyAll()
+ self.assertItemsEqual([123, 234, 345], project_ids)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/issue_svc_test.py b/appengine/monorail/services/test/issue_svc_test.py
new file mode 100644
index 0000000..a43ffb1
--- /dev/null
+++ b/appengine/monorail/services/test/issue_svc_test.py
@@ -0,0 +1,1469 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for issue_svc module."""
+
+import time
+import unittest
+
+import mox
+
+from google.appengine.api import search
+from google.appengine.ext import testbed
+
+import settings
+from framework import sql
+from proto import tracker_pb2
+from services import issue_svc
+from services import service_manager
+from services import spam_svc
+from services import tracker_fulltext
+from testing import fake
+from tracker import tracker_bizobj
+
+
+class MockIndex(object):
+
+ def delete(self, string_list):
+ pass
+
+
+def MakeIssueService(project_service, config_service, cache_manager, my_mox):
+ issue_service = issue_svc.IssueService(
+ project_service, config_service, cache_manager)
+ for table_var in [
+ 'issue_tbl', 'issuesummary_tbl', 'issue2label_tbl',
+ 'issue2component_tbl', 'issue2cc_tbl', 'issue2notify_tbl',
+ 'issue2fieldvalue_tbl', 'issuerelation_tbl', 'danglingrelation_tbl',
+ 'issueformerlocations_tbl', 'comment_tbl', 'issueupdate_tbl',
+ 'attachment_tbl', 'reindexqueue_tbl', 'localidcounter_tbl']:
+ setattr(issue_service, table_var, my_mox.CreateMock(sql.SQLTableManager))
+
+ return issue_service
+
+
+class IssueIDTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.project_service = fake.ProjectService()
+ self.config_service = fake.ConfigService()
+ self.cache_manager = fake.CacheManager()
+ self.issue_service = MakeIssueService(
+ self.project_service, self.config_service, self.cache_manager,
+ self.mox)
+ self.issue_id_2lc = self.issue_service.issue_id_2lc
+ self.spam_service = fake.SpamService()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testDeserializeIssueIDs_Empty(self):
+ issue_id_dict = self.issue_id_2lc._DeserializeIssueIDs([])
+ self.assertEqual({}, issue_id_dict)
+
+ def testDeserializeIssueIDs_Normal(self):
+ rows = [(789, 1, 78901), (789, 2, 78902), (789, 3, 78903)]
+ issue_id_dict = self.issue_id_2lc._DeserializeIssueIDs(rows)
+ expected = {
+ (789, 1): 78901,
+ (789, 2): 78902,
+ (789, 3): 78903,
+ }
+ self.assertEqual(expected, issue_id_dict)
+
+ def SetUpFetchItems(self):
+ where = [
+ ('(Issue.project_id = %s AND Issue.local_id IN (%s,%s,%s))',
+ [789, 1, 2, 3])]
+ rows = [(789, 1, 78901), (789, 2, 78902), (789, 3, 78903)]
+ self.issue_service.issue_tbl.Select(
+ self.cnxn, cols=['project_id', 'local_id', 'id'],
+ where=where, or_where_conds=True).AndReturn(rows)
+
+ def testFetchItems(self):
+ project_local_ids_list = [(789, 1), (789, 2), (789, 3)]
+ issue_ids = [78901, 78902, 78903]
+ self.SetUpFetchItems()
+ self.mox.ReplayAll()
+ issue_dict = self.issue_id_2lc.FetchItems(
+ self.cnxn, project_local_ids_list)
+ self.mox.VerifyAll()
+ self.assertItemsEqual(project_local_ids_list, issue_dict.keys())
+ self.assertItemsEqual(issue_ids, issue_dict.values())
+
+ def testKeyToStr(self):
+ self.assertEqual('789,1', self.issue_id_2lc._KeyToStr((789, 1)))
+
+ def testStrToKey(self):
+ self.assertEqual((789, 1), self.issue_id_2lc._StrToKey('789,1'))
+
+
+class IssueTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.project_service = fake.ProjectService()
+ self.config_service = fake.ConfigService()
+ self.cache_manager = fake.CacheManager()
+ self.issue_service = MakeIssueService(
+ self.project_service, self.config_service, self.cache_manager,
+ self.mox)
+ self.issue_2lc = self.issue_service.issue_2lc
+
+ now = int(time.time())
+ self.project_service.TestAddProject('proj', project_id=789)
+ self.issue_rows = [
+ (78901, 789, 1, 1, 111L, 222L, now, now, now, 0, 0, 0, 1, 0, False)]
+ self.summary_rows = [(78901, 'sum')]
+ self.label_rows = [(78901, 1, 0)]
+ self.component_rows = []
+ self.cc_rows = [(78901, 333L, 0)]
+ self.notify_rows = []
+ self.fieldvalue_rows = []
+ self.relation_rows = [
+ (78901, 78902, 'blockedon'), (78903, 78901, 'blockedon')]
+ self.dangling_relation_rows = [
+ (78901, 'codesite', 5001, 'blocking'),
+ (78901, 'codesite', 5002, 'blockedon')]
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testDeserializeIssues_Empty(self):
+ issue_dict = self.issue_2lc._DeserializeIssues(
+ self.cnxn, [], [], [], [], [], [], [], [], [])
+ self.assertEqual({}, issue_dict)
+
+ def testDeserializeIssues_Normal(self):
+ issue_dict = self.issue_2lc._DeserializeIssues(
+ self.cnxn, self.issue_rows, self.summary_rows, self.label_rows,
+ self.component_rows, self.cc_rows, self.notify_rows,
+ self.fieldvalue_rows, self.relation_rows, self.dangling_relation_rows)
+ self.assertItemsEqual([78901], issue_dict.keys())
+
+ def testDeserializeIssues_UnexpectedLabel(self):
+ unexpected_label_rows = [
+ (78901, 999, 0)
+ ]
+ self.assertRaises(
+ AssertionError,
+ self.issue_2lc._DeserializeIssues,
+ self.cnxn, self.issue_rows, self.summary_rows, unexpected_label_rows,
+ self.component_rows, self.cc_rows, self.notify_rows,
+ self.fieldvalue_rows, self.relation_rows, self.dangling_relation_rows)
+
+ def testDeserializeIssues_UnexpectedIssueRelation(self):
+ unexpected_relation_rows = [
+ (78990, 78999, 'blockedon')
+ ]
+ self.assertRaises(
+ AssertionError,
+ self.issue_2lc._DeserializeIssues,
+ self.cnxn, self.issue_rows, self.summary_rows, self.label_rows,
+ self.component_rows, self.cc_rows, self.notify_rows,
+ self.fieldvalue_rows, unexpected_relation_rows,
+ self.dangling_relation_rows)
+
+ def SetUpFetchItems(self, issue_ids):
+ shard_id = None
+ self.issue_service.issue_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUE_COLS, id=issue_ids,
+ shard_id=shard_id).AndReturn(self.issue_rows)
+ self.issue_service.issuesummary_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUESUMMARY_COLS, shard_id=shard_id,
+ issue_id=issue_ids).AndReturn(self.summary_rows)
+ self.issue_service.issue2label_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUE2LABEL_COLS, shard_id=shard_id,
+ issue_id=issue_ids).AndReturn(self.label_rows)
+ self.issue_service.issue2component_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUE2COMPONENT_COLS, shard_id=shard_id,
+ issue_id=issue_ids).AndReturn(self.component_rows)
+ self.issue_service.issue2cc_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUE2CC_COLS, shard_id=shard_id,
+ issue_id=issue_ids).AndReturn(self.cc_rows)
+ self.issue_service.issue2notify_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUE2NOTIFY_COLS, shard_id=shard_id,
+ issue_id=issue_ids).AndReturn(self.notify_rows)
+ self.issue_service.issue2fieldvalue_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUE2FIELDVALUE_COLS, shard_id=shard_id,
+ issue_id=issue_ids).AndReturn(self.fieldvalue_rows)
+ self.issue_service.issuerelation_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUERELATION_COLS, # Note: no shard
+ where=[('(issue_id IN (%s) OR dst_issue_id IN (%s))',
+ issue_ids + issue_ids)]).AndReturn(self.relation_rows)
+ self.issue_service.danglingrelation_tbl.Select(
+ self.cnxn, cols=issue_svc.DANGLINGRELATION_COLS, # Note: no shard
+ issue_id=issue_ids).AndReturn(self.dangling_relation_rows)
+
+ def testFetchItems(self):
+ issue_ids = [78901]
+ self.SetUpFetchItems(issue_ids)
+ self.mox.ReplayAll()
+ issue_dict = self.issue_2lc.FetchItems(self.cnxn, issue_ids)
+ self.mox.VerifyAll()
+ self.assertItemsEqual(issue_ids, issue_dict.keys())
+
+
+class IssueServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.services = service_manager.Services()
+ self.services.user = fake.UserService()
+ self.services.project = fake.ProjectService()
+ self.services.config = fake.ConfigService()
+ self.services.features = fake.FeaturesService()
+ self.cache_manager = fake.CacheManager()
+ self.services.issue = MakeIssueService(
+ self.services.project, self.services.config, self.cache_manager,
+ self.mox)
+ self.services.spam = self.mox.CreateMock(spam_svc.SpamService)
+ self.now = int(time.time())
+ self.orig_index_issues = tracker_fulltext.IndexIssues
+ tracker_fulltext.IndexIssues = lambda *args: None
+
+ def classifierResult(self, label, score):
+ return {'outputLabel': label,
+ 'outputMulti': [{'label': label, 'score': score}]}
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+ tracker_fulltext.IndexIssues = self.orig_index_issues
+
+ ### Issue ID lookups
+
+ def testLookupIssueIDs_Hit(self):
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ self.services.issue.issue_id_2lc.CacheItem((789, 2), 78902)
+ actual = self.services.issue.LookupIssueIDs(
+ self.cnxn, [(789, 1), (789, 2)])
+ self.assertEqual([78901, 78902], actual)
+
+ def testLookupIssueID(self):
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ actual = self.services.issue.LookupIssueID(self.cnxn, 789, 1)
+ self.assertEqual(78901, actual)
+
+ def testResolveIssueRefs(self):
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ self.services.issue.issue_id_2lc.CacheItem((789, 2), 78902)
+ prefetched_projects = {'proj': fake.Project('proj', project_id=789)}
+ refs = [('proj', 1), (None, 2)]
+ actual = self.services.issue.ResolveIssueRefs(
+ self.cnxn, prefetched_projects, 'proj', refs)
+ self.assertEqual([78901, 78902], actual)
+
+ ### Issue objects
+
+ def testCreateIssue(self):
+ settings.classifier_spam_thresh = 0.9
+ self.SetUpAllocateNextLocalID(789, None, None)
+ self.SetUpInsertIssue()
+ self.SetUpInsertComment(7890101, True)
+ self.services.spam.ClassifyIssue(mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(
+ self.classifierResult('ham', 1.0))
+ self.services.spam.RecordClassifierIssueVerdict(self.cnxn,
+ mox.IsA(tracker_pb2.Issue), False, 1.0)
+ self.SetUpUpdateIssuesModified(set())
+
+ self.mox.ReplayAll()
+ actual_local_id = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'sum',
+ 'New', 111L, [], ['Type-Defect'], [], [], 111L, 'content',
+ index_now=False, timestamp=self.now)
+ self.mox.VerifyAll()
+ self.assertEqual(1, actual_local_id)
+
+ def testCreateIssue_EmptyStringLabels(self):
+ settings.classifier_spam_thresh = 0.9
+ self.SetUpAllocateNextLocalID(789, None, None)
+ self.SetUpInsertIssue(label_rows=[])
+ self.SetUpInsertComment(7890101, True)
+ self.services.spam.ClassifyIssue(mox.IgnoreArg(),
+ mox.IgnoreArg()).AndReturn(
+ self.classifierResult('ham', 1.0))
+ self.services.spam.RecordClassifierIssueVerdict(self.cnxn,
+ mox.IsA(tracker_pb2.Issue), False, 1.0)
+ self.SetUpUpdateIssuesModified(set(), modified_timestamp=self.now)
+
+ self.mox.ReplayAll()
+ actual_local_id = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'sum',
+ 'New', 111L, [], [',', '', ' ', ', '], [], [], 111L, 'content',
+ index_now=False, timestamp=self.now)
+ self.mox.VerifyAll()
+ self.assertEqual(1, actual_local_id)
+
+ def SetUpUpdateIssuesModified(self, iids, modified_timestamp=None):
+ self.services.issue.issue_tbl.Update(
+ self.cnxn, {'modified': modified_timestamp or self.now},
+ id=iids, commit=False)
+
+ def testCreateIssue_spam(self):
+ settings.classifier_spam_thresh = 0.9
+ self.SetUpAllocateNextSpamID(789, None, None)
+ self.SetUpInsertSpamIssue()
+ self.SetUpInsertComment(7890101, True)
+
+ self.services.spam.ClassifyIssue(mox.IsA(tracker_pb2.Issue),
+ mox.IsA(tracker_pb2.IssueComment)).AndReturn(
+ self.classifierResult('spam', 1.0))
+ self.services.spam.RecordClassifierIssueVerdict(self.cnxn,
+ mox.IsA(tracker_pb2.Issue), True, 1.0)
+ self.SetUpUpdateIssuesModified(set())
+
+ self.mox.ReplayAll()
+ actual_local_id = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'sum',
+ 'New', 111L, [], ['Type-Defect'], [], [], 111L, 'content',
+ index_now=False, timestamp=self.now)
+ self.mox.VerifyAll()
+ self.assertEqual(-1, actual_local_id)
+
+ def testGetAllIssuesInProject_NoIssues(self):
+ self.SetUpGetHighestLocalID(789, None, None)
+ self.mox.ReplayAll()
+ issues = self.services.issue.GetAllIssuesInProject(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual([], issues)
+
+ def testGetAnyOnHandIssue(self):
+ issue_ids = [78901, 78902, 78903]
+ self.SetUpGetIssues()
+ issue = self.services.issue.GetAnyOnHandIssue(issue_ids)
+ self.assertEqual(78901, issue.issue_id)
+
+ def SetUpGetIssues(self):
+ issue_1 = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ issue_1.project_name = 'proj'
+ issue_2 = fake.MakeTestIssue(
+ project_id=789, local_id=2, owner_id=111L, summary='sum',
+ status='Fixed', issue_id=78902)
+ issue_2.project_name = 'proj'
+ self.services.issue.issue_2lc.CacheItem(78901, issue_1)
+ self.services.issue.issue_2lc.CacheItem(78902, issue_2)
+ return issue_1, issue_2
+
+ def testGetIssuesDict(self):
+ issue_ids = [78901, 78902]
+ issue_1, issue_2 = self.SetUpGetIssues()
+ issues_dict = self.services.issue.GetIssuesDict(self.cnxn, issue_ids)
+ self.assertEqual(
+ {78901: issue_1, 78902: issue_2},
+ issues_dict)
+
+ def testGetIssues(self):
+ issue_ids = [78901, 78902]
+ issue_1, issue_2 = self.SetUpGetIssues()
+ issues = self.services.issue.GetIssues(self.cnxn, issue_ids)
+ self.assertEqual([issue_1, issue_2], issues)
+
+ def testGetIssue(self):
+ issue_1, _issue_2 = self.SetUpGetIssues()
+ actual_issue = self.services.issue.GetIssue(self.cnxn, 78901)
+ self.assertEqual(issue_1, actual_issue)
+
+ def testGetIssuesByLocalIDs(self):
+ issue_1, issue_2 = self.SetUpGetIssues()
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ self.services.issue.issue_id_2lc.CacheItem((789, 2), 78902)
+ actual_issues = self.services.issue.GetIssuesByLocalIDs(
+ self.cnxn, 789, [1, 2])
+ self.assertEqual([issue_1, issue_2], actual_issues)
+
+ def testGetIssueByLocalID(self):
+ issue_1, _issue_2 = self.SetUpGetIssues()
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ actual_issues = self.services.issue.GetIssueByLocalID(self.cnxn, 789, 1)
+ self.assertEqual(issue_1, actual_issues)
+
+ def testGetOpenAndClosedIssues(self):
+ issue_1, issue_2 = self.SetUpGetIssues()
+ open_issues, closed_issues = self.services.issue.GetOpenAndClosedIssues(
+ self.cnxn, [78901, 78902])
+ self.assertEqual([issue_1], open_issues)
+ self.assertEqual([issue_2], closed_issues)
+
+ def SetUpGetCurrentLocationOfMovedIssue(self, project_id, local_id):
+ issue_id = project_id * 100 + local_id
+ self.services.issue.issueformerlocations_tbl.SelectValue(
+ self.cnxn, 'issue_id', default=0, project_id=project_id,
+ local_id=local_id).AndReturn(issue_id)
+ self.services.issue.issue_tbl.SelectRow(
+ self.cnxn, cols=['project_id', 'local_id'], id=issue_id).AndReturn(
+ (project_id + 1, local_id + 1))
+
+ def testGetCurrentLocationOfMovedIssue(self):
+ self.SetUpGetCurrentLocationOfMovedIssue(789, 1)
+ self.mox.ReplayAll()
+ new_project_id, new_local_id = (
+ self.services.issue.GetCurrentLocationOfMovedIssue(self.cnxn, 789, 1))
+ self.mox.VerifyAll()
+ self.assertEqual(789 + 1, new_project_id)
+ self.assertEqual(1 + 1, new_local_id)
+
+ def SetUpGetPreviousLocations(self, issue_id, location_rows):
+ self.services.issue.issueformerlocations_tbl.Select(
+ self.cnxn, cols=['project_id', 'local_id'],
+ issue_id=issue_id).AndReturn(location_rows)
+
+ def testGetPreviousLocations(self):
+ self.SetUpGetPreviousLocations(78901, [(781, 1), (782, 11), (789, 1)])
+ self.mox.ReplayAll()
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ locations = self.services.issue.GetPreviousLocations(self.cnxn, issue)
+ self.mox.VerifyAll()
+ self.assertEqual(locations, [(781, 1), (782, 11)])
+
+ def SetUpInsertIssue(self, label_rows=None):
+ row = (789, 1, 1, 111L, 111L, self.now, 0, self.now, None, 0,
+ False, 0, 0, False)
+ self.services.issue.issue_tbl.InsertRows(
+ self.cnxn, issue_svc.ISSUE_COLS[1:], [row],
+ commit=False, return_generated_ids=True).AndReturn([78901])
+ self.cnxn.Commit()
+ self.services.issue.issue_tbl.Update(
+ self.cnxn, {'shard': 78901 % settings.num_logical_shards},
+ id=78901, commit=False)
+ self.SetUpUpdateIssuesSummary()
+ self.SetUpUpdateIssuesLabels(label_rows=label_rows)
+ self.SetUpUpdateIssuesFields()
+ self.SetUpUpdateIssuesComponents()
+ self.SetUpUpdateIssuesCc()
+ self.SetUpUpdateIssuesNotify()
+ self.SetUpUpdateIssuesRelation()
+
+ def SetUpInsertSpamIssue(self):
+ row = (789, -1, 1, 111L, 111L, self.now, 0, self.now, None, 0,
+ False, 0, 0, True)
+ self.services.issue.issue_tbl.InsertRows(
+ self.cnxn, issue_svc.ISSUE_COLS[1:], [row],
+ commit=False, return_generated_ids=True).AndReturn([78901])
+ self.cnxn.Commit()
+ self.services.issue.issue_tbl.Update(
+ self.cnxn, {'shard': 78901 % settings.num_logical_shards},
+ id=78901, commit=False)
+ self.SetUpUpdateIssuesSummary()
+ self.SetUpUpdateIssuesLabels()
+ self.SetUpUpdateIssuesFields()
+ self.SetUpUpdateIssuesComponents()
+ self.SetUpUpdateIssuesCc()
+ self.SetUpUpdateIssuesNotify()
+ self.SetUpUpdateIssuesRelation()
+
+ def SetUpUpdateIssuesSummary(self):
+ self.services.issue.issuesummary_tbl.InsertRows(
+ self.cnxn, ['issue_id', 'summary'],
+ [(78901, 'sum')], replace=True, commit=False)
+
+ def SetUpUpdateIssuesLabels(self, label_rows=None):
+ if label_rows is None:
+ label_rows = [(78901, 1, False, 1)]
+ self.services.issue.issue2label_tbl.Delete(
+ self.cnxn, issue_id=[78901], commit=False)
+ self.services.issue.issue2label_tbl.InsertRows(
+ self.cnxn, ['issue_id', 'label_id', 'derived', 'issue_shard'],
+ label_rows, ignore=True, commit=False)
+
+ def SetUpUpdateIssuesFields(self, issue2fieldvalue_rows=None):
+ issue2fieldvalue_rows = issue2fieldvalue_rows or []
+ self.services.issue.issue2fieldvalue_tbl.Delete(
+ self.cnxn, issue_id=[78901], commit=False)
+ self.services.issue.issue2fieldvalue_tbl.InsertRows(
+ self.cnxn, issue_svc.ISSUE2FIELDVALUE_COLS + ['issue_shard'],
+ issue2fieldvalue_rows, commit=False)
+
+ def SetUpUpdateIssuesComponents(self, issue2component_rows=None):
+ issue2component_rows = issue2component_rows or []
+ self.services.issue.issue2component_tbl.Delete(
+ self.cnxn, issue_id=[78901], commit=False)
+ self.services.issue.issue2component_tbl.InsertRows(
+ self.cnxn, ['issue_id', 'component_id', 'derived', 'issue_shard'],
+ issue2component_rows, ignore=True, commit=False)
+
+ def SetUpUpdateIssuesCc(self, issue2cc_rows=None):
+ issue2cc_rows = issue2cc_rows or []
+ self.services.issue.issue2cc_tbl.Delete(
+ self.cnxn, issue_id=[78901], commit=False)
+ self.services.issue.issue2cc_tbl.InsertRows(
+ self.cnxn, ['issue_id', 'cc_id', 'derived', 'issue_shard'],
+ issue2cc_rows, ignore=True, commit=False)
+
+ def SetUpUpdateIssuesNotify(self, notify_rows=None):
+ notify_rows = notify_rows or []
+ self.services.issue.issue2notify_tbl.Delete(
+ self.cnxn, issue_id=[78901], commit=False)
+ self.services.issue.issue2notify_tbl.InsertRows(
+ self.cnxn, issue_svc.ISSUE2NOTIFY_COLS,
+ notify_rows, ignore=True, commit=False)
+
+ def SetUpUpdateIssuesRelation(
+ self, relation_rows=None, dangling_relation_rows=None):
+ relation_rows = relation_rows or []
+ dangling_relation_rows = dangling_relation_rows or []
+ self.services.issue.issuerelation_tbl.Delete(
+ self.cnxn, issue_id=[78901], commit=False)
+ self.services.issue.issuerelation_tbl.Delete(
+ self.cnxn, dst_issue_id=[78901], kind='blockedon',
+ commit=False)
+ self.services.issue.issuerelation_tbl.InsertRows(
+ self.cnxn, issue_svc.ISSUERELATION_COLS, relation_rows,
+ ignore=True, commit=False)
+ self.services.issue.danglingrelation_tbl.Delete(
+ self.cnxn, issue_id=[78901], commit=False)
+ self.services.issue.danglingrelation_tbl.InsertRows(
+ self.cnxn, issue_svc.DANGLINGRELATION_COLS, dangling_relation_rows,
+ ignore=True, commit=False)
+
+ def testInsertIssue(self):
+ self.SetUpInsertIssue()
+ self.mox.ReplayAll()
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, reporter_id=111L,
+ summary='sum', status='New', labels=['Type-Defect'], issue_id=78901,
+ opened_timestamp=self.now, modified_timestamp=self.now)
+ actual_issue_id = self.services.issue.InsertIssue(self.cnxn, issue)
+ self.mox.VerifyAll()
+ self.assertEqual(78901, actual_issue_id)
+
+ def SetUpUpdateIssues(self, given_delta=None):
+ delta = given_delta or {
+ 'project_id': 789,
+ 'local_id': 1,
+ 'owner_id': 111L,
+ 'status_id': 1,
+ 'opened': 123456789,
+ 'closed': 0,
+ 'modified': 123456789,
+ 'derived_owner_id': None,
+ 'derived_status_id': None,
+ 'deleted': False,
+ 'star_count': 12,
+ 'attachment_count': 0,
+ 'is_spam': False,
+ }
+ self.services.issue.issue_tbl.Update(
+ self.cnxn, delta, id=78901, commit=False)
+ if not given_delta:
+ self.SetUpUpdateIssuesLabels()
+ self.SetUpUpdateIssuesCc()
+ self.SetUpUpdateIssuesFields()
+ self.SetUpUpdateIssuesComponents()
+ self.SetUpUpdateIssuesNotify()
+ self.SetUpUpdateIssuesSummary()
+ self.SetUpUpdateIssuesRelation()
+
+ self.cnxn.Commit()
+
+ def testUpdateIssues_Empty(self):
+ # Note: no setup because DB should not be called.
+ self.mox.ReplayAll()
+ self.services.issue.UpdateIssues(self.cnxn, [])
+ self.mox.VerifyAll()
+
+ def testUpdateIssues_Normal(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', labels=['Type-Defect'], issue_id=78901,
+ opened_timestamp=123456789, modified_timestamp=123456789,
+ star_count=12)
+ self.SetUpUpdateIssues()
+ self.mox.ReplayAll()
+ self.services.issue.UpdateIssues(self.cnxn, [issue])
+ self.mox.VerifyAll()
+
+ def testUpdateIssue(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', labels=['Type-Defect'], issue_id=78901,
+ opened_timestamp=123456789, modified_timestamp=123456789,
+ star_count=12)
+ self.SetUpUpdateIssues()
+ self.mox.ReplayAll()
+ self.services.issue.UpdateIssue(self.cnxn, issue)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesSummary(self):
+ issue = fake.MakeTestIssue(
+ local_id=1, issue_id=78901, owner_id=111L, summary='sum', status='New',
+ project_id=789)
+ self.SetUpUpdateIssuesSummary()
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesSummary(self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesLabels(self):
+ issue = fake.MakeTestIssue(
+ local_id=1, issue_id=78901, owner_id=111L, summary='sum', status='New',
+ labels=['Type-Defect'], project_id=789)
+ self.SetUpUpdateIssuesLabels()
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesLabels(
+ self.cnxn, [issue], 789, commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesFields_Empty(self):
+ issue = fake.MakeTestIssue(
+ local_id=1, issue_id=78901, owner_id=111L, summary='sum', status='New',
+ project_id=789)
+ self.SetUpUpdateIssuesFields()
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesFields(self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesFields_Some(self):
+ issue = fake.MakeTestIssue(
+ local_id=1, issue_id=78901, owner_id=111L, summary='sum', status='New',
+ project_id=789)
+ issue_shard = issue.issue_id % settings.num_logical_shards
+ fv1 = tracker_bizobj.MakeFieldValue(345, 679, '', 0L, False)
+ issue.field_values.append(fv1)
+ fv2 = tracker_bizobj.MakeFieldValue(346, 0, 'Blue', 0L, True)
+ issue.field_values.append(fv2)
+ self.SetUpUpdateIssuesFields(issue2fieldvalue_rows=[
+ (issue.issue_id, fv1.field_id, fv1.int_value, fv1.str_value,
+ None, fv1.derived, issue_shard),
+ (issue.issue_id, fv2.field_id, fv2.int_value, fv2.str_value,
+ None, fv2.derived, issue_shard),
+ ])
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesFields(self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesComponents_Empty(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ self.SetUpUpdateIssuesComponents()
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesComponents(
+ self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesCc_Empty(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ self.SetUpUpdateIssuesCc()
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesCc(self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesCc_Some(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ issue.cc_ids = [222L, 333L]
+ issue.derived_cc_ids = [888L]
+ issue_shard = issue.issue_id % settings.num_logical_shards
+ self.SetUpUpdateIssuesCc(issue2cc_rows=[
+ (issue.issue_id, 222L, False, issue_shard),
+ (issue.issue_id, 333L, False, issue_shard),
+ (issue.issue_id, 888L, True, issue_shard),
+ ])
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesCc(self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesNotify_Empty(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ self.SetUpUpdateIssuesNotify()
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesNotify(self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testUpdateIssuesRelation_Empty(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ self.SetUpUpdateIssuesRelation()
+ self.mox.ReplayAll()
+ self.services.issue._UpdateIssuesRelation(self.cnxn, [issue], commit=False)
+ self.mox.VerifyAll()
+
+ def testDeltaUpdateIssue(self):
+ pass # TODO(jrobbins): write more tests
+
+ def testDeltaUpdateIssue_MergedInto(self):
+ commenter_id = 222L
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901, project_name='proj')
+ target_issue = fake.MakeTestIssue(
+ project_id=789, local_id=2, owner_id=111L, summary='sum sum',
+ status='Live', issue_id=78902, project_name='proj')
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ self.mox.StubOutWithMock(self.services.issue, 'GetIssue')
+ self.mox.StubOutWithMock(self.services.issue, 'UpdateIssue')
+ self.mox.StubOutWithMock(self.services.issue, 'CreateIssueComment')
+ self.mox.StubOutWithMock(self.services.issue, '_UpdateIssuesModified')
+
+ self.services.issue.GetIssue(
+ self.cnxn, 0).AndRaise(issue_svc.NoSuchIssueException)
+ self.services.issue.GetIssue(
+ self.cnxn, target_issue.issue_id).AndReturn(target_issue)
+ self.services.issue.UpdateIssue(
+ self.cnxn, issue, commit=False, invalidate=False)
+ amendments = [
+ tracker_bizobj.MakeMergedIntoAmendment(
+ ('proj', 2), None, default_project_name='proj')]
+ self.services.issue.CreateIssueComment(
+ self.cnxn, 789, 1, commenter_id, 'comment text',
+ amendments=amendments, commit=False)
+ self.services.issue._UpdateIssuesModified(
+ self.cnxn, {issue.issue_id, target_issue.issue_id},
+ modified_timestamp=self.now, invalidate=True)
+
+ self.mox.ReplayAll()
+ self.services.issue.DeltaUpdateIssue(
+ self.cnxn, self.services, commenter_id, issue.project_id, config,
+ issue, issue.status, issue.owner_id,
+ [], [], [], [], [], [], [], [], [],
+ merged_into=target_issue.issue_id, comment='comment text',
+ index_now=False, timestamp=self.now)
+ self.mox.VerifyAll()
+
+ def testApplyIssueComment(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+
+ self.mox.StubOutWithMock(self.services.issue, 'GetIssueByLocalID')
+ self.mox.StubOutWithMock(self.services.issue, 'UpdateIssues')
+ self.mox.StubOutWithMock(self.services.issue, 'GetCommentsForIssue')
+ self.mox.StubOutWithMock(self.services.issue, 'SoftDeleteComment')
+ self.mox.StubOutWithMock(self.services.issue, "CreateIssueComment")
+ self.mox.StubOutWithMock(self.services.issue, "_UpdateIssuesModified")
+
+ self.services.issue.GetIssueByLocalID(self.cnxn, issue.project_id,
+ issue.local_id).AndReturn(issue)
+ self.services.issue.CreateIssueComment(self.cnxn, issue.project_id,
+ issue.local_id, issue.reporter_id, 'comment text',
+ amendments=[], attachments=None, inbound_message=None,
+ is_spam=False)
+ self.services.issue.UpdateIssues(self.cnxn, [issue],
+ just_derived=False, update_cols=None, commit=True, invalidate=True)
+ self.services.spam.ClassifyComment('comment text').AndReturn(
+ self.classifierResult('ham', 1.0))
+ self.services.spam.RecordClassifierCommentVerdict(self.cnxn,
+ None, False, 1.0)
+ self.services.issue._UpdateIssuesModified(
+ self.cnxn, set(), modified_timestamp=self.now)
+
+ self.mox.ReplayAll()
+ self.services.issue.ApplyIssueComment(self.cnxn, self.services,
+ issue.reporter_id, issue.project_id, issue.local_id, issue.summary,
+ issue.status, issue.owner_id, issue.cc_ids, issue.labels,
+ issue.field_values, issue.component_ids, [],
+ [], [], [], issue.merged_into, comment='comment text',
+ timestamp=self.now)
+ self.mox.VerifyAll()
+
+ def testApplyIssueComment_spam(self):
+ settings.classifier_spam_thresh = 0.5
+
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+
+ self.mox.StubOutWithMock(self.services.issue, "GetIssueByLocalID")
+ self.mox.StubOutWithMock(self.services.issue, "UpdateIssues")
+ self.mox.StubOutWithMock(self.services.issue, "GetCommentsForIssue")
+ self.mox.StubOutWithMock(self.services.issue, "CreateIssueComment")
+ self.mox.StubOutWithMock(self.services.issue, "SoftDeleteComment")
+ self.mox.StubOutWithMock(self.services.issue, "_UpdateIssuesModified")
+
+ self.services.issue.GetIssueByLocalID(self.cnxn, 789, 1).AndReturn(issue)
+ self.services.issue.UpdateIssues(self.cnxn, [issue],
+ just_derived=False, update_cols=None, commit=True, invalidate=True)
+ self.services.issue.GetCommentsForIssue(self.cnxn,
+ issue.issue_id).AndReturn([""])
+ self.services.issue.SoftDeleteComment(self.cnxn,
+ issue.project_id, issue.local_id, 0, issue.reporter_id,
+ self.services.user, is_spam=True)
+ self.services.spam.ClassifyComment('comment text').AndReturn(
+ self.classifierResult('spam', 1.0))
+ self.services.spam.RecordClassifierCommentVerdict(self.cnxn,
+ mox.IsA(tracker_pb2.IssueComment), True, 1.0)
+ self.services.issue.CreateIssueComment(self.cnxn, issue.project_id,
+ issue.local_id, issue.reporter_id, 'comment text',
+ amendments=[], attachments=None, inbound_message=None,
+ is_spam=True).AndReturn(tracker_pb2.IssueComment())
+ self.services.issue._UpdateIssuesModified(
+ self.cnxn, set(), modified_timestamp=self.now)
+
+ self.mox.ReplayAll()
+ self.services.issue.ApplyIssueComment(self.cnxn, self.services,
+ issue.reporter_id, issue.project_id, issue.local_id, issue.summary,
+ issue.status, issue.owner_id, issue.cc_ids, issue.labels,
+ issue.field_values, issue.component_ids, [],
+ [], [], [], issue.merged_into, comment='comment text',
+ timestamp=self.now)
+ self.mox.VerifyAll()
+
+ def testApplyIssueComment_blockedon(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, owner_id=111L, summary='sum',
+ status='Live', issue_id=78901)
+ blockedon_issue = fake.MakeTestIssue(
+ project_id=789, local_id=2, owner_id=111L, summary='sum',
+ status='Live', issue_id=78902)
+
+ self.mox.StubOutWithMock(self.services.issue, "GetIssueByLocalID")
+ self.mox.StubOutWithMock(self.services.issue, "UpdateIssues")
+ self.mox.StubOutWithMock(self.services.issue, "CreateIssueComment")
+ self.mox.StubOutWithMock(self.services.issue, "GetIssues")
+ self.mox.StubOutWithMock(self.services.issue, "_UpdateIssuesModified")
+ # Call to find added blockedon issues.
+ self.services.issue.GetIssues(
+ self.cnxn, [blockedon_issue.issue_id]).AndReturn([blockedon_issue])
+ # Call to find removed blockedon issues.
+ self.services.issue.GetIssues(self.cnxn, []).AndReturn([])
+
+ self.services.issue.GetIssueByLocalID(self.cnxn, 789, 1).AndReturn(issue)
+ self.services.issue.UpdateIssues(self.cnxn, [issue],
+ just_derived=False, update_cols=None, commit=True, invalidate=True)
+ self.services.spam.ClassifyComment('comment text').AndReturn(
+ self.classifierResult('ham', 1.0))
+ self.services.spam.RecordClassifierCommentVerdict(self.cnxn,
+ mox.IsA(tracker_pb2.IssueComment), False, 1.0)
+ self.services.issue.CreateIssueComment(self.cnxn, issue.project_id,
+ issue.local_id, issue.reporter_id, 'comment text',
+ amendments=[
+ tracker_bizobj.MakeBlockedOnAmendment(
+ [(blockedon_issue.project_name, blockedon_issue.local_id)], [],
+ default_project_name=blockedon_issue.project_name)],
+ attachments=None, inbound_message=None,
+ is_spam=False).AndReturn(tracker_pb2.IssueComment())
+ # Add a comment on the blockedon issue.
+ self.services.issue.CreateIssueComment(
+ self.cnxn, blockedon_issue.project_id, blockedon_issue.local_id,
+ blockedon_issue.reporter_id, content='',
+ amendments=[tracker_bizobj.MakeBlockingAmendment(
+ [(issue.project_name, issue.local_id)], [],
+ default_project_name=issue.project_name)])
+ self.services.issue._UpdateIssuesModified(
+ self.cnxn, {blockedon_issue.issue_id}, modified_timestamp=self.now)
+
+ self.mox.ReplayAll()
+ self.services.issue.ApplyIssueComment(self.cnxn, self.services,
+ issue.reporter_id, issue.project_id, issue.local_id, issue.summary,
+ issue.status, issue.owner_id, issue.cc_ids, issue.labels,
+ issue.field_values, issue.component_ids, [blockedon_issue.issue_id],
+ [], [], [], issue.merged_into, comment='comment text',
+ timestamp=self.now)
+ self.mox.VerifyAll()
+
+ def SetUpMoveIssues_NewProject(self):
+ self.services.issue.issueformerlocations_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUEFORMERLOCATIONS_COLS, project_id=789,
+ issue_id=[78901]).AndReturn([])
+ self.SetUpAllocateNextLocalID(789, None, None)
+ self.SetUpUpdateIssues()
+ self.services.issue.comment_tbl.Update(
+ self.cnxn, {'project_id': 789}, issue_id=[78901], commit=False)
+
+ old_location_rows = [(78901, 711, 2)]
+ self.services.issue.issueformerlocations_tbl.InsertRows(
+ self.cnxn, issue_svc.ISSUEFORMERLOCATIONS_COLS, old_location_rows,
+ ignore=True, commit=False)
+ self.cnxn.Commit()
+
+ def testMoveIssues_NewProject(self):
+ """Move project 711 issue 2 to become project 789 issue 1."""
+ dest_project = fake.Project(project_id=789)
+ issue = fake.MakeTestIssue(
+ project_id=711, local_id=2, owner_id=111L, summary='sum',
+ status='Live', labels=['Type-Defect'], issue_id=78901,
+ opened_timestamp=123456789, modified_timestamp=123456789,
+ star_count=12)
+ self.SetUpMoveIssues_NewProject()
+ self.mox.ReplayAll()
+ self.services.issue.MoveIssues(
+ self.cnxn, dest_project, [issue], self.services.user)
+ self.mox.VerifyAll()
+
+ # TODO(jrobbins): case where issue is moved back into former project
+
+ def testExpungeFormerLocations(self):
+ self.services.issue.issueformerlocations_tbl.Delete(
+ self.cnxn, project_id=789)
+
+ self.mox.ReplayAll()
+ self.services.issue.ExpungeFormerLocations(self.cnxn, 789)
+ self.mox.VerifyAll()
+
+ def testExpungeIssues(self):
+ issue_ids = [1, 2]
+
+ self.mox.StubOutWithMock(search, 'Index')
+ search.Index(name=settings.search_index_name_format % 1).AndReturn(
+ MockIndex())
+ search.Index(name=settings.search_index_name_format % 2).AndReturn(
+ MockIndex())
+
+ self.services.issue.issuesummary_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issue2label_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issue2component_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issue2cc_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issue2notify_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issueupdate_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.attachment_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.comment_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issuerelation_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issuerelation_tbl.Delete(self.cnxn, dst_issue_id=[1, 2])
+ self.services.issue.danglingrelation_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issueformerlocations_tbl.Delete(
+ self.cnxn, issue_id=[1, 2])
+ self.services.issue.reindexqueue_tbl.Delete(self.cnxn, issue_id=[1, 2])
+ self.services.issue.issue_tbl.Delete(self.cnxn, id=[1, 2])
+
+ self.mox.ReplayAll()
+ self.services.issue.ExpungeIssues(self.cnxn, issue_ids)
+ self.mox.VerifyAll()
+
+ def testSoftDeleteIssue(self):
+ project = fake.Project(project_id=789)
+ issue_1, _issue_2 = self.SetUpGetIssues()
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ delta = {'deleted': True}
+ self.services.issue.issue_tbl.Update(
+ self.cnxn, delta, id=78901, commit=False)
+ self.cnxn.Commit()
+ self.mox.ReplayAll()
+ self.services.issue.SoftDeleteIssue(
+ self.cnxn, project.project_id, 1, True, self.services.user)
+ self.mox.VerifyAll()
+ self.assertTrue(issue_1.deleted)
+
+ def SetUpDeleteComponentReferences(self, component_id):
+ self.services.issue.issue2component_tbl.Delete(
+ self.cnxn, component_id=component_id)
+
+ def testDeleteComponentReferences(self):
+ self.SetUpDeleteComponentReferences(123)
+ self.mox.ReplayAll()
+ self.services.issue.DeleteComponentReferences(self.cnxn, 123)
+ self.mox.VerifyAll()
+
+ ### Local ID generation
+
+ def SetUpInitializeLocalID(self, project_id):
+ self.services.issue.localidcounter_tbl.InsertRow(
+ self.cnxn, project_id=project_id, used_local_id=0, used_spam_id=0)
+
+ def testInitializeLocalID(self):
+ self.SetUpInitializeLocalID(789)
+ self.mox.ReplayAll()
+ self.services.issue.InitializeLocalID(self.cnxn, 789)
+ self.mox.VerifyAll()
+
+ def SetUpAllocateNextLocalID(
+ self, project_id, highest_in_use, highest_former):
+ highest_either = max(highest_in_use or 0, highest_former or 0)
+ self.services.issue.localidcounter_tbl.IncrementCounterValue(
+ self.cnxn, 'used_local_id', project_id=project_id).AndReturn(
+ highest_either + 1)
+
+ def SetUpAllocateNextSpamID(
+ self, project_id, highest_in_use, highest_former):
+ highest_either = max(highest_in_use or 0, highest_former or 0)
+ self.services.issue.localidcounter_tbl.IncrementCounterValue(
+ self.cnxn, 'used_spam_id', project_id=project_id).AndReturn(
+ highest_either + 1)
+
+ def testAllocateNextLocalID_NewProject(self):
+ self.SetUpAllocateNextLocalID(789, None, None)
+ self.mox.ReplayAll()
+ next_local_id = self.services.issue.AllocateNextLocalID(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(1, next_local_id)
+
+ def testAllocateNextLocalID_HighestInUse(self):
+ self.SetUpAllocateNextLocalID(789, 14, None)
+ self.mox.ReplayAll()
+ next_local_id = self.services.issue.AllocateNextLocalID(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(15, next_local_id)
+
+ def testAllocateNextLocalID_HighestWasMoved(self):
+ self.SetUpAllocateNextLocalID(789, 23, 66)
+ self.mox.ReplayAll()
+ next_local_id = self.services.issue.AllocateNextLocalID(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(67, next_local_id)
+
+ def SetUpGetHighestLocalID(self, project_id, highest_in_use, highest_former):
+ self.services.issue.issue_tbl.SelectValue(
+ self.cnxn, 'MAX(local_id)', project_id=project_id).AndReturn(
+ highest_in_use)
+ self.services.issue.issueformerlocations_tbl.SelectValue(
+ self.cnxn, 'MAX(local_id)', project_id=project_id).AndReturn(
+ highest_former)
+
+ def testGetHighestLocalID_OnlyActiveLocalIDs(self):
+ self.SetUpGetHighestLocalID(789, 14, None)
+ self.mox.ReplayAll()
+ highest_id = self.services.issue.GetHighestLocalID(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(14, highest_id)
+
+ def testGetHighestLocalID_OnlyFormerIDs(self):
+ self.SetUpGetHighestLocalID(789, None, 97)
+ self.mox.ReplayAll()
+ highest_id = self.services.issue.GetHighestLocalID(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(97, highest_id)
+
+ def testGetHighestLocalID_BothActiveAndFormer(self):
+ self.SetUpGetHighestLocalID(789, 345, 97)
+ self.mox.ReplayAll()
+ highest_id = self.services.issue.GetHighestLocalID(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(345, highest_id)
+
+ def testGetAllLocalIDsInProject(self):
+ self.SetUpGetHighestLocalID(789, 14, None)
+ self.mox.ReplayAll()
+ local_id_range = self.services.issue.GetAllLocalIDsInProject(self.cnxn, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(range(1, 15), local_id_range)
+
+ ### Comments
+
+ def testDeserializeComments_Empty(self):
+ comments = self.services.issue._DeserializeComments([], [], [])
+ self.assertEqual([], comments)
+
+ def SetUpCommentRows(self):
+ comment_rows = [
+ (7890101, 78901, self.now, 789, 111L,
+ 'content', None, True, None, False)]
+ amendment_rows = [
+ (1, 78901, 7890101, 'cc', 'old', 'new val', 222, None, None)]
+ attachment_rows = []
+ return comment_rows, amendment_rows, attachment_rows
+
+ def testDeserializeComments(self):
+ comment_rows, amendment_rows, attachment_rows = self.SetUpCommentRows()
+ comments = self.services.issue._DeserializeComments(
+ comment_rows, amendment_rows, attachment_rows)
+ self.assertEqual(1, len(comments))
+
+ def SetUpGetComments(self, issue_ids):
+ # Assumes one comment per issue.
+ cids = [issue_id + 1000 for issue_id in issue_ids]
+ self.services.issue.comment_tbl.Select(
+ self.cnxn, cols=['Comment.id'] + issue_svc.COMMENT_COLS[1:],
+ where=None, issue_id=issue_ids, order_by=[('created', [])]).AndReturn([
+ (issue_id + 1000, issue_id, self.now, 789, 111L, 'content',
+ None, True, None, False) for issue_id in issue_ids])
+ # Assume no amendments or attachment for now.
+ self.services.issue.issueupdate_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUEUPDATE_COLS,
+ comment_id=cids).AndReturn([])
+ if issue_ids:
+ attachment_rows = [
+ (1234, issue_ids[0], cids[0], 'a_filename', 1024, 'text/plain',
+ False, None)]
+ else:
+ attachment_rows = []
+
+ self.services.issue.attachment_tbl.Select(
+ self.cnxn, cols=issue_svc.ATTACHMENT_COLS,
+ comment_id=cids).AndReturn(attachment_rows)
+
+ def testGetComments(self):
+ self.SetUpGetComments([100001, 100002])
+ self.mox.ReplayAll()
+ comments = self.services.issue.GetComments(
+ self.cnxn, issue_id=[100001, 100002])
+ self.mox.VerifyAll()
+ self.assertEqual(2, len(comments))
+ self.assertEqual('content', comments[0].content)
+ self.assertEqual('content', comments[1].content)
+
+ def SetUpGetComment_Found(self, comment_id):
+ # Assumes one comment per issue.
+ self.services.issue.comment_tbl.Select(
+ self.cnxn, cols=['Comment.id'] + issue_svc.COMMENT_COLS[1:],
+ where=None, id=comment_id, order_by=[('created', [])]).AndReturn([
+ (comment_id, int(comment_id / 100), self.now, 789, 111L, 'content',
+ None, True, None, False)])
+ # Assume no amendments or attachment for now.
+ self.services.issue.issueupdate_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUEUPDATE_COLS,
+ comment_id=[comment_id]).AndReturn([])
+ self.services.issue.attachment_tbl.Select(
+ self.cnxn, cols=issue_svc.ATTACHMENT_COLS,
+ comment_id=[comment_id]).AndReturn([])
+
+ def testGetComment_Found(self):
+ self.SetUpGetComment_Found(7890101)
+ self.mox.ReplayAll()
+ comment = self.services.issue.GetComment(self.cnxn, 7890101)
+ self.mox.VerifyAll()
+ self.assertEqual('content', comment.content)
+
+ def SetUpGetComment_Missing(self, comment_id):
+ # Assumes one comment per issue.
+ self.services.issue.comment_tbl.Select(
+ self.cnxn, cols=['Comment.id'] + issue_svc.COMMENT_COLS[1:],
+ where=None, id=comment_id, order_by=[('created', [])]).AndReturn([])
+ # Assume no amendments or attachment for now.
+ self.services.issue.issueupdate_tbl.Select(
+ self.cnxn, cols=issue_svc.ISSUEUPDATE_COLS,
+ comment_id=[]).AndReturn([])
+ self.services.issue.attachment_tbl.Select(
+ self.cnxn, cols=issue_svc.ATTACHMENT_COLS, comment_id=[]).AndReturn([])
+
+ def testGetComment_Missing(self):
+ self.SetUpGetComment_Missing(7890101)
+ self.mox.ReplayAll()
+ self.assertRaises(
+ issue_svc.NoSuchCommentException,
+ self.services.issue.GetComment, self.cnxn, 7890101)
+ self.mox.VerifyAll()
+
+ def testGetCommentsForIssue(self):
+ issue = fake.MakeTestIssue(789, 1, 'Summary', 'New', 111L)
+ self.SetUpGetComments([issue.issue_id])
+ self.mox.ReplayAll()
+ self.services.issue.GetCommentsForIssue(self.cnxn, issue.issue_id)
+ self.mox.VerifyAll()
+
+ def testGetCommentsForIssues(self):
+ self.SetUpGetComments([100001, 100002])
+ self.mox.ReplayAll()
+ self.services.issue.GetCommentsForIssues(
+ self.cnxn, issue_ids=[100001, 100002])
+ self.mox.VerifyAll()
+
+ def SetUpInsertComment(self, comment_id, was_escaped, is_spam=False):
+ self.services.issue.comment_tbl.InsertRow(
+ self.cnxn, issue_id=78901, created=self.now, project_id=789,
+ commenter_id=111L, content='content', inbound_message=None,
+ was_escaped=was_escaped, deleted_by=None, is_spam=is_spam,
+ commit=True).AndReturn(comment_id)
+
+ amendment_rows = []
+ self.services.issue.issueupdate_tbl.InsertRows(
+ self.cnxn, issue_svc.ISSUEUPDATE_COLS[1:], amendment_rows,
+ commit=True)
+
+ attachment_rows = []
+ self.services.issue.attachment_tbl.InsertRows(
+ self.cnxn, issue_svc.ATTACHMENT_COLS[1:], attachment_rows,
+ commit=True)
+
+ def testInsertComment(self):
+ self.SetUpInsertComment(7890101, False)
+ self.mox.ReplayAll()
+ comment = tracker_pb2.IssueComment(
+ issue_id=78901, timestamp=self.now, project_id=789, user_id=111L,
+ content='content', was_escaped=False)
+ self.services.issue.InsertComment(self.cnxn, comment, commit=True)
+ self.mox.VerifyAll()
+ self.assertEqual(7890101, comment.id)
+
+ def SetUpUpdateComment(self, comment_id, delta=None):
+ delta = delta or {
+ 'commenter_id': 111L,
+ 'content': 'new content',
+ 'deleted_by': 222L,
+ 'is_spam': False,
+ }
+ self.services.issue.comment_tbl.Update(
+ self.cnxn, delta, id=comment_id)
+
+ def testUpdateComment(self):
+ self.SetUpUpdateComment(7890101)
+ self.mox.ReplayAll()
+ comment = tracker_pb2.IssueComment(
+ id=7890101, issue_id=78901, timestamp=self.now, project_id=789,
+ user_id=111L, content='new content', was_escaped=True, deleted_by=222L,
+ is_spam=False)
+ self.services.issue._UpdateComment(self.cnxn, comment)
+ self.mox.VerifyAll()
+
+ def testMakeIssueComment(self):
+ comment = self.services.issue._MakeIssueComment(
+ 789, 111L, 'content', timestamp=self.now)
+ self.assertEqual('content', comment.content)
+ self.assertEqual([], comment.amendments)
+ self.assertEqual([], comment.attachments)
+
+ def testCreateIssueComment(self):
+ _issue_1, _issue_2 = self.SetUpGetIssues()
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ self.SetUpInsertComment(7890101, False)
+ self.mox.ReplayAll()
+ comment = self.services.issue.CreateIssueComment(
+ self.cnxn, 789, 1, 111L, 'content', timestamp=self.now)
+ self.mox.VerifyAll()
+ self.assertEqual('content', comment.content)
+
+ def testCreateIssueComment_spam(self):
+ _issue_1, _issue_2 = self.SetUpGetIssues()
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ self.SetUpInsertComment(7890101, False, is_spam=True)
+ self.mox.ReplayAll()
+ comment = self.services.issue.CreateIssueComment(
+ self.cnxn, 789, 1, 111L, 'content', timestamp=self.now, is_spam=True)
+ self.mox.VerifyAll()
+ self.assertEqual('content', comment.content)
+ self.assertTrue(comment.is_spam)
+
+ def testSoftDeleteComment(self):
+ issue_1, _issue_2 = self.SetUpGetIssues()
+ issue_1.attachment_count = 1
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ self.SetUpGetComments([78901])
+ self.SetUpUpdateComment(79901, delta={'deleted_by': 222L, 'is_spam': False})
+ self.SetUpUpdateIssues(given_delta={'attachment_count': 0})
+ self.mox.ReplayAll()
+ self.services.issue.SoftDeleteComment(
+ self.cnxn, 789, 1, 0, 222L, self.services.user)
+ self.mox.VerifyAll()
+
+ ### Attachments
+
+ def testGetAttachmentAndContext(self):
+ # TODO(jrobbins): re-implemnent to use Google Cloud Storage.
+ pass
+
+ def SetUpUpdateAttachment(self, attachment_id, delta):
+ self.services.issue.attachment_tbl.Update(
+ self.cnxn, delta, id=attachment_id)
+
+ def testUpdateAttachment(self):
+ delta = {
+ 'filename': 'a_filename',
+ 'filesize': 1024,
+ 'mimetype': 'text/plain',
+ 'deleted': False,
+ }
+ self.SetUpUpdateAttachment(1234, delta)
+ self.mox.ReplayAll()
+ attach = tracker_pb2.Attachment(
+ attachment_id=1234, filename='a_filename', filesize=1024,
+ mimetype='text/plain')
+ self.services.issue._UpdateAttachment(self.cnxn, attach)
+ self.mox.VerifyAll()
+
+ def testStoreAttachmentBlob(self):
+ # TODO(jrobbins): re-implemnent to use Google Cloud Storage.
+ pass
+
+ def testSoftDeleteAttachment(self):
+ issue_1, _issue_2 = self.SetUpGetIssues()
+ issue_1.attachment_count = 1
+ self.services.issue.issue_id_2lc.CacheItem((789, 1), 78901)
+ self.SetUpGetComments([78901])
+ self.SetUpUpdateAttachment(1234, {'deleted': True})
+ self.SetUpUpdateIssues(given_delta={'attachment_count': 0})
+
+ self.mox.ReplayAll()
+ self.services.issue.SoftDeleteAttachment(
+ self.cnxn, 789, 1, 0, 1234, self.services.user)
+ self.mox.VerifyAll()
+
+ ### Reindex queue
+
+ def SetUpEnqueueIssuesForIndexing(self, issue_ids):
+ reindex_rows = [(issue_id,) for issue_id in issue_ids]
+ self.services.issue.reindexqueue_tbl.InsertRows(
+ self.cnxn, ['issue_id'], reindex_rows, ignore=True)
+
+ def testEnqueueIssuesForIndexing(self):
+ self.SetUpEnqueueIssuesForIndexing([78901])
+ self.mox.ReplayAll()
+ self.services.issue.EnqueueIssuesForIndexing(self.cnxn, [78901])
+ self.mox.VerifyAll()
+
+ def SetUpReindexIssues(self, issue_ids):
+ self.services.issue.reindexqueue_tbl.Select(
+ self.cnxn, order_by=[('created', [])],
+ limit=50).AndReturn([(issue_id,) for issue_id in issue_ids])
+
+ if issue_ids:
+ _issue_1, _issue_2 = self.SetUpGetIssues()
+ self.services.issue.reindexqueue_tbl.Delete(
+ self.cnxn, issue_id=issue_ids)
+
+ def testReindexIssues_QueueEmpty(self):
+ self.SetUpReindexIssues([])
+ self.mox.ReplayAll()
+ self.services.issue.ReindexIssues(self.cnxn, 50, self.services.user)
+ self.mox.VerifyAll()
+
+ def testReindexIssues_QueueHasTwoIssues(self):
+ self.SetUpReindexIssues([78901, 78902])
+ self.mox.ReplayAll()
+ self.services.issue.ReindexIssues(self.cnxn, 50, self.services.user)
+ self.mox.VerifyAll()
+
+ ### Search functions
+
+ def SetUpRunIssueQuery(
+ self, rows, limit=settings.search_limit_per_shard):
+ self.services.issue.issue_tbl.Select(
+ self.cnxn, shard_id=1, distinct=True, cols=['Issue.id'],
+ left_joins=[], where=[('Issue.deleted = %s', [False])], order_by=[],
+ limit=limit).AndReturn(rows)
+
+ def testRunIssueQuery_NoResults(self):
+ self.SetUpRunIssueQuery([])
+ self.mox.ReplayAll()
+ result_iids, capped = self.services.issue.RunIssueQuery(
+ self.cnxn, [], [], [], shard_id=1)
+ self.mox.VerifyAll()
+ self.assertEqual([], result_iids)
+ self.assertFalse(capped)
+
+ def testRunIssueQuery_Normal(self):
+ self.SetUpRunIssueQuery([(1,), (11,), (21,)])
+ self.mox.ReplayAll()
+ result_iids, capped = self.services.issue.RunIssueQuery(
+ self.cnxn, [], [], [], shard_id=1)
+ self.mox.VerifyAll()
+ self.assertEqual([1, 11, 21], result_iids)
+ self.assertFalse(capped)
+
+ def testRunIssueQuery_Capped(self):
+ try:
+ orig = settings.search_limit_per_shard
+ settings.search_limit_per_shard = 3
+ self.SetUpRunIssueQuery([(1,), (11,), (21,)], limit=3)
+ self.mox.ReplayAll()
+ result_iids, capped = self.services.issue.RunIssueQuery(
+ self.cnxn, [], [], [], shard_id=1)
+ self.mox.VerifyAll()
+ self.assertEqual([1, 11, 21], result_iids)
+ self.assertTrue(capped)
+ finally:
+ settings.search_limit_per_shard = orig
+
+ def SetUpGetIIDsByLabelIDs(self):
+ self.services.issue.issue_tbl.Select(
+ self.cnxn, shard_id=1, cols=['id'],
+ left_joins=[('Issue2Label ON Issue.id = Issue2Label.issue_id', [])],
+ label_id=[123, 456], project_id=789,
+ where=[('shard = %s', [1])]
+ ).AndReturn([(1,), (2,), (3,)])
+
+ def testGetIIDsByLabelIDs(self):
+ self.SetUpGetIIDsByLabelIDs()
+ self.mox.ReplayAll()
+ iids = self.services.issue.GetIIDsByLabelIDs(self.cnxn, [123, 456], 789, 1)
+ self.mox.VerifyAll()
+ self.assertEqual([1, 2, 3], iids)
+
+ def SetUpGetIIDsByParticipant(self):
+ self.services.issue.issue_tbl.Select(
+ self.cnxn, shard_id=1, cols=['id'],
+ reporter_id=[111L, 888L],
+ where=[('shard = %s', [1]), ('Issue.project_id IN (%s)', [789])]
+ ).AndReturn([(1,)])
+ self.services.issue.issue_tbl.Select(
+ self.cnxn, shard_id=1, cols=['id'],
+ owner_id=[111L, 888L],
+ where=[('shard = %s', [1]), ('Issue.project_id IN (%s)', [789])]
+ ).AndReturn([(2,)])
+ self.services.issue.issue_tbl.Select(
+ self.cnxn, shard_id=1, cols=['id'],
+ derived_owner_id=[111L, 888L],
+ where=[('shard = %s', [1]), ('Issue.project_id IN (%s)', [789])]
+ ).AndReturn([(3,)])
+ self.services.issue.issue_tbl.Select(
+ self.cnxn, shard_id=1, cols=['id'],
+ left_joins=[('Issue2Cc ON Issue2Cc.issue_id = Issue.id', [])],
+ cc_id=[111L, 888L],
+ where=[('shard = %s', [1]), ('Issue.project_id IN (%s)', [789]),
+ ('cc_id IS NOT NULL', [])]
+ ).AndReturn([(4,)])
+ self.services.issue.issue_tbl.Select(
+ self.cnxn, shard_id=1, cols=['Issue.id'],
+ left_joins=[
+ ('Issue2FieldValue ON Issue.id = Issue2FieldValue.issue_id', []),
+ ('FieldDef ON Issue2FieldValue.field_id = FieldDef.id', [])],
+ user_id=[111L, 888L], grants_perm='View',
+ where=[('shard = %s', [1]), ('Issue.project_id IN (%s)', [789]),
+ ('user_id IS NOT NULL', [])]
+ ).AndReturn([(5,)])
+
+ def testGetIIDsByParticipant(self):
+ self.SetUpGetIIDsByParticipant()
+ self.mox.ReplayAll()
+ iids = self.services.issue.GetIIDsByParticipant(
+ self.cnxn, [111L, 888L], [789], 1)
+ self.mox.VerifyAll()
+ self.assertEqual([1, 2, 3, 4, 5], iids)
+
+
+class IssueServiceFunctionsTest(unittest.TestCase):
+
+ def testUpdateClosedTimestamp(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='New', means_open=True))
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='Accepted', means_open=True))
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='Old', means_open=False))
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='Closed', means_open=False))
+
+ issue = tracker_pb2.Issue()
+ issue.local_id = 1234
+ issue.status = 'New'
+
+ # ensure the default value is undef
+ self.assert_(not issue.closed_timestamp)
+
+ # ensure transitioning to the same and other open states
+ # doesn't set the timestamp
+ issue.status = 'New'
+ issue_svc._UpdateClosedTimestamp(config, issue, 'New')
+ self.assert_(not issue.closed_timestamp)
+
+ issue.status = 'Accepted'
+ issue_svc._UpdateClosedTimestamp(config, issue, 'New')
+ self.assert_(not issue.closed_timestamp)
+
+ # ensure transitioning from open to closed sets the timestamp
+ issue.status = 'Closed'
+ issue_svc._UpdateClosedTimestamp(config, issue, 'Accepted')
+ self.assert_(issue.closed_timestamp)
+
+ # ensure that the timestamp is cleared when transitioning from
+ # closed to open
+ issue.status = 'New'
+ issue_svc._UpdateClosedTimestamp(config, issue, 'Closed')
+ self.assert_(not issue.closed_timestamp)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/project_svc_test.py b/appengine/monorail/services/test/project_svc_test.py
new file mode 100644
index 0000000..9b84a17
--- /dev/null
+++ b/appengine/monorail/services/test/project_svc_test.py
@@ -0,0 +1,447 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the project_svc module."""
+
+import time
+import unittest
+
+import mox
+
+from google.appengine.ext import testbed
+
+from framework import sql
+from proto import project_pb2
+from proto import user_pb2
+from services import config_svc
+from services import project_svc
+from testing import fake
+
+NOW = 12345678
+
+
+def MakeProjectService(cache_manager, my_mox):
+ project_service = project_svc.ProjectService(cache_manager)
+ project_service.project_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ project_service.user2project_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ project_service.extraperm_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ project_service.membernotes_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ project_service.usergroupprojects_tbl = my_mox.CreateMock(
+ sql.SQLTableManager)
+ return project_service
+
+
+class ProjectTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.cache_manager = fake.CacheManager()
+ self.project_service = MakeProjectService(self.cache_manager, self.mox)
+
+ def testDeserializeProjects(self):
+ project_rows = [
+ (123, 'proj1', 'test proj 1', 'test project', 'live', 'anyone', '', '',
+ None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True, False,
+ False, None, None, None, None, None),
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'anyone', '', '',
+ None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True, False,
+ False, None, None, None, None, None)]
+ role_rows = [
+ (123, 111L, 'owner'), (123, 444L, 'owner'),
+ (123, 222L, 'committer'),
+ (123, 333L, 'contributor'),
+ (234, 111L, 'owner')]
+ extraperm_rows = []
+
+ project_dict = self.project_service.project_2lc._DeserializeProjects(
+ project_rows, role_rows, extraperm_rows)
+
+ self.assertItemsEqual([123, 234], project_dict.keys())
+ self.assertEqual(123, project_dict[123].project_id)
+ self.assertEqual('proj1', project_dict[123].project_name)
+ self.assertEqual(NOW, project_dict[123].recent_activity)
+ self.assertItemsEqual([111L, 444L], project_dict[123].owner_ids)
+ self.assertItemsEqual([222L], project_dict[123].committer_ids)
+ self.assertItemsEqual([333L], project_dict[123].contributor_ids)
+ self.assertEqual(234, project_dict[234].project_id)
+ self.assertItemsEqual([111L], project_dict[234].owner_ids)
+
+
+class ProjectServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.cache_manager = fake.CacheManager()
+ self.config_service = self.mox.CreateMock(config_svc.ConfigService)
+ self.project_service = MakeProjectService(self.cache_manager, self.mox)
+
+ self.proj1 = fake.Project(project_name='proj1', project_id=123)
+ self.proj2 = fake.Project(project_name='proj2', project_id=234)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def SetUpCreateProject(self):
+ # Check for existing project: there should be none.
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_name', 'project_id'],
+ project_name=['proj1']).AndReturn([])
+
+ # Inserting the project gives the project ID.
+ self.project_service.project_tbl.InsertRow(
+ self.cnxn, project_name='proj1',
+ summary='Test project summary', description='Test project description',
+ home_page=None, docs_url=None, logo_file_name=None, logo_gcs_id=None,
+ state='LIVE', access='ANYONE').AndReturn(123)
+
+ # Insert the users. There are none.
+ self.project_service.user2project_tbl.InsertRows(
+ self.cnxn, ['project_id', 'user_id', 'role_name'], [])
+
+ def testCreateProject(self):
+ self.SetUpCreateProject()
+ self.mox.ReplayAll()
+ self.project_service.CreateProject(
+ self.cnxn, 'proj1', owner_ids=[], committer_ids=[], contributor_ids=[],
+ summary='Test project summary', description='Test project description')
+ self.mox.VerifyAll()
+
+ def SetUpLookupProjectIDs(self):
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_name', 'project_id'],
+ project_name=['proj2']).AndReturn([('proj2', 234)])
+
+ def testLookupProjectIDs(self):
+ self.SetUpLookupProjectIDs()
+ self.project_service.project_names_to_ids.CacheItem('proj1', 123)
+ self.mox.ReplayAll()
+ id_dict = self.project_service.LookupProjectIDs(
+ self.cnxn, ['proj1', 'proj2'])
+ self.mox.VerifyAll()
+ self.assertEqual({'proj1': 123, 'proj2': 234}, id_dict)
+
+ def testLookupProjectNames(self):
+ self.SetUpGetProjects() # Same as testGetProjects()
+ self.project_service.project_2lc.CacheItem(123, self.proj1)
+ self.mox.ReplayAll()
+ name_dict = self.project_service.LookupProjectNames(
+ self.cnxn, [123, 234])
+ self.mox.VerifyAll()
+ self.assertEqual({123: 'proj1', 234: 'proj2'}, name_dict)
+
+ def SetUpGetProjects(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'anyone', '', '',
+ None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True, False,
+ False, None, None, None, None, None)]
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=project_svc.PROJECT_COLS,
+ project_id=[234]).AndReturn(project_rows)
+ self.project_service.user2project_tbl.Select(
+ self.cnxn, cols=['project_id', 'user_id', 'role_name'],
+ project_id=[234]).AndReturn([])
+ self.project_service.extraperm_tbl.Select(
+ self.cnxn, cols=project_svc.EXTRAPERM_COLS,
+ project_id=[234]).AndReturn([])
+
+ def testGetProjects(self):
+ self.project_service.project_2lc.CacheItem(123, self.proj1)
+ self.SetUpGetProjects()
+ self.mox.ReplayAll()
+ project_dict = self.project_service.GetProjects(
+ self.cnxn, [123, 234])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([123, 234], project_dict.keys())
+ self.assertEqual('proj1', project_dict[123].project_name)
+ self.assertEqual('proj2', project_dict[234].project_name)
+
+ def testGetVisibleLiveProjects_AnyoneAccessWithUser(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'anyone', '', '',
+ None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True, False,
+ False, None, None)]
+
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'],
+ state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
+ self.SetUpGetProjects()
+ self.mox.ReplayAll()
+ user_a = user_pb2.User(email='a@example.com')
+ project_ids = self.project_service.GetVisibleLiveProjects(
+ self.cnxn, user_a, set([111]))
+
+ self.mox.VerifyAll()
+ self.assertItemsEqual([234], project_ids)
+
+ def testGetVisibleLiveProjects_AnyoneAccessWithAnon(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'anyone', '', '',
+ None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True, False,
+ False, None, None, None, None, None)]
+
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'],
+ state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
+ self.SetUpGetProjects()
+ self.mox.ReplayAll()
+ project_ids = self.project_service.GetVisibleLiveProjects(
+ self.cnxn, None, None)
+
+ self.mox.VerifyAll()
+ self.assertItemsEqual([234], project_ids)
+
+ def testGetVisibleLiveProjects_RestrictedAccessWithMember(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
+ '', '', None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True, False,
+ False, False, None, None, None, None, None)]
+ self.proj2.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ self.proj2.contributor_ids.append(111)
+ self.project_service.project_2lc.CacheItem(234, self.proj2)
+
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'],
+ state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
+ self.mox.ReplayAll()
+ user_a = user_pb2.User(email='a@example.com')
+ project_ids = self.project_service.GetVisibleLiveProjects(
+ self.cnxn, user_a, set([111]))
+
+ self.mox.VerifyAll()
+ self.assertItemsEqual([234], project_ids)
+
+ def testGetVisibleLiveProjects_RestrictedAccessWithNonMember(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
+ '', '', None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True,
+ False, False, None, None, None, None, None)]
+ self.proj2.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ self.project_service.project_2lc.CacheItem(234, self.proj2)
+
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'],
+ state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
+ self.mox.ReplayAll()
+ user_a = user_pb2.User(email='a@example.com')
+ project_ids = self.project_service.GetVisibleLiveProjects(
+ self.cnxn, user_a, set([111]))
+
+ self.mox.VerifyAll()
+ self.assertItemsEqual([], project_ids)
+
+ def testGetVisibleLiveProjects_RestrictedAccessWithAnon(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
+ '', '', None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True,
+ False, False, None, None, None, None, None)]
+ self.proj2.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ self.project_service.project_2lc.CacheItem(234, self.proj2)
+
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'],
+ state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
+ self.mox.ReplayAll()
+ project_ids = self.project_service.GetVisibleLiveProjects(
+ self.cnxn, None, None)
+
+ self.mox.VerifyAll()
+ self.assertItemsEqual([], project_ids)
+
+ def testGetVisibleLiveProjects_RestrictedAccessWithSiteAdmin(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'live', 'members_only',
+ '', '', None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True,
+ False, False, None, None, None, None, None)]
+ self.proj2.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ self.project_service.project_2lc.CacheItem(234, self.proj2)
+
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'],
+ state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
+ self.mox.ReplayAll()
+ user_a = user_pb2.User(email='a@example.com')
+ user_a.is_site_admin = True
+ project_ids = self.project_service.GetVisibleLiveProjects(
+ self.cnxn, user_a, set([111]))
+
+ self.mox.VerifyAll()
+ self.assertItemsEqual([234], project_ids)
+
+ def testGetVisibleLiveProjects_ArchivedProject(self):
+ project_rows = [
+ (234, 'proj2', 'test proj 2', 'test project', 'archived', 'anyone',
+ '', '', None, '', 0, 50 * 1024 * 1024, NOW, NOW, None, True,
+ False, False, None, None, None, None, None)]
+ self.proj2.state = project_pb2.ProjectState.ARCHIVED
+ self.project_service.project_2lc.CacheItem(234, self.proj2)
+
+ self.project_service.project_tbl.Select(
+ self.cnxn, cols=['project_id'],
+ state=project_pb2.ProjectState.LIVE).AndReturn(project_rows)
+ self.mox.ReplayAll()
+ user_a = user_pb2.User(email='a@example.com')
+ project_ids = self.project_service.GetVisibleLiveProjects(
+ self.cnxn, user_a, set([111]))
+
+ self.mox.VerifyAll()
+ self.assertItemsEqual([], project_ids)
+
+ def testGetProjectsByName(self):
+ self.project_service.project_names_to_ids.CacheItem('proj1', 123)
+ self.project_service.project_2lc.CacheItem(123, self.proj1)
+ self.SetUpLookupProjectIDs()
+ self.SetUpGetProjects()
+ self.mox.ReplayAll()
+ project_dict = self.project_service.GetProjectsByName(
+ self.cnxn, ['proj1', 'proj2'])
+ self.mox.VerifyAll()
+ self.assertItemsEqual(['proj1', 'proj2'], project_dict.keys())
+ self.assertEqual(123, project_dict['proj1'].project_id)
+ self.assertEqual(234, project_dict['proj2'].project_id)
+
+ def SetUpExpungeProject(self):
+ self.project_service.user2project_tbl.Delete(
+ self.cnxn, project_id=234)
+ self.project_service.usergroupprojects_tbl.Delete(
+ self.cnxn, project_id=234)
+ self.project_service.extraperm_tbl.Delete(
+ self.cnxn, project_id=234)
+ self.project_service.membernotes_tbl.Delete(
+ self.cnxn, project_id=234)
+ self.project_service.project_tbl.Delete(
+ self.cnxn, project_id=234)
+
+ def testExpungeProject(self):
+ self.SetUpExpungeProject()
+ self.mox.ReplayAll()
+ self.project_service.ExpungeProject(self.cnxn, 234)
+ self.mox.VerifyAll()
+
+ def SetUpUpdateProject(self, project_id, delta):
+ self.project_service.project_tbl.Update(
+ self.cnxn, delta, project_id=project_id)
+
+ def testUpdateProject(self):
+ self.SetUpGetProjects()
+ delta = {'summary': 'An even better one-line summary'}
+ self.SetUpUpdateProject(234, delta)
+ self.mox.ReplayAll()
+ self.project_service.UpdateProject(
+ self.cnxn, 234, summary='An even better one-line summary')
+ self.mox.VerifyAll()
+
+ def SetUpUpdateProjectRoles(
+ self, project_id, owner_ids, committer_ids, contributor_ids):
+ self.project_service.project_tbl.Update(
+ self.cnxn, {'cached_content_timestamp': NOW}, project_id=project_id)
+
+ self.project_service.user2project_tbl.Delete(
+ self.cnxn, project_id=project_id, role_name='owner', commit=False)
+ self.project_service.user2project_tbl.Delete(
+ self.cnxn, project_id=project_id, role_name='committer', commit=False)
+ self.project_service.user2project_tbl.Delete(
+ self.cnxn, project_id=project_id, role_name='contributor',
+ commit=False)
+
+ self.project_service.user2project_tbl.InsertRows(
+ self.cnxn, ['project_id', 'user_id', 'role_name'],
+ [(project_id, user_id, 'owner') for user_id in owner_ids],
+ commit=False)
+ self.project_service.user2project_tbl.InsertRows(
+ self.cnxn, ['project_id', 'user_id', 'role_name'],
+ [(project_id, user_id, 'committer') for user_id in committer_ids],
+ commit=False)
+ self.project_service.user2project_tbl.InsertRows(
+ self.cnxn, ['project_id', 'user_id', 'role_name'],
+ [(project_id, user_id, 'contributor') for user_id in contributor_ids],
+ commit=False)
+
+ self.cnxn.Commit()
+
+ def testUpdateProjectRoles(self):
+ self.SetUpGetProjects()
+ self.SetUpUpdateProjectRoles(234, [111L, 222L], [333L], [])
+ self.mox.ReplayAll()
+ self.project_service.UpdateProjectRoles(
+ self.cnxn, 234, [111L, 222L], [333L], [], now=NOW)
+ self.mox.VerifyAll()
+
+ def SetUpMarkProjectDeletable(self):
+ delta = {
+ 'project_name': 'DELETABLE_123',
+ 'state': 'deletable',
+ }
+ self.project_service.project_tbl.Update(self.cnxn, delta, project_id=123)
+ self.config_service.InvalidateMemcacheForEntireProject(123)
+
+ def testMarkProjectDeletable(self):
+ self.SetUpMarkProjectDeletable()
+ self.mox.ReplayAll()
+ self.project_service.MarkProjectDeletable(
+ self.cnxn, 123, self.config_service)
+ self.mox.VerifyAll()
+
+ def testUpdateRecentActivity(self):
+ self.SetUpGetProjects()
+ delta = {'recent_activity_timestamp': NOW}
+ self.SetUpUpdateProject(234, delta)
+ self.mox.ReplayAll()
+ self.project_service.UpdateRecentActivity(self.cnxn, 234, now=NOW)
+ self.mox.VerifyAll()
+
+ def SetUpGetUserRolesInAllProjects(self):
+ rows = [
+ (123, 'committer'),
+ (234, 'owner'),
+ ]
+ self.project_service.user2project_tbl.Select(
+ self.cnxn, cols=['project_id', 'role_name'],
+ user_id={111L, 888L}).AndReturn(rows)
+
+ def testGetUserRolesInAllProjects(self):
+ self.SetUpGetUserRolesInAllProjects()
+ self.mox.ReplayAll()
+ actual = self.project_service.GetUserRolesInAllProjects(
+ self.cnxn, {111L, 888L})
+ owned_project_ids, membered_project_ids, contrib_project_ids = actual
+ self.mox.VerifyAll()
+ self.assertItemsEqual([234], owned_project_ids)
+ self.assertItemsEqual([123], membered_project_ids)
+ self.assertItemsEqual([], contrib_project_ids)
+
+ def SetUpUpdateExtraPerms(self):
+ self.project_service.extraperm_tbl.Delete(
+ self.cnxn, project_id=234, user_id=111L, commit=False)
+ self.project_service.extraperm_tbl.InsertRows(
+ self.cnxn, project_svc.EXTRAPERM_COLS,
+ [(234, 111L, 'SecurityTeam')], commit=False)
+ self.project_service.project_tbl.Update(
+ self.cnxn, {'cached_content_timestamp': NOW},
+ project_id=234, commit=False)
+ self.cnxn.Commit()
+
+ def testUpdateExtraPerms(self):
+ self.SetUpGetProjects()
+ self.SetUpUpdateExtraPerms()
+ self.mox.ReplayAll()
+ self.project_service.UpdateExtraPerms(
+ self.cnxn, 234, 111L, ['SecurityTeam'], now=NOW)
+ self.mox.VerifyAll()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/service_manager_test.py b/appengine/monorail/services/test/service_manager_test.py
new file mode 100644
index 0000000..6e74d00
--- /dev/null
+++ b/appengine/monorail/services/test/service_manager_test.py
@@ -0,0 +1,37 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the service_manager module."""
+
+import unittest
+
+from features import autolink
+from services import cachemanager_svc
+from services import config_svc
+from services import features_svc
+from services import issue_svc
+from services import service_manager
+from services import project_svc
+from services import star_svc
+from services import user_svc
+from services import usergroup_svc
+
+
+class ServiceManagerTest(unittest.TestCase):
+
+ def testSetUpServices(self):
+ svcs = service_manager.set_up_services()
+ self.assertIsInstance(svcs, service_manager.Services)
+ self.assertIsInstance(svcs.autolink, autolink.Autolink)
+ self.assertIsInstance(svcs.cache_manager, cachemanager_svc.CacheManager)
+ self.assertIsInstance(svcs.user, user_svc.UserService)
+ self.assertIsInstance(svcs.user_star, star_svc.UserStarService)
+ self.assertIsInstance(svcs.project_star, star_svc.ProjectStarService)
+ self.assertIsInstance(svcs.issue_star, star_svc.IssueStarService)
+ self.assertIsInstance(svcs.project, project_svc.ProjectService)
+ self.assertIsInstance(svcs.usergroup, usergroup_svc.UserGroupService)
+ self.assertIsInstance(svcs.config, config_svc.ConfigService)
+ self.assertIsInstance(svcs.issue, issue_svc.IssueService)
+ self.assertIsInstance(svcs.features, features_svc.FeaturesService)
\ No newline at end of file
diff --git a/appengine/monorail/services/test/spam_svc_test.py b/appengine/monorail/services/test/spam_svc_test.py
new file mode 100644
index 0000000..1c9fba4
--- /dev/null
+++ b/appengine/monorail/services/test/spam_svc_test.py
@@ -0,0 +1,254 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the spam service."""
+
+import unittest
+
+import mox
+
+from google.appengine.ext import testbed
+
+import settings
+from framework import sql
+from proto import user_pb2
+from services import spam_svc
+from testing import fake
+
+class SpamServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+
+ self.mox = mox.Mox()
+ self.mock_report_tbl = self.mox.CreateMock(sql.SQLTableManager)
+ self.mock_verdict_tbl = self.mox.CreateMock(sql.SQLTableManager)
+ self.mock_issue_tbl = self.mox.CreateMock(sql.SQLTableManager)
+ self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
+ self.issue_service = fake.IssueService()
+ self.spam_service = spam_svc.SpamService()
+ self.spam_service.report_tbl = self.mock_report_tbl
+ self.spam_service.verdict_tbl = self.mock_verdict_tbl
+ self.spam_service.issue_tbl = self.mock_issue_tbl
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testLookupFlaggers(self):
+ self.mock_report_tbl.Select(
+ self.cnxn, cols=['user_id', 'comment_id'],
+ issue_id=234).AndReturn([[111L, None], [222L, 1]])
+ self.mox.ReplayAll()
+
+ issue_reporters, comment_reporters = (
+ self.spam_service.LookupFlaggers(self.cnxn, 234))
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111L], issue_reporters)
+ self.assertEqual({1: [222L]}, comment_reporters)
+
+ def testFlagIssues_overThresh(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, reporter_id=111L, owner_id=456,
+ summary='sum', status='Live', issue_id=78901)
+
+ self.mock_report_tbl.InsertRows(self.cnxn,
+ ['issue_id', 'reported_user_id', 'user_id'],
+ [(78901, 111L, 111L)], ignore=True)
+
+ self.mock_report_tbl.Select(self.cnxn,
+ cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
+ issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh)])
+ self.mock_verdict_tbl.Select(
+ self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
+ group_by=['issue_id'], issue_id=[78901]).AndReturn([])
+ self.mock_verdict_tbl.InsertRows(
+ self.cnxn, ['issue_id', 'is_spam', 'reason', 'project_id'],
+ [(78901, True, 'threshold', 789)], ignore=True)
+
+ self.mox.ReplayAll()
+ self.spam_service.FlagIssues(
+ self.cnxn, self.issue_service, [issue], 111L, True)
+ self.mox.VerifyAll()
+ self.assertIn(issue, self.issue_service.updated_issues)
+
+ def testFlagIssues_underThresh(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, reporter_id=111L, owner_id=456,
+ summary='sum', status='Live', issue_id=78901)
+
+ self.mock_report_tbl.InsertRows(self.cnxn,
+ ['issue_id', 'reported_user_id', 'user_id'],
+ [(78901, 111L, 111L)], ignore=True)
+
+ self.mock_report_tbl.Select(self.cnxn,
+ cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
+ issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh - 1)])
+
+ self.mock_verdict_tbl.Select(
+ self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
+ group_by=['issue_id'], issue_id=[78901]).AndReturn([])
+
+ self.mox.ReplayAll()
+ self.spam_service.FlagIssues(
+ self.cnxn, self.issue_service, [issue], 111L, True)
+ self.mox.VerifyAll()
+
+ self.assertNotIn(issue, self.issue_service.updated_issues)
+
+ def testUnflagIssue_overThresh(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, reporter_id=111L, owner_id=456,
+ summary='sum', status='Live', issue_id=78901, is_spam=True)
+ self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
+ comment_id=None, user_id=111L)
+ self.mock_report_tbl.Select(self.cnxn,
+ cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
+ issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh)])
+
+ self.mock_verdict_tbl.Select(
+ self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
+ group_by=['issue_id'], issue_id=[78901]).AndReturn([])
+
+ self.mox.ReplayAll()
+ self.spam_service.FlagIssues(
+ self.cnxn, self.issue_service, [issue], 111L, False)
+ self.mox.VerifyAll()
+
+ self.assertNotIn(issue, self.issue_service.updated_issues)
+ self.assertEqual(True, issue.is_spam)
+
+ def testUnflagIssue_underThresh(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, reporter_id=111L, owner_id=456,
+ summary='sum', status='Live', issue_id=78901, is_spam=True)
+ self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
+ comment_id=None, user_id=111L)
+ self.mock_report_tbl.Select(self.cnxn,
+ cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
+ issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh - 1)])
+
+ self.mock_verdict_tbl.Select(
+ self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
+ group_by=['issue_id'], issue_id=[78901]).AndReturn([])
+ self.mock_verdict_tbl.InsertRows(
+ self.cnxn, ['issue_id', 'is_spam', 'reason', 'project_id'],
+ [(78901, False, 'threshold', 789)], ignore=True)
+
+ self.mox.ReplayAll()
+ self.spam_service.FlagIssues(
+ self.cnxn, self.issue_service, [issue], 111L, False)
+ self.mox.VerifyAll()
+
+ self.assertIn(issue, self.issue_service.updated_issues)
+ self.assertEqual(False, issue.is_spam)
+
+ def testUnflagIssue_underThreshNoManualOerride(self):
+ issue = fake.MakeTestIssue(
+ project_id=789, local_id=1, reporter_id=111L, owner_id=456,
+ summary='sum', status='Live', issue_id=78901, is_spam=True)
+ self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
+ comment_id=None, user_id=111L)
+ self.mock_report_tbl.Select(self.cnxn,
+ cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
+ issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh - 1)])
+
+ self.mock_verdict_tbl.Select(
+ self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
+ group_by=['issue_id'],
+ issue_id=[78901]).AndReturn([(78901, 'manual', '')])
+
+ self.mox.ReplayAll()
+ self.spam_service.FlagIssues(
+ self.cnxn, self.issue_service, [issue], 111L, False)
+ self.mox.VerifyAll()
+
+ self.assertNotIn(issue, self.issue_service.updated_issues)
+ self.assertEqual(True, issue.is_spam)
+
+ def testGetModerationQueue_noVerdicts(self):
+ self.mock_verdict_tbl.Select(self.cnxn,
+ cols=['issue_id', 'is_spam', 'reason', 'classifier_confidence',
+ 'created'],
+ where=[
+ ('project_id = %s', [789]),
+ ('classifier_confidence <= %s',
+ [settings.classifier_moderation_thresh]),
+ ('overruled = %s', [False]),
+ ('issue_id IS NOT NULL', []),
+ ],
+ order_by=[
+ ('classifier_confidence ASC', []),
+ ('created ASC', [])
+ ],
+ group_by=['issue_id'],
+ offset=0,
+ limit=10,
+ ).AndReturn([])
+
+ self.mock_verdict_tbl.SelectValue(self.cnxn,
+ col='COUNT(*)',
+ where=[
+ ('project_id = %s', [789]),
+ ('classifier_confidence <= %s',
+ [settings.classifier_moderation_thresh]),
+ ('overruled = %s', [False]),
+ ('issue_id IS NOT NULL', []),
+ ]).AndReturn(0)
+
+ self.mox.ReplayAll()
+ res, count = self.spam_service.GetModerationQueue(
+ self.cnxn, self.issue_service, 789)
+ self.mox.VerifyAll()
+
+ self.assertEqual([], res)
+ self.assertEqual(0, count)
+
+ def testGetModerationQueue_someVerdicts(self):
+ self.mock_verdict_tbl.Select(self.cnxn,
+ cols=['issue_id', 'is_spam', 'reason', 'classifier_confidence',
+ 'created'],
+ where=[
+ ('project_id = %s', [789]),
+ ('classifier_confidence <= %s',
+ [settings.classifier_moderation_thresh]),
+ ('overruled = %s', [False]),
+ ('issue_id IS NOT NULL', []),
+ ],
+ order_by=[
+ ('classifier_confidence ASC', []),
+ ('created ASC', [])
+ ],
+ group_by=['issue_id'],
+ offset=0,
+ limit=10,
+ ).AndReturn([[78901, 0, "classifier", 0.9, "2015-12-10 11:06:24"]])
+
+ self.mock_verdict_tbl.SelectValue(self.cnxn,
+ col='COUNT(*)',
+ where=[
+ ('project_id = %s', [789]),
+ ('classifier_confidence <= %s',
+ [settings.classifier_moderation_thresh]),
+ ('overruled = %s', [False]),
+ ('issue_id IS NOT NULL', []),
+ ]).AndReturn(10)
+
+ self.mox.ReplayAll()
+ res, count = self.spam_service.GetModerationQueue(
+ self.cnxn, self.issue_service, 789)
+ self.mox.VerifyAll()
+ self.assertEqual(1, len(res))
+ self.assertEqual(10, count)
+ self.assertEqual(78901, res[0].issue_id)
+ self.assertEqual(False, res[0].is_spam)
+ self.assertEqual("classifier", res[0].reason)
+ self.assertEqual(0.9, res[0].classifier_confidence)
+ self.assertEqual("2015-12-10 11:06:24", res[0].verdict_time)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/star_svc_test.py b/appengine/monorail/services/test/star_svc_test.py
new file mode 100644
index 0000000..ce1a80a
--- /dev/null
+++ b/appengine/monorail/services/test/star_svc_test.py
@@ -0,0 +1,137 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the star service."""
+
+import unittest
+
+import mox
+
+from google.appengine.ext import testbed
+
+import settings
+from framework import sql
+from proto import user_pb2
+from services import star_svc
+from testing import fake
+
+
+class AbstractStarServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.mock_tbl = self.mox.CreateMock(sql.SQLTableManager)
+ self.cnxn = 'fake connection'
+ self.cache_manager = fake.CacheManager()
+ self.star_service = star_svc.AbstractStarService(
+ self.cache_manager, self.mock_tbl, 'item_id', 'user_id', 'project')
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def SetUpExpungeStars(self):
+ self.mock_tbl.Delete(self.cnxn, item_id=123)
+
+ def testExpungeStars(self):
+ self.SetUpExpungeStars()
+ self.mox.ReplayAll()
+ self.star_service.ExpungeStars(self.cnxn, 123)
+ self.mox.VerifyAll()
+
+ def SetUpLookupItemsStarrers(self):
+ self.mock_tbl.Select(
+ self.cnxn, cols=['item_id', 'user_id'],
+ item_id=[234]).AndReturn([(234, 111L), (234, 222L)])
+
+ def testLookupItemsStarrers(self):
+ self.star_service.starrer_cache.CacheItem(123, [111L, 333L])
+ self.SetUpLookupItemsStarrers()
+ self.mox.ReplayAll()
+ starrer_list_dict = self.star_service.LookupItemsStarrers(
+ self.cnxn, [123, 234])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([123, 234], starrer_list_dict.keys())
+ self.assertItemsEqual([111L, 333L], starrer_list_dict[123])
+ self.assertItemsEqual([111L, 222L], starrer_list_dict[234])
+ self.assertItemsEqual([111L, 333L],
+ self.star_service.starrer_cache.GetItem(123))
+ self.assertItemsEqual([111L, 222L],
+ self.star_service.starrer_cache.GetItem(234))
+
+ def SetUpLookupStarredItemIDs(self):
+ self.mock_tbl.Select(
+ self.cnxn, cols=['item_id'], user_id=111L).AndReturn(
+ [(123,), (234,)])
+
+ def testLookupStarredItemIDs(self):
+ self.SetUpLookupStarredItemIDs()
+ self.mox.ReplayAll()
+ item_ids = self.star_service.LookupStarredItemIDs(self.cnxn, 111L)
+ self.mox.VerifyAll()
+ self.assertItemsEqual([123, 234], item_ids)
+ self.assertItemsEqual([123, 234],
+ self.star_service.star_cache.GetItem(111L))
+
+ def testIsItemStarredBy(self):
+ self.SetUpLookupStarredItemIDs()
+ self.mox.ReplayAll()
+ self.assertTrue(self.star_service.IsItemStarredBy(self.cnxn, 123, 111L))
+ self.assertTrue(self.star_service.IsItemStarredBy(self.cnxn, 234, 111))
+ self.assertFalse(
+ self.star_service.IsItemStarredBy(self.cnxn, 435, 111L))
+ self.mox.VerifyAll()
+
+ def SetUpCountItemStars(self):
+ self.mock_tbl.Select(
+ self.cnxn, cols=['item_id', 'COUNT(user_id)'], item_id=[234],
+ group_by=['item_id']).AndReturn([(234, 2)])
+
+ def testCountItemStars(self):
+ self.star_service.star_count_cache.CacheItem(123, 3)
+ self.SetUpCountItemStars()
+ self.mox.ReplayAll()
+ self.assertEqual(3, self.star_service.CountItemStars(self.cnxn, 123))
+ self.assertEqual(2, self.star_service.CountItemStars(self.cnxn, 234))
+ self.mox.VerifyAll()
+
+ def testCountItemsStars(self):
+ self.star_service.star_count_cache.CacheItem(123, 3)
+ self.SetUpCountItemStars()
+ self.mox.ReplayAll()
+ count_dict = self.star_service.CountItemsStars(
+ self.cnxn, [123, 234])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([123, 234], count_dict.keys())
+ self.assertEqual(3, count_dict[123])
+ self.assertEqual(2, count_dict[234])
+
+ def SetUpSetStar_Add(self):
+ self.mock_tbl.InsertRow(
+ self.cnxn, ignore=True, item_id=123, user_id=111L)
+
+ def testSetStar_Add(self):
+ self.SetUpSetStar_Add()
+ self.mox.ReplayAll()
+ self.star_service.SetStar(self.cnxn, 123, 111L, True)
+ self.mox.VerifyAll()
+
+ def SetUpSetStar_Remove(self):
+ self.mock_tbl.Delete(self.cnxn, item_id=123, user_id=111L)
+
+ def testSetProjectStar_Remove(self):
+ self.SetUpSetStar_Remove()
+ self.mox.ReplayAll()
+ self.star_service.SetStar(self.cnxn, 123, 111L, False)
+ self.mox.VerifyAll()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/tracker_fulltext_test.py b/appengine/monorail/services/test/tracker_fulltext_test.py
new file mode 100644
index 0000000..776866a
--- /dev/null
+++ b/appengine/monorail/services/test/tracker_fulltext_test.py
@@ -0,0 +1,209 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for tracker_fulltext module."""
+
+import unittest
+
+import mox
+
+from google.appengine.api import search
+
+import settings
+from framework import framework_views
+from proto import ast_pb2
+from proto import tracker_pb2
+from services import fulltext_helpers
+from services import tracker_fulltext
+from testing import fake
+from tracker import tracker_bizobj
+
+
+class TrackerFulltextTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.mock_index = self.mox.CreateMockAnything()
+ self.mox.StubOutWithMock(search, 'Index')
+ self.docs = None
+ self.cnxn = 'fake connection'
+ self.user_service = fake.UserService()
+ self.user_service.TestAddUser('test@example.com', 111L)
+ self.issue_service = fake.IssueService()
+ self.config_service = fake.ConfigService()
+
+ self.issue = fake.MakeTestIssue(
+ 123, 1, 'test summary', 'New', 111L)
+ self.issue_service.TestAddIssue(self.issue)
+ self.comment = tracker_pb2.IssueComment(
+ project_id=789, issue_id=self.issue.issue_id, user_id=111L,
+ content='comment content',
+ attachments=[
+ tracker_pb2.Attachment(filename='hello.c'),
+ tracker_pb2.Attachment(filename='hello.h')])
+ self.issue_service.TestAddComment(self.comment, 1)
+ self.users_by_id = framework_views.MakeAllUserViews(
+ self.cnxn, self.user_service, [111L])
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def RecordDocs(self, docs):
+ self.docs = docs
+
+ def SetUpIndexIssues(self):
+ search.Index(name=settings.search_index_name_format % 1).AndReturn(
+ self.mock_index)
+ self.mock_index.put(mox.IgnoreArg()).WithSideEffects(self.RecordDocs)
+
+ def testIndexIssues(self):
+ self.SetUpIndexIssues()
+ self.mox.ReplayAll()
+ tracker_fulltext.IndexIssues(
+ self.cnxn, [self.issue], self.user_service, self.issue_service,
+ self.config_service)
+ self.mox.VerifyAll()
+ self.assertEqual(1, len(self.docs))
+ issue_doc = self.docs[0]
+ self.assertEqual(123, issue_doc.fields[0].value)
+ self.assertEqual('test summary', issue_doc.fields[1].value)
+
+ def SetUpCreateIssueSearchDocuments(self):
+ self.mox.StubOutWithMock(tracker_fulltext, '_IndexDocsInShard')
+ tracker_fulltext._IndexDocsInShard(1, mox.IgnoreArg()).WithSideEffects(
+ lambda shard_id, docs: self.RecordDocs(docs))
+
+ def testCreateIssueSearchDocuments_Normal(self):
+ self.SetUpCreateIssueSearchDocuments()
+ self.mox.ReplayAll()
+ config_dict = {123: tracker_bizobj.MakeDefaultProjectIssueConfig(123)}
+ tracker_fulltext._CreateIssueSearchDocuments(
+ [self.issue], {self.issue.issue_id: [self.comment]}, self.users_by_id,
+ config_dict)
+ self.mox.VerifyAll()
+ self.assertEqual(1, len(self.docs))
+ issue_doc = self.docs[0]
+ self.assertEqual(5, len(issue_doc.fields))
+ self.assertEqual(123, issue_doc.fields[0].value)
+ self.assertEqual('test summary', issue_doc.fields[1].value)
+ self.assertEqual('test@example.com comment content hello.c hello.h',
+ issue_doc.fields[3].value)
+ self.assertEqual('', issue_doc.fields[4].value)
+
+ def testCreateIssueSearchDocuments_CustomFields(self):
+ self.SetUpCreateIssueSearchDocuments()
+ self.mox.ReplayAll()
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(123)
+ config_dict = {123: tracker_bizobj.MakeDefaultProjectIssueConfig(123)}
+ int_field = tracker_bizobj.MakeFieldDef(
+ 1, 123, 'CustomInt', tracker_pb2.FieldTypes.INT_TYPE, None, False,
+ False, None, None, None, None, False, None, None, None,
+ 'A custom int field', False)
+ int_field_value = tracker_bizobj.MakeFieldValue(1, 42, None, None, False)
+ str_field = tracker_bizobj.MakeFieldDef(
+ 2, 123, 'CustomStr', tracker_pb2.FieldTypes.STR_TYPE, None, False,
+ False, None, None, None, None, False, None, None, None,
+ 'A custom string field', False)
+ str_field_value = tracker_bizobj.MakeFieldValue(
+ 2, None, 'Greetings', None, False)
+ config.field_defs.extend([int_field, str_field])
+ self.issue.field_values.extend([int_field_value, str_field_value])
+
+ tracker_fulltext._CreateIssueSearchDocuments(
+ [self.issue], {self.issue.issue_id: [self.comment]}, self.users_by_id,
+ config_dict)
+ self.mox.VerifyAll()
+ self.assertEqual(1, len(self.docs))
+ issue_doc = self.docs[0]
+ metadata = issue_doc.fields[2]
+ self.assertEqual(
+ u'New test@example.com [] 42 Greetings ',
+ metadata.value)
+
+ def testExtractCommentText(self):
+ extracted_text = tracker_fulltext._ExtractCommentText(
+ self.comment, self.users_by_id)
+ self.assertEqual(
+ 'test@example.com comment content hello.c hello.h',
+ extracted_text)
+
+ def testIndexableComments_Length(self):
+ comments = [self.comment]
+ indexable = tracker_fulltext._IndexableComments(comments, self.users_by_id)
+ self.assertEquals(1, len(indexable))
+
+ comments = [self.comment] * 100
+ indexable = tracker_fulltext._IndexableComments(comments, self.users_by_id)
+ self.assertEquals(100, len(indexable))
+
+ comments = [self.comment] * 101
+ indexable = tracker_fulltext._IndexableComments(comments, self.users_by_id)
+ self.assertEquals(101, len(indexable))
+
+ comments = [self.comment] * 600
+ indexable = tracker_fulltext._IndexableComments(comments, self.users_by_id)
+ self.assertEquals(600, len(indexable))
+
+ comments = [self.comment] * 601
+ indexable = tracker_fulltext._IndexableComments(comments, self.users_by_id)
+ self.assertEquals(600, len(indexable))
+ self.assertNotIn(100, indexable)
+
+ def SetUpUnindexIssues(self):
+ search.Index(name=settings.search_index_name_format % 1).AndReturn(
+ self.mock_index)
+ self.mock_index.delete(['1'])
+
+ def testUnindexIssues(self):
+ self.SetUpUnindexIssues()
+ self.mox.ReplayAll()
+ tracker_fulltext.UnindexIssues([1])
+ self.mox.VerifyAll()
+
+ def SetUpSearchIssueFullText(self):
+ self.mox.StubOutWithMock(fulltext_helpers, 'ComprehensiveSearch')
+ fulltext_helpers.ComprehensiveSearch(
+ '(project_id:789) (summary:"test")',
+ settings.search_index_name_format % 1).AndReturn([123, 234])
+
+ def testSearchIssueFullText_Normal(self):
+ self.SetUpSearchIssueFullText()
+ self.mox.ReplayAll()
+ summary_fd = tracker_pb2.FieldDef(
+ field_name='summary', field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.Condition(
+ op=ast_pb2.QueryOp.TEXT_HAS, field_defs=[summary_fd],
+ str_values=['test'])])
+ issue_ids, capped = tracker_fulltext.SearchIssueFullText(
+ [789], query_ast_conj, 1)
+ self.mox.VerifyAll()
+ self.assertItemsEqual([123, 234], issue_ids)
+ self.assertFalse(capped)
+
+ def testSearchIssueFullText_Capped(self):
+ try:
+ orig = settings.fulltext_limit_per_shard
+ settings.fulltext_limit_per_shard = 1
+ self.SetUpSearchIssueFullText()
+ self.mox.ReplayAll()
+ summary_fd = tracker_pb2.FieldDef(
+ field_name='summary', field_type=tracker_pb2.FieldTypes.STR_TYPE)
+ query_ast_conj = ast_pb2.Conjunction(conds=[
+ ast_pb2.Condition(
+ op=ast_pb2.QueryOp.TEXT_HAS, field_defs=[summary_fd],
+ str_values=['test'])])
+ issue_ids, capped = tracker_fulltext.SearchIssueFullText(
+ [789], query_ast_conj, 1)
+ self.mox.VerifyAll()
+ self.assertItemsEqual([123, 234], issue_ids)
+ self.assertTrue(capped)
+ finally:
+ settings.fulltext_limit_per_shard = orig
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/user_svc_test.py b/appengine/monorail/services/test/user_svc_test.py
new file mode 100644
index 0000000..bf8b9e0
--- /dev/null
+++ b/appengine/monorail/services/test/user_svc_test.py
@@ -0,0 +1,252 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the user service."""
+
+import unittest
+
+import mox
+
+from google.appengine.ext import testbed
+
+from framework import sql
+from proto import user_pb2
+from services import user_svc
+from testing import fake
+
+
+def SetUpGetUsers(user_service, cnxn):
+ """Set up expected calls to SQL tables."""
+ user_service.user_tbl.Select(
+ cnxn, cols=user_svc.USER_COLS, user_id=[333L]).AndReturn(
+ [(333L, 'c@example.com', False, False, False, 'Spammer',
+ 'stay_same_issue', False, False, False, True)])
+ user_service.actionlimit_tbl.Select(
+ cnxn, cols=user_svc.ACTIONLIMIT_COLS, user_id=[333L]).AndReturn([])
+ user_service.dismissedcues_tbl.Select(
+ cnxn, cols=user_svc.DISMISSEDCUES_COLS, user_id=[333L]).AndReturn([])
+
+
+def MakeUserService(cache_manager, my_mox):
+ user_service = user_svc.UserService(cache_manager)
+ user_service.user_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ user_service.actionlimit_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ user_service.dismissedcues_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ return user_service
+
+
+class UserTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = fake.MonorailConnection()
+ self.cache_manager = fake.CacheManager()
+ self.user_service = MakeUserService(self.cache_manager, self.mox)
+
+ def testDeserializeUsersByID(self):
+ user_rows = [
+ (111L, 'a@example.com', False, False, False, '',
+ 'stay_same_issue', False, False, False, True),
+ (222L, 'b@example.com', False, False, False, '',
+ 'next_in_list', False, False, False, True),
+ ]
+ actionlimit_rows = []
+ dismissedcues_rows = []
+ user_dict = self.user_service.user_2lc._DeserializeUsersByID(
+ user_rows, actionlimit_rows, dismissedcues_rows)
+ self.assertEqual(2, len(user_dict))
+ self.assertEqual('a@example.com', user_dict[111L].email)
+ self.assertFalse(user_dict[111L].is_site_admin)
+ self.assertEqual('', user_dict[111L].banned)
+ self.assertFalse(user_dict[111L].notify_issue_change)
+ self.assertEqual('b@example.com', user_dict[222L].email)
+
+ def testFetchItems(self):
+ SetUpGetUsers(self.user_service, self.cnxn)
+ self.mox.ReplayAll()
+ user_dict = self.user_service.user_2lc.FetchItems(self.cnxn, [333L])
+ self.mox.VerifyAll()
+ self.assertEqual([333L], user_dict.keys())
+ self.assertEqual('c@example.com', user_dict[333L].email)
+ self.assertFalse(user_dict[333L].is_site_admin)
+ self.assertEqual('Spammer', user_dict[333L].banned)
+
+
+class UserServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = fake.MonorailConnection()
+ self.cache_manager = fake.CacheManager()
+ self.user_service = MakeUserService(self.cache_manager, self.mox)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def SetUpCreateUsers(self):
+ self.user_service.user_tbl.InsertRows(
+ self.cnxn,
+ ['user_id', 'email', 'obscure_email'],
+ [(3035911623, 'a@example.com', True),
+ (2996997680, 'b@example.com', True)]
+ ).AndReturn(None)
+
+ def testCreateUsers(self):
+ self.SetUpCreateUsers()
+ self.mox.ReplayAll()
+ self.user_service._CreateUsers(
+ self.cnxn, ['a@example.com', 'b@example.com'])
+ self.mox.VerifyAll()
+
+ def SetUpLookupUserEmails(self):
+ self.user_service.user_tbl.Select(
+ self.cnxn, cols=['user_id', 'email'], user_id=[222L]).AndReturn(
+ [(222L, 'b@example.com')])
+
+ def testLookupUserEmails(self):
+ self.SetUpLookupUserEmails()
+ self.user_service.email_cache.CacheItem(
+ 111L, 'a@example.com')
+ self.mox.ReplayAll()
+ emails_dict = self.user_service.LookupUserEmails(
+ self.cnxn, [111L, 222L])
+ self.mox.VerifyAll()
+ self.assertEqual(
+ {111L: 'a@example.com', 222L: 'b@example.com'},
+ emails_dict)
+
+ def testLookupUserEmail(self):
+ self.SetUpLookupUserEmails() # Same as testLookupUserEmails()
+ self.mox.ReplayAll()
+ email_addr = self.user_service.LookupUserEmail(self.cnxn, 222L)
+ self.mox.VerifyAll()
+ self.assertEqual('b@example.com', email_addr)
+
+ def SetUpLookupUserIDs(self):
+ self.user_service.user_tbl.Select(
+ self.cnxn, cols=['email', 'user_id'],
+ email=['b@example.com']).AndReturn([('b@example.com', 222L)])
+
+ def testLookupUserIDs(self):
+ self.SetUpLookupUserIDs()
+ self.user_service.user_id_cache.CacheItem(
+ 'a@example.com', 111L)
+ self.mox.ReplayAll()
+ user_id_dict = self.user_service.LookupUserIDs(
+ self.cnxn, ['a@example.com', 'b@example.com'])
+ self.mox.VerifyAll()
+ self.assertEqual(
+ {'a@example.com': 111L, 'b@example.com': 222L},
+ user_id_dict)
+
+ def testLookupUserIDs_InvalidEmail(self):
+ self.user_service.user_tbl.Select(
+ self.cnxn, cols=['email', 'user_id'], email=['abc']).AndReturn([])
+ self.mox.ReplayAll()
+ user_id_dict = self.user_service.LookupUserIDs(
+ self.cnxn, ['abc'], autocreate=True)
+ self.mox.VerifyAll()
+ self.assertEqual({}, user_id_dict)
+
+ def testLookupUserID(self):
+ self.SetUpLookupUserIDs() # Same as testLookupUserIDs()
+ self.user_service.user_id_cache.CacheItem('a@example.com', 111L)
+ self.mox.ReplayAll()
+ user_id = self.user_service.LookupUserID(self.cnxn, 'b@example.com')
+ self.mox.VerifyAll()
+ self.assertEqual(222, user_id)
+
+ def testGetUsersByIDs(self):
+ SetUpGetUsers(self.user_service, self.cnxn)
+ user_a = user_pb2.User(email='a@example.com')
+ self.user_service.user_2lc.CacheItem(111L, user_a)
+ self.mox.ReplayAll()
+ user_dict = self.user_service.GetUsersByIDs(
+ self.cnxn, [111L, 333L])
+ self.mox.VerifyAll()
+ self.assertEqual(2, len(user_dict))
+ self.assertEqual('a@example.com', user_dict[111L].email)
+ self.assertFalse(user_dict[111L].is_site_admin)
+ self.assertFalse(user_dict[111L].banned)
+ self.assertTrue(user_dict[111L].notify_issue_change)
+ self.assertEqual('c@example.com', user_dict[333L].email)
+
+ def testGetUser(self):
+ SetUpGetUsers(self.user_service, self.cnxn)
+ user_a = user_pb2.User(email='a@example.com')
+ self.user_service.user_2lc.CacheItem(111L, user_a)
+ self.mox.ReplayAll()
+ user = self.user_service.GetUser(self.cnxn, 333L)
+ self.mox.VerifyAll()
+ self.assertEqual('c@example.com', user.email)
+
+ def SetUpUpdateUser(self):
+ delta = {
+ 'keep_people_perms_open': False,
+ 'preview_on_hover': True,
+ 'ignore_action_limits': False,
+ 'notify_issue_change': True,
+ 'after_issue_update': 'STAY_SAME_ISSUE',
+ 'notify_starred_issue_change': True,
+ 'is_site_admin': False,
+ 'banned': 'Turned spammer',
+ 'obscure_email': True,
+ }
+ self.user_service.user_tbl.Update(
+ self.cnxn, delta, user_id=111L, commit=False)
+
+ self.user_service.actionlimit_tbl.Delete(
+ self.cnxn, user_id=111L, commit=False)
+ self.user_service.actionlimit_tbl.InsertRows(
+ self.cnxn, user_svc.ACTIONLIMIT_COLS, [], commit=False)
+
+ self.user_service.dismissedcues_tbl.Delete(
+ self.cnxn, user_id=111L, commit=False)
+ self.user_service.dismissedcues_tbl.InsertRows(
+ self.cnxn, user_svc.DISMISSEDCUES_COLS, [], commit=False)
+
+ def testUpdateUser(self):
+ self.SetUpUpdateUser()
+ user_a = user_pb2.User(
+ email='a@example.com', banned='Turned spammer')
+ self.mox.ReplayAll()
+ self.user_service.UpdateUser(self.cnxn, 111L, user_a)
+ self.mox.VerifyAll()
+ self.assertFalse(self.user_service.user_2lc.HasItem(111L))
+
+ def testUpdateUserSettings(self):
+ self.SetUpUpdateUser()
+ user_a = user_pb2.User(email='a@example.com')
+ self.mox.ReplayAll()
+ self.user_service.UpdateUserSettings(
+ self.cnxn, 111L, user_a, is_banned=True,
+ banned_reason='Turned spammer')
+ self.mox.VerifyAll()
+
+
+class UserServiceFunctionsTest(unittest.TestCase):
+
+ def testActionLimitToRow(self):
+ al = user_pb2.ActionLimit(
+ recent_count=1, reset_timestamp=123456, lifetime_count=9,
+ lifetime_limit=10, period_soft_limit=2, period_hard_limit=5)
+ action_kind = 3
+ row = user_svc._ActionLimitToRow(
+ 111, action_kind, al)
+ self.assertEqual((111, action_kind, 1, 123456, 9, 10, 2, 5), row)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/test/usergroup_svc_test.py b/appengine/monorail/services/test/usergroup_svc_test.py
new file mode 100644
index 0000000..fd3f5b6
--- /dev/null
+++ b/appengine/monorail/services/test/usergroup_svc_test.py
@@ -0,0 +1,510 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the usergroup service."""
+
+import collections
+import unittest
+
+import mox
+
+from google.appengine.ext import testbed
+
+from framework import permissions
+from framework import sql
+from proto import usergroup_pb2
+from services import service_manager
+from services import usergroup_svc
+from testing import fake
+
+
+def MakeUserGroupService(cache_manager, my_mox):
+ usergroup_service = usergroup_svc.UserGroupService(cache_manager)
+ usergroup_service.usergroup_tbl = my_mox.CreateMock(sql.SQLTableManager)
+ usergroup_service.usergroupsettings_tbl = my_mox.CreateMock(
+ sql.SQLTableManager)
+ usergroup_service.usergroupprojects_tbl = my_mox.CreateMock(
+ sql.SQLTableManager)
+ return usergroup_service
+
+
+class MembershipTwoLevelCacheTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.cache_manager = fake.CacheManager()
+ self.usergroup_service = MakeUserGroupService(self.cache_manager, self.mox)
+
+ def testDeserializeMemberships(self):
+ memberships_rows = [(111L, 777L), (111L, 888L), (222L, 888L)]
+ actual = self.usergroup_service.memberships_2lc._DeserializeMemberships(
+ memberships_rows)
+ self.assertItemsEqual([111L, 222L], actual.keys())
+ self.assertItemsEqual([777L, 888L], actual[111L])
+ self.assertItemsEqual([888L], actual[222L])
+
+
+class UserGroupServiceTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+
+ self.mox = mox.Mox()
+ self.cnxn = 'fake connection'
+ self.cache_manager = fake.CacheManager()
+ self.usergroup_service = MakeUserGroupService(self.cache_manager, self.mox)
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=self.usergroup_service,
+ project=fake.ProjectService())
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def SetUpCreateGroup(
+ self, group_id, visiblity, external_group_type=None):
+ self.SetUpUpdateSettings(group_id, visiblity, external_group_type)
+
+ def testCreateGroup_Normal(self):
+ self.services.user.TestAddUser('group@example.com', 888L)
+ self.SetUpCreateGroup(888L, 'anyone')
+ self.mox.ReplayAll()
+ actual_group_id = self.usergroup_service.CreateGroup(
+ self.cnxn, self.services, 'group@example.com', 'anyone')
+ self.mox.VerifyAll()
+ self.assertEqual(888L, actual_group_id)
+
+ def testCreateGroup_Import(self):
+ self.services.user.TestAddUser('troopers', 888L)
+ self.SetUpCreateGroup(888L, 'owners', 'mdb')
+ self.mox.ReplayAll()
+ actual_group_id = self.usergroup_service.CreateGroup(
+ self.cnxn, self.services, 'troopers', 'owners', 'mdb')
+ self.mox.VerifyAll()
+ self.assertEqual(888L, actual_group_id)
+
+ def SetUpDetermineWhichUserIDsAreGroups(self, ids_to_query, mock_group_ids):
+ self.usergroup_service.usergroupsettings_tbl.Select(
+ self.cnxn, cols=['group_id'], group_id=ids_to_query).AndReturn(
+ (gid,) for gid in mock_group_ids)
+
+ def testDetermineWhichUserIDsAreGroups_NoGroups(self):
+ self.SetUpDetermineWhichUserIDsAreGroups([], [])
+ self.mox.ReplayAll()
+ actual_group_ids = self.usergroup_service.DetermineWhichUserIDsAreGroups(
+ self.cnxn, [])
+ self.mox.VerifyAll()
+ self.assertEqual([], actual_group_ids)
+
+ def testDetermineWhichUserIDsAreGroups_SomeGroups(self):
+ user_ids = [111, 222, 333]
+ group_ids = [888, 999]
+ self.SetUpDetermineWhichUserIDsAreGroups(user_ids + group_ids, group_ids)
+ self.mox.ReplayAll()
+ actual_group_ids = self.usergroup_service.DetermineWhichUserIDsAreGroups(
+ self.cnxn, user_ids + group_ids)
+ self.mox.VerifyAll()
+ self.assertEqual(group_ids, actual_group_ids)
+
+ def SetUpLookupAllMemberships(self, user_ids, mock_membership_rows):
+ self.usergroup_service.usergroup_tbl.Select(
+ self.cnxn, cols=['user_id', 'group_id'], distinct=True,
+ user_id=user_ids).AndReturn(mock_membership_rows)
+
+ def testLookupAllMemberships(self):
+ self.usergroup_service.group_dag.initialized = True
+ self.usergroup_service.memberships_2lc.CacheItem(111L, {888L, 999L})
+ self.SetUpLookupAllMemberships([222L], [(222L, 777L), (222L, 999L)])
+ self.mox.ReplayAll()
+ actual_membership_dict = self.usergroup_service.LookupAllMemberships(
+ self.cnxn, [111L, 222L])
+ self.mox.VerifyAll()
+ self.assertEqual(
+ {111L: {888L, 999}, 222L: {777L, 999L}},
+ actual_membership_dict)
+
+ def SetUpRemoveMembers(self, group_id, member_ids):
+ self.usergroup_service.usergroup_tbl.Delete(
+ self.cnxn, group_id=group_id, user_id=member_ids)
+
+ def testRemoveMembers(self):
+ self.usergroup_service.group_dag.initialized = True
+ self.SetUpRemoveMembers(888L, [111L, 222L])
+ self.SetUpLookupAllMembers([111L, 222L], [], {}, {})
+ self.mox.ReplayAll()
+ self.usergroup_service.RemoveMembers(self.cnxn, 888L, [111L, 222L])
+ self.mox.VerifyAll()
+
+ def testUpdateMembers(self):
+ self.usergroup_service.group_dag.initialized = True
+ self.usergroup_service.usergroup_tbl.Delete(
+ self.cnxn, group_id=888L, user_id=[111L, 222L])
+ self.usergroup_service.usergroup_tbl.InsertRows(
+ self.cnxn, ['user_id', 'group_id', 'role'],
+ [(111L, 888L, 'member'), (222L, 888L, 'member')])
+ self.SetUpLookupAllMembers([111L, 222L], [], {}, {})
+ self.mox.ReplayAll()
+ self.usergroup_service.UpdateMembers(
+ self.cnxn, 888L, [111L, 222L], 'member')
+ self.mox.VerifyAll()
+
+ def testUpdateMembers_CircleDetection(self):
+ # Two groups: 888 and 999 while 999 is a member of 888.
+ self.SetUpDAG([(888,), (999,)], [(999, 888)])
+ self.mox.ReplayAll()
+ self.assertRaises(
+ usergroup_svc.CircularGroupException,
+ self.usergroup_service.UpdateMembers, self.cnxn, 999, [888], 'member')
+ self.mox.VerifyAll()
+
+ def SetUpLookupAllMembers(
+ self, group_ids, direct_member_rows,
+ descedants_dict, indirect_member_rows_dict):
+ self.usergroup_service.usergroup_tbl.Select(
+ self.cnxn, cols=['user_id', 'group_id', 'role'], distinct=True,
+ group_id=group_ids).AndReturn(direct_member_rows)
+ for gid in group_ids:
+ self.usergroup_service.usergroup_tbl.Select(
+ self.cnxn, cols=['user_id'], distinct=True,
+ group_id=descedants_dict.get(gid, [])).AndReturn(
+ indirect_member_rows_dict.get(gid, []))
+
+ def testLookupAllMembers(self):
+ self.usergroup_service.group_dag.initialized = True
+ self.usergroup_service.group_dag.user_group_children = (
+ collections.defaultdict(list))
+ self.usergroup_service.group_dag.user_group_children[777] = [888]
+ self.usergroup_service.group_dag.user_group_children[888] = [999]
+ self.SetUpLookupAllMembers(
+ [777],
+ [(888, 777, 'member'), (111, 888, 'member'), (999, 888, 'member'),
+ (222, 999, 'member')],
+ {777: [888, 999]},
+ {777: [(111,), (222,), (999,)]})
+
+ self.mox.ReplayAll()
+ members_dict, owners_dict = self.usergroup_service.LookupAllMembers(
+ self.cnxn, [777])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111, 222, 888, 999], members_dict[777])
+ self.assertItemsEqual([], owners_dict[777])
+
+ def testExpandAnyUserGroups_NoneRequested(self):
+ self.SetUpDetermineWhichUserIDsAreGroups([], [])
+ self.SetUpLookupMembers({})
+ self.mox.ReplayAll()
+ direct_ids, indirect_ids = self.usergroup_service.ExpandAnyUserGroups(
+ self.cnxn, [])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([], direct_ids)
+ self.assertItemsEqual([], indirect_ids)
+
+ def testExpandAnyUserGroups_NoGroups(self):
+ self.SetUpDetermineWhichUserIDsAreGroups([111, 222], [])
+ self.SetUpLookupMembers({})
+ self.mox.ReplayAll()
+ direct_ids, indirect_ids = self.usergroup_service.ExpandAnyUserGroups(
+ self.cnxn, [111, 222])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111, 222], direct_ids)
+ self.assertItemsEqual([], indirect_ids)
+
+ def testExpandAnyUserGroups_WithGroups(self):
+ self.usergroup_service.group_dag.initialized = True
+ self.SetUpDetermineWhichUserIDsAreGroups([111, 222, 888], [888])
+ self.SetUpLookupAllMembers(
+ [888], [(222, 888, 'member'), (333, 888, 'member')], {}, {})
+ self.mox.ReplayAll()
+ direct_ids, indirect_ids = self.usergroup_service.ExpandAnyUserGroups(
+ self.cnxn, [111, 222, 888])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111, 222], direct_ids)
+ self.assertItemsEqual([333, 222], indirect_ids)
+
+ def SetUpLookupMembers(self, group_member_dict):
+ mock_membership_rows = []
+ group_ids = []
+ for gid, members in group_member_dict.iteritems():
+ group_ids.append(gid)
+ mock_membership_rows.extend([(uid, gid, 'member') for uid in members])
+ group_ids.sort()
+ self.usergroup_service.usergroup_tbl.Select(
+ self.cnxn, cols=['user_id','group_id', 'role'], distinct=True,
+ group_id=group_ids).AndReturn(mock_membership_rows)
+
+ def testLookupMembers_NoneRequested(self):
+ self.SetUpLookupMembers({})
+ self.mox.ReplayAll()
+ member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [])
+ self.mox.VerifyAll()
+ self.assertItemsEqual({}, member_ids)
+
+ def testLookupMembers_Nonexistent(self):
+ """If some requested groups don't exist, they are ignored."""
+ self.SetUpLookupMembers({777: []})
+ self.mox.ReplayAll()
+ member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [777])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([], member_ids[777])
+
+ def testLookupMembers_AllEmpty(self):
+ """Requesting all empty groups results in no members."""
+ self.SetUpLookupMembers({888: [], 999: []})
+ self.mox.ReplayAll()
+ member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [888, 999])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([], member_ids[888])
+
+ def testLookupMembers_OneGroup(self):
+ self.SetUpLookupMembers({888: [111, 222]})
+ self.mox.ReplayAll()
+ member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [888])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111, 222], member_ids[888])
+
+ def testLookupMembers_GroupsAndNonGroups(self):
+ """We ignore any non-groups passed in."""
+ self.SetUpLookupMembers({111: [], 333: [], 888: [111, 222]})
+ self.mox.ReplayAll()
+ member_ids, _ = self.usergroup_service.LookupMembers(
+ self.cnxn, [111, 333, 888])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111, 222], member_ids[888])
+
+ def testLookupMembers_OverlappingGroups(self):
+ """We get the union of IDs. Imagine 888 = {111} and 999 = {111, 222}."""
+ self.SetUpLookupMembers({888: [111], 999: [111, 222]})
+ self.mox.ReplayAll()
+ member_ids, _ = self.usergroup_service.LookupMembers(self.cnxn, [888, 999])
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111, 222], member_ids[999])
+ self.assertItemsEqual([111], member_ids[888])
+
+ def testLookupVisibleMembers_LimitedVisiblity(self):
+ """We get only the member IDs in groups that the user is allowed to see."""
+ self.usergroup_service.group_dag.initialized = True
+ self.SetUpGetGroupSettings(
+ [888, 999],
+ [(888, 'anyone', None, 0), (999, 'members', None, 0)])
+ self.SetUpLookupMembers({888: [111], 999: [111]})
+ self.SetUpLookupAllMembers(
+ [888, 999], [(111, 888, 'member'), (111, 999, 'member')], {}, {})
+ self.mox.ReplayAll()
+ member_ids, _ = self.usergroup_service.LookupVisibleMembers(
+ self.cnxn, [888, 999], permissions.USER_PERMISSIONSET, set(),
+ self.services)
+ self.mox.VerifyAll()
+ self.assertItemsEqual([111], member_ids[888])
+ self.assertNotIn(999, member_ids)
+
+ def SetUpGetAllUserGroupsInfo(self, mock_settings_rows, mock_count_rows,
+ mock_friends=None):
+ mock_friends = mock_friends or []
+ self.usergroup_service.usergroupsettings_tbl.Select(
+ self.cnxn, cols=['group_id', 'email', 'who_can_view_members',
+ 'external_group_type', 'last_sync_time'],
+ left_joins=[('User ON UserGroupSettings.group_id = User.user_id', [])]
+ ).AndReturn(mock_settings_rows)
+ self.usergroup_service.usergroup_tbl.Select(
+ self.cnxn, cols=['group_id', 'COUNT(*)'],
+ group_by=['group_id']).AndReturn(mock_count_rows)
+
+ group_ids = [g[0] for g in mock_settings_rows]
+ self.usergroup_service.usergroupprojects_tbl.Select(
+ self.cnxn, cols=usergroup_svc.USERGROUPPROJECTS_COLS,
+ group_id=group_ids).AndReturn(mock_friends)
+
+ def testGetAllUserGroupsInfo(self):
+ self.SetUpGetAllUserGroupsInfo(
+ [(888L, 'group@example.com', 'anyone', None, 0)],
+ [(888L, 12)])
+ self.mox.ReplayAll()
+ actual_infos = self.usergroup_service.GetAllUserGroupsInfo(self.cnxn)
+ self.mox.VerifyAll()
+ self.assertEqual(1, len(actual_infos))
+ addr, count, group_settings, group_id = actual_infos[0]
+ self.assertEqual('group@example.com', addr)
+ self.assertEqual(12, count)
+ self.assertEqual(usergroup_pb2.MemberVisibility.ANYONE,
+ group_settings.who_can_view_members)
+ self.assertEqual(888L, group_id)
+
+ def SetUpGetGroupSettings(self, group_ids, mock_result_rows,
+ mock_friends=None):
+ mock_friends = mock_friends or []
+ self.usergroup_service.usergroupsettings_tbl.Select(
+ self.cnxn, cols=usergroup_svc.USERGROUPSETTINGS_COLS,
+ group_id=group_ids).AndReturn(mock_result_rows)
+ self.usergroup_service.usergroupprojects_tbl.Select(
+ self.cnxn, cols=usergroup_svc.USERGROUPPROJECTS_COLS,
+ group_id=group_ids).AndReturn(mock_friends)
+
+ def testGetGroupSettings_NoGroupsRequested(self):
+ self.SetUpGetGroupSettings([], [])
+ self.mox.ReplayAll()
+ actual_settings_dict = self.usergroup_service.GetAllGroupSettings(
+ self.cnxn, [])
+ self.mox.VerifyAll()
+ self.assertEqual({}, actual_settings_dict)
+
+ def testGetGroupSettings_NoGroupsFound(self):
+ self.SetUpGetGroupSettings([777L], [])
+ self.mox.ReplayAll()
+ actual_settings_dict = self.usergroup_service.GetAllGroupSettings(
+ self.cnxn, [777L])
+ self.mox.VerifyAll()
+ self.assertEqual({}, actual_settings_dict)
+
+ def testGetGroupSettings_SomeGroups(self):
+ self.SetUpGetGroupSettings(
+ [777L, 888L, 999L],
+ [(888L, 'anyone', None, 0), (999L, 'members', None, 0)])
+ self.mox.ReplayAll()
+ actual_settings_dict = self.usergroup_service.GetAllGroupSettings(
+ self.cnxn, [777L, 888L, 999L])
+ self.mox.VerifyAll()
+ self.assertEqual(
+ {888L: usergroup_pb2.MakeSettings('anyone'),
+ 999L: usergroup_pb2.MakeSettings('members')},
+ actual_settings_dict)
+
+ def testGetGroupSettings_NoSuchGroup(self):
+ self.SetUpGetGroupSettings([777L], [])
+ self.mox.ReplayAll()
+ actual_settings = self.usergroup_service.GetGroupSettings(self.cnxn, 777L)
+ self.mox.VerifyAll()
+ self.assertEqual(None, actual_settings)
+
+ def testGetGroupSettings_Found(self):
+ self.SetUpGetGroupSettings([888L], [(888L, 'anyone', None, 0)])
+ self.mox.ReplayAll()
+ actual_settings = self.usergroup_service.GetGroupSettings(self.cnxn, 888)
+ self.mox.VerifyAll()
+ self.assertEqual(
+ usergroup_pb2.MemberVisibility.ANYONE,
+ actual_settings.who_can_view_members)
+
+ def testGetGroupSettings_Import(self):
+ self.SetUpGetGroupSettings([888L], [(888L, 'owners', 'mdb', 0)])
+ self.mox.ReplayAll()
+ actual_settings = self.usergroup_service.GetGroupSettings(self.cnxn, 888)
+ self.mox.VerifyAll()
+ self.assertEqual(
+ usergroup_pb2.MemberVisibility.OWNERS,
+ actual_settings.who_can_view_members)
+ self.assertEqual(
+ usergroup_pb2.GroupType.MDB,
+ actual_settings.ext_group_type)
+
+ def SetUpUpdateSettings(self, group_id, visiblity, external_group_type=None,
+ last_sync_time=0, friend_projects=None):
+ friend_projects = friend_projects or []
+ self.usergroup_service.usergroupsettings_tbl.InsertRow(
+ self.cnxn, group_id=group_id, who_can_view_members=visiblity,
+ external_group_type=external_group_type,
+ last_sync_time=last_sync_time,
+ replace=True)
+ self.usergroup_service.usergroupprojects_tbl.Delete(
+ self.cnxn, group_id=group_id)
+ if friend_projects:
+ rows = [(group_id, p_id) for p_id in friend_projects]
+ self.usergroup_service.usergroupprojects_tbl.InsertRows(
+ self.cnxn, ['group_id', 'project_id'], rows)
+
+ def testUpdateSettings_Normal(self):
+ self.SetUpUpdateSettings(888L, 'anyone')
+ self.mox.ReplayAll()
+ self.usergroup_service.UpdateSettings(
+ self.cnxn, 888L, usergroup_pb2.MakeSettings('anyone'))
+ self.mox.VerifyAll()
+
+ def testUpdateSettings_Import(self):
+ self.SetUpUpdateSettings(888L, 'owners', 'mdb')
+ self.mox.ReplayAll()
+ self.usergroup_service.UpdateSettings(
+ self.cnxn, 888L,
+ usergroup_pb2.MakeSettings('owners', 'mdb'))
+ self.mox.VerifyAll()
+
+ def SetUpDAG(self, group_id_rows, usergroup_rows):
+ self.usergroup_service.usergroupsettings_tbl.Select(
+ self.cnxn, cols=['group_id']).AndReturn(group_id_rows)
+ self.usergroup_service.usergroup_tbl.Select(
+ self.cnxn, cols=['user_id', 'group_id'], distinct=True,
+ user_id=[r[0] for r in group_id_rows]).AndReturn(usergroup_rows)
+
+ def testDAG_Build(self):
+ # Old entries should go away after rebuilding
+ self.usergroup_service.group_dag.user_group_parents = (
+ collections.defaultdict(list))
+ self.usergroup_service.group_dag.user_group_parents[111] = [222]
+ # Two groups: 888 and 999 while 999 is a member of 888.
+ self.SetUpDAG([(888,), (999,)], [(999, 888)])
+ self.mox.ReplayAll()
+ self.usergroup_service.group_dag.Build(self.cnxn)
+ self.mox.VerifyAll()
+ self.assertIn(888, self.usergroup_service.group_dag.user_group_children)
+ self.assertIn(999, self.usergroup_service.group_dag.user_group_parents)
+ self.assertNotIn(111, self.usergroup_service.group_dag.user_group_parents)
+
+ def testDAG_GetAllAncestors(self):
+ # Three groups: 777, 888 and 999.
+ # 999 is a direct member of 888, and 888 is a direct member of 777.
+ self.SetUpDAG([(777,), (888,), (999,)], [(999, 888), (888, 777)])
+ self.mox.ReplayAll()
+ ancestors = self.usergroup_service.group_dag.GetAllAncestors(
+ self.cnxn, 999)
+ self.mox.VerifyAll()
+ ancestors.sort()
+ self.assertEqual([777, 888], ancestors)
+
+ def testDAG_GetAllAncestorsDiamond(self):
+ # Four groups: 666, 777, 888 and 999.
+ # 999 is a direct member of both 888 and 777,
+ # 888 is a direct member of 666, and 777 is also a direct member of 666.
+ self.SetUpDAG([(666, ), (777,), (888,), (999,)],
+ [(999, 888), (999, 777), (888, 666), (777, 666)])
+ self.mox.ReplayAll()
+ ancestors = self.usergroup_service.group_dag.GetAllAncestors(
+ self.cnxn, 999)
+ self.mox.VerifyAll()
+ ancestors.sort()
+ self.assertEqual([666, 777, 888], ancestors)
+
+ def testDAG_GetAllDescendants(self):
+ # Four groups: 666, 777, 888 and 999.
+ # 999 is a direct member of both 888 and 777,
+ # 888 is a direct member of 666, and 777 is also a direct member of 666.
+ self.SetUpDAG([(666, ), (777,), (888,), (999,)],
+ [(999, 888), (999, 777), (888, 666), (777, 666)])
+ self.mox.ReplayAll()
+ descendants = self.usergroup_service.group_dag.GetAllDescendants(
+ self.cnxn, 666)
+ self.mox.VerifyAll()
+ descendants.sort()
+ self.assertEqual([777, 888, 999], descendants)
+
+ def testDAG_IsChild(self):
+ # Four groups: 666, 777, 888 and 999.
+ # 999 is a direct member of both 888 and 777,
+ # 888 is a direct member of 666, and 777 is also a direct member of 666.
+ self.SetUpDAG([(666, ), (777,), (888,), (999,)],
+ [(999, 888), (999, 777), (888, 666), (777, 666)])
+ self.mox.ReplayAll()
+ result1 = self.usergroup_service.group_dag.IsChild(
+ self.cnxn, 777, 666)
+ result2 = self.usergroup_service.group_dag.IsChild(
+ self.cnxn, 777, 888)
+ self.mox.VerifyAll()
+ self.assertTrue(result1)
+ self.assertFalse(result2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/services/tracker_fulltext.py b/appengine/monorail/services/tracker_fulltext.py
new file mode 100644
index 0000000..ea453fd
--- /dev/null
+++ b/appengine/monorail/services/tracker_fulltext.py
@@ -0,0 +1,276 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions that provide fulltext search for issues."""
+
+import collections
+import logging
+import time
+
+from google.appengine.api import search
+
+import settings
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from services import fulltext_helpers
+from tracker import tracker_bizobj
+
+
+# When updating and re-indexing all issues in a project, work in batches
+# of this size to manage memory usage and avoid rpc timeouts.
+_INDEX_BATCH_SIZE = 40
+
+
+# The user can search for text that occurs specifically in these
+# parts of an issue.
+ISSUE_FULLTEXT_FIELDS = ['summary', 'description', 'comment']
+# Note: issue documents also contain a "metadata" field, but we do not
+# expose that to users. Issue metadata can be searched in a structured way
+# by giving a specific field name such as "owner:" or "status:". The metadata
+# search field exists only for fulltext queries that do not specify any field.
+
+
+def IndexIssues(cnxn, issues, user_service, issue_service, config_service):
+ """(Re)index all the given issues.
+
+ Args:
+ cnxn: connection to SQL database.
+ issues: list of Issue PBs to index.
+ user_service: interface to user data storage.
+ issue_service: interface to issue data storage.
+ config_service: interface to configuration data storage.
+ """
+ issues = list(issues)
+ config_dict = config_service.GetProjectConfigs(
+ cnxn, {issue.project_id for issue in issues})
+ for start in xrange(0, len(issues), _INDEX_BATCH_SIZE):
+ logging.info('indexing issues: %d remaining', len(issues) - start)
+ _IndexIssueBatch(
+ cnxn, issues[start:start + _INDEX_BATCH_SIZE], user_service,
+ issue_service, config_dict)
+
+
+def _IndexIssueBatch(cnxn, issues, user_service, issue_service, config_dict):
+ """Internal method to (re)index the given batch of issues.
+
+ Args:
+ cnxn: connection to SQL database.
+ issues: list of Issue PBs to index.
+ user_service: interface to user data storage.
+ issue_service: interface to issue data storage.
+ config_dict: dict {project_id: config} for all the projects that
+ the given issues are in.
+ """
+ user_ids = tracker_bizobj.UsersInvolvedInIssues(issues)
+ comments_dict = issue_service.GetCommentsForIssues(
+ cnxn, [issue.issue_id for issue in issues])
+ for comments in comments_dict.itervalues():
+ user_ids.update([ic.user_id for ic in comments])
+
+ users_by_id = framework_views.MakeAllUserViews(
+ cnxn, user_service, user_ids)
+ _CreateIssueSearchDocuments(issues, comments_dict, users_by_id, config_dict)
+
+
+def _CreateIssueSearchDocuments(
+ issues, comments_dict, users_by_id, config_dict):
+ """Make the GAE search index documents for the given issue batch.
+
+ Args:
+ issues: list of issues to index.
+ comments_dict: prefetched dictionary of comments on those issues.
+ users_by_id: dictionary {user_id: UserView} so that the email
+ addresses of users who left comments can be found via search.
+ config_dict: dict {project_id: config} for all the projects that
+ the given issues are in.
+ """
+ documents_by_shard = collections.defaultdict(list)
+ for issue in issues:
+ comments = comments_dict.get(issue.issue_id, [])
+ comments = _IndexableComments(comments, users_by_id)
+ summary = issue.summary
+ # TODO(jrobbins): allow search specifically on explicit vs derived
+ # fields.
+ owner_id = tracker_bizobj.GetOwnerId(issue)
+ owner_email = users_by_id[owner_id].email
+ config = config_dict[issue.project_id]
+ component_paths = []
+ for component_id in issue.component_ids:
+ cd = tracker_bizobj.FindComponentDefByID(component_id, config)
+ if cd:
+ component_paths.append(cd.path)
+
+ field_values = [str(tracker_bizobj.GetFieldValue(fv, users_by_id))
+ for fv in issue.field_values]
+ metadata = '%s %s %s %s %s %s' % (
+ tracker_bizobj.GetStatus(issue),
+ owner_email,
+ [users_by_id[cc_id].email for cc_id in
+ tracker_bizobj.GetCcIds(issue)],
+ ' '.join(component_paths),
+ ' '.join(field_values),
+ ' '.join(tracker_bizobj.GetLabels(issue)))
+ assert comments, 'issues should always have at least the description'
+ description = _ExtractCommentText(comments[0], users_by_id)
+ description = description[:framework_constants.MAX_FTS_FIELD_SIZE]
+ all_comments = ' '. join(
+ _ExtractCommentText(c, users_by_id) for c in comments[1:])
+ all_comments = all_comments[:framework_constants.MAX_FTS_FIELD_SIZE]
+
+ custom_fields = _BuildCustomFTSFields(issue)
+ doc = search.Document(
+ doc_id=str(issue.issue_id),
+ fields=[
+ search.NumberField(name='project_id', value=issue.project_id),
+ search.TextField(name='summary', value=summary),
+ search.TextField(name='metadata', value=metadata),
+ search.TextField(name='description', value=description),
+ search.TextField(name='comment', value=all_comments),
+ ] + custom_fields)
+
+ shard_id = issue.issue_id % settings.num_logical_shards
+ documents_by_shard[shard_id].append(doc)
+
+ start_time = time.time()
+ promises = []
+ for shard_id, documents in documents_by_shard.iteritems():
+ if documents:
+ promises.append(framework_helpers.Promise(
+ _IndexDocsInShard, shard_id, documents))
+
+ for promise in promises:
+ promise.WaitAndGetValue()
+
+ logging.info('Finished %d indexing in shards in %d ms',
+ len(documents_by_shard), int((time.time() - start_time) * 1000))
+
+
+def _IndexableComments(comments, users_by_id):
+ """We only index the comments that are not deleted or banned.
+
+ Args:
+ comments: list of Comment PBs for one issue.
+ users_by_id: Dict of (user_id -> UserView) for all users.
+
+ Returns:
+ A list of comments filtered to not have any deleted comments or
+ comments from banned users. If the issue has a huge number of
+ comments, only a certain number of the first and last comments
+ are actually indexed.
+ """
+ allowed_comments = []
+ for comment in comments:
+ user_view = users_by_id.get(comment.user_id)
+ if not (comment.deleted_by or (user_view and user_view.banned)):
+ allowed_comments.append(comment)
+
+ reasonable_size = (framework_constants.INITIAL_COMMENTS_TO_INDEX +
+ framework_constants.FINAL_COMMENTS_TO_INDEX)
+ if len(allowed_comments) <= reasonable_size:
+ return allowed_comments
+
+ candidates = ( # Prioritize the description and recent comments.
+ allowed_comments[0:1] +
+ allowed_comments[-framework_constants.FINAL_COMMENTS_TO_INDEX:] +
+ allowed_comments[1:framework_constants.INITIAL_COMMENTS_TO_INDEX])
+ total_length = 0
+ result = []
+ for comment in candidates:
+ total_length += len(comment.content)
+ if total_length < framework_constants.MAX_FTS_FIELD_SIZE:
+ result.append(comment)
+
+ return result
+
+
+def _IndexDocsInShard(shard_id, documents):
+ search_index = search.Index(
+ name=settings.search_index_name_format % shard_id)
+ search_index.put(documents)
+ logging.info('FTS indexed %d docs in shard %d', len(documents), shard_id)
+ # TODO(jrobbins): catch OverQuotaError and add the issues to the
+ # ReindexQueue table instead.
+
+
+def _ExtractCommentText(comment, users_by_id):
+ """Return a string with all the searchable text of the given Comment PB."""
+ commenter_email = users_by_id[comment.user_id].email
+ return '%s %s %s' % (
+ commenter_email,
+ comment.content,
+ ' '.join(attach.filename
+ for attach in comment.attachments
+ if not attach.deleted))
+
+
+def _BuildCustomFTSFields(issue):
+ """Return a list of FTS Fields to index string-valued custom fields."""
+ fts_fields = []
+ for fv in issue.field_values:
+ if fv.str_value:
+ # TODO(jrobbins): also indicate which were derived vs. explicit.
+ # TODO(jrobbins): also toss in the email addresses of any users in
+ # user-valued custom fields, ints for int-valued fields, etc.
+ fts_field = search.TextField(
+ name='custom_%d' % fv.field_id, value=fv.str_value)
+ fts_fields.append(fts_field)
+
+ return fts_fields
+
+
+def UnindexIssues(issue_ids):
+ """Remove many issues from the sharded search indexes."""
+ iids_by_shard = {}
+ for issue_id in issue_ids:
+ shard_id = issue_id % settings.num_logical_shards
+ iids_by_shard.setdefault(shard_id, [])
+ iids_by_shard[shard_id].append(issue_id)
+
+ for shard_id, iids_in_shard in iids_by_shard.iteritems():
+ try:
+ logging.info(
+ 'unindexing %r issue_ids in %r', len(iids_in_shard), shard_id)
+ search_index = search.Index(
+ name=settings.search_index_name_format % shard_id)
+ search_index.delete([str(iid) for iid in iids_in_shard])
+ except search.Error:
+ logging.exception('FTS deletion failed')
+
+
+def SearchIssueFullText(project_ids, query_ast_conj, shard_id):
+ """Do full-text search in GAE FTS.
+
+ Args:
+ project_ids: list of project ID numbers to consider.
+ query_ast_conj: One conjuctive clause from the AST parsed
+ from the user's query.
+ shard_id: int shard ID for the shard to consider.
+
+ Returns:
+ (issue_ids, capped) where issue_ids is a list of issue issue_ids that match
+ the full-text query. And, capped is True if the results were capped due to
+ an implementation limitation. Or, return (None, False) if the given AST
+ conjunction contains no full-text conditions.
+ """
+ fulltext_query = fulltext_helpers.BuildFTSQuery(
+ query_ast_conj, ISSUE_FULLTEXT_FIELDS)
+ if fulltext_query is None:
+ return None, False
+
+ if project_ids:
+ project_clause = ' or '.join(
+ 'project_id:%d' % pid for pid in project_ids)
+ fulltext_query = '(%s) %s' % (project_clause, fulltext_query)
+
+ # TODO(jrobbins): it would be good to also include some other
+ # structured search terms to narrow down the set of index
+ # documents considered. E.g., most queries are only over the
+ # open issues.
+ logging.info('FTS query is %r', fulltext_query)
+ issue_ids = fulltext_helpers.ComprehensiveSearch(
+ fulltext_query, settings.search_index_name_format % shard_id)
+ capped = len(issue_ids) >= settings.fulltext_limit_per_shard
+ return issue_ids, capped
diff --git a/appengine/monorail/services/user_svc.py b/appengine/monorail/services/user_svc.py
new file mode 100644
index 0000000..8439268
--- /dev/null
+++ b/appengine/monorail/services/user_svc.py
@@ -0,0 +1,503 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A set of functions that provide persistence for users.
+
+Business objects are described in user_pb2.py.
+"""
+
+import logging
+
+import settings
+from framework import actionlimit
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import sql
+from framework import validate
+from proto import user_pb2
+from services import caches
+
+
+USER_TABLE_NAME = 'User'
+ACTIONLIMIT_TABLE_NAME = 'ActionLimit'
+DISMISSEDCUES_TABLE_NAME = 'DismissedCues'
+
+USER_COLS = [
+ 'user_id', 'email', 'is_site_admin', 'notify_issue_change',
+ 'notify_starred_issue_change', 'banned', 'after_issue_update',
+ 'keep_people_perms_open', 'preview_on_hover', 'ignore_action_limits',
+ 'obscure_email']
+ACTIONLIMIT_COLS = [
+ 'user_id', 'action_kind', 'recent_count', 'reset_timestamp',
+ 'lifetime_count', 'lifetime_limit', 'period_soft_limit',
+ 'period_hard_limit']
+DISMISSEDCUES_COLS = ['user_id', 'cue']
+
+
+class UserTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for User PBs."""
+
+ def __init__(self, cache_manager, user_service):
+ super(UserTwoLevelCache, self).__init__(
+ cache_manager, 'user', 'user:', user_pb2.User,
+ max_size=settings.user_cache_max_size)
+ self.user_service = user_service
+
+ def _DeserializeUsersByID(
+ self, user_rows, actionlimit_rows, dismissedcue_rows):
+ """Convert database row tuples into User PBs.
+
+ Args:
+ user_rows: rows from the User DB table.
+ actionlimit_rows: rows from the ActionLimit DB table.
+ dismissedcue_rows: rows from the DismissedCues DB table.
+
+ Returns:
+ A dict {user_id: user_pb} for all the users referenced in user_rows.
+ """
+ result_dict = {}
+
+ # Make one User PB for each row in user_rows.
+ for row in user_rows:
+ (user_id, email, is_site_admin,
+ notify_issue_change, notify_starred_issue_change, banned,
+ after_issue_update, keep_people_perms_open, preview_on_hover,
+ ignore_action_limits, obscure_email) = row
+ user = user_pb2.MakeUser()
+ user.email = email
+ user.is_site_admin = bool(is_site_admin)
+ user.notify_issue_change = bool(notify_issue_change)
+ user.notify_starred_issue_change = bool(notify_starred_issue_change)
+ user.obscure_email = bool(obscure_email)
+ if banned:
+ user.banned = banned
+ if after_issue_update:
+ user.after_issue_update = user_pb2.IssueUpdateNav(
+ after_issue_update.upper())
+ user.keep_people_perms_open = bool(keep_people_perms_open)
+ user.preview_on_hover = bool(preview_on_hover)
+ user.ignore_action_limits = bool(ignore_action_limits)
+ result_dict[user_id] = user
+
+ # Make an ActionLimit for each actionlimit row and attach it to a User PB.
+ for row in actionlimit_rows:
+ (user_id, action_type_name, recent_count, reset_timestamp,
+ lifetime_count, lifetime_limit, period_soft_limit,
+ period_hard_limit) = row
+ if user_id not in result_dict:
+ logging.error('Found action limits for missing user %r', user_id)
+ continue
+ user = result_dict[user_id]
+ action_type = actionlimit.ACTION_TYPE_NAMES[action_type_name]
+ al = actionlimit.GetLimitPB(user, action_type)
+ al.recent_count = recent_count
+ al.reset_timestamp = reset_timestamp
+ al.lifetime_count = lifetime_count
+ al.lifetime_limit = lifetime_limit
+ al.period_soft_limit = period_soft_limit
+ al.period_hard_limit = period_hard_limit
+
+ # Build up a list of dismissed "cue card" help items for the users.
+ for user_id, cue in dismissedcue_rows:
+ if user_id not in result_dict:
+ logging.error('Found dismissed cues for missing user %r', user_id)
+ continue
+ result_dict[user_id].dismissed_cues.append(cue)
+
+ return result_dict
+
+ def FetchItems(self, cnxn, keys):
+ """On RAM and memcache miss, retrieve User objects from the database.
+
+ Args:
+ cnxn: connection to SQL database.
+ keys: list of user IDs to retrieve.
+
+ Returns:
+ A dict {user_id: user_pb} for each user that satisfies the conditions.
+ """
+ user_rows = self.user_service.user_tbl.Select(
+ cnxn, cols=USER_COLS, user_id=keys)
+ actionlimit_rows = self.user_service.actionlimit_tbl.Select(
+ cnxn, cols=ACTIONLIMIT_COLS, user_id=keys)
+ dismissedcues_rows = self.user_service.dismissedcues_tbl.Select(
+ cnxn, cols=DISMISSEDCUES_COLS, user_id=keys)
+ return self._DeserializeUsersByID(
+ user_rows, actionlimit_rows, dismissedcues_rows)
+
+
+class UserService(object):
+ """The persistence layer for all user data."""
+
+ def __init__(self, cache_manager):
+ """Constructor.
+
+ Args:
+ cache_manager: local cache with distributed invalidation.
+ """
+ self.user_tbl = sql.SQLTableManager(USER_TABLE_NAME)
+ self.actionlimit_tbl = sql.SQLTableManager(ACTIONLIMIT_TABLE_NAME)
+ self.dismissedcues_tbl = sql.SQLTableManager(DISMISSEDCUES_TABLE_NAME)
+
+ # Like a dictionary {user_id: email}
+ self.email_cache = cache_manager.MakeCache('user', max_size=50000)
+
+ # Like a dictionary {email: user_id}.
+ # This will never invaidate, and it doesn't need to.
+ self.user_id_cache = cache_manager.MakeCache('user', max_size=50000)
+
+ # Like a dictionary {user_id: user_pb}
+ self.user_2lc = UserTwoLevelCache(cache_manager, self)
+
+ ### Creating users
+
+ def _CreateUsers(self, cnxn, emails):
+ """Create many users in the database."""
+ emails = [email.lower() for email in emails]
+ ids = [framework_helpers.MurmurHash3_x86_32(email) for email in emails]
+ row_values = [
+ (user_id, email, not framework_bizobj.IsPriviledgedDomainUser(email))
+ for (user_id, email) in zip(ids, emails)]
+ self.user_tbl.InsertRows(
+ cnxn, ['user_id', 'email', 'obscure_email'], row_values)
+ self.user_2lc.InvalidateKeys(cnxn, ids)
+
+ ### Lookup of user ID and email address
+
+ def LookupUserEmails(self, cnxn, user_ids):
+ """Return a dict of email addresses for the given user IDs.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_ids: list of int user IDs to look up.
+
+ Returns:
+ A dict {user_id: email_addr} for all the requested IDs.
+
+ Raises:
+ NoSuchUserException: if any requested user cannot be found.
+ """
+ self.email_cache.CacheItem(framework_constants.NO_USER_SPECIFIED, '')
+ emails_dict, missed_ids = self.email_cache.GetAll(user_ids)
+ if missed_ids:
+ logging.info('got %d user emails from cache', len(emails_dict))
+ rows = self.user_tbl.Select(
+ cnxn, cols=['user_id', 'email'], user_id=missed_ids)
+ retrieved_dict = dict(rows)
+ logging.info('looked up users %r', retrieved_dict)
+ self.email_cache.CacheAll(retrieved_dict)
+ emails_dict.update(retrieved_dict)
+
+ # Check if there are any that we could not find. ID 0 means "no user".
+ nonexist_ids = [user_id for user_id in user_ids
+ if user_id and user_id not in emails_dict]
+ if nonexist_ids:
+ raise NoSuchUserException(
+ 'No email addresses found for users %r' % nonexist_ids)
+
+ return emails_dict
+
+ def LookupUserEmail(self, cnxn, user_id):
+ """Get the email address of the given user.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_id: int user ID of the user whose email address is needed.
+
+ Returns:
+ String email address of that user or None if user_id is invalid.
+
+ Raises:
+ NoSuchUserException: if no email address was found for that user.
+ """
+ if not user_id:
+ return None
+ emails_dict = self.LookupUserEmails(cnxn, [user_id])
+ return emails_dict[user_id]
+
+ def LookupExistingUserIDs(self, cnxn, emails):
+ """Return a dict of user IDs for the given emails for users that exist.
+
+ Args:
+ cnxn: connection to SQL database.
+ emails: list of string email addresses.
+
+ Returns:
+ A dict {email_addr: user_id} for the requested emails.
+ """
+ # Look up these users in the RAM cache
+ user_id_dict, missed_emails = self.user_id_cache.GetAll(emails)
+ logging.info('hit %d emails, missed %r', len(user_id_dict), missed_emails)
+
+ # Hit the DB to lookup any user IDs that were not cached.
+ if missed_emails:
+ rows = self.user_tbl.Select(
+ cnxn, cols=['email', 'user_id'], email=missed_emails)
+ retrieved_dict = dict(rows)
+ # Cache all the user IDs that we retrieved to make later requests faster.
+ self.user_id_cache.CacheAll(retrieved_dict)
+ user_id_dict.update(retrieved_dict)
+
+ logging.info('looked up User IDs %r', user_id_dict)
+ return user_id_dict
+
+ def LookupUserIDs(self, cnxn, emails, autocreate=False,
+ allowgroups=False):
+ """Return a dict of user IDs for the given emails.
+
+ Args:
+ cnxn: connection to SQL database.
+ emails: list of string email addresses.
+ autocreate: set to True to create users that were not found.
+ allowgroups: set to True to allow non-email user name for group
+ creation.
+
+ Returns:
+ A dict {email_addr: user_id} for the requested emails.
+
+ Raises:
+ NoSuchUserException: if some users were not found and autocreate is
+ False.
+ """
+ # Skip any addresses that look like "--", because that means "no user".
+ # Also, make sure all email addresses are lower case.
+ needed_emails = [email.lower() for email in emails
+ if not framework_constants.NO_VALUE_RE.match(email)]
+
+ # Look up these users in the RAM cache
+ user_id_dict = self.LookupExistingUserIDs(cnxn, needed_emails)
+ if len(needed_emails) == len(user_id_dict):
+ logging.info('found all %d emails', len(user_id_dict))
+ return user_id_dict
+
+ # If any were not found in the DB, create them or raise an exception.
+ nonexist_emails = [email for email in needed_emails
+ if email not in user_id_dict]
+ logging.info('nonexist_emails: %r, autocreate is %r',
+ nonexist_emails, autocreate)
+ if not autocreate:
+ raise NoSuchUserException('%r' % nonexist_emails)
+
+ if not allowgroups:
+ # Only create accounts for valid email addresses.
+ nonexist_emails = [email for email in nonexist_emails
+ if validate.IsValidEmail(email)]
+ if not nonexist_emails:
+ return user_id_dict
+
+ self._CreateUsers(cnxn, nonexist_emails)
+ created_rows = self.user_tbl.Select(
+ cnxn, cols=['email', 'user_id'], email=nonexist_emails)
+ created_dict = dict(created_rows)
+ # Cache all the user IDs that we retrieved to make later requests faster.
+ self.user_id_cache.CacheAll(created_dict)
+ user_id_dict.update(created_dict)
+
+ logging.info('looked up User IDs %r', user_id_dict)
+ return user_id_dict
+
+ def LookupUserID(self, cnxn, email, autocreate=False, allowgroups=False):
+ """Get one user ID for the given email address.
+
+ Args:
+ cnxn: connection to SQL database.
+ email: string email address of the user to look up.
+ autocreate: set to True to create users that were not found.
+ allowgroups: set to True to allow non-email user name for group
+ creation.
+
+ Returns:
+ The int user ID of the specified user.
+
+ Raises:
+ NoSuchUserException if the user was not found and autocreate is False.
+ """
+ email = email.lower()
+ email_dict = self.LookupUserIDs(
+ cnxn, [email], autocreate=autocreate, allowgroups=allowgroups)
+ if email not in email_dict:
+ raise NoSuchUserException('%r not found' % email)
+ return email_dict[email]
+
+ ### Retrieval of user objects: with preferences, action limits, and cues
+
+ def GetUsersByIDs(self, cnxn, user_ids, use_cache=True):
+ """Return a dictionary of retrieved User PBs.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_ids: list of user IDs to fetch.
+ use_cache: set to False to ignore cache and force DB lookup.
+
+ Returns:
+ A dict {user_id: user_pb} for each specified user ID. For any user ID
+ that is not fount in the DB, a default User PB is created on-the-fly.
+ """
+ # Check the RAM cache and memcache, as appropriate.
+ result_dict, missed_ids = self.user_2lc.GetAll(
+ cnxn, user_ids, use_cache=use_cache)
+
+ # Provide default values for any user ID that was not found.
+ result_dict.update(
+ (user_id, user_pb2.MakeUser()) for user_id in missed_ids)
+
+ return result_dict
+
+ def GetUser(self, cnxn, user_id):
+ """Load the specified user from the user details table."""
+ return self.GetUsersByIDs(cnxn, [user_id])[user_id]
+
+ ### Updating user objects
+
+ def UpdateUser(self, cnxn, user_id, user):
+ """Store a user PB in the database.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_id: int user ID of the user to update.
+ user: User PB to store.
+
+ Returns:
+ Nothing.
+ """
+ delta = {
+ 'is_site_admin': user.is_site_admin,
+ 'notify_issue_change': user.notify_issue_change,
+ 'notify_starred_issue_change': user.notify_starred_issue_change,
+ 'banned': user.banned,
+ 'after_issue_update': str(user.after_issue_update or 'UP_TO_LIST'),
+ 'keep_people_perms_open': user.keep_people_perms_open,
+ 'preview_on_hover': user.preview_on_hover,
+ 'ignore_action_limits': user.ignore_action_limits,
+ 'obscure_email': user.obscure_email,
+ }
+ # Start sending UPDATE statements, but don't COMMIT until the end.
+ self.user_tbl.Update(cnxn, delta, user_id=user_id, commit=False)
+
+ # Add rows for any ActionLimits that are defined for this user.
+ al_rows = []
+ if user.get_assigned_value('project_creation_limit'):
+ al_rows.append(_ActionLimitToRow(
+ user_id, 'project_creation', user.project_creation_limit))
+ if user.get_assigned_value('issue_comment_limit'):
+ al_rows.append(_ActionLimitToRow(
+ user_id, 'issue_comment', user.issue_comment_limit))
+ if user.get_assigned_value('issue_attachment_limit'):
+ al_rows.append(_ActionLimitToRow(
+ user_id, 'issue_attachment', user.issue_attachment_limit))
+ if user.get_assigned_value('issue_bulk_edit_limit'):
+ al_rows.append(_ActionLimitToRow(
+ user_id, 'issue_bulk_edit', user.issue_bulk_edit_limit))
+ if user.get_assigned_value('api_request_limit'):
+ al_rows.append(_ActionLimitToRow(
+ user_id, 'api_request', user.api_request_limit))
+
+ self.actionlimit_tbl.Delete(cnxn, user_id=user_id, commit=False)
+ self.actionlimit_tbl.InsertRows(
+ cnxn, ACTIONLIMIT_COLS, al_rows, commit=False)
+
+ # Rewrite all the DismissedCues rows.
+ cues_rows = [(user_id, cue) for cue in user.dismissed_cues]
+ self.dismissedcues_tbl.Delete(cnxn, user_id=user_id, commit=False)
+ self.dismissedcues_tbl.InsertRows(
+ cnxn, DISMISSEDCUES_COLS, cues_rows, commit=False)
+
+ cnxn.Commit()
+ self.user_2lc.InvalidateKeys(cnxn, [user_id])
+
+ def UpdateUserSettings(
+ self, cnxn, user_id, user, notify=None, notify_starred=None,
+ obscure_email=None, after_issue_update=None,
+ is_site_admin=None, ignore_action_limits=None,
+ is_banned=None, banned_reason=None, action_limit_updates=None,
+ dismissed_cues=None, keep_people_perms_open=None, preview_on_hover=None):
+ """Update the preferences of the specified user.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_id: int user ID of the user whose settings we are updating.
+ user: User PB of user before changes are applied.
+ keyword args: dictionary of setting names mapped to new values.
+
+ Returns:
+ The user's new User PB.
+ """
+ # notifications
+ if notify is not None:
+ user.notify_issue_change = notify
+ if notify_starred is not None:
+ user.notify_starred_issue_change = notify_starred
+
+ # display options
+ if after_issue_update is not None:
+ user.after_issue_update = user_pb2.IssueUpdateNav(after_issue_update)
+ if preview_on_hover is not None:
+ user.preview_on_hover = preview_on_hover
+ if dismissed_cues: # Note, we never set it back to [].
+ user.dismissed_cues = dismissed_cues
+ if keep_people_perms_open is not None:
+ user.keep_people_perms_open = keep_people_perms_open
+
+ # misc
+ if obscure_email is not None:
+ user.obscure_email = obscure_email
+
+ # admin
+ if is_site_admin is not None:
+ user.is_site_admin = is_site_admin
+ if ignore_action_limits is not None:
+ user.ignore_action_limits = ignore_action_limits
+ if is_banned is not None:
+ if is_banned:
+ user.banned = banned_reason or 'No reason given'
+ else:
+ user.reset('banned')
+
+ # action limits
+ if action_limit_updates:
+ self._UpdateActionLimits(user, action_limit_updates)
+
+ # Write the user settings to the database.
+ self.UpdateUser(cnxn, user_id, user)
+
+ def _UpdateActionLimits(self, user, action_limit_updates):
+ """Apply action limit updates to a user's account."""
+ for action, new_limit_tuple in action_limit_updates.iteritems():
+ if action in actionlimit.ACTION_TYPE_NAMES:
+ action_type = actionlimit.ACTION_TYPE_NAMES[action]
+ if new_limit_tuple is None:
+ actionlimit.ResetRecentActions(user, action_type)
+ else:
+ new_soft_limit, new_hard_limit, new_lifetime_limit = new_limit_tuple
+
+ pb_getter = action + '_limit'
+ old_lifetime_limit = getattr(user, pb_getter).lifetime_limit
+ old_soft_limit = getattr(user, pb_getter).period_soft_limit
+ old_hard_limit = getattr(user, pb_getter).period_hard_limit
+
+ if ((new_lifetime_limit >= 0 and
+ new_lifetime_limit != old_lifetime_limit) or
+ (new_soft_limit >= 0 and new_soft_limit != old_soft_limit) or
+ (new_hard_limit >= 0 and new_hard_limit != old_hard_limit)):
+ actionlimit.CustomizeLimit(user, action_type, new_soft_limit,
+ new_hard_limit, new_lifetime_limit)
+
+
+def _ActionLimitToRow(user_id, action_kind, al):
+ """Return a tuple for an SQL table row for an action limit."""
+ return (user_id, action_kind, al.recent_count, al.reset_timestamp,
+ al.lifetime_count, al.lifetime_limit, al.period_soft_limit,
+ al.period_hard_limit)
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+class NoSuchUserException(Error):
+ """No user with the specified name exists."""
+ pass
diff --git a/appengine/monorail/services/usergroup_svc.py b/appengine/monorail/services/usergroup_svc.py
new file mode 100644
index 0000000..9700190
--- /dev/null
+++ b/appengine/monorail/services/usergroup_svc.py
@@ -0,0 +1,541 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Persistence class for user groups.
+
+User groups are represented in the database by:
+- A row in the Users table giving an email address and user ID.
+ (A "group ID" is the user_id of the group in the User table.)
+- A row in the UserGroupSettings table giving user group settings.
+
+Membership of a user X in user group Y is represented as:
+- A row in the UserGroup table with user_id=X and group_id=Y.
+"""
+
+import collections
+import logging
+import re
+
+from framework import permissions
+from framework import sql
+from proto import usergroup_pb2
+from services import caches
+
+
+USERGROUP_TABLE_NAME = 'UserGroup'
+USERGROUPSETTINGS_TABLE_NAME = 'UserGroupSettings'
+USERGROUPPROJECTS_TABLE_NAME = 'Group2Project'
+
+USERGROUP_COLS = ['user_id', 'group_id', 'role']
+USERGROUPSETTINGS_COLS = ['group_id', 'who_can_view_members',
+ 'external_group_type', 'last_sync_time']
+USERGROUPPROJECTS_COLS = ['group_id', 'project_id']
+
+
+class MembershipTwoLevelCache(caches.AbstractTwoLevelCache):
+ """Class to manage RAM and memcache for each user's memberships."""
+
+ def __init__(self, cache_manager, usergroup_service, group_dag):
+ super(MembershipTwoLevelCache, self).__init__(
+ cache_manager, 'user', 'memberships:', None)
+ self.usergroup_service = usergroup_service
+ self.group_dag = group_dag
+
+ def _DeserializeMemberships(self, memberships_rows):
+ """Reserialize the DB results into a {user_id: {group_id}}."""
+ result_dict = collections.defaultdict(set)
+ for user_id, group_id in memberships_rows:
+ result_dict[user_id].add(group_id)
+
+ return result_dict
+
+ def FetchItems(self, cnxn, keys):
+ """On RAM and memcache miss, hit the database to get memberships."""
+ direct_memberships_rows = self.usergroup_service.usergroup_tbl.Select(
+ cnxn, cols=['user_id', 'group_id'], distinct=True,
+ user_id=keys)
+ memberships_set = set()
+ for c_id, p_id in direct_memberships_rows:
+ all_parents = self.group_dag.GetAllAncestors(cnxn, p_id, True)
+ all_parents.append(p_id)
+ memberships_set.update([(c_id, g_id) for g_id in all_parents])
+ retrieved_dict = self._DeserializeMemberships(list(memberships_set))
+
+ # Make sure that every requested user is in the result, and gets cached.
+ retrieved_dict.update(
+ (user_id, set()) for user_id in keys
+ if user_id not in retrieved_dict)
+ return retrieved_dict
+
+
+class UserGroupService(object):
+ """The persistence layer for user group data."""
+
+ def __init__(self, cache_manager):
+ """Initialize this service so that it is ready to use.
+
+ Args:
+ cache_manager: local cache with distributed invalidation.
+ """
+ self.usergroup_tbl = sql.SQLTableManager(USERGROUP_TABLE_NAME)
+ self.usergroupsettings_tbl = sql.SQLTableManager(
+ USERGROUPSETTINGS_TABLE_NAME)
+ self.usergroupprojects_tbl = sql.SQLTableManager(
+ USERGROUPPROJECTS_TABLE_NAME)
+
+ self.group_dag = UserGroupDAG(self)
+
+ # Like a dictionary {user_id: {group_id}}
+ self.memberships_2lc = MembershipTwoLevelCache(
+ cache_manager, self, self.group_dag)
+
+ ### Group creation
+
+ def CreateGroup(self, cnxn, services, group_name, who_can_view_members,
+ ext_group_type=None, friend_projects=None):
+ """Create a new user group.
+
+ Args:
+ cnxn: connection to SQL database.
+ services: connections to backend services.
+ group_name: string email address of the group to create.
+ who_can_view_members: 'owners', 'members', or 'anyone'.
+ ext_group_type: The type of external group to import.
+ friend_projects: The project ids declared as group friends to view its
+ members.
+
+ Returns:
+ int group_id of the new group.
+ """
+ friend_projects = friend_projects or []
+ assert who_can_view_members in ('owners', 'members', 'anyone')
+ if ext_group_type:
+ ext_group_type = str(ext_group_type).lower()
+ assert ext_group_type in (
+ 'chrome_infra_auth', 'mdb', 'baggins'), (
+ ext_group_type)
+ assert who_can_view_members == 'owners'
+ group_id = services.user.LookupUserID(
+ cnxn, group_name.lower(), autocreate=True, allowgroups=True)
+ group_settings = usergroup_pb2.MakeSettings(
+ who_can_view_members, ext_group_type, 0, friend_projects)
+ self.UpdateSettings(cnxn, group_id, group_settings)
+ return group_id
+
+ def DeleteGroups(self, cnxn, group_ids):
+ """Delete groups' members and settings. It will NOT delete user entries.
+
+ Args:
+ cnxn: connection to SQL database.
+ group_ids: list of group ids to delete.
+ """
+ member_ids_dict, owner_ids_dict = self.LookupMembers(cnxn, group_ids)
+ citizens_id_dict = collections.defaultdict(list)
+ for g_id, user_ids in member_ids_dict.iteritems():
+ citizens_id_dict[g_id].extend(user_ids)
+ for g_id, user_ids in owner_ids_dict.iteritems():
+ citizens_id_dict[g_id].extend(user_ids)
+ for g_id, citizen_ids in citizens_id_dict.iteritems():
+ logging.info('Deleting group %d', g_id)
+ # Remove group members, friend projects and settings
+ self.RemoveMembers(cnxn, g_id, citizen_ids)
+ self.usergroupprojects_tbl.Delete(cnxn, group_id=g_id)
+ self.usergroupsettings_tbl.Delete(cnxn, group_id=g_id)
+
+ def DetermineWhichUserIDsAreGroups(self, cnxn, user_ids):
+ """From a list of user IDs, identify potential user groups.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_ids: list of user IDs to examine.
+
+ Returns:
+ A list with a subset of the given user IDs that are user groups
+ rather than individual users.
+ """
+ # It is a group if there is any entry in the UserGroupSettings table.
+ group_id_rows = self.usergroupsettings_tbl.Select(
+ cnxn, cols=['group_id'], group_id=user_ids)
+ group_ids = [row[0] for row in group_id_rows]
+ return group_ids
+
+ ### User memberships in groups
+
+ def LookupAllMemberships(self, cnxn, user_ids, use_cache=True):
+ """Lookup all the group memberships of a list of users.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_ids: list of int user IDs to get memberships for.
+ use_cache: set to False to ignore cached values.
+
+ Returns:
+ A dict {user_id: {group_id}} for the given user_ids.
+ """
+ result_dict, missed_ids = self.memberships_2lc.GetAll(
+ cnxn, user_ids, use_cache=use_cache)
+ assert not missed_ids
+ return result_dict
+
+ def LookupMemberships(self, cnxn, user_id):
+ """Return a set of group_ids that this user is a member of."""
+ membership_dict = self.LookupAllMemberships(cnxn, [user_id])
+ return membership_dict[user_id]
+
+ ### Group member addition, removal, and retrieval
+
+ def RemoveMembers(self, cnxn, group_id, old_member_ids):
+ """Remove the given members/owners from the user group."""
+ self.usergroup_tbl.Delete(
+ cnxn, group_id=group_id, user_id=old_member_ids)
+
+ all_affected = self._GetAllMembersInList(cnxn, old_member_ids)
+
+ self.group_dag.MarkObsolete()
+ self.memberships_2lc.InvalidateAllKeys(cnxn, all_affected)
+
+ def UpdateMembers(self, cnxn, group_id, member_ids, new_role):
+ """Update role for given members/owners to the user group."""
+ # Circle detection
+ for mid in member_ids:
+ if self.group_dag.IsChild(cnxn, group_id, mid):
+ raise CircularGroupException(
+ '%s is already an ancestor of group %s.' % (mid, group_id))
+
+ self.usergroup_tbl.Delete(
+ cnxn, group_id=group_id, user_id=member_ids)
+ rows = [(member_id, group_id, new_role) for member_id in member_ids]
+ self.usergroup_tbl.InsertRows(
+ cnxn, ['user_id', 'group_id', 'role'], rows)
+
+ all_affected = self._GetAllMembersInList(cnxn, member_ids)
+
+ self.group_dag.MarkObsolete()
+ self.memberships_2lc.InvalidateAllKeys(cnxn, all_affected)
+
+ def _GetAllMembersInList(self, cnxn, group_ids):
+ """Get all direct/indirect members/owners in a list."""
+ children_member_ids, children_owner_ids = self.LookupAllMembers(
+ cnxn, group_ids)
+ all_members_owners = set()
+ all_members_owners.update(group_ids)
+ for users in children_member_ids.itervalues():
+ all_members_owners.update(users)
+ for users in children_owner_ids.itervalues():
+ all_members_owners.update(users)
+ return list(all_members_owners)
+
+ def LookupAllMembers(self, cnxn, group_ids):
+ """Retrieve user IDs of members/owners of any of the given groups
+ transitively."""
+ direct_member_rows = self.usergroup_tbl.Select(
+ cnxn, cols=['user_id', 'group_id', 'role'], distinct=True,
+ group_id=group_ids)
+ member_ids_dict = {}
+ owner_ids_dict = {}
+ for gid in group_ids:
+ all_descendants = self.group_dag.GetAllDescendants(cnxn, gid, True)
+ indirect_member_rows = self.usergroup_tbl.Select(
+ cnxn, cols=['user_id'], distinct=True,
+ group_id=all_descendants)
+
+ # Owners must have direct membership. All indirect users are members.
+ owner_ids_dict[gid] = [m[0] for m in direct_member_rows
+ if m[1] == gid and m[2] == 'owner']
+ member_ids_list = [r[0] for r in indirect_member_rows]
+ member_ids_list.extend([m[0] for m in direct_member_rows
+ if m[1] == gid and m[2] == 'member'])
+ member_ids_dict[gid] = list(set(member_ids_list))
+ return member_ids_dict, owner_ids_dict
+
+ def LookupMembers(self, cnxn, group_ids):
+ """"Retrieve user IDs of direct members/owners of any of the given groups.
+
+ Args:
+ cnxn: connection to SQL database.
+ group_ids: list of int user IDs for all user groups to be examined.
+
+ Returns:
+ A dict of member IDs, and a dict of owner IDs keyed by group id.
+ """
+ member_rows = self.usergroup_tbl.Select(
+ cnxn, cols=['user_id', 'group_id', 'role'], distinct=True,
+ group_id=group_ids)
+ member_ids_dict = {}
+ owner_ids_dict = {}
+ for gid in group_ids:
+ member_ids_dict[gid] = [row[0] for row in member_rows
+ if row[1] == gid and row[2] == 'member']
+ owner_ids_dict[gid] = [row[0] for row in member_rows
+ if row[1] == gid and row[2] == 'owner']
+ return member_ids_dict, owner_ids_dict
+
+ def ExpandAnyUserGroups(self, cnxn, user_ids):
+ """Transitively expand any user groups and return member user IDs.
+
+ Args:
+ cnxn: connection to SQL database.
+ user_ids: list of user IDs to check.
+
+ Returns:
+ A pair (individual_user_ids, transitive_ids). individual_user_ids
+ is a list of user IDs that were in the given user_ids list and
+ that identify individual members. transitive_ids is a list of
+ user IDs of the members of any user group in the given list of
+ user_ids and the individual members of any nested groups.
+ """
+ group_ids = self.DetermineWhichUserIDsAreGroups(cnxn, user_ids)
+ direct_ids = [uid for uid in user_ids if uid not in group_ids]
+ member_ids_dict, owner_ids_dict = self.LookupAllMembers(cnxn, group_ids)
+ indirect_ids = set()
+ for gid in group_ids:
+ indirect_ids.update(member_ids_dict[gid])
+ indirect_ids.update(owner_ids_dict[gid])
+
+ # Note: we return direct and indirect member IDs separately so that
+ # the email notification footer can give more a specific reason for
+ # why the user got an email. E.g., "You were Cc'd" vs. "You are a
+ # member of a user group that was Cc'd".
+ return direct_ids, list(indirect_ids)
+
+ def LookupVisibleMembers(
+ self, cnxn, group_id_list, perms, effective_ids, services):
+ """"Retrieve the list of user group direct member/owner IDs that the user
+ may see.
+
+ Args:
+ cnxn: connection to SQL database.
+ group_id_list: list of int user IDs for all user groups to be examined.
+ perms: optional PermissionSet for the user viewing this page.
+ effective_ids: set of int user IDs for that user and all
+ his/her group memberships.
+ services: backend services.
+
+ Returns:
+ A list of all the member IDs from any group that the user is allowed
+ to view.
+ """
+ settings_dict = self.GetAllGroupSettings(cnxn, group_id_list)
+ group_ids = settings_dict.keys()
+ (owned_project_ids, membered_project_ids,
+ contrib_project_ids) = services.project.GetUserRolesInAllProjects(
+ cnxn, effective_ids)
+ project_ids = owned_project_ids.union(
+ membered_project_ids).union(contrib_project_ids)
+ # We need to fetch all members/owners to determine whether the requester
+ # has permission to view.
+ direct_member_ids_dict, direct_owner_ids_dict = self.LookupMembers(
+ cnxn, group_ids)
+ all_member_ids_dict, all_owner_ids_dict = self.LookupAllMembers(
+ cnxn, group_ids)
+ visible_member_ids = {}
+ visible_owner_ids = {}
+ for gid in group_ids:
+ member_ids = all_member_ids_dict[gid]
+ owner_ids = all_owner_ids_dict[gid]
+
+ if permissions.CanViewGroup(perms, effective_ids, settings_dict[gid],
+ member_ids, owner_ids, project_ids):
+ visible_member_ids[gid] = direct_member_ids_dict[gid]
+ visible_owner_ids[gid] = direct_owner_ids_dict[gid]
+
+ return visible_member_ids, visible_owner_ids
+
+ ### Group settings
+
+ def GetAllUserGroupsInfo(self, cnxn):
+ """Fetch (addr, member_count, usergroup_settings) for all user groups."""
+ group_rows = self.usergroupsettings_tbl.Select(
+ cnxn, cols=['group_id', 'email', 'who_can_view_members',
+ 'external_group_type', 'last_sync_time'],
+ left_joins=[('User ON UserGroupSettings.group_id = User.user_id', [])])
+ count_rows = self.usergroup_tbl.Select(
+ cnxn, cols=['group_id', 'COUNT(*)'],
+ group_by=['group_id'])
+ count_dict = dict(count_rows)
+ group_ids = [g[0] for g in group_rows]
+ friends_dict = self.GetAllGroupFriendProjects(cnxn, group_ids)
+
+ user_group_info_tuples = [
+ (email, count_dict.get(group_id, 0),
+ usergroup_pb2.MakeSettings(visiblity, group_type, last_sync_time,
+ friends_dict.get(group_id, [])),
+ group_id)
+ for (group_id, email, visiblity, group_type, last_sync_time)
+ in group_rows]
+ return user_group_info_tuples
+
+ def GetAllGroupSettings(self, cnxn, group_ids):
+ """Fetch {group_id: group_settings} for the specified groups."""
+ # TODO(jrobbins): add settings to control who can join, etc.
+ rows = self.usergroupsettings_tbl.Select(
+ cnxn, cols=USERGROUPSETTINGS_COLS, group_id=group_ids)
+ friends_dict = self.GetAllGroupFriendProjects(cnxn, group_ids)
+ settings_dict = {
+ group_id: usergroup_pb2.MakeSettings(
+ vis, group_type, last_sync_time, friends_dict.get(group_id, []))
+ for group_id, vis, group_type, last_sync_time in rows}
+ return settings_dict
+
+ def GetGroupSettings(self, cnxn, group_id):
+ """Retrieve group settings for the specified user group.
+
+ Args:
+ cnxn: connection to SQL database.
+ group_id: int user ID of the user group.
+
+ Returns:
+ A UserGroupSettings object, or None if no such group exists.
+ """
+ return self.GetAllGroupSettings(cnxn, [group_id]).get(group_id)
+
+ def UpdateSettings(self, cnxn, group_id, group_settings):
+ """Update the visiblity settings of the specified group."""
+ who_can_view_members = str(group_settings.who_can_view_members).lower()
+ ext_group_type = group_settings.ext_group_type
+ assert who_can_view_members in ('owners', 'members', 'anyone')
+ if ext_group_type:
+ ext_group_type = str(group_settings.ext_group_type).lower()
+ assert ext_group_type in (
+ 'chrome_infra_auth', 'mdb', 'baggins'), (
+ ext_group_type)
+ assert who_can_view_members == 'owners'
+ self.usergroupsettings_tbl.InsertRow(
+ cnxn, group_id=group_id, who_can_view_members=who_can_view_members,
+ external_group_type=ext_group_type,
+ last_sync_time=group_settings.last_sync_time,
+ replace=True)
+ self.usergroupprojects_tbl.Delete(
+ cnxn, group_id=group_id)
+ if group_settings.friend_projects:
+ rows = [(group_id, p_id) for p_id in group_settings.friend_projects]
+ self.usergroupprojects_tbl.InsertRows(
+ cnxn, ['group_id', 'project_id'], rows)
+
+ def GetAllGroupFriendProjects(self, cnxn, group_ids):
+ """Get {group_id: [project_ids]} for the specified user groups."""
+ rows = self.usergroupprojects_tbl.Select(
+ cnxn, cols=USERGROUPPROJECTS_COLS, group_id=group_ids)
+ friends_dict = {}
+ for group_id, project_id in rows:
+ friends_dict.setdefault(group_id, []).append(project_id)
+ return friends_dict
+
+ def GetGroupFriendProjects(self, cnxn, group_id):
+ """Get a list of friend projects for the specified user group."""
+ return self.GetAllGroupFriendProjects(cnxn, [group_id]).get(group_id)
+
+ def ValidateFriendProjects(self, cnxn, services, friend_projects):
+ """Validate friend projects.
+
+ Returns:
+ A list of project ids if no errors, or an error message.
+ """
+ project_names = filter(None, re.split('; |, | |;|,', friend_projects))
+ id_dict = services.project.LookupProjectIDs(cnxn, project_names)
+ missed_projects = []
+ result = []
+ for p_name in project_names:
+ if p_name in id_dict:
+ result.append(id_dict[p_name])
+ else:
+ missed_projects.append(p_name)
+ error_msg = ''
+ if missed_projects:
+ error_msg = 'Project(s) %s do not exist' % ', '.join(missed_projects)
+ return None, error_msg
+ else:
+ return result, None
+
+ # TODO(jrobbins): re-implement FindUntrustedGroups()
+
+
+class UserGroupDAG(object):
+ """A directed-acyclic graph of potentially nested user groups."""
+
+ def __init__(self, usergroup_service):
+ self.usergroup_service = usergroup_service
+ self.user_group_parents = collections.defaultdict(list)
+ self.user_group_children = collections.defaultdict(list)
+ self.initialized = False
+
+ def Build(self, cnxn, circle_detection=False):
+ if not self.initialized:
+ self.user_group_parents.clear()
+ self.user_group_children.clear()
+ group_ids = self.usergroup_service.usergroupsettings_tbl.Select(
+ cnxn, cols=['group_id'])
+ usergroup_rows = self.usergroup_service.usergroup_tbl.Select(
+ cnxn, cols=['user_id', 'group_id'], distinct=True,
+ user_id=[r[0] for r in group_ids])
+ for user_id, group_id in usergroup_rows:
+ self.user_group_parents[user_id].append(group_id)
+ self.user_group_children[group_id].append(user_id)
+ self.initialized = True
+
+ if circle_detection:
+ for child_id, parent_ids in self.user_group_parents.iteritems():
+ for parent_id in parent_ids:
+ if self.IsChild(cnxn, parent_id, child_id):
+ logging.error(
+ 'Circle exists between group %d and %d.', child_id, parent_id)
+
+ def GetAllAncestors(self, cnxn, group_id, circle_detection=False):
+ """Return a list of distinct ancestor group IDs for the given group."""
+ self.Build(cnxn, circle_detection)
+ result = set()
+ child_ids = [group_id]
+ while child_ids:
+ parent_ids = set()
+ for c_id in child_ids:
+ group_ids = self.user_group_parents[c_id]
+ parent_ids.update(g_id for g_id in group_ids if g_id not in result)
+ result.update(parent_ids)
+ child_ids = list(parent_ids)
+ return list(result)
+
+ def GetAllDescendants(self, cnxn, group_id, circle_detection=False):
+ """Return a list of distinct descendant group IDs for the given group."""
+ self.Build(cnxn, circle_detection)
+ result = set()
+ parent_ids = [group_id]
+ while parent_ids:
+ child_ids = set()
+ for p_id in parent_ids:
+ group_ids = self.user_group_children[p_id]
+ child_ids.update(g_id for g_id in group_ids if g_id not in result)
+ result.update(child_ids)
+ parent_ids = list(child_ids)
+ return list(result)
+
+ def IsChild(self, cnxn, child_id, parent_id):
+ """Returns True if child_id is a direct/indirect child of parent_id."""
+ all_descendants = self.GetAllDescendants(cnxn, parent_id)
+ return child_id in all_descendants
+
+ def MarkObsolete(self):
+ """Mark the DAG as uninitialized so it'll be re-built."""
+ self.initialized = False
+
+ def __repr__(self):
+ result = {}
+ result['parents'] = self.user_group_parents
+ result['children'] = self.user_group_children
+ return str(result)
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+ pass
+
+
+class CircularGroupException(Error):
+ """Circular nested group exception."""
+ pass
+
+
+class GroupExistsException(Error):
+ """Group already exists exception."""
+ pass
diff --git a/appengine/monorail/settings.py b/appengine/monorail/settings.py
new file mode 100644
index 0000000..5a463fa
--- /dev/null
+++ b/appengine/monorail/settings.py
@@ -0,0 +1,297 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Defines settings for monorail."""
+
+import os
+import re
+
+from google.appengine.api import app_identity
+
+from proto import project_pb2
+from proto import site_pb2
+
+
+# This file is divided into the following parts:
+# 1. Settings you must edit before deploying your site.
+# 2. Settings you would edit on certainoccasions while maintaining your site.
+# 3. Settings enable specific features.
+# 4. Settings that you can usually leave as-is.
+
+# TODO(jrobbins): Store these settings in the database and implement
+# servlets for domain admins to edit them without needing to redeploy the
+# app.
+
+
+####
+# Part 1: settings that you must edit before deploying your site.
+
+# Email address that is offered to users who might need help using the tool.
+feedback_email = 'jrobbins+monorail.feedback@chromium.org'
+
+# For debugging when running in staging: send all outbound
+# email to this address rather than to the actual address that
+# it would normally be sent to.
+send_all_email_to = 'jrobbins+all+%(user)s+%(domain)s@chromium.org'
+
+# For debugging when running the dev server locally: send all outbound
+# email to this address rather than to the actual address that
+# it would normally be sent to.
+send_dev_email_to = (send_all_email_to or
+ 'jrobbins+dev+%(user)s+%(domain)s@chromium.org')
+
+# User to send emails from Monorail as. The reply_to sections of emails will be
+# set to appspotmail addresses.
+# Note: If configuring a new monorail instance without DNS records and reserved
+# email addresses then setting these values to
+# 'reply@${app_id}.appspotmail.com' and 'noreply@{app_id}.appspotmail.com'
+# is likely the best option.
+send_email_as = 'monorail@chromium.org'
+send_noreply_email_as = 'monorail+noreply@chromium.org'
+
+# The default is to look for a database named "monorail" in replicas
+# named "replica-00" .. "replica-09"
+db_database_name = 'monorail'
+db_replica_prefix = 'replica-'
+
+# The number of logical database shards used. Each replica is complete copy
+# of the master, so any replica DB can answer queries about any logical shard.
+num_logical_shards = 10
+
+# "Learn more" link for the site home page
+# TODO(agable): Update this when we have publicly visible documentation.
+learn_more_link = None
+
+# Site name, displayed above the search box on the site home page.
+site_name = 'Monorail'
+
+# Who is allowed to create new projects? Set to ANYONE or ADMIN_ONLY.
+project_creation_restriction = site_pb2.UserTypeRestriction.ADMIN_ONLY
+
+# Default access level when creating a new project.
+default_access_level = project_pb2.ProjectAccess.ANYONE
+
+# Possible access levels to offer when creating a new project.
+allowed_access_levels = [
+ project_pb2.ProjectAccess.ANYONE,
+ project_pb2.ProjectAccess.MEMBERS_ONLY]
+
+# Who is allowed to create user groups? Set to ANYONE or ADMIN_ONLY.
+group_creation_restriction = site_pb2.UserTypeRestriction.ADMIN_ONLY
+
+####
+# Part 2: Settings you would edit on certain occasions.
+
+# Read-only mode prevents changes while we make server-side changes.
+read_only = False
+
+# Timestamp used to notify users when the read only mode or other status
+# described in the banner message takes effect. It is
+# expressed as a 5-tuple of ints: (year, month, day, hour, minute),
+# e.g. (2009, 3, 20, 21, 45) represents March 20 2009 9:45PM.
+banner_time = None
+
+# Display a site maintenance banner on every monorail page.
+banner_message = ''
+
+# User accounts with email addresses at these domains are all banned.
+banned_user_domains = []
+
+
+####
+# Part 3: Settings that enable specific features
+
+# Enables "My projects" drop down menu
+enable_my_projects_menu = True
+
+# Enables stars in the UI for projects
+enable_project_stars = True
+
+# Enables stars in the UI for users
+enable_user_stars = True
+
+# Enable quick edit mode in issue peek dialog and show dialog on hover
+enable_quick_edit = True
+
+
+####
+# Part 4: Settings that you can usually leave as-is.
+
+# dev_mode makes the server slower and more dynamic for easier debugging.
+# E.g., template files are reloaded on each request.
+dev_mode = os.environ['SERVER_SOFTWARE'].startswith('Development')
+unit_test_mode = os.environ['SERVER_SOFTWARE'].startswith('test')
+
+# If we assume 1KB each, then this would be 400 MB for this cache in frontends
+# that have only 1024 MB total.
+issue_cache_max_size = 400 * 1000
+
+# 150K users should be enough for all the frequent daily users plus the
+# occasional users that are mentioned on any popular pages.
+user_cache_max_size = 150 * 1000
+
+# Recompute derived issue fields via work items rather than while
+# the user is waiting for a page to load.
+recompute_derived_fields_in_worker = True
+
+# The issue search SQL queries have a LIMIT clause with this amount.
+search_limit_per_shard = 10 * 1000 # This is more than all open in chromium.
+
+# The GAE search feature is slow, so don't request too many results.
+fulltext_limit_per_shard = 1 * 1000
+
+# Retrieve at most this many issues from the DB when showing an issue grid.
+max_issues_in_grid = 6000
+# This is the most tiles that we show in grid view. If the number of results
+# is larger than this, we display IDs instead.
+max_tiles_in_grid = 1000
+
+# Maximum number of project results to display on a single pagination page
+max_project_search_results_per_page = 100
+
+# Maxium number of results per pagination page, regardless of what
+# the user specified in his/her request. This exists to prevent someone
+# from doing a DoS attack that makes our servers do a huge amount of work.
+max_artifact_search_results_per_page = 1000
+
+# Maximum number of comments to display on a single pagination page
+max_comments_per_page = 500
+
+# Max number of issue starrers to notify via email. Issues with more
+# that this many starrers will only notify the last N of them after a
+# comment from a project member.
+max_starrers_to_notify = 4000
+
+# In projects that have more than this many issues the next and prev
+# links on the issue detail page will not be shown when the user comes
+# directly to an issue without specifying any query terms.
+threshold_to_suppress_prev_next = 10000
+
+# Format string for the name of the FTS index shards for issues.
+search_index_name_format = 'issues%02d'
+
+# Name of the FTS index for projects (not sharded).
+project_search_index_name = 'projects'
+
+# Each backend has this many seconds to respond, otherwise frontend gives up
+# on that shard.
+backend_deadline = 45
+
+# If the initial call to a backend fails, try again this many times.
+# Initial backend calls are failfast, meaning that they fail immediately rather
+# than queue behind other requests. The last 2 retries will wait in queue.
+backend_retries = 3
+
+# Do various extra logging at INFO level.
+enable_profiler_logging = False
+
+# Mail sending domain. Normally set this to None and it will be computed
+# automatically from your AppEngine APP_ID. But, it can be overridden below.
+mail_domain = None
+
+# URL format to browse source code revisions. This can be overridden
+# in specific projects by setting project.revision_url_format.
+# The format string may include "{revnum}" for the revision number.
+revision_url_format = 'https://crrev.com/{revnum}'
+
+# Users with emails in the priviledged domains see unobscured email addresses.
+priviledged_user_domains = [
+ 'google.com', 'chromium.org', 'webrtc.org',
+ ]
+
+# Names of projects on code.google.com which we allow cross-linking to.
+recognized_codesite_projects = [
+ 'chromium-os',
+ 'chrome-os-partner',
+]
+
+###
+# Part 5: Instance-specific settings that override lines above.
+
+# We usually use a DB instance named "master" for writes.
+db_master_name = 'master'
+# This ID is for -staging and other misc deployments. Prod is defined below.
+analytics_id = 'UA-55762617-20'
+
+if unit_test_mode:
+ db_cloud_project = '' # No real database is used during unit testing.
+else:
+ app_id = app_identity.get_application_id()
+
+ if app_id == 'monorail-staging':
+ site_name = 'Monorail Staging'
+ banner_message = 'This staging site does not send emails.'
+ # The Google Cloud SQL databases to use.
+ db_cloud_project = app_id
+ db_replica_prefix = 'replica-7-'
+
+ elif app_id == 'monorail-prod':
+ send_all_email_to = None # Deliver it to the intended users.
+ # The Google Cloud SQL databases to use.
+ db_cloud_project = app_id
+ analytics_id = 'UA-55762617-14'
+
+if dev_mode:
+ site_name = 'Monorail Dev'
+ num_logical_shards = 10
+
+# Combine the customized info above to make the name of the master DB instance.
+db_instance = db_cloud_project + ':' + db_master_name
+
+# Format string for the name of the physical database replicas.
+physical_db_name_format = db_cloud_project + ':' + db_replica_prefix + '%02d'
+
+# preferred domains to display
+preferred_domains = {
+ 'monorail-prod.appspot.com': 'bugs.chromium.org',
+ 'monorail-staging.appspot.com': 'bugs-staging.chromium.org'}
+
+# Borg robot service account
+borg_service_account = 'chrome-infra-prod-borg@system.gserviceaccount.com'
+
+# Prediction API params.
+classifier_project_id = 'project-id-testing-only'
+
+# Necessary for tests.
+if 'APPLICATION_ID' not in os.environ:
+ os.environ['APPLICATION_ID'] = 'testing app'
+
+if dev_mode:
+ # This is monorail-staging. There is no local stub for Cloud Prediction.
+ classifier_project_id = '52759169022'
+else:
+ classifier_project_id = app_identity.get_application_id()
+
+classifier_model_id = 'spam'
+
+# Number of distinct users who have to flag an issue before it
+# is autmatically removed as spam.
+# 5 is an arbitrarily chosen value. Set it to something really high
+# to effectively disable spam flag threshhold checking.
+spam_flag_thresh = 5
+
+# If the classifier's confidence is less than this value, the
+# item will show up in the spam moderation queue for manual
+# review.
+classifier_moderation_thresh = 1.0
+
+# If the classifier's confidence is greater than this value,
+# and the label is 'spam', the item will automatically be created
+# with is_spam=True, and will be filtered out from search results.
+classifier_spam_thresh = 1.0
+
+ratelimiting_enabled = False
+
+# Enable cost-based rate limiting. This only applies if
+# ratelimiting_enabled = True
+ratelimiting_cost_enabled = True
+
+# Requests that take longer than this are hit with extra
+# counts added to their bucket at the end of the request.
+ratelimiting_cost_thresh_ms = 2000
+
+# Requests that hit ratelimiting_cost_thresh_sec get this
+# extra amount added to their bucket at the end of the request.
+ratelimiting_cost_penalty = 1
diff --git a/appengine/monorail/sitewide/__init__.py b/appengine/monorail/sitewide/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/sitewide/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/sitewide/custom_404.py b/appengine/monorail/sitewide/custom_404.py
new file mode 100644
index 0000000..29b1a3f
--- /dev/null
+++ b/appengine/monorail/sitewide/custom_404.py
@@ -0,0 +1,36 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Page class for generating somewhat informative project-page 404s.
+
+This page class produces a mostly-empty project subpage, which helps
+users find what they're looking for, rather than telling them
+"404. That's an error. That's all we know." which is maddeningly not
+helpful when we already have a project pb loaded.
+"""
+
+from framework import monorailrequest
+from framework import servlet
+
+
+class ErrorPage(servlet.Servlet):
+ """Page class for generating somewhat informative project-page 404s.
+
+ This page class produces a mostly-empty project subpage, which helps
+ users find what they're looking for, rather than telling them
+ "404. That's an error. That's all we know." which is maddeningly not
+ helpful when we already have a project pb loaded.
+ """
+
+ _PAGE_TEMPLATE = 'sitewide/project-404-page.ezt'
+
+ def get(self, **kwargs):
+ servlet.Servlet.get(self, **kwargs)
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ if not mr.project_name:
+ raise monorailrequest.InputException('No project specified')
+ return {}
diff --git a/appengine/monorail/sitewide/group_helpers.py b/appengine/monorail/sitewide/group_helpers.py
new file mode 100644
index 0000000..b35351e
--- /dev/null
+++ b/appengine/monorail/sitewide/group_helpers.py
@@ -0,0 +1,73 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions used in user group modules."""
+
+from framework import framework_views
+from proto import usergroup_pb2
+
+
+class GroupVisibilityView(object):
+ """Object for group visibility information that can be easily used in EZT."""
+
+ VISIBILITY_NAMES = {
+ usergroup_pb2.MemberVisibility.ANYONE: 'Anyone on the Internet',
+ usergroup_pb2.MemberVisibility.MEMBERS: 'Group Members',
+ usergroup_pb2.MemberVisibility.OWNERS: 'Group Owners'}
+
+ def __init__(self, group_visibility_enum):
+ self.key = int(group_visibility_enum)
+ self.name = self.VISIBILITY_NAMES[group_visibility_enum]
+
+
+class GroupTypeView(object):
+ """Object for group type information that can be easily used in EZT."""
+
+ TYPE_NAMES = {
+ usergroup_pb2.GroupType.CHROME_INFRA_AUTH: 'Chrome-infra-auth',
+ usergroup_pb2.GroupType.MDB: 'MDB',
+ usergroup_pb2.GroupType.BAGGINS: 'Baggins'}
+
+ def __init__(self, group_type_enum):
+ self.key = int(group_type_enum)
+ self.name = self.TYPE_NAMES[group_type_enum]
+
+
+class GroupMemberView(framework_views.UserView):
+ """Wrapper class to display basic group member information in a template."""
+
+ def __init__(self, user_id, email, obscure_email, group_id, role):
+ assert role in ['member', 'owner']
+ super(GroupMemberView, self).__init__(
+ user_id, email, obscure_email)
+ self.group_id = group_id
+ self.role = role
+
+
+def BuildUserGroupVisibilityOptions():
+ """Return a list of user group visibility values for use in an HTML menu.
+
+ Returns:
+ A list of GroupVisibilityView objects that can be used in EZT.
+ """
+ vis_levels = [usergroup_pb2.MemberVisibility.OWNERS,
+ usergroup_pb2.MemberVisibility.MEMBERS,
+ usergroup_pb2.MemberVisibility.ANYONE]
+
+ return [GroupVisibilityView(vis) for vis in vis_levels]
+
+
+def BuildUserGroupTypeOptions():
+ """Return a list of user group types for use in an HTML menu.
+
+ Returns:
+ A list of GroupTypeView objects that can be used in EZT.
+ """
+ group_types = [usergroup_pb2.GroupType.CHROME_INFRA_AUTH,
+ usergroup_pb2.GroupType.MDB,
+ usergroup_pb2.GroupType.BAGGINS]
+
+ return sorted([GroupTypeView(gt) for gt in group_types],
+ key=lambda gtv: gtv.name)
diff --git a/appengine/monorail/sitewide/groupadmin.py b/appengine/monorail/sitewide/groupadmin.py
new file mode 100644
index 0000000..7b98cf4
--- /dev/null
+++ b/appengine/monorail/sitewide/groupadmin.py
@@ -0,0 +1,120 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display user group admin page."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from framework import urls
+from proto import usergroup_pb2
+from services import usergroup_svc
+from sitewide import group_helpers
+
+
+class GroupAdmin(servlet.Servlet):
+ """The group admin page."""
+
+ _PAGE_TEMPLATE = 'sitewide/group-admin-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Assert that the user has the permissions needed to view this page."""
+ super(GroupAdmin, self).AssertBasePermission(mr)
+
+ _, owner_ids_dict = self.services.usergroup.LookupMembers(
+ mr.cnxn, [mr.viewed_user_auth.user_id])
+ owner_ids = owner_ids_dict[mr.viewed_user_auth.user_id]
+ if not permissions.CanEditGroup(
+ mr.perms, mr.auth.effective_ids, owner_ids):
+ raise permissions.PermissionException(
+ 'User is not allowed to edit a user group')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ group_id = mr.viewed_user_auth.user_id
+ group_settings = self.services.usergroup.GetGroupSettings(
+ mr.cnxn, group_id)
+ visibility_levels = group_helpers.BuildUserGroupVisibilityOptions()
+ initial_visibility = group_helpers.GroupVisibilityView(
+ group_settings.who_can_view_members)
+ group_types = group_helpers.BuildUserGroupTypeOptions()
+ import_group = bool(group_settings.ext_group_type)
+ if import_group:
+ initial_group_type = group_helpers.GroupTypeView(
+ group_settings.ext_group_type)
+ else:
+ initial_group_type = ''
+
+ if group_settings.friend_projects:
+ initial_friendprojects = ', '.join(
+ self.services.project.LookupProjectNames(
+ mr.cnxn, group_settings.friend_projects).values())
+ else:
+ initial_friendprojects = ''
+
+ return {
+ 'admin_tab_mode': 'st2',
+ 'groupadmin': True,
+ 'groupid': group_id,
+ 'groupname': mr.viewed_username,
+ 'group_types': group_types,
+ 'import_group': import_group or '',
+ 'initial_friendprojects': initial_friendprojects,
+ 'initial_group_type': initial_group_type,
+ 'initial_visibility': initial_visibility,
+ 'offer_membership_editing': True,
+ 'visibility_levels': visibility_levels,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ # 1. Gather data from the request.
+ group_name = mr.viewed_username
+ group_id = mr.viewed_user_auth.user_id
+
+ if post_data.get('import_group'):
+ vis_level = usergroup_pb2.MemberVisibility.OWNERS
+ ext_group_type = post_data.get('group_type')
+ friend_projects = ''
+ if not ext_group_type:
+ mr.errors.groupimport = 'Please provide external group type'
+ else:
+ ext_group_type = usergroup_pb2.GroupType(int(ext_group_type))
+ else:
+ vis_level = post_data.get('visibility')
+ ext_group_type = None
+ friend_projects = post_data.get('friendprojects', '')
+ if vis_level:
+ vis_level = usergroup_pb2.MemberVisibility(int(vis_level))
+ else:
+ mr.errors.groupimport = 'Cannot update settings for imported group'
+
+ if not mr.errors.AnyErrors():
+ project_ids, error = self.services.usergroup.ValidateFriendProjects(
+ mr.cnxn, self.services, friend_projects)
+ if error:
+ mr.errors.friendprojects = error
+
+ # 2. Call services layer to save changes.
+ if not mr.errors.AnyErrors():
+ group_settings = usergroup_pb2.UserGroupSettings(
+ who_can_view_members=vis_level,
+ ext_group_type=ext_group_type,
+ friend_projects=project_ids)
+ self.services.usergroup.UpdateSettings(
+ mr.cnxn, group_id, group_settings)
+
+ # 3. Determine the next page in the UI flow.
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(mr, initial_name=group_name)
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, '/g/%s%s' % (group_name, urls.GROUP_ADMIN),
+ include_project=False, saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/sitewide/groupcreate.py b/appengine/monorail/sitewide/groupcreate.py
new file mode 100644
index 0000000..6a24687
--- /dev/null
+++ b/appengine/monorail/sitewide/groupcreate.py
@@ -0,0 +1,96 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A page for site admins to create a new user group."""
+
+import logging
+import re
+
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from proto import usergroup_pb2
+from services import user_svc
+from sitewide import group_helpers
+
+
+class GroupCreate(servlet.Servlet):
+ """Shows a page with a simple form to create a user group."""
+
+ _PAGE_TEMPLATE = 'sitewide/group-create-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Assert that the user has the permissions needed to view this page."""
+ super(GroupCreate, self).AssertBasePermission(mr)
+
+ if not permissions.CanCreateGroup(mr.perms):
+ raise permissions.PermissionException(
+ 'User is not allowed to create a user group')
+
+ def GatherPageData(self, _mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ visibility_levels = group_helpers.BuildUserGroupVisibilityOptions()
+ initial_visibility = group_helpers.GroupVisibilityView(
+ usergroup_pb2.MemberVisibility.ANYONE)
+ group_types = group_helpers.BuildUserGroupTypeOptions()
+
+ return {
+ 'groupadmin': '',
+ 'group_types': group_types,
+ 'import_group': '',
+ 'initial_friendprojects': '',
+ 'initial_group_type': '',
+ 'initial_name': '',
+ 'initial_visibility': initial_visibility,
+ 'visibility_levels': visibility_levels,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ # 1. Gather data from the request.
+ group_name = post_data.get('groupname')
+ try:
+ existing_group_id = self.services.user.LookupUserID(mr.cnxn, group_name)
+ existing_settings = self.services.usergroup.GetGroupSettings(
+ mr.cnxn, existing_group_id)
+ if existing_settings:
+ mr.errors.groupname = 'That user group already exists'
+ except user_svc.NoSuchUserException:
+ pass
+
+ if post_data.get('import_group'):
+ vis = usergroup_pb2.MemberVisibility.OWNERS
+ ext_group_type = post_data.get('group_type')
+ friend_projects = ''
+ if not ext_group_type:
+ mr.errors.groupimport = 'Please provide external group type'
+ else:
+ ext_group_type = str(
+ usergroup_pb2.GroupType(int(ext_group_type))).lower()
+ else:
+ vis = usergroup_pb2.MemberVisibility(int(post_data['visibility']))
+ ext_group_type = None
+ friend_projects = post_data.get('friendprojects', '')
+ who_can_view_members = str(vis).lower()
+
+ if not mr.errors.AnyErrors():
+ project_ids, error = self.services.usergroup.ValidateFriendProjects(
+ mr.cnxn, self.services, friend_projects)
+ if error:
+ mr.errors.friendprojects = error
+
+ # 2. Call services layer to save changes.
+ if not mr.errors.AnyErrors():
+ group_id = self.services.usergroup.CreateGroup(
+ mr.cnxn, self.services, group_name, who_can_view_members,
+ ext_group_type, project_ids)
+
+ # 3. Determine the next page in the UI flow.
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(mr, initial_name=group_name)
+ else:
+ # Go to the new user group's detail page.
+ return framework_helpers.FormatAbsoluteURL(
+ mr, '/g/%s/' % group_id, include_project=False)
diff --git a/appengine/monorail/sitewide/groupdetail.py b/appengine/monorail/sitewide/groupdetail.py
new file mode 100644
index 0000000..4d66e03
--- /dev/null
+++ b/appengine/monorail/sitewide/groupdetail.py
@@ -0,0 +1,196 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display a user group, including a paginated list of members."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import framework_views
+from framework import paginate
+from framework import permissions
+from framework import servlet
+from project import project_helpers
+from proto import usergroup_pb2
+from services import usergroup_svc
+from sitewide import group_helpers
+from sitewide import sitewide_views
+
+MEMBERS_PER_PAGE = 50
+
+
+class GroupDetail(servlet.Servlet):
+ """The group detail page presents information about one user group."""
+
+ _PAGE_TEMPLATE = 'sitewide/group-detail-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Assert that the user has the permissions needed to view this page."""
+ super(GroupDetail, self).AssertBasePermission(mr)
+
+ group_id = mr.viewed_user_auth.user_id
+ group_settings = self.services.usergroup.GetGroupSettings(
+ mr.cnxn, group_id)
+
+ member_ids, owner_ids = self.services.usergroup.LookupAllMembers(
+ mr.cnxn, [group_id])
+ (owned_project_ids, membered_project_ids,
+ contrib_project_ids) = self.services.project.GetUserRolesInAllProjects(
+ mr.cnxn, mr.auth.effective_ids)
+ project_ids = owned_project_ids.union(
+ membered_project_ids).union(contrib_project_ids)
+ if not permissions.CanViewGroup(
+ mr.perms, mr.auth.effective_ids, group_settings, member_ids[group_id],
+ owner_ids[group_id], project_ids):
+ raise permissions.PermissionException(
+ 'User is not allowed to view a user group')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ group_id = mr.viewed_user_auth.user_id
+ group_settings = self.services.usergroup.GetGroupSettings(
+ mr.cnxn, group_id)
+
+ member_ids_dict, owner_ids_dict = (
+ self.services.usergroup.LookupVisibleMembers(
+ mr.cnxn, [group_id], mr.perms, mr.auth.effective_ids,
+ self.services))
+ member_ids = member_ids_dict[group_id]
+ owner_ids = owner_ids_dict[group_id]
+ member_pbs_dict = self.services.user.GetUsersByIDs(
+ mr.cnxn, member_ids)
+ owner_pbs_dict = self.services.user.GetUsersByIDs(
+ mr.cnxn, owner_ids)
+ member_dict = {}
+ for user_id, user_pb in member_pbs_dict.iteritems():
+ member_view = group_helpers.GroupMemberView(
+ user_id, user_pb.email, user_pb.obscure_email, group_id, 'member')
+ member_dict[user_id] = member_view
+ owner_dict = {}
+ for user_id, user_pb in owner_pbs_dict.iteritems():
+ member_view = group_helpers.GroupMemberView(
+ user_id, user_pb.email, user_pb.obscure_email, group_id, 'owner')
+ owner_dict[user_id] = member_view
+
+ member_user_views = []
+ member_user_views.extend(
+ sorted(owner_dict.values(), key=lambda u: u.email))
+ member_user_views.extend(
+ sorted(member_dict.values(), key=lambda u: u.email))
+
+ group_view = sitewide_views.GroupView(
+ mr.viewed_user_auth.email, len(member_ids), group_settings,
+ mr.viewed_user_auth.user_id)
+ pagination = paginate.ArtifactPagination(
+ mr, member_user_views, MEMBERS_PER_PAGE, group_view.detail_url)
+
+ is_imported_group = bool(group_settings.ext_group_type)
+
+ offer_membership_editing = permissions.CanEditGroup(
+ mr.perms, mr.auth.effective_ids, owner_ids) and not is_imported_group
+
+ return {
+ 'admin_tab_mode': self.ADMIN_TAB_META,
+ 'offer_membership_editing': ezt.boolean(offer_membership_editing),
+ 'initial_add_members': '',
+ 'initially_expand_form': ezt.boolean(False),
+ 'groupid': group_id,
+ 'groupname': mr.viewed_username,
+ 'settings': group_settings,
+ 'pagination': pagination,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ _, owner_ids_dict = self.services.usergroup.LookupMembers(
+ mr.cnxn, [mr.viewed_user_auth.user_id])
+ owner_ids = owner_ids_dict[mr.viewed_user_auth.user_id]
+ permit_edit = permissions.CanEditGroup(
+ mr.perms, mr.auth.effective_ids, owner_ids)
+ if not permit_edit:
+ raise permissions.PermissionException(
+ 'User is not permitted to edit group membership')
+
+ group_settings = self.services.usergroup.GetGroupSettings(
+ mr.cnxn, mr.viewed_user_auth.user_id)
+ if bool(group_settings.ext_group_type):
+ raise permissions.PermissionException(
+ 'Imported groups are read-only')
+
+ if 'addbtn' in post_data:
+ return self.ProcessAddMembers(mr, post_data)
+ elif 'removebtn' in post_data:
+ return self.ProcessRemoveMembers(mr, post_data)
+
+ def ProcessAddMembers(self, mr, post_data):
+ """Process the user's request to add members.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ post_data: dictionary of form data.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ # 1. Gather data from the request.
+ group_id = mr.viewed_user_auth.user_id
+ add_members_str = post_data.get('addmembers')
+ new_member_ids = project_helpers.ParseUsernames(
+ mr.cnxn, self.services.user, add_members_str)
+ role = post_data['role']
+
+ # 2. Call services layer to save changes.
+ if not mr.errors.AnyErrors():
+ try:
+ self.services.usergroup.UpdateMembers(
+ mr.cnxn, group_id, new_member_ids, role)
+ except usergroup_svc.CircularGroupException:
+ mr.errors.addmembers = (
+ 'The members are already ancestors of current group.')
+
+ # 3. Determine the next page in the UI flow.
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(
+ mr, initial_add_members=add_members_str,
+ initially_expand_form=ezt.boolean(True))
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, '/g/%s/' % mr.viewed_username, include_project=False,
+ saved=1, ts=int(time.time()))
+
+ def ProcessRemoveMembers(self, mr, post_data):
+ """Process the user's request to remove members.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ post_data: dictionary of form data.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ # 1. Gather data from the request.
+ remove_strs = post_data.getall('remove')
+ logging.info('remove_strs = %r', remove_strs)
+
+ if not remove_strs:
+ mr.errors.remove = 'No users specified'
+
+ # 2. Call services layer to save changes.
+ if not mr.errors.AnyErrors():
+ remove_ids = set(
+ self.services.user.LookupUserIDs(mr.cnxn, remove_strs).values())
+ self.services.usergroup.RemoveMembers(
+ mr.cnxn, mr.viewed_user_auth.user_id, remove_ids)
+
+ # 3. Determine the next page in the UI flow.
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(mr)
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, '/g/%s/' % mr.viewed_username, include_project=False,
+ saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/sitewide/grouplist.py b/appengine/monorail/sitewide/grouplist.py
new file mode 100644
index 0000000..a8852e9
--- /dev/null
+++ b/appengine/monorail/sitewide/grouplist.py
@@ -0,0 +1,67 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes to list user groups."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from framework import urls
+from framework import xsrf
+from sitewide import sitewide_views
+
+
+class GroupList(servlet.Servlet):
+ """Shows a page with a simple form to create a user group."""
+
+ _PAGE_TEMPLATE = 'sitewide/group-list-page.ezt'
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ group_views = [
+ sitewide_views.GroupView(*groupinfo) for groupinfo in
+ self.services.usergroup.GetAllUserGroupsInfo(mr.cnxn)]
+ group_views.sort(key=lambda gv: gv.name)
+ offer_group_deletion = mr.perms.CanUsePerm(
+ permissions.DELETE_GROUP, mr.auth.effective_ids, None, [])
+ offer_group_creation = mr.perms.CanUsePerm(
+ permissions.CREATE_GROUP, mr.auth.effective_ids, None, [])
+
+ return {
+ 'form_token': xsrf.GenerateToken(
+ mr.auth.user_id, '%s.do' % urls.GROUP_DELETE),
+ 'groups': group_views,
+ 'offer_group_deletion': ezt.boolean(offer_group_deletion),
+ 'offer_group_creation': ezt.boolean(offer_group_creation),
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ if 'removebtn' in post_data:
+ return self.ProcessDeleteGroups(mr, post_data)
+
+ def ProcessDeleteGroups(self, mr, post_data):
+ """Process request to delete groups."""
+ if not mr.perms.CanUsePerm(
+ permissions.DELETE_GROUP, mr.auth.effective_ids, None, []):
+ raise permissions.PermissionException(
+ 'User is not permitted to delete groups')
+
+ remove_groups = [int(g) for g in post_data.getall('remove')]
+
+ if not mr.errors.AnyErrors():
+ self.services.usergroup.DeleteGroups(mr.cnxn, remove_groups)
+
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(mr)
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, '/g', include_project=False,
+ saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/sitewide/hostinghome.py b/appengine/monorail/sitewide/hostinghome.py
new file mode 100644
index 0000000..5f15a0e
--- /dev/null
+++ b/appengine/monorail/sitewide/hostinghome.py
@@ -0,0 +1,105 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display the hosting home page."""
+
+import logging
+from third_party import ezt
+
+import settings
+from framework import permissions
+from framework import servlet
+from framework import template_helpers
+from framework import urls
+from project import project_views
+from sitewide import projectsearch
+from sitewide import sitewide_helpers
+
+
+class HostingHome(servlet.Servlet):
+ """HostingHome shows the Monorail site homepage and link to create a project.
+
+ This needs to be a full Servlet rather than just a static page
+ because we need to check permissions before offering the link to create
+ a project.
+ """
+
+ _PAGE_TEMPLATE = 'sitewide/hosting-home-page.ezt'
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ can_create_project = permissions.CanCreateProject(mr.perms)
+
+ # Kick off the search pipeline, it has its own promises for parallelism.
+ pipeline = projectsearch.ProjectSearchPipeline(
+ mr, self.services, self.profiler)
+
+ # Meanwhile, determine which projects the signed-in user has starred.
+ starred_project_ids = set()
+ # A dict of project id to the user's membership status.
+ project_memberships = {}
+ if mr.auth.user_id:
+ starred_projects = sitewide_helpers.GetViewableStarredProjects(
+ mr.cnxn, self.services, mr.auth.user_id,
+ mr.auth.effective_ids, mr.auth.user_pb)
+ starred_project_ids = {p.project_id for p in starred_projects}
+
+ owned, _archive_owned, member_of, contrib_of = (
+ sitewide_helpers.GetUserProjects(
+ mr.cnxn, self.services, mr.auth.user_pb, mr.auth.effective_ids,
+ mr.auth.effective_ids))
+ project_memberships.update({proj.project_id: 'Owner' for proj in owned})
+ project_memberships.update(
+ {proj.project_id: 'Member' for proj in member_of})
+ project_memberships.update(
+ {proj.project_id: 'Contributor' for proj in contrib_of})
+
+ # Finish the project search pipeline.
+ pipeline.SearchForIDs()
+ pipeline.GetProjectsAndPaginate(mr.cnxn, urls.HOSTING_HOME)
+ project_ids = [p.project_id for p in pipeline.visible_results]
+ star_count_dict = self.services.project_star.CountItemsStars(
+ mr.cnxn, project_ids)
+
+ # Make ProjectView objects
+ project_view_list = [
+ project_views.ProjectView(
+ p, starred=p.project_id in starred_project_ids,
+ num_stars=star_count_dict.get(p.project_id),
+ membership_desc=project_memberships.get(p.project_id))
+ for p in pipeline.visible_results]
+ return {
+ 'can_create_project': ezt.boolean(can_create_project),
+ 'learn_more_link': settings.learn_more_link,
+ 'projects': project_view_list,
+ 'pagination': pipeline.pagination,
+ }
+
+
+def _MakeExampleLabelGrid(label_list):
+ """Return a list of EZTItems to make it easy to display example searches."""
+ labels = label_list[:] # Don't mess with the given labels.
+
+ if len(labels) < 15:
+ cols = 4
+ elif len(labels) < 20:
+ cols = 5
+ else:
+ cols = 6
+
+ rows = []
+ while labels:
+ current_row = labels[:cols]
+ labels = labels[cols:]
+ rows.append(template_helpers.EZTItem(labels=current_row))
+
+ return rows
diff --git a/appengine/monorail/sitewide/moved.py b/appengine/monorail/sitewide/moved.py
new file mode 100644
index 0000000..b5eef52
--- /dev/null
+++ b/appengine/monorail/sitewide/moved.py
@@ -0,0 +1,57 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to display a message explaining that a project has moved.
+
+When a project moves, we just display a link to the new location.
+"""
+
+import logging
+
+from framework import framework_bizobj
+from framework import framework_helpers
+from framework import monorailrequest
+from framework import servlet
+
+
+class ProjectMoved(servlet.Servlet):
+ """The ProjectMoved page explains that the project has moved."""
+
+ _PAGE_TEMPLATE = 'sitewide/moved-page.ezt'
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+
+ # We are not actually in /p/PROJECTNAME, so mr.project_name is None.
+ # Putting the ProjectMoved page inside a moved project would make
+ # the redirect logic much more complicated.
+ if not mr.specified_project:
+ raise monorailrequest.InputException('No project specified')
+
+ project = self.services.project.GetProjectByName(
+ mr.cnxn, mr.specified_project)
+ if not project:
+ self.abort(404, 'project not found')
+
+ if not project.moved_to:
+ # Only show this page for projects that are actually moved.
+ # Don't allow hackers to construct misleading links to this servlet.
+ logging.info('attempt to view ProjectMoved for non-moved project: %s',
+ mr.specified_project)
+ self.abort(400, 'This project has not been moved')
+
+ if framework_bizobj.RE_PROJECT_NAME.match(project.moved_to):
+ moved_to_url = framework_helpers.FormatMovedProjectURL(
+ mr, project.moved_to)
+ elif project.moved_to.startswith('http'):
+ moved_to_url = project.moved_to
+ else:
+ # Prevent users from using javascript: or any other tricky URL scheme.
+ moved_to_url = '#invalid-destination-url'
+
+ return {
+ 'project_name': mr.specified_project,
+ 'moved_to_url': moved_to_url,
+ }
diff --git a/appengine/monorail/sitewide/projectcreate.py b/appengine/monorail/sitewide/projectcreate.py
new file mode 100644
index 0000000..a31afea
--- /dev/null
+++ b/appengine/monorail/sitewide/projectcreate.py
@@ -0,0 +1,203 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes for users to create a new project."""
+
+
+import logging
+from third_party import ezt
+
+import settings
+from framework import actionlimit
+from framework import filecontent
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import gcs_helpers
+from framework import jsonfeed
+from framework import permissions
+from framework import servlet
+from framework import urls
+from project import project_helpers
+from project import project_views
+from services import project_svc
+from tracker import tracker_bizobj
+from tracker import tracker_views
+
+
+_MSG_PROJECT_NAME_NOT_AVAIL = 'That project name is not available.'
+_MSG_MISSING_PROJECT_NAME = 'Missing project name'
+_MSG_INVALID_PROJECT_NAME = 'Invalid project name'
+_MSG_MISSING_PROJECT_SUMMARY = 'Missing project summary'
+
+
+class ProjectCreate(servlet.Servlet):
+ """Shows a page with a simple form to create a project."""
+
+ _PAGE_TEMPLATE = 'sitewide/project-create-page.ezt'
+
+ _CAPTCHA_ACTION_TYPES = [actionlimit.PROJECT_CREATION]
+
+ def AssertBasePermission(self, mr):
+ """Assert that the user has the permissions needed to view this page."""
+ super(ProjectCreate, self).AssertBasePermission(mr)
+
+ if not permissions.CanCreateProject(mr.perms):
+ raise permissions.PermissionException(
+ 'User is not allowed to create a project')
+
+ def GatherPageData(self, _mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ available_access_levels = project_helpers.BuildProjectAccessOptions(None)
+ offer_access_level = len(available_access_levels) > 1
+ if settings.default_access_level:
+ access_view = project_views.ProjectAccessView(
+ settings.default_access_level)
+ else:
+ access_view = None
+
+ return {
+ 'initial_name': '',
+ 'initial_summary': '',
+ 'initial_description': '',
+ 'initial_project_home': '',
+ 'initial_docs_url': '',
+ 'initial_logo_gcs_id': '',
+ 'initial_logo_file_name': '',
+ 'logo_view': tracker_views.LogoView(None),
+ 'labels': [],
+ 'max_project_name_length': framework_constants.MAX_PROJECT_NAME_LENGTH,
+ 'offer_access_level': ezt.boolean(offer_access_level),
+ 'initial_access': access_view,
+ 'available_access_levels': available_access_levels,
+ }
+
+ def GatherHelpData(self, mr, _page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ _page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ cue_remaining_projects = None
+
+ (_period, _soft, _hard,
+ life_max) = actionlimit.ACTION_LIMITS[actionlimit.PROJECT_CREATION]
+ actionlimit_pb = actionlimit.GetLimitPB(
+ mr.auth.user_pb, actionlimit.PROJECT_CREATION)
+ if actionlimit_pb.get_assigned_value('lifetime_limit'):
+ life_max = actionlimit_pb.lifetime_limit
+ if life_max is not None:
+ if (actionlimit_pb.lifetime_count + 10 >= life_max
+ and actionlimit_pb.lifetime_count < life_max):
+ cue_remaining_projects = life_max - actionlimit_pb.lifetime_count
+
+ return {
+ 'cue': None,
+ 'cue_remaining_projects': cue_remaining_projects,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ # 1. Parse and validate user input.
+ # Project name is taken from post_data because we are creating it.
+ project_name = post_data.get('projectname')
+ if not project_name:
+ mr.errors.projectname = _MSG_MISSING_PROJECT_NAME
+ elif not framework_bizobj.IsValidProjectName(project_name):
+ mr.errors.projectname = _MSG_INVALID_PROJECT_NAME
+
+ summary = post_data.get('summary')
+ if not summary:
+ mr.errors.summary = _MSG_MISSING_PROJECT_SUMMARY
+ description = post_data.get('description', '')
+
+ access = project_helpers.ParseProjectAccess(None, post_data.get('access'))
+ home_page = post_data.get('project_home')
+ if home_page and not (
+ home_page.startswith('http://') or home_page.startswith('https://')):
+ mr.errors.project_home = 'Home page link must start with http(s)://'
+ docs_url = post_data.get('docs_url')
+ if docs_url and not (
+ docs_url.startswith('http:') or docs_url.startswith('https:')):
+ mr.errors.docs_url = 'Documentation link must start with http: or https:'
+
+ self.CheckCaptcha(mr, post_data)
+
+ # These are not specified on via the ProjectCreate form,
+ # the user must edit the project after creation to set them.
+ committer_ids = []
+ contributor_ids = []
+
+ # Validate that provided logo is supported.
+ logo_provided = 'logo' in post_data and not isinstance(
+ post_data['logo'], basestring)
+ if logo_provided:
+ item = post_data['logo']
+ try:
+ gcs_helpers.CheckMimeTypeResizable(
+ filecontent.GuessContentTypeFromFilename(item.filename))
+ except gcs_helpers.UnsupportedMimeType, e:
+ mr.errors.logo = e.message
+
+ # 2. Call services layer to save changes.
+ if not mr.errors.AnyErrors():
+ try:
+ project_id = self.services.project.CreateProject(
+ mr.cnxn, project_name, [mr.auth.user_id],
+ committer_ids, contributor_ids, summary, description,
+ access=access, home_page=home_page, docs_url=docs_url)
+
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(project_id)
+ self.services.config.StoreConfig(mr.cnxn, config)
+ # Note: No need to store any canned queries or rules yet.
+ self.services.issue.InitializeLocalID(mr.cnxn, project_id)
+
+ # Update project with logo if specified.
+ if logo_provided:
+ item = post_data['logo']
+ logo_file_name = item.filename
+ logo_gcs_id = gcs_helpers.StoreLogoInGCS(
+ logo_file_name, item.value, project_id)
+ self.services.project.UpdateProject(
+ mr.cnxn, project_id, logo_gcs_id=logo_gcs_id,
+ logo_file_name=logo_file_name)
+
+ self.CountRateLimitedActions(
+ mr, {actionlimit.PROJECT_CREATION: 1})
+ except project_svc.ProjectAlreadyExists:
+ mr.errors.projectname = _MSG_PROJECT_NAME_NOT_AVAIL
+
+ # 3. Determine the next page in the UI flow.
+ if mr.errors.AnyErrors():
+ access_view = project_views.ProjectAccessView(access)
+ self.PleaseCorrect(
+ mr, initial_summary=summary, initial_description=description,
+ initial_name=project_name, initial_access=access_view)
+ else:
+ # Go to the new project's introduction page.
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_INTRO, project_name=project_name)
+
+
+class CheckProjectNameJSON(jsonfeed.JsonFeed):
+ """JSON data for handling project name checks when creating a project."""
+
+ def HandleRequest(self, mr):
+ """Provide the UI with info about the availability of the project name.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format.
+ """
+ if self.services.project.LookupProjectIDs(mr.cnxn, [mr.specified_project]):
+ return {'error_message': _MSG_PROJECT_NAME_NOT_AVAIL}
+
+ return {'error_message': ''}
diff --git a/appengine/monorail/sitewide/projectsearch.py b/appengine/monorail/sitewide/projectsearch.py
new file mode 100644
index 0000000..ef7b070
--- /dev/null
+++ b/appengine/monorail/sitewide/projectsearch.py
@@ -0,0 +1,55 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions and classes used when searching for projects."""
+
+import logging
+
+from framework import framework_helpers
+from framework import paginate
+from framework import permissions
+
+
+DEFAULT_RESULTS_PER_PAGE = 10
+MAXIMUM_RESULT_PAGES_OFFERED = 10
+
+
+class ProjectSearchPipeline(object):
+ """Manage the process of project search, filter, fetch, and pagination."""
+
+ def __init__(self, mr, services, prof,
+ default_results_per_page=DEFAULT_RESULTS_PER_PAGE):
+
+ self.mr = mr
+ self.services = services
+ self.profiler = prof
+ self.default_results_per_page = default_results_per_page
+ self.pagination = None
+ self.allowed_project_ids = None
+ self.visible_results = None
+
+ def SearchForIDs(self):
+ """Get project IDs the user has permission to view."""
+ with self.profiler.Phase('getting user visible projects'):
+ self.allowed_project_ids = self.services.project.GetVisibleLiveProjects(
+ self.mr.cnxn, self.mr.auth.user_pb, self.mr.auth.effective_ids)
+ logging.info('allowed_project_ids is %r', self.allowed_project_ids)
+
+ def GetProjectsAndPaginate(self, cnxn, list_page_url):
+ """Paginate the filtered list of project names and retrieve Project PBs.
+
+ Args:
+ cnxn: connection to SQL database.
+ list_page_url: string page URL for prev and next links.
+ """
+ self.pagination = paginate.ArtifactPagination(
+ self.mr, self.allowed_project_ids, self.default_results_per_page,
+ list_page_url)
+ with self.profiler.Phase('getting projects on current pagination page'):
+ project_dict = self.services.project.GetProjects(
+ cnxn, self.pagination.visible_results)
+ self.visible_results = [
+ project_dict[pid] for pid in self.pagination.visible_results]
+ logging.info('visible_results is %r', self.visible_results)
diff --git a/appengine/monorail/sitewide/sitewide_helpers.py b/appengine/monorail/sitewide/sitewide_helpers.py
new file mode 100644
index 0000000..9ebb983
--- /dev/null
+++ b/appengine/monorail/sitewide/sitewide_helpers.py
@@ -0,0 +1,122 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions used in sitewide servlets."""
+
+import logging
+
+from framework import permissions
+from proto import project_pb2
+
+
+def GetViewableStarredProjects(
+ cnxn, services, viewed_user_id, effective_ids, logged_in_user):
+ """Returns a list of viewable starred projects."""
+ starred_project_ids = services.project_star.LookupStarredItemIDs(
+ cnxn, viewed_user_id)
+ projects = services.project.GetProjects(cnxn, starred_project_ids).values()
+ viewable_projects = FilterViewableProjects(
+ projects, logged_in_user, effective_ids)
+ return viewable_projects
+
+
+def FilterViewableProjects(project_list, logged_in_user, effective_ids):
+ """Return subset of LIVE project protobufs viewable by the given user."""
+ viewable_projects = []
+ for project in project_list:
+ if (project.state == project_pb2.ProjectState.LIVE and
+ permissions.UserCanViewProject(
+ logged_in_user, effective_ids, project)):
+ viewable_projects.append(project)
+
+ return viewable_projects
+
+
+def GetUserProjects(
+ cnxn, services, user, effective_ids, viewed_user_effective_ids):
+ """Get the projects to display in the user's profile.
+
+ Args:
+ cnxn: connection to the SQL database.
+ services: An instance of services
+ user: The user doing the viewing.
+ effective_ids: set of int user IDs of the user viewing the projects
+ (including any user group IDs).
+ viewed_user_effective_ids: set of int user IDs of the user being viewed.
+
+ Returns:
+ A 4-tuple of lists of PBs:
+ - live projects the viewed user owns
+ - archived projects the viewed user owns
+ - live projects the viewed user is a member of
+ - live projects the viewed user is a contributor to
+
+ Any projects the viewing user should not be able to see are filtered out.
+ Admins can see everything, while other users can see all non-locked
+ projects they own or are a member of, as well as all live projects.
+ """
+ (owned_project_ids, membered_project_ids,
+ contrib_project_ids) = services.project.GetUserRolesInAllProjects(
+ cnxn, viewed_user_effective_ids)
+
+ # Each project should only be considered for at most one role category.
+ # We keep the highest ranking roles and discard lower-ranking ones.
+ membered_project_ids.difference_update(owned_project_ids)
+ contrib_project_ids.difference_update(owned_project_ids)
+ contrib_project_ids.difference_update(membered_project_ids)
+
+ # Build a dictionary of (project_id -> project)
+ # so that we can check permissions.
+ combined = owned_project_ids.union(membered_project_ids).union(
+ contrib_project_ids)
+ projects_dict = services.project.GetProjects(cnxn, combined)
+ projects_dict = _FilterProjectDict(user, effective_ids, projects_dict)
+
+ visible_ownership = _PickProjects(owned_project_ids, projects_dict)
+ visible_archived = _PickProjects(
+ owned_project_ids, projects_dict, archived=True)
+ visible_membership = _PickProjects(membered_project_ids, projects_dict)
+ visible_contrib = _PickProjects(contrib_project_ids, projects_dict)
+
+ return (_SortProjects(visible_ownership), _SortProjects(visible_archived),
+ _SortProjects(visible_membership), _SortProjects(visible_contrib))
+
+
+def _SortProjects(projects):
+ return sorted(projects, key=lambda p: p.project_name)
+
+
+def _PickProjects(project_ids, projects_dict, archived=False):
+ """Select the projects named in project_ids from a preloaded dictionary.
+
+ Args:
+ project_ids: list of project_ids for the desired projects.
+ projects_dict: dict {project_id: ProjectPB, ...} of a lot
+ of preloaded projects, including all the desired ones that exist.
+ archived: set to True if you want to return projects that are in a
+ ARCHIVED state instead of those that are not.
+
+ Returns:
+ A list of Project PBs for the desired projects. If one of them is
+ not found in projects_dict, it is ignored.
+ """
+ # Done in 3 steps: lookup all existing requested projects, filter out
+ # DELETABLE ones, then filter out ARCHIVED or non-ARCHIVED.
+ results = [projects_dict.get(pid) for pid in project_ids
+ if pid in projects_dict]
+ results = [proj for proj in results
+ if proj.state != project_pb2.ProjectState.DELETABLE]
+ results = [proj for proj in results
+ if archived == (proj.state == project_pb2.ProjectState.ARCHIVED)]
+ return results
+
+
+def _FilterProjectDict(user, effective_ids, projects_dict):
+ """Return a new project dictionary which contains only viewable projects."""
+ return {
+ pid: project
+ for pid, project in projects_dict.iteritems()
+ if permissions.UserCanViewProject(user, effective_ids, project)
+ }
diff --git a/appengine/monorail/sitewide/sitewide_views.py b/appengine/monorail/sitewide/sitewide_views.py
new file mode 100644
index 0000000..0cc125d
--- /dev/null
+++ b/appengine/monorail/sitewide/sitewide_views.py
@@ -0,0 +1,20 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""View objects to help display users and groups in UI templates."""
+
+import logging
+
+
+class GroupView(object):
+ """Class to make it easier to display user group metadata."""
+
+ def __init__(self, name, num_members, group_settings, group_id):
+ self.name = name
+ self.num_members = num_members
+ self.who_can_view_members = str(group_settings.who_can_view_members)
+ self.group_id = group_id
+
+ self.detail_url = '/g/%s/' % group_id
diff --git a/appengine/monorail/sitewide/test/__init__.py b/appengine/monorail/sitewide/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/sitewide/test/__init__.py
diff --git a/appengine/monorail/sitewide/test/group_helpers_test.py b/appengine/monorail/sitewide/test/group_helpers_test.py
new file mode 100644
index 0000000..5709b08
--- /dev/null
+++ b/appengine/monorail/sitewide/test/group_helpers_test.py
@@ -0,0 +1,48 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit test for User Group helpers."""
+
+import unittest
+
+from proto import usergroup_pb2
+from sitewide import group_helpers
+
+
+class GroupHelpersTest(unittest.TestCase):
+
+ def testGroupVisibilityView(self):
+ gvv_anyone = group_helpers.GroupVisibilityView(
+ usergroup_pb2.MemberVisibility.ANYONE)
+ gvv_members = group_helpers.GroupVisibilityView(
+ usergroup_pb2.MemberVisibility.MEMBERS)
+ gvv_owners = group_helpers.GroupVisibilityView(
+ usergroup_pb2.MemberVisibility.OWNERS)
+ self.assertEqual('Anyone on the Internet', gvv_anyone.name)
+ self.assertEqual('Group Members', gvv_members.name)
+ self.assertEqual('Group Owners', gvv_owners.name)
+
+ def testGroupMemberView(self):
+ gmv = group_helpers.GroupMemberView(
+ 1L, 'test@example.com', 't...@example.com', 888L, 'member')
+ self.assertEqual(888L, gmv.group_id)
+ self.assertEqual('member', gmv.role)
+
+ def testBuildUserGroupVisibilityOptions(self):
+ vis_views = group_helpers.BuildUserGroupVisibilityOptions()
+ self.assertEqual(3, len(vis_views))
+
+ def testGroupTypeView(self):
+ gt_cia = group_helpers.GroupTypeView(
+ usergroup_pb2.GroupType.CHROME_INFRA_AUTH)
+ gt_mdb = group_helpers.GroupTypeView(
+ usergroup_pb2.GroupType.MDB)
+ self.assertEqual('Chrome-infra-auth', gt_cia.name)
+ self.assertEqual('MDB', gt_mdb.name)
+
+ def testBuildUserGroupTypeOptions(self):
+ group_types = group_helpers.BuildUserGroupTypeOptions()
+ self.assertEqual(3, len(group_types))
+
\ No newline at end of file
diff --git a/appengine/monorail/sitewide/test/groupadmin_test.py b/appengine/monorail/sitewide/test/groupadmin_test.py
new file mode 100644
index 0000000..2078d53
--- /dev/null
+++ b/appengine/monorail/sitewide/test/groupadmin_test.py
@@ -0,0 +1,82 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit test for User Group admin servlet."""
+
+import unittest
+
+from framework import permissions
+from proto import usergroup_pb2
+from services import service_manager
+from sitewide import groupadmin
+from testing import fake
+from testing import testing_helpers
+
+
+class GrouAdminTest(unittest.TestCase):
+ """Tests for the GroupAdmin servlet."""
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService())
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.services.user.TestAddUser('b@example.com', 222L)
+ self.services.user.TestAddUser('c@example.com', 333L)
+ self.services.user.TestAddUser('group@example.com', 888L)
+ self.services.user.TestAddUser('importgroup@example.com', 999L)
+ self.services.usergroup.TestAddGroupSettings(888L, 'group@example.com')
+ self.services.usergroup.TestAddGroupSettings(
+ 999L, 'importgroup@example.com', external_group_type='mdb')
+ self.servlet = groupadmin.GroupAdmin(
+ 'req', 'res', services=self.services)
+ self.mr = testing_helpers.MakeMonorailRequest()
+ self.mr.viewed_username = 'group@example.com'
+ self.mr.viewed_user_auth.user_id = 888L
+
+ def testAssertBasePermission(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(None, {}, None))
+ mr.viewed_user_auth.user_id = 888L
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ self.services.usergroup.TestAddMembers(888L, [111L], 'owner')
+ self.servlet.AssertBasePermission(self.mr)
+
+ def testGatherPageData_Normal(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual('group@example.com', page_data['groupname'])
+ self.assertEqual('Group Members', page_data['initial_visibility'].name)
+ self.assertEqual(3, len(page_data['visibility_levels']))
+
+ def testGatherPageData_Import(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.viewed_username = 'importgroup@example.com'
+ mr.viewed_user_auth.user_id = 999L
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual('importgroup@example.com', page_data['groupname'])
+ self.assertTrue(page_data['import_group'])
+ self.assertEqual('MDB', page_data['initial_group_type'].name)
+
+ def testProcessFormData_Normal(self):
+ post_data = fake.PostData(visibility='0')
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertIn('/g/group@example.com/groupadmin', url)
+ group_settings = self.services.usergroup.GetGroupSettings(None, 888L)
+ self.assertEqual(usergroup_pb2.MemberVisibility.OWNERS,
+ group_settings.who_can_view_members)
+
+ def testProcessFormData_Import(self):
+ post_data = fake.PostData(
+ group_type='1', import_group=['on'])
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertIn('/g/group@example.com/groupadmin', url)
+ group_settings = self.services.usergroup.GetGroupSettings(None, 888L)
+ self.assertEqual(usergroup_pb2.MemberVisibility.OWNERS,
+ group_settings.who_can_view_members)
+ self.assertEqual(usergroup_pb2.GroupType.MDB,
+ group_settings.ext_group_type)
diff --git a/appengine/monorail/sitewide/test/groupcreate_test.py b/appengine/monorail/sitewide/test/groupcreate_test.py
new file mode 100644
index 0000000..1b8575b
--- /dev/null
+++ b/appengine/monorail/sitewide/test/groupcreate_test.py
@@ -0,0 +1,102 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit test for User Group creation servlet."""
+
+import unittest
+
+import settings
+from framework import permissions
+from proto import site_pb2
+from proto import usergroup_pb2
+from services import service_manager
+from sitewide import groupcreate
+from testing import fake
+from testing import testing_helpers
+
+
+class GroupCreateTest(unittest.TestCase):
+ """Tests for the GroupCreate servlet."""
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project=fake.ProjectService())
+ self.servlet = groupcreate.GroupCreate(
+ 'req', 'res', services=self.services)
+ self.mr = testing_helpers.MakeMonorailRequest()
+
+ def CheckAssertBasePermissions(
+ self, restriction, expect_admin_ok, expect_nonadmin_ok):
+ old_group_creation_restriction = settings.group_creation_restriction
+ settings.group_creation_restriction = restriction
+
+ # Anon users can never do it
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(None, {}, None))
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ mr = testing_helpers.MakeMonorailRequest()
+ if expect_admin_ok:
+ self.servlet.AssertBasePermission(mr)
+ else:
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(mr.auth.user_pb, {111L}, None))
+ if expect_nonadmin_ok:
+ self.servlet.AssertBasePermission(mr)
+ else:
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ settings.group_creation_restriction = old_group_creation_restriction
+
+ def testAssertBasePermission(self):
+ self.CheckAssertBasePermissions(
+ site_pb2.UserTypeRestriction.ANYONE, True, True)
+ self.CheckAssertBasePermissions(
+ site_pb2.UserTypeRestriction.ADMIN_ONLY, True, False)
+ self.CheckAssertBasePermissions(
+ site_pb2.UserTypeRestriction.NO_ONE, False, False)
+
+ def testGatherPageData(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual('', page_data['initial_name'])
+
+ def testProcessFormData_Normal(self):
+ post_data = fake.PostData(
+ groupname=['group@example.com'], visibility='1')
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertIn('/g/3444127190/', url)
+ group_id = self.services.user.LookupUserID('cnxn', 'group@example.com')
+ group_settings = self.services.usergroup.GetGroupSettings('cnxn', group_id)
+ self.assertIsNotNone(group_settings)
+ members_after, owners_after = self.services.usergroup.LookupMembers(
+ 'cnxn', [group_id])
+ self.assertEqual(0, len(members_after[group_id] + owners_after[group_id]))
+
+ def testProcessFormData_Import(self):
+ post_data = fake.PostData(
+ groupname=['group@example.com'], group_type='1',
+ import_group=['on'])
+ self.servlet.ProcessFormData(self.mr, post_data)
+ group_id = self.services.user.LookupUserID('cnxn', 'group@example.com')
+ group_settings = self.services.usergroup.GetGroupSettings('cnxn', group_id)
+ self.assertIsNotNone(group_settings)
+ self.assertEqual(usergroup_pb2.MemberVisibility.OWNERS,
+ group_settings.who_can_view_members)
+ self.assertEqual(usergroup_pb2.GroupType.MDB,
+ group_settings.ext_group_type)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/groupdetail_test.py b/appengine/monorail/sitewide/test/groupdetail_test.py
new file mode 100644
index 0000000..98119f6
--- /dev/null
+++ b/appengine/monorail/sitewide/test/groupdetail_test.py
@@ -0,0 +1,128 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit test for User Group Detail servlet."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from sitewide import groupdetail
+from testing import fake
+from testing import testing_helpers
+
+
+class GroupDetailTest(unittest.TestCase):
+ """Tests for the GroupDetail servlet."""
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.services.user.TestAddUser('b@example.com', 222L)
+ self.services.user.TestAddUser('c@example.com', 333L)
+ self.services.user.TestAddUser('group@example.com', 888L)
+ self.services.usergroup.TestAddGroupSettings(888L, 'group@example.com')
+ self.servlet = groupdetail.GroupDetail(
+ 'req', 'res', services=self.services)
+ self.mr = testing_helpers.MakeMonorailRequest()
+ self.mr.viewed_username = 'group@example.com'
+ self.mr.viewed_user_auth.user_id = 888L
+
+ def testAssertBasePermission(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(None, {}, None))
+ mr.viewed_user_auth.user_id = 888L
+ mr.auth.effective_ids = set([111L])
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ self.services.usergroup.TestAddMembers(888L, [111L], 'member')
+ self.servlet.AssertBasePermission(mr)
+
+ def testAssertBasePermission_IndirectMembership(self):
+ self.services.usergroup.TestAddGroupSettings(999L, 'subgroup@example.com')
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(None, {}, None))
+ mr.viewed_user_auth.user_id = 888L
+ mr.auth.effective_ids = set([111L])
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ self.services.usergroup.TestAddMembers(888L, [999L], 'member')
+ self.services.usergroup.TestAddMembers(999L, [111L], 'member')
+ self.servlet.AssertBasePermission(mr)
+
+ def testGatherPagData_ZeroMembers(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ pagination = page_data['pagination']
+ self.assertEqual(0, len(pagination.visible_results))
+
+ def testGatherPagData_NonzeroMembers(self):
+ self.services.usergroup.TestAddMembers(888L, [111L, 222L, 333L])
+ page_data = self.servlet.GatherPageData(self.mr)
+ pagination = page_data['pagination']
+ self.assertEqual(3, len(pagination.visible_results))
+ self.assertEqual(3, pagination.total_count)
+ self.assertEqual(1, pagination.start)
+ self.assertEqual(3, pagination.last)
+ user_view_a, user_view_b, user_view_c = pagination.visible_results
+ self.assertEqual('a@example.com', user_view_a.email)
+ self.assertEqual('b@example.com', user_view_b.email)
+ self.assertEqual('c@example.com', user_view_c.email)
+
+ def testProcessAddMembers_NoneAdded(self):
+ post_data = fake.PostData(addmembers=[''], role=['member'])
+ url = self.servlet.ProcessAddMembers(self.mr, post_data)
+ self.assertIn('/g/group@example.com/?', url)
+ members_after, _ = self.services.usergroup.LookupMembers('cnxn', [888L])
+ self.assertEqual(0, len(members_after[888L]))
+
+ self.services.usergroup.TestAddMembers(888L, [111L, 222L, 333L])
+ url = self.servlet.ProcessAddMembers(self.mr, post_data)
+ self.assertIn('/g/group@example.com/?', url)
+ members_after, _ = self.services.usergroup.LookupMembers('cnxn', [888L])
+ self.assertEqual(3, len(members_after[888L]))
+
+ def testProcessAddMembers_SomeAdded(self):
+ self.services.usergroup.TestAddMembers(888L, [111L])
+ post_data = fake.PostData(
+ addmembers=['b@example.com, c@example.com'], role=['member'])
+ url = self.servlet.ProcessAddMembers(self.mr, post_data)
+ self.assertIn('/g/group@example.com/?', url)
+ members_after, _ = self.services.usergroup.LookupMembers('cnxn', [888L])
+ self.assertEqual(3, len(members_after[888L]))
+
+ def testProcessRemoveMembers_SomeRemoved(self):
+ self.services.usergroup.TestAddMembers(888L, [111L, 222L, 333L])
+ post_data = fake.PostData(remove=['b@example.com', 'c@example.com'])
+ url = self.servlet.ProcessRemoveMembers(self.mr, post_data)
+ self.assertIn('/g/group@example.com/?', url)
+ members_after, _ = self.services.usergroup.LookupMembers('cnxn', [888L])
+ self.assertEqual(1, len(members_after[888L]))
+
+ def testProcessFormData_NoPermission(self):
+ """Group members cannot edit group."""
+ self.services.usergroup.TestAddMembers(888L, [111L], 'member')
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(None, {}, None))
+ mr.viewed_user_auth.user_id = 888L
+ mr.auth.effective_ids = set([111L])
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, {})
+
+ def testProcessFormData_OwnerPermission(self):
+ """Group owners cannot edit group."""
+ self.services.usergroup.TestAddMembers(888L, [111L], 'owner')
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(None, {}, None))
+ mr.viewed_user_auth.user_id = 888L
+ mr.auth.effective_ids = set([111L])
+ self.servlet.ProcessFormData(mr, {})
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/grouplist_test.py b/appengine/monorail/sitewide/test/grouplist_test.py
new file mode 100644
index 0000000..e13407a
--- /dev/null
+++ b/appengine/monorail/sitewide/test/grouplist_test.py
@@ -0,0 +1,62 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit test for User Group List servlet."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from sitewide import grouplist
+from testing import fake
+from testing import testing_helpers
+
+
+class GroupListTest(unittest.TestCase):
+ """Tests for the GroupList servlet."""
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ usergroup=fake.UserGroupService())
+ self.servlet = grouplist.GroupList('req', 'res', services=self.services)
+ self.mr = testing_helpers.MakeMonorailRequest()
+
+ def testGatherPagData_ZeroGroups(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual([], page_data['groups'])
+
+ def testGatherPagData_NonzeroGroups(self):
+ self.services.usergroup.TestAddGroupSettings(777L, 'group_a@example.com')
+ self.services.usergroup.TestAddGroupSettings(888L, 'group_b@example.com')
+ self.services.usergroup.TestAddMembers(888L, [111L, 222L, 333L])
+ page_data = self.servlet.GatherPageData(self.mr)
+ group_view_a, group_view_b = page_data['groups']
+ self.assertEqual('group_a@example.com', group_view_a.name)
+ self.assertEqual(0, group_view_a.num_members)
+ self.assertEqual('group_b@example.com', group_view_b.name)
+ self.assertEqual(3, group_view_b.num_members)
+
+ def testProcessFormData_NoPermission(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.USER_PERMISSIONSET)
+ post_data = fake.PostData(
+ removebtn=[1])
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, post_data)
+
+ def testProcessFormData_Normal(self):
+ self.services.usergroup.TestAddGroupSettings(
+ 888L, 'group_b@example.com',friend_projects=[789])
+ self.services.usergroup.TestAddMembers(888L, [111L, 222L, 333L])
+
+ post_data = fake.PostData(
+ remove=[888],
+ removebtn=[1])
+ self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertNotIn(888L, self.services.usergroup.group_settings)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/hostinghome_test.py b/appengine/monorail/sitewide/test/hostinghome_test.py
new file mode 100644
index 0000000..a7d86f8
--- /dev/null
+++ b/appengine/monorail/sitewide/test/hostinghome_test.py
@@ -0,0 +1,130 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the Monorail home page."""
+
+import unittest
+
+from third_party import ezt
+
+import settings
+from framework import permissions
+from proto import site_pb2
+from services import service_manager
+from sitewide import hostinghome
+from sitewide import projectsearch
+from testing import fake
+from testing import testing_helpers
+
+
+class MockProjectSearchPipeline(object):
+
+ def __init__(self, _mr, services, _profiler):
+ self.visible_results = services.mock_visible_results
+ self.pagination = None
+
+ def SearchForIDs(self):
+ pass
+
+ def GetProjectsAndPaginate(self, cnxn, list_page_url):
+ pass
+
+
+class HostingHomeTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ project_star=fake.ProjectStarService())
+ self.services.mock_visible_results = []
+ self.project_a = self.services.project.TestAddProject('a', project_id=1)
+ self.project_b = self.services.project.TestAddProject('b', project_id=2)
+
+ self.servlet = hostinghome.HostingHome('req', 'res', services=self.services)
+ self.mr = testing_helpers.MakeMonorailRequest(user_info={'user_id': 111L})
+
+ self.orig_pipeline_class = projectsearch.ProjectSearchPipeline
+ projectsearch.ProjectSearchPipeline = MockProjectSearchPipeline
+
+ def tearDown(self):
+ projectsearch.ProjectSearchPipeline = self.orig_pipeline_class
+
+ def testSearch_ZeroResults(self):
+ self.services.mock_visible_results = []
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual([], page_data['projects'])
+
+ def testSearch_NonzeroResults(self):
+ self.services.mock_visible_results = [self.project_a, self.project_b]
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(['a', 'b'],
+ [pv.project_name for pv in page_data['projects']])
+
+ def testStarCounts(self):
+ """Test the display of star counts on each displayed project."""
+ self.services.mock_visible_results = [self.project_a, self.project_b]
+ self.services.project_star.SetStar('fake cnxn', 1, 111L, True)
+ self.services.project_star.SetStar('fake cnxn', 1, 222L, True)
+ page_data = self.servlet.GatherPageData(self.mr)
+ project_view_a, project_view_b = page_data['projects']
+ self.assertEqual(2, project_view_a.num_stars)
+ self.assertEqual(0, project_view_b.num_stars)
+
+ def testStarredProjects(self):
+ self.services.mock_visible_results = [self.project_a, self.project_b]
+ self.services.project_star.SetStar('fake cnxn', 1, 111L, True)
+ page_data = self.servlet.GatherPageData(self.mr)
+ project_view_a, project_view_b = page_data['projects']
+ self.assertTrue(project_view_a.starred)
+ self.assertFalse(project_view_b.starred)
+
+ def testGatherPageData(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(settings.learn_more_link, page_data['learn_more_link'])
+
+ def testGatherPageData_CanCreateProject(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.perms = permissions.PermissionSet([permissions.CREATE_PROJECT])
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(
+ ezt.boolean(settings.project_creation_restriction ==
+ site_pb2.UserTypeRestriction.ANYONE),
+ page_data['can_create_project'])
+
+ mr.perms = permissions.PermissionSet([])
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(ezt.boolean(False), page_data['can_create_project'])
+
+ def testMakeExampleLabelGrid(self):
+ self.assertEqual([], hostinghome._MakeExampleLabelGrid([]))
+
+ grid = hostinghome._MakeExampleLabelGrid(
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
+ self.assertEqual(2, len(grid))
+ self.assertEqual(['a', 'b', 'c', 'd'], grid[0].labels)
+ self.assertEqual(['e', 'f', 'g', 'h'], grid[1].labels)
+
+ grid = hostinghome._MakeExampleLabelGrid(
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g'])
+ self.assertEqual(2, len(grid))
+ self.assertEqual(['a', 'b', 'c', 'd'], grid[0].labels)
+ self.assertEqual(['e', 'f', 'g'], grid[1].labels)
+
+ grid = hostinghome._MakeExampleLabelGrid(['lab'] * 15)
+ self.assertEqual(3, len(grid))
+ self.assertEqual(5, len(grid[0].labels))
+ self.assertEqual(5, len(grid[1].labels))
+ self.assertEqual(5, len(grid[2].labels))
+
+ grid = hostinghome._MakeExampleLabelGrid(['lab'] * 24)
+ self.assertEqual(4, len(grid))
+ self.assertEqual(6, len(grid[0].labels))
+ self.assertEqual(6, len(grid[1].labels))
+ self.assertEqual(6, len(grid[2].labels))
+ self.assertEqual(6, len(grid[3].labels))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/moved_test.py b/appengine/monorail/sitewide/test/moved_test.py
new file mode 100644
index 0000000..67e205d
--- /dev/null
+++ b/appengine/monorail/sitewide/test/moved_test.py
@@ -0,0 +1,65 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the moved project notification page servlet."""
+
+import unittest
+
+import webapp2
+
+from services import service_manager
+from sitewide import moved
+from testing import fake
+from testing import testing_helpers
+
+
+class MovedTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService())
+ self.servlet = moved.ProjectMoved('req', 'res', services=self.services)
+
+ def testGatherPageData(self):
+ project_name = 'my-project'
+ moved_to = 'http://we-are-outta-here.com/'
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/hosting/moved?project=my-project')
+
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ project = self.services.project.TestAddProject(project_name)
+ # Project exists but has not been moved, so 400 BAD_REQUEST.
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(400, e.code)
+
+ # Display the moved_to url if it is valid.
+ project.moved_to = moved_to
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertItemsEqual(
+ ['project_name', 'moved_to_url'],
+ page_data.keys())
+ self.assertEqual(project_name, page_data['project_name'])
+ self.assertEqual(moved_to, page_data['moved_to_url'])
+
+ # We only display URLs that start with 'http'.
+ project.moved_to = 'javascript:alert(1)'
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertItemsEqual(
+ ['project_name', 'moved_to_url'],
+ page_data.keys())
+ self.assertEqual(project_name, page_data['project_name'])
+ self.assertEqual('#invalid-destination-url', page_data['moved_to_url'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/projectcreate_test.py b/appengine/monorail/sitewide/test/projectcreate_test.py
new file mode 100644
index 0000000..f4a4f9c
--- /dev/null
+++ b/appengine/monorail/sitewide/test/projectcreate_test.py
@@ -0,0 +1,123 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for the Project Creation servlet."""
+
+import unittest
+
+import settings
+from framework import permissions
+from proto import project_pb2
+from proto import site_pb2
+from services import service_manager
+from sitewide import projectcreate
+from testing import fake
+from testing import testing_helpers
+
+
+class ProjectCreateTest(unittest.TestCase):
+
+ def setUp(self):
+ services = service_manager.Services()
+ self.servlet = projectcreate.ProjectCreate('req', 'res', services=services)
+
+ def CheckAssertBasePermissions(
+ self, restriction, expect_admin_ok, expect_nonadmin_ok):
+ old_project_creation_restriction = settings.project_creation_restriction
+ settings.project_creation_restriction = restriction
+
+ # Anon users can never do it
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(None, {}, None))
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ mr = testing_helpers.MakeMonorailRequest()
+ if expect_admin_ok:
+ self.servlet.AssertBasePermission(mr)
+ else:
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.GetPermissions(mr.auth.user_pb, {111L}, None))
+ if expect_nonadmin_ok:
+ self.servlet.AssertBasePermission(mr)
+ else:
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ settings.project_creation_restriction = old_project_creation_restriction
+
+ def testAssertBasePermission(self):
+ self.CheckAssertBasePermissions(
+ site_pb2.UserTypeRestriction.ANYONE, True, True)
+ self.CheckAssertBasePermissions(
+ site_pb2.UserTypeRestriction.ADMIN_ONLY, True, False)
+ self.CheckAssertBasePermissions(
+ site_pb2.UserTypeRestriction.NO_ONE, False, False)
+
+ def testGatherPageData(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual('', page_data['initial_name'])
+ self.assertEqual('', page_data['initial_summary'])
+ self.assertEqual('', page_data['initial_description'])
+ self.assertEqual([], page_data['labels'])
+
+ def testGatherHelpData(self):
+ project = project_pb2.Project()
+ mr = testing_helpers.MakeMonorailRequest(project=project)
+
+ # Users not near the lifetime limit see no cue card.
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual(None, help_data['cue_remaining_projects'])
+
+ # User who is near the lifetime limit will see a cue card.
+ mr.auth.user_pb.project_creation_limit.lifetime_count = 20
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual(5, help_data['cue_remaining_projects'])
+
+ # User far under custom lifetime limit won't see a cue card.
+ mr.auth.user_pb.project_creation_limit.lifetime_limit = 100
+ mr.auth.user_pb.project_creation_limit.lifetime_count = 20
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual(None, help_data['cue_remaining_projects'])
+
+ # User near custom lifetime limit will see a cue card.
+ mr.auth.user_pb.project_creation_limit.lifetime_limit = 100
+ mr.auth.user_pb.project_creation_limit.lifetime_count = 91
+ help_data = self.servlet.GatherHelpData(mr, {})
+ self.assertEqual(9, help_data['cue_remaining_projects'])
+
+
+class CheckProjectNameJSONTest(unittest.TestCase):
+ def setUp(self):
+ services = service_manager.Services(project=fake.ProjectService())
+ self.project = services.project.TestAddProject('proj')
+ self.servlet = projectcreate.CheckProjectNameJSON(
+ 'req', 'res', services=services)
+
+ def testHandleRequestNameTaken(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, user_info={'user_id': 222L}, method='POST',
+ params={'project': self.project.project_name})
+ json_data = self.servlet.HandleRequest(mr)
+ self.assertEqual('That project name is not available.',
+ json_data['error_message'])
+
+ def testHandleRequestNameNotTaken(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, user_info={'user_id': 222L}, method='POST',
+ params={'project': 'not-taken'})
+ json_data = self.servlet.HandleRequest(mr)
+ self.assertEqual('', json_data['error_message'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/projectsearch_test.py b/appengine/monorail/sitewide/test/projectsearch_test.py
new file mode 100644
index 0000000..1e71076
--- /dev/null
+++ b/appengine/monorail/sitewide/test/projectsearch_test.py
@@ -0,0 +1,101 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for the projectsearch module."""
+
+import unittest
+
+import mox
+
+from framework import profiler
+from proto import project_pb2
+from services import service_manager
+from sitewide import projectsearch
+from testing import fake
+from testing import testing_helpers
+
+
+class ProjectSearchTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService())
+
+ for idx, letter in enumerate('abcdefghijklmnopqrstuvwxyz'):
+ self.services.project.TestAddProject(letter, project_id=idx + 1)
+
+ self.mox = mox.Mox()
+ self.mox.StubOutWithMock(self.services.project, 'GetVisibleLiveProjects')
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def TestPipeline(
+ self, mr=None, expected_start=1, expected_last=None,
+ expected_len=None):
+ if not mr:
+ mr = testing_helpers.MakeMonorailRequest()
+
+ if expected_last is None and expected_len is not None:
+ expected_last = expected_len
+
+ mr.can = 1
+ prof = profiler.Profiler()
+
+ pipeline = projectsearch.ProjectSearchPipeline(mr, self.services, prof)
+ pipeline.SearchForIDs()
+ pipeline.GetProjectsAndPaginate('fake cnxn', '/hosting/search')
+ self.assertEqual(expected_start, pipeline.pagination.start)
+ if expected_last is not None:
+ self.assertEqual(expected_last, pipeline.pagination.last)
+ if expected_len is not None:
+ self.assertEqual(expected_len, len(pipeline.visible_results))
+
+ return pipeline
+
+ def SetUpZeroResults(self):
+ self.services.project.GetVisibleLiveProjects(
+ mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn([])
+
+ def testZeroResults(self):
+ self.SetUpZeroResults()
+ self.mox.ReplayAll()
+ pipeline = self.TestPipeline(
+ expected_last=0, expected_len=0)
+ self.mox.VerifyAll()
+ self.assertListEqual([], pipeline.visible_results)
+
+ def SetUpNonzeroResults(self):
+ self.services.project.GetVisibleLiveProjects(
+ mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn([1, 2, 3])
+
+ def testNonzeroResults(self):
+ self.SetUpNonzeroResults()
+ self.mox.ReplayAll()
+ pipeline = self.TestPipeline(
+ expected_last=3, expected_len=3)
+ self.mox.VerifyAll()
+ self.assertListEqual(
+ [1, 2, 3], [p.project_id for p in pipeline.visible_results])
+
+ def SetUpTwoPageResults(self):
+ self.services.project.GetVisibleLiveProjects(
+ mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(
+ range(1, 16))
+
+ def testTwoPageResults(self):
+ """Test more than one pagination page of results."""
+ self.SetUpTwoPageResults()
+ self.mox.ReplayAll()
+ pipeline = self.TestPipeline(
+ expected_last=10, expected_len=10)
+ self.mox.VerifyAll()
+ self.assertEqual(
+ '/hosting/search?num=10&start=10', pipeline.pagination.next_url)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/sitewide_helpers_test.py b/appengine/monorail/sitewide/test/sitewide_helpers_test.py
new file mode 100644
index 0000000..f806820
--- /dev/null
+++ b/appengine/monorail/sitewide/test/sitewide_helpers_test.py
@@ -0,0 +1,261 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the sitewide_helpers module."""
+
+import unittest
+
+from proto import project_pb2
+from services import service_manager
+from sitewide import sitewide_helpers
+from testing import fake
+
+
+REGULAR_USER_ID = 111L
+ADMIN_USER_ID = 222L
+OTHER_USER_ID = 333L
+
+# Test project IDs
+REGULAR_OWNER_LIVE = 1001
+REGULAR_OWNER_ARCHIVED = 1002
+REGULAR_OWNER_DELETABLE = 1003
+REGULAR_COMMITTER_LIVE = 2001
+REGULAR_COMMITTER_ARCHIVED = 2002
+REGULAR_COMMITTER_DELETABLE = 2003
+OTHER_OWNER_LIVE = 3001
+OTHER_OWNER_ARCHIVED = 3002
+OTHER_OWNER_DELETABLE = 3003
+OTHER_COMMITTER_LIVE = 4001
+MEMBERS_ONLY = 5001
+
+
+class HelperFunctionsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ project_star=fake.ProjectStarService())
+ self.cnxn = 'fake cnxn'
+
+ for user_id in (ADMIN_USER_ID, REGULAR_USER_ID, OTHER_USER_ID):
+ self.services.user.TestAddUser('ignored_%s@gmail.com' % user_id, user_id)
+
+ self.regular_owner_live = self.services.project.TestAddProject(
+ 'regular-owner-live', state=project_pb2.ProjectState.LIVE,
+ owner_ids=[REGULAR_USER_ID], project_id=REGULAR_OWNER_LIVE)
+ self.regular_owner_archived = self.services.project.TestAddProject(
+ 'regular-owner-archived', state=project_pb2.ProjectState.ARCHIVED,
+ owner_ids=[REGULAR_USER_ID], project_id=REGULAR_OWNER_ARCHIVED)
+ self.regular_owner_deletable = self.services.project.TestAddProject(
+ 'regular-owner-deletable', state=project_pb2.ProjectState.DELETABLE,
+ owner_ids=[REGULAR_USER_ID], project_id=REGULAR_OWNER_DELETABLE)
+ self.regular_committer_live = self.services.project.TestAddProject(
+ 'regular-committer-live', state=project_pb2.ProjectState.LIVE,
+ committer_ids=[REGULAR_USER_ID], project_id=REGULAR_COMMITTER_LIVE)
+ self.regular_committer_archived = self.services.project.TestAddProject(
+ 'regular-committer-archived', state=project_pb2.ProjectState.ARCHIVED,
+ committer_ids=[REGULAR_USER_ID], project_id=REGULAR_COMMITTER_ARCHIVED)
+ self.regular_committer_deletable = self.services.project.TestAddProject(
+ 'regular-committer-deletable', state=project_pb2.ProjectState.DELETABLE,
+ committer_ids=[REGULAR_USER_ID], project_id=REGULAR_COMMITTER_DELETABLE)
+ self.other_owner_live = self.services.project.TestAddProject(
+ 'other-owner-live', state=project_pb2.ProjectState.LIVE,
+ owner_ids=[OTHER_USER_ID], project_id=OTHER_OWNER_LIVE)
+ self.other_owner_archived = self.services.project.TestAddProject(
+ 'other-owner-archived', state=project_pb2.ProjectState.ARCHIVED,
+ owner_ids=[OTHER_USER_ID], project_id=OTHER_OWNER_ARCHIVED)
+ self.other_owner_deletable = self.services.project.TestAddProject(
+ 'other-owner-deletable', state=project_pb2.ProjectState.DELETABLE,
+ owner_ids=[OTHER_USER_ID], project_id=OTHER_OWNER_DELETABLE)
+ self.other_committer_live = self.services.project.TestAddProject(
+ 'other-committer-live', state=project_pb2.ProjectState.LIVE,
+ committer_ids=[OTHER_USER_ID], project_id=OTHER_COMMITTER_LIVE)
+
+ self.regular_user = self.services.user.GetUser(self.cnxn, REGULAR_USER_ID)
+
+ self.admin_user = self.services.user.TestAddUser(
+ 'administrator@chromium.org', ADMIN_USER_ID)
+ self.admin_user.is_site_admin = True
+
+ self.other_user = self.services.user.GetUser(self.cnxn, OTHER_USER_ID)
+
+ self.members_only_project = self.services.project.TestAddProject(
+ 'members-only', owner_ids=[REGULAR_USER_ID], project_id=MEMBERS_ONLY)
+ self.members_only_project.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+
+ def assertProjectsAnyOrder(self, actual_projects, *expected_projects):
+ # Check names rather than Project objects so that output is easier to read.
+ actual_names = [p.project_name for p in actual_projects]
+ expected_names = [p.project_name for p in expected_projects]
+ self.assertItemsEqual(expected_names, actual_names)
+
+ def testFilterViewableProjects_CantViewArchived(self):
+ projects = list(sitewide_helpers.FilterViewableProjects(
+ self.services.project.test_projects.values(),
+ self.regular_user, {REGULAR_USER_ID}))
+ self.assertProjectsAnyOrder(
+ projects, self.regular_owner_live, self.regular_committer_live,
+ self.other_owner_live, self.other_committer_live,
+ self.members_only_project)
+
+ def testFilterViewableProjects_NonMemberCantViewMembersOnly(self):
+ projects = list(sitewide_helpers.FilterViewableProjects(
+ self.services.project.test_projects.values(),
+ self.other_user, {OTHER_USER_ID}))
+ self.assertProjectsAnyOrder(
+ projects, self.regular_owner_live, self.regular_committer_live,
+ self.other_owner_live, self.other_committer_live)
+
+ def testFilterViewableProjects_AdminCanViewAny(self):
+ projects = list(sitewide_helpers.FilterViewableProjects(
+ self.services.project.test_projects.values(),
+ self.admin_user, {ADMIN_USER_ID}))
+ self.assertProjectsAnyOrder(
+ projects, self.regular_owner_live, self.regular_committer_live,
+ self.other_owner_live, self.other_committer_live,
+ self.members_only_project)
+
+ def testGetStarredProjects_OnlyViewableLiveStarred(self):
+ viewed_user_id = 123
+ for p in self.services.project.test_projects.values():
+ self.services.project_star.SetStar(
+ self.cnxn, p.project_id, viewed_user_id, True)
+
+ self.assertProjectsAnyOrder(
+ sitewide_helpers.GetViewableStarredProjects(
+ self.cnxn, self.services, viewed_user_id,
+ {REGULAR_USER_ID}, self.regular_user),
+ self.regular_owner_live, self.regular_committer_live,
+ self.other_owner_live, self.other_committer_live,
+ self.members_only_project)
+
+ def testGetStarredProjects_MembersOnly(self):
+ # Both users were able to star the project in the past. The stars do not
+ # go away even if access to the project changes.
+ self.services.project_star.SetStar(
+ self.cnxn, self.members_only_project.project_id, REGULAR_USER_ID, True)
+ self.services.project_star.SetStar(
+ self.cnxn, self.members_only_project.project_id, OTHER_USER_ID, True)
+
+ # But now, only one of them is currently a member, so only regular_user
+ # can see the starred project in the lists.
+ self.assertProjectsAnyOrder(
+ sitewide_helpers.GetViewableStarredProjects(
+ self.cnxn, self.services, REGULAR_USER_ID, {REGULAR_USER_ID},
+ self.regular_user),
+ self.members_only_project)
+ self.assertProjectsAnyOrder(
+ sitewide_helpers.GetViewableStarredProjects(
+ self.cnxn, self.services, OTHER_USER_ID, {REGULAR_USER_ID},
+ self.regular_user),
+ self.members_only_project)
+
+ # The other user cannot see the project, so he does not see it in either
+ # list of starred projects.
+ self.assertProjectsAnyOrder(
+ sitewide_helpers.GetViewableStarredProjects(
+ self.cnxn, self.services, REGULAR_USER_ID, {OTHER_USER_ID},
+ self.other_user)) # No expected projects listed.
+ self.assertProjectsAnyOrder(
+ sitewide_helpers.GetViewableStarredProjects(
+ self.cnxn, self.services, OTHER_USER_ID, {OTHER_USER_ID},
+ self.other_user)) # No expected projects listed.
+
+ def testGetUserProjects_OnlyLiveOfOtherUsers(self):
+ """Regular users should only see live projects of other users."""
+ (owned, archived, membered,
+ _contrib) = sitewide_helpers.GetUserProjects(
+ self.cnxn, self.services, self.regular_user, {REGULAR_USER_ID},
+ {OTHER_USER_ID})
+ self.assertProjectsAnyOrder(owned, self.other_owner_live)
+ self.assertEqual([], archived)
+ self.assertProjectsAnyOrder(membered, self.other_committer_live)
+
+ def testGetUserProjects_AdminSeesAll(self):
+ (owned, archived, membered,
+ _contrib) = sitewide_helpers.GetUserProjects(
+ self.cnxn, self.services, self.admin_user, {ADMIN_USER_ID},
+ {OTHER_USER_ID})
+ self.assertProjectsAnyOrder(owned, self.other_owner_live)
+ self.assertProjectsAnyOrder(archived, self.other_owner_archived)
+ self.assertProjectsAnyOrder(membered, self.other_committer_live)
+
+ def testGetUserProjects_UserSeesOwnArchived(self):
+ (owned, archived, membered,
+ _contrib) = sitewide_helpers.GetUserProjects(
+ self.cnxn, self.services, self.regular_user, {REGULAR_USER_ID},
+ {REGULAR_USER_ID})
+ self.assertProjectsAnyOrder(
+ owned, self.regular_owner_live, self.members_only_project)
+ self.assertProjectsAnyOrder(archived, self.regular_owner_archived)
+ self.assertProjectsAnyOrder(membered, self.regular_committer_live)
+
+ def testGetUserProjects_UserSeesGroupArchived(self):
+ # Now imagine that "regular user" is part of a user group, and the "other
+ # user" is the group. Users and user groups are treated
+ # the same: there's no separate way to represent a user group.
+ (owned, archived, membered,
+ _contrib) = sitewide_helpers.GetUserProjects(
+ self.cnxn, self.services, self.regular_user,
+ {REGULAR_USER_ID, OTHER_USER_ID},
+ {REGULAR_USER_ID, OTHER_USER_ID})
+ self.assertProjectsAnyOrder(
+ owned, self.regular_owner_live, self.members_only_project,
+ self.other_owner_live)
+ self.assertProjectsAnyOrder(
+ archived, self.regular_owner_archived, self.other_owner_archived)
+ self.assertProjectsAnyOrder(
+ membered, self.regular_committer_live, self.other_committer_live)
+
+
+class PickProjectsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project_1 = project_pb2.Project()
+ self.project_2 = project_pb2.Project()
+ self.project_archived = project_pb2.Project()
+ self.project_archived.state = project_pb2.ProjectState.ARCHIVED
+
+ def testPickProjects_NoneDesired(self):
+ wanted = []
+ preloaded = {}
+ picked = sitewide_helpers._PickProjects(wanted, preloaded)
+ self.assertEqual([], picked)
+
+ def testPickProjects_NoneAvailable(self):
+ wanted = [234]
+ preloaded = {}
+ picked = sitewide_helpers._PickProjects(wanted, preloaded)
+ self.assertEqual([], picked)
+
+ def testPickProjects_Normal(self):
+ preloaded = {123: self.project_1,
+ 234: self.project_2,
+ 999: self.project_archived}
+
+ # We get the only one we actually want.
+ wanted = [123]
+ picked = sitewide_helpers._PickProjects(wanted, preloaded)
+ self.assertEqual([self.project_1], picked)
+
+ # Archived projects are normally not included.
+ wanted = [123, 999]
+ picked = sitewide_helpers._PickProjects(wanted, preloaded)
+ self.assertEqual([self.project_1], picked)
+
+ # Just archived projects are returned if we ask for that.
+ picked = sitewide_helpers._PickProjects(wanted, preloaded, archived=True)
+ self.assertEqual([self.project_archived], picked)
+
+ # Non-existent projects are ignored. Projects might not exist if there
+ # is data corruption in our dev and staging instances.
+ wanted = [123, 999, 234]
+ picked = sitewide_helpers._PickProjects(wanted, preloaded)
+ self.assertEqual([self.project_1, self.project_2], picked)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/sitewide_views_test.py b/appengine/monorail/sitewide/test/sitewide_views_test.py
new file mode 100644
index 0000000..909a01a
--- /dev/null
+++ b/appengine/monorail/sitewide/test/sitewide_views_test.py
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for sitewide_views module."""
+
+import unittest
+
+from proto import usergroup_pb2
+from sitewide import sitewide_views
+
+
+class GroupViewTest(unittest.TestCase):
+
+ def testConstructor(self):
+ group_settings = usergroup_pb2.MakeSettings('anyone')
+ view = sitewide_views.GroupView('groupname', 123, group_settings, 999)
+
+ self.assertEqual('groupname', view.name)
+ self.assertEqual(123, view.num_members)
+ self.assertEqual('ANYONE', view.who_can_view_members)
+ self.assertEqual('/g/999/', view.detail_url)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/userprofile_test.py b/appengine/monorail/sitewide/test/userprofile_test.py
new file mode 100644
index 0000000..5b9a525
--- /dev/null
+++ b/appengine/monorail/sitewide/test/userprofile_test.py
@@ -0,0 +1,189 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the user profile page."""
+
+import unittest
+
+import mox
+
+from framework import framework_helpers
+from framework import framework_views
+from proto import project_pb2
+from proto import user_pb2
+from services import service_manager
+from sitewide import userprofile
+from testing import fake
+
+
+REGULAR_USER_ID = 111L
+ADMIN_USER_ID = 222L
+OTHER_USER_ID = 333L
+STATES = {
+ 'live': project_pb2.ProjectState.LIVE,
+ 'archived': project_pb2.ProjectState.ARCHIVED,
+}
+
+
+def MakeReqInfo(
+ user_pb, user_id, viewed_user_pb, viewed_user_id, viewed_user_name,
+ _reveal_email=False, _params=None):
+ mr = fake.MonorailRequest()
+ mr.auth.user_pb = user_pb
+ mr.auth.user_id = user_id
+ mr.auth.effective_ids = {user_id}
+ mr.viewed_user_auth.email = viewed_user_name
+ mr.viewed_user_auth.user_pb = viewed_user_pb
+ mr.viewed_user_auth.user_id = viewed_user_id
+ mr.viewed_user_auth.effective_ids = {viewed_user_id}
+ mr.viewed_user_auth.user_view = framework_views.UserView(
+ viewed_user_id, viewed_user_pb.email, viewed_user_pb.obscure_email)
+ mr.viewed_user_name = viewed_user_name
+ return mr
+
+
+class UserProfileTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.mox.StubOutWithMock(
+ framework_helpers.UserSettings, 'GatherUnifiedSettingsPageData')
+
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService(),
+ project_star=fake.ProjectStarService(),
+ user_star=fake.UserStarService())
+ self.servlet = userprofile.UserProfile('req', 'res', services=services)
+
+ for user_id in (
+ REGULAR_USER_ID, ADMIN_USER_ID, OTHER_USER_ID):
+ services.user.TestAddUser('%s@gmail.com' % user_id, user_id)
+
+ for user in ['regular', 'other']:
+ for relation in ['owner', 'member']:
+ for state_name, state in STATES.iteritems():
+ services.project.TestAddProject(
+ '%s-%s-%s' % (user, relation, state_name), state=state)
+
+ # Add projects
+ for state_name, state in STATES.iteritems():
+ services.project.TestAddProject(
+ 'regular-owner-%s' % state_name, state=state,
+ owner_ids=[REGULAR_USER_ID])
+ services.project.TestAddProject(
+ 'regular-member-%s' % state_name, state=state,
+ committer_ids=[REGULAR_USER_ID])
+ services.project.TestAddProject(
+ 'other-owner-%s' % state_name, state=state,
+ owner_ids=[OTHER_USER_ID])
+ services.project.TestAddProject(
+ 'other-member-%s' % state_name, state=state,
+ committer_ids=[OTHER_USER_ID])
+
+ self.regular_user = services.user.GetUser('fake cnxn', REGULAR_USER_ID)
+ self.admin_user = services.user.GetUser('fake cnxn', ADMIN_USER_ID)
+ self.admin_user.is_site_admin = True
+ self.other_user = services.user.GetUser('fake cnxn', OTHER_USER_ID)
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+
+ def assertProjectsAnyOrder(self, value_to_test, *expected_project_names):
+ actual_project_names = [project_view.project_name
+ for project_view in value_to_test]
+ self.assertItemsEqual(expected_project_names, actual_project_names)
+
+ # TODO(jrobbins): re-implement captchas to reveal full
+ # email address and add tests for that.
+
+ def testGatherPageData_RegularUserViewingOtherUserProjects(self):
+ """A user can see the other users' live projects, but not archived ones."""
+ mr = MakeReqInfo(
+ self.regular_user, REGULAR_USER_ID, self.other_user,
+ OTHER_USER_ID, 'other@xyz.com')
+
+ framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ 111L, mr.viewed_user_auth.user_view,
+ mr.viewed_user_auth.user_pb).AndReturn({'unified': None})
+ self.mox.ReplayAll()
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertProjectsAnyOrder(page_data['owner_of_projects'],
+ 'other-owner-live')
+ self.assertProjectsAnyOrder(page_data['committer_of_projects'],
+ 'other-member-live')
+ self.assertFalse(page_data['owner_of_archived_projects'])
+ self.assertEquals('ot...@xyz.com', page_data['viewed_user_display_name'])
+
+ self.mox.VerifyAll()
+
+ def testGatherPageData_RegularUserViewingOwnProjects(self):
+ """A user can see all their own projects: live or archived."""
+ mr = MakeReqInfo(
+ self.regular_user, REGULAR_USER_ID, self.regular_user,
+ REGULAR_USER_ID, 'self@xyz.com')
+
+ framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ 111L, mr.viewed_user_auth.user_view,
+ mr.viewed_user_auth.user_pb).AndReturn({'unified': None})
+ self.mox.ReplayAll()
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEquals('self@xyz.com', page_data['viewed_user_display_name'])
+ self.assertProjectsAnyOrder(page_data['owner_of_projects'],
+ 'regular-owner-live')
+ self.assertProjectsAnyOrder(page_data['committer_of_projects'],
+ 'regular-member-live')
+ self.assertProjectsAnyOrder(
+ page_data['owner_of_archived_projects'],
+ 'regular-owner-archived')
+
+ self.mox.VerifyAll()
+
+ def testGatherPageData_AdminViewingOtherUserAddress(self):
+ """Site admins always see full email addresses of other users."""
+ mr = MakeReqInfo(
+ self.admin_user, ADMIN_USER_ID, self.other_user,
+ OTHER_USER_ID, 'other@xyz.com')
+
+ framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ 222L, mr.viewed_user_auth.user_view,
+ mr.viewed_user_auth.user_pb).AndReturn({'unified': None})
+ self.mox.ReplayAll()
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEquals('other@xyz.com', page_data['viewed_user_display_name'])
+
+ self.mox.VerifyAll()
+
+ def testGatherPageData_RegularUserViewingOtherUserAddress(self):
+ """Email should be revealed to others depending on obscure_email."""
+ mr = MakeReqInfo(
+ self.regular_user, REGULAR_USER_ID, self.other_user,
+ OTHER_USER_ID, 'other@xyz.com')
+
+ framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ 111L, mr.viewed_user_auth.user_view,
+ mr.viewed_user_auth.user_pb).AndReturn({'unified': None})
+ framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ 111L, mr.viewed_user_auth.user_view,
+ mr.viewed_user_auth.user_pb).AndReturn({'unified': None})
+ self.mox.ReplayAll()
+
+ mr.viewed_user_auth.user_view.obscure_email = False
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEquals('other@xyz.com', page_data['viewed_user_display_name'])
+
+ mr.viewed_user_auth.user_view.obscure_email = True
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEquals('ot...@xyz.com', page_data['viewed_user_display_name'])
+
+ self.mox.VerifyAll()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/userprojects_test.py b/appengine/monorail/sitewide/test/userprojects_test.py
new file mode 100644
index 0000000..1d7ec46
--- /dev/null
+++ b/appengine/monorail/sitewide/test/userprojects_test.py
@@ -0,0 +1,49 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the user projects feed."""
+
+import unittest
+
+from sitewide import userprojects
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+
+
+class UserProjectsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ project_star=fake.ProjectStarService())
+ self.projects_json_feed = userprojects.ProjectsJsonFeed(
+ 'req', 'res', services=self.services)
+
+ def testGatherProjects(self):
+ self.services.user.TestAddUser('testuser', 1L)
+ self.services.user.TestAddUser('otheruser', 2L)
+ self.services.project.TestAddProject(
+ 'memberof-proj', project_id=1, owner_ids=[2], committer_ids=[1])
+ self.services.project.TestAddProject(
+ 'ownerof-proj', project_id=2, owner_ids=[1])
+ self.services.project.TestAddProject(
+ 'contributorto-proj', project_id=3, owner_ids=[2], contrib_ids=[1])
+ self.services.project.TestAddProject(
+ 'starred-proj', project_id=4)
+ self.services.project_star.SetStar(None, 4, 1, True)
+
+ mr = testing_helpers.MakeMonorailRequest(
+ user_info={'user_id': 1L}, services=self.services)
+ result = self.projects_json_feed.HandleRequest(mr)
+ self.assertEqual(['memberof-proj'], result['memberof'])
+ self.assertEqual(['contributorto-proj'], result['contributorto'])
+ self.assertEqual(['starred-proj'], result['starred_projects'])
+ self.assertEqual(['ownerof-proj'], result['ownerof'])
+
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/appengine/monorail/sitewide/test/usersettings_test.py b/appengine/monorail/sitewide/test/usersettings_test.py
new file mode 100644
index 0000000..faed831
--- /dev/null
+++ b/appengine/monorail/sitewide/test/usersettings_test.py
@@ -0,0 +1,66 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the user settings page."""
+
+import unittest
+
+import mox
+
+from framework import framework_helpers
+from framework import permissions
+from framework import template_helpers
+from proto import user_pb2
+from services import service_manager
+from sitewide import usersettings
+from testing import fake
+from testing import testing_helpers
+
+
+class UserSettingsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.services = service_manager.Services(user=fake.UserService())
+ self.servlet = usersettings.UserSettings(
+ 'req', 'res', services=self.services)
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+
+ def testAssertBasePermission(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.auth.user_id = 111L
+
+ # The following should return without exception.
+ self.servlet.AssertBasePermission(mr)
+
+ # No logged in user means anonymous access, should raise error.
+ mr.auth.user_id = 0L
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ def testGatherPageData(self):
+ self.mox.StubOutWithMock(
+ framework_helpers.UserSettings, 'GatherUnifiedSettingsPageData')
+ framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ 0L, None, mox.IsA(user_pb2.User)).AndReturn({'unified': None})
+ self.mox.ReplayAll()
+
+ mr = testing_helpers.MakeMonorailRequest()
+ page_data = self.servlet.GatherPageData(mr)
+
+ self.assertItemsEqual(
+ ['logged_in_user_pb', 'unified', 'user_tab_mode',
+ 'viewed_user', 'offer_saved_queries_subtab', 'viewing_self'],
+ page_data.keys())
+ self.assertEqual(template_helpers.PBProxy(mr.auth.user_pb),
+ page_data['logged_in_user_pb'])
+
+ self.mox.VerifyAll()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/sitewide/test/userupdates_test.py b/appengine/monorail/sitewide/test/userupdates_test.py
new file mode 100644
index 0000000..c662aba
--- /dev/null
+++ b/appengine/monorail/sitewide/test/userupdates_test.py
@@ -0,0 +1,116 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.sitewide.userupdates."""
+
+import unittest
+
+import mox
+
+from features import activities
+from services import service_manager
+from sitewide import sitewide_helpers
+from sitewide import userupdates
+from testing import fake
+from testing import testing_helpers
+
+
+class ProjectUpdatesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ user_star=fake.UserStarService())
+
+ self.user_id = 2
+ self.project_id = 987
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=self.project_id)
+
+ self.mr = testing_helpers.MakeMonorailRequest(
+ services=self.services, project=self.project)
+ self.mr.cnxn = 'fake cnxn'
+ self.mr.viewed_user_auth.user_id = 100
+
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testUserUpdatesProjects(self):
+ uup = userupdates.UserUpdatesProjects(None, None, self.services)
+
+ self.mox.StubOutWithMock(sitewide_helpers, 'GetViewableStarredProjects')
+ sitewide_helpers.GetViewableStarredProjects(
+ self.mr.cnxn, self.services, self.mr.viewed_user_auth.user_id,
+ self.mr.auth.effective_ids, self.mr.auth.user_pb).AndReturn(
+ [self.project])
+
+ self.mox.StubOutWithMock(activities, 'GatherUpdatesData')
+ activities.GatherUpdatesData(
+ self.services, self.mr, mox.IgnoreArg(),
+ user_ids=None,
+ project_ids=[self.project_id],
+ ending=uup._ENDING,
+ updates_page_url=uup._UPDATES_PAGE_URL,
+ highlight=uup._HIGHLIGHT).AndReturn({})
+
+ self.mox.ReplayAll()
+
+ page_data = uup.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual(3, len(page_data))
+ self.assertEqual('st5', page_data['user_tab_mode'])
+ self.assertEqual('yes', page_data['viewing_user_page'])
+ self.assertEqual(uup._TAB_MODE, page_data['user_updates_tab_mode'])
+
+ def testUserUpdatesDevelopers(self):
+ uud = userupdates.UserUpdatesDevelopers(None, None, self.services)
+
+ self.mox.StubOutWithMock(self.services.user_star, 'LookupStarredItemIDs')
+ self.services.user_star.LookupStarredItemIDs(
+ self.mr.cnxn, self.mr.viewed_user_auth.user_id).AndReturn(
+ [self.user_id])
+
+ self.mox.StubOutWithMock(activities, 'GatherUpdatesData')
+ activities.GatherUpdatesData(
+ self.services, self.mr, mox.IgnoreArg(),
+ user_ids=[self.user_id],
+ project_ids=None,
+ ending=uud._ENDING,
+ updates_page_url=uud._UPDATES_PAGE_URL,
+ highlight=uud._HIGHLIGHT).AndReturn({})
+
+ self.mox.ReplayAll()
+
+ page_data = uud.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual(3, len(page_data))
+ self.assertEqual('st5', page_data['user_tab_mode'])
+ self.assertEqual('yes', page_data['viewing_user_page'])
+ self.assertEqual(uud._TAB_MODE, page_data['user_updates_tab_mode'])
+
+ def testUserUpdatesIndividual(self):
+ uui = userupdates.UserUpdatesIndividual(None, None, self.services)
+
+ self.mox.StubOutWithMock(activities, 'GatherUpdatesData')
+ activities.GatherUpdatesData(
+ self.services, self.mr, mox.IgnoreArg(),
+ user_ids=[self.mr.viewed_user_auth.user_id],
+ project_ids=None,
+ ending=uui._ENDING,
+ updates_page_url=uui._UPDATES_PAGE_URL,
+ highlight=uui._HIGHLIGHT).AndReturn({})
+
+ self.mox.ReplayAll()
+
+ page_data = uui.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+ self.assertEqual(3, len(page_data))
+ self.assertEqual('st5', page_data['user_tab_mode'])
+ self.assertEqual('yes', page_data['viewing_user_page'])
+ self.assertEqual(uui._TAB_MODE, page_data['user_updates_tab_mode'])
+
diff --git a/appengine/monorail/sitewide/userprofile.py b/appengine/monorail/sitewide/userprofile.py
new file mode 100644
index 0000000..82427e7
--- /dev/null
+++ b/appengine/monorail/sitewide/userprofile.py
@@ -0,0 +1,140 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes for the user profile page ("my page")."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import framework_views
+from framework import permissions
+from framework import servlet
+from project import project_views
+from sitewide import sitewide_helpers
+
+
+class AbstractUserPage(servlet.Servlet):
+ """Base class for UserProfile and UserUpdates pages."""
+
+ _PAGE_TEMPLATE = None
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ raise NotImplementedError()
+
+
+class UserProfile(AbstractUserPage):
+ """Shows a page of information about a user."""
+
+ _PAGE_TEMPLATE = 'sitewide/user-profile-page.ezt'
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ viewing_self = mr.viewed_user_auth.user_id == mr.auth.user_id
+
+ if self.services.usergroup.GetGroupSettings(
+ mr.cnxn, mr.viewed_user_auth.user_id):
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, '/g/%s/' % mr.viewed_user_auth.user_pb.email,
+ include_project=False)
+ self.redirect(url, abort=True) # Show group page instead.
+
+ with self.profiler.Phase('GetUserProjects'):
+ project_lists = sitewide_helpers.GetUserProjects(
+ mr.cnxn, self.services, mr.auth.user_pb, mr.auth.effective_ids,
+ mr.viewed_user_auth.effective_ids)
+
+ (visible_ownership, visible_archived, visible_membership,
+ visible_contrib) = project_lists
+
+ # Do not obscure email if current user is a site admin. Do not obscure
+ # email if current user is viewing his/her own profile. For all other
+ # cases do whatever obscure_email setting for the user is.
+ email_obscured = (not(mr.auth.user_pb.is_site_admin or viewing_self)
+ and mr.viewed_user_auth.user_view.obscure_email)
+ if email_obscured:
+ _, domain, obscured_username = framework_views.ParseAndObscureAddress(
+ mr.viewed_user_auth.email)
+ viewed_user_display_name = '%s...@%s' % (obscured_username, domain)
+ else:
+ viewed_user_display_name = mr.viewed_user_auth.email
+
+ with self.profiler.Phase('GetStarredProjects'):
+ starred_projects = sitewide_helpers.GetViewableStarredProjects(
+ mr.cnxn, self.services, mr.viewed_user_auth.user_id,
+ mr.auth.effective_ids, mr.auth.user_pb)
+
+ logged_in_starred_pids = []
+ if mr.auth.user_id:
+ logged_in_starred_pids = self.services.project_star.LookupStarredItemIDs(
+ mr.cnxn, mr.auth.user_id)
+
+ starred_user_ids = self.services.user_star.LookupStarredItemIDs(
+ mr.cnxn, mr.viewed_user_auth.user_id)
+ starred_user_dict = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, starred_user_ids)
+ starred_users = starred_user_dict.values()
+
+ is_user_starred = self._IsUserStarred(
+ mr.cnxn, mr.auth.user_id, mr.viewed_user_auth.user_id)
+
+ page_data = {
+ 'user_tab_mode': 'st2',
+ 'viewed_user_display_name': viewed_user_display_name,
+ 'viewed_user_is_banned': ezt.boolean(
+ mr.viewed_user_auth.user_pb.banned),
+ 'viewed_user_ignore_action_limits': (
+ ezt.boolean(mr.viewed_user_auth.user_pb.ignore_action_limits)),
+ 'owner_of_projects': [
+ project_views.ProjectView(
+ p, starred=p.project_id in logged_in_starred_pids)
+ for p in visible_ownership],
+ 'committer_of_projects': [
+ project_views.ProjectView(
+ p, starred=p.project_id in logged_in_starred_pids)
+ for p in visible_membership],
+ 'contributor_to_projects': [
+ project_views.ProjectView(
+ p, starred=p.project_id in logged_in_starred_pids)
+ for p in visible_contrib],
+ 'owner_of_archived_projects': [
+ project_views.ProjectView(p) for p in visible_archived],
+ 'starred_projects': [
+ project_views.ProjectView(
+ p, starred=p.project_id in logged_in_starred_pids)
+ for p in starred_projects],
+ 'starred_users': starred_users,
+ 'is_user_starred': ezt.boolean(is_user_starred),
+ 'viewing_user_page': ezt.boolean(True),
+ }
+
+ settings = framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ mr.auth.user_id, mr.viewed_user_auth.user_view,
+ mr.viewed_user_auth.user_pb)
+ page_data.update(settings)
+
+ return page_data
+
+ def _IsUserStarred(self, cnxn, logged_in_user_id, viewed_user_id):
+ """Return whether the logged in user starred the viewed user."""
+ if logged_in_user_id:
+ return self.services.user_star.IsItemStarredBy(
+ cnxn, viewed_user_id, logged_in_user_id)
+ return False
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ has_admin_perm = mr.perms.HasPerm(permissions.ADMINISTER_SITE, None, None)
+ framework_helpers.UserSettings.ProcessSettingsForm(
+ mr.cnxn, self.services.user, post_data, mr.viewed_user_auth.user_id,
+ mr.viewed_user_auth.user_pb, admin=has_admin_perm)
+
+ # TODO(jrobbins): Check all calls to FormatAbsoluteURL for include_project.
+ return framework_helpers.FormatAbsoluteURL(
+ mr, mr.viewed_user_auth.user_view.profile_url, include_project=False,
+ saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/sitewide/userprojects.py b/appengine/monorail/sitewide/userprojects.py
new file mode 100644
index 0000000..3dd52c2
--- /dev/null
+++ b/appengine/monorail/sitewide/userprojects.py
@@ -0,0 +1,55 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes for the user projects feed."""
+
+from framework import jsonfeed
+from sitewide import sitewide_helpers
+
+
+class ProjectsJsonFeed(jsonfeed.JsonFeed):
+ """Servlet to get all of a user's projects in JSON format."""
+
+ def HandleRequest(self, mr):
+ """Retrieve list of a user's projects for the "My projects" menu.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format
+ """
+ if not mr.auth.user_id:
+ return {'error': 'User is not logged in.'}
+
+ json_data = {}
+
+ with self.profiler.Phase('page processing'):
+ json_data.update(self._GatherProjects(mr))
+
+ return json_data
+
+ def _GatherProjects(self, mr):
+ """Return a dict of project names the current user is involved in."""
+ with self.profiler.Phase('GetUserProjects'):
+ project_lists = sitewide_helpers.GetUserProjects(
+ mr.cnxn, self.services, mr.auth.user_pb, mr.auth.effective_ids,
+ mr.auth.effective_ids)
+ (visible_ownership, _visible_deleted, visible_membership,
+ visible_contrib) = project_lists
+
+ with self.profiler.Phase('GetStarredProjects'):
+ starred_projects = sitewide_helpers.GetViewableStarredProjects(
+ mr.cnxn, self.services, mr.auth.user_id,
+ mr.auth.effective_ids, mr.auth.user_pb)
+
+ projects_dict = {
+ 'memberof': [p.project_name for p in visible_membership],
+ 'ownerof': [p.project_name for p in visible_ownership],
+ 'contributorto': [p.project_name for p in visible_contrib],
+ 'starred_projects': [p.project_name for p in starred_projects],
+ }
+
+ return projects_dict
diff --git a/appengine/monorail/sitewide/usersettings.py b/appengine/monorail/sitewide/usersettings.py
new file mode 100644
index 0000000..23841b2
--- /dev/null
+++ b/appengine/monorail/sitewide/usersettings.py
@@ -0,0 +1,58 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes for the user settings (preferences) page."""
+
+import time
+import urllib
+
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from framework import template_helpers
+from framework import urls
+
+
+class UserSettings(servlet.Servlet):
+ """Shows a page with a simple form to edit user preferences."""
+
+ _PAGE_TEMPLATE = 'sitewide/user-settings-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Assert that the user has the permissions needed to view this page."""
+ super(UserSettings, self).AssertBasePermission(mr)
+
+ if not mr.auth.user_id:
+ raise permissions.PermissionException(
+ 'Anonymous users are not allowed to edit user settings')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ page_data = {
+ 'user_tab_mode': 'st3',
+ 'logged_in_user_pb': template_helpers.PBProxy(mr.auth.user_pb),
+ # When on /hosting/settings, the logged-in user is the viewed user.
+ 'viewed_user': mr.auth.user_view,
+ 'offer_saved_queries_subtab': ezt.boolean(True),
+ 'viewing_self': ezt.boolean(True),
+ }
+ page_data.update(
+ framework_helpers.UserSettings.GatherUnifiedSettingsPageData(
+ mr.auth.user_id, mr.auth.user_view, mr.auth.user_pb))
+ return page_data
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted form."""
+ framework_helpers.UserSettings.ProcessSettingsForm(
+ mr.cnxn, self.services.user, post_data,
+ mr.auth.user_id, mr.auth.user_pb)
+
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.USER_SETTINGS, include_project=False,
+ saved=1, ts=int(time.time()))
+
+ return url
diff --git a/appengine/monorail/sitewide/userupdates.py b/appengine/monorail/sitewide/userupdates.py
new file mode 100644
index 0000000..5639524
--- /dev/null
+++ b/appengine/monorail/sitewide/userupdates.py
@@ -0,0 +1,114 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes for user updates pages.
+
+ AbstractUserUpdatesPage: Base class for all user updates pages
+ UserUpdatesProjects: Handles displaying starred projects
+ UserUpdatesDevelopers: Handles displaying starred developers
+ UserUpdatesIndividual: Handles displaying activities by the viewed user
+"""
+
+
+import logging
+
+from third_party import ezt
+
+from features import activities
+from framework import urls
+from sitewide import sitewide_helpers
+from sitewide import userprofile
+
+
+class AbstractUserUpdatesPage(userprofile.AbstractUserPage):
+ """Base class for user updates pages."""
+
+ _PAGE_TEMPLATE = 'sitewide/user-updates-page.ezt'
+
+ # Subclasses should override these constants.
+ _UPDATES_PAGE_URL = None
+ # What to highlight in the middle column on user updates pages - 'project',
+ # 'user', or None
+ _HIGHLIGHT = None
+ # What the ending phrase for activity titles should be - 'by_user',
+ # 'in_project', or None
+ _ENDING = None
+ _TAB_MODE = None
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ # TODO(jrobbins): re-implement
+ # if self.CheckRevelationCaptcha(mr, mr.errors):
+ # mr.viewed_user_auth.user_view.RevealEmail()
+
+ page_data = {
+ 'user_tab_mode': 'st5',
+ 'viewing_user_page': ezt.boolean(True),
+ 'user_updates_tab_mode': self._TAB_MODE,
+ }
+
+ user_ids = self._GetUserIDsForUpdates(mr)
+ project_ids = self._GetProjectIDsForUpdates(mr)
+ page_data.update(activities.GatherUpdatesData(
+ self.services, mr, self.profiler, user_ids=user_ids,
+ project_ids=project_ids, ending=self._ENDING,
+ updates_page_url=self._UPDATES_PAGE_URL, highlight=self._HIGHLIGHT))
+
+ return page_data
+
+ def _GetUserIDsForUpdates(self, _mr):
+ """Returns a list of user IDs to retrieve activities from."""
+ return None # Means any.
+
+ def _GetProjectIDsForUpdates(self, _mr):
+ """Returns a list of project IDs to retrieve activities from."""
+ return None # Means any.
+
+
+class UserUpdatesProjects(AbstractUserUpdatesPage):
+ """Shows a page of updates from projects starred by a user."""
+
+ _UPDATES_FEED_URL = urls.USER_UPDATES_PROJECTS
+ _UPDATES_PAGE_URL = urls.USER_UPDATES_PROJECTS
+ _HIGHLIGHT = 'project'
+ _ENDING = 'by_user'
+ _TAB_MODE = 'st2'
+
+ def _GetProjectIDsForUpdates(self, mr):
+ """Returns a list of project IDs whom to retrieve activities from."""
+ starred_projects = sitewide_helpers.GetViewableStarredProjects(
+ mr.cnxn, self.services, mr.viewed_user_auth.user_id,
+ mr.auth.effective_ids, mr.auth.user_pb)
+ return [project.project_id for project in starred_projects]
+
+
+class UserUpdatesDevelopers(AbstractUserUpdatesPage):
+ """Shows a page of updates from developers starred by a user."""
+
+ _UPDATES_FEED_URL = urls.USER_UPDATES_DEVELOPERS
+ _UPDATES_PAGE_URL = urls.USER_UPDATES_DEVELOPERS
+ _HIGHLIGHT = 'user'
+ _ENDING = 'in_project'
+ _TAB_MODE = 'st3'
+
+ def _GetUserIDsForUpdates(self, mr):
+ """Returns a list of user IDs whom to retrieve activities from."""
+ user_ids = self.services.user_star.LookupStarredItemIDs(
+ mr.cnxn, mr.viewed_user_auth.user_id)
+ logging.debug('StarredUsers: %r', user_ids)
+ return user_ids
+
+
+class UserUpdatesIndividual(AbstractUserUpdatesPage):
+ """Shows a page of updates initiated by a user."""
+
+ _UPDATES_FEED_URL = urls.USER_UPDATES_MINE + '/user'
+ _UPDATES_PAGE_URL = urls.USER_UPDATES_MINE
+ _HIGHLIGHT = 'project'
+ _TAB_MODE = 'st1'
+
+ def _GetUserIDsForUpdates(self, mr):
+ """Returns a list of user IDs whom to retrieve activities from."""
+ return [mr.viewed_user_auth.user_id]
diff --git a/appengine/monorail/sql/PRESUBMIT.py b/appengine/monorail/sql/PRESUBMIT.py
new file mode 100644
index 0000000..a0fb30f
--- /dev/null
+++ b/appengine/monorail/sql/PRESUBMIT.py
@@ -0,0 +1,32 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Presubmit script just for Monorail's SQL files."""
+
+
+def AlterTableCheck(input_api, output_api): # pragma: no cover
+ this_dir = input_api.PresubmitLocalPath()
+ sql_files = set(x for x in input_api.os_listdir(this_dir)
+ if (x.endswith('.sql') and x != 'queries.sql'))
+ log_file = input_api.os_path.join(this_dir, 'alter-table-log.txt')
+ affected_files = set(f.LocalPath() for f in input_api.AffectedTextFiles())
+
+ if (any(f in affected_files for f in sql_files) ^
+ (log_file in affected_files)):
+ return [output_api.PresubmitPromptOrNotify(
+ 'It looks like you have modified the sql schema without updating\n'
+ 'the alter-table-log, or vice versa. Are you sure you want to do this?')
+ ]
+ return []
+
+
+def CheckChangeOnUpload(input_api, output_api): # pragma: no cover
+ output = AlterTableCheck(input_api, output_api)
+ return output
+
+
+def CheckChangeOnCommit(input_api, output_api): # pragma: no cover
+ output = AlterTableCheck(input_api, output_api)
+ return output
diff --git a/appengine/monorail/sql/alter-table-log.txt b/appengine/monorail/sql/alter-table-log.txt
new file mode 100644
index 0000000..ea0e3f4
--- /dev/null
+++ b/appengine/monorail/sql/alter-table-log.txt
@@ -0,0 +1,797 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+This file contains a log of ALTER TABLE statements that need to be executed
+to bring a Monorail SQL database up to the current schema.
+
+================================================================
+2012-05-24: Added more Project fields.
+
+ALTER TABLE Project ADD COLUMN read_only_reason VARCHAR(80);
+ALTER TABLE Project ADD COLUMN issue_notify_address VARCHAR(80);
+ALTER TABLE Project ADD COLUMN attachment_bytes_used INT DEFAULT 0;
+ALTER TABLE Project ADD COLUMN attachment_quota INT DEFAULT 52428800;
+ALTER TABLE Project ADD COLUMN moved_to VARCHAR(250);
+ALTER TABLE Project ADD COLUMN process_inbound_email BOOLEAN DEFAULT FALSE;
+
+================================================================
+2012-06-01: Added inbound_message for issue comments
+
+ALTER TABLE Comment ADD COLUMN inbound_message TEXT;
+
+
+================================================================
+2012-06-05: Removed send_notifications_from_user because Monorail will
+not offer that feature any time soon.
+
+ALTER TABLE ProjectIssueConfig DROP COLUMN send_notifications_from_user;
+
+
+================================================================
+2012-06-05: Add initial subscription options.
+
+ALTER TABLE User2SavedQuery ADD COLUMN subscription_mode
+ ENUM ('noemail', 'immediate') DEFAULT 'noemail' NOT NULL;
+
+
+================================================================
+2012-07-02: Revised project states and added state_reason and delete_time
+
+ALTER TABLE Project MODIFY COLUMN state ENUM ('live', 'archived', 'deletable')
+NOT NULL;
+
+ALTER TABLE Project ADD COLUMN state_reason VARCHAR(80);
+ALTER TABLE Project ADD COLUMN delete_time INT;
+
+
+================================================================
+2012-07-05: Added action limits and dismissed cues
+
+CREATE TABLE ActionLimit (
+ user_id INT NOT NULL AUTO_INCREMENT,
+ action_kind ENUM (
+ 'project_creation', 'issue_comment', 'issue_attachment',
+ 'issue_bulk_edit'),
+ recent_count INT,
+ reset_timestamp INT,
+ lifetime_count INT,
+ lifetime_limit INT,
+
+ PRIMARY KEY (user_id, action_kind)
+) ENGINE=INNODB;
+
+
+CREATE TABLE DismissedCues (
+ user_id INT NOT NULL AUTO_INCREMENT,
+ cue VARCHAR(40), -- names of the cue cards that the user has dismissed.
+
+ INDEX (user_id)
+) ENGINE=INNODB;
+
+
+ALTER TABLE User ADD COLUMN ignore_action_limits BOOLEAN DEFAULT FALSE;
+
+================================================================
+2012-07-11: No longer using Counter table.
+
+DROP TABLE Counter;
+
+================================================================
+2012-09-06: Drop AttachmentContent, put blobkey in Attachment
+and drop some redundant columns.
+
+Note: This loses attachment data that might currently be in your
+instance. Good thing these schema refinements are getting done
+before launch.
+
+ALTER TABLE Attachment DROP COLUMN attachment_id;
+ALTER TABLE Attachment DROP COLUMN comment_created;
+ALTER TABLE Attachment ADD COLUMN blobkey VARCHAR(1024) NOT NULL;
+
+DROP TABLE AttachmentContent;
+
+ALTER TABLE IssueUpdate DROP COLUMN comment_created;
+
+
+================================================================
+2012-11-01: Add Components to IssueUpdate enum.
+
+alter table IssueUpdate modify field ENUM ('summary', 'status', 'owner',
+'cc', 'labels', 'blockedon', 'blocking', 'mergedinto', 'project',
+'components') NOT NULL;
+
+
+================================================================
+2012-12-10: Add template admins and field admins
+
+
+CREATE TABLE FieldDef2Admin (
+ field_id INT NOT NULL,
+ admin_id INT NOT NULL,
+
+ PRIMARY KEY (field_id, admin_id),
+ FOREIGN KEY (field_id) REFERENCES FieldDef(id),
+ FOREIGN KEY (admin_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+CREATE TABLE Template2Admin (
+ template_id INT NOT NULL,
+ admin_id INT NOT NULL,
+
+ PRIMARY KEY (template_id, admin_id),
+ FOREIGN KEY (template_id) REFERENCES Template(id),
+ FOREIGN KEY (admin_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+================================================================
+2012-12-14: Add a table of custom field values
+
+ALTER TABLE FieldDef MODIFY field_type ENUM (
+ 'enum_type', 'int_type', 'str_type', 'user_type') NOT NULL;
+
+CREATE TABLE Issue2FieldValue (
+ iid INT NOT NULL,
+ field_id INT NOT NULL,
+
+ int_value INT,
+ str_value VARCHAR(1024),
+ user_id INT,
+
+ derived BOOLEAN DEFAULT FALSE,
+
+ INDEX (iid, field_id),
+ INDEX (field_id, int_value),
+ INDEX (field_id, str_value),
+ INDEX (field_id, user_id),
+
+ FOREIGN KEY (iid) REFERENCES Issue(id),
+ -- FOREIGN KEY (field_id) REFERENCES FieldDef(id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+================================================================
+2012-12-18: persistence for update objects on custom fields
+
+ALTER TABLE IssueUpdate MODIFY field ENUM (
+ 'summary', 'status', 'owner', 'cc', 'labels', 'blockedon', 'blocking', 'mergedinto',
+ 'project', 'components', 'custom' ) NOT NULL;
+
+ALTER TABLE IssueUpdate ADD custom_field_name VARCHAR(255);
+
+
+================================================================
+2012-12-27: Rename component owner to component admin
+
+DROP TABLE Component2Owner;
+
+CREATE TABLE Component2Admin (
+ component_id SMALLINT UNSIGNED NOT NULL,
+ admin_id INT NOT NULL,
+
+ PRIMARY KEY (component_id, admin_id),
+
+ FOREIGN KEY (component_id) REFERENCES ComponentDef(id),
+ FOREIGN KEY (admin_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+================================================================
+2013-01-20: add field applicability predicate
+
+ALTER TABLE FieldDef ADD applicable_type VARCHAR(80);
+ALTER TABLE FieldDef ADD applicable_predicate TEXT;
+
+================================================================
+2013-01-25: add field validation details
+
+ALTER TABLE FieldDef ADD max_value INT;
+ALTER TABLE FieldDef ADD min_value INT;
+ALTER TABLE FieldDef ADD regex VARCHAR(80);
+ALTER TABLE FieldDef ADD needs_member BOOLEAN;
+ALTER TABLE FieldDef ADD needs_perm VARCHAR(80);
+
+
+================================================================
+2013-02-11: add grant and notify to user-valued fields
+
+ALTER TABLE FieldDef ADD grants_perm VARCHAR(80);
+ALTER TABLE FieldDef ADD notify_on ENUM ('never', 'any_comment') DEFAULT 'never' NOT NULL;
+
+
+================================================================
+2013-03-17: Add Template2FieldValue
+
+CREATE TABLE Template2FieldValue (
+ template_id INT NOT NULL,
+ field_id INT NOT NULL,
+
+ int_value INT,
+ str_value VARCHAR(1024),
+ user_id INT,
+
+ INDEX (template_id, field_id),
+
+ FOREIGN KEY (template_id) REFERENCES Template(id),
+ -- FOREIGN KEY (field_id) REFERENCES FieldDef(id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+================================================================
+2013-05-08: eliminated same_org_only
+
+-- This needs to be done on all shards.
+UPDATE Project SET access = 'members_only' WHERE access = 'same_org_only';
+ALTER TABLE Project MODIFY COLUMN access ENUM ('anyone', 'members_only');
+
+================================================================
+2013-05-08: implemented recent activity timestamp
+
+-- This needs to be done on all shards.
+ALTER TABLE Project ADD recent_activity_timestamp INT;
+
+================================================================
+2013-07-01: use BIGINT for Invalidate timesteps
+
+ALTER TABLE Invalidate MODIFY COLUMN timestep BIGINT NOT NULL AUTO_INCREMENT;
+
+
+================================================================
+2013-07-23: renamed to avoid "participant"
+
+RENAME TABLE ParticipantDuty TO MemberDuty;
+RENAME TABLE ParticipantNotes TO MemberNotes;
+
+================================================================
+2013-08-22: renamed issue_id to local_id
+
+-- On master and all shards
+ALTER TABLE Issue CHANGE issue_id local_id INT NOT NULL;
+
+-- On master only
+ALTER TABLE IssueFormerLocations CHANGE issue_id local_id INT NOT NULL;
+
+================================================================
+2013-08-24: renamed iid to issue_id
+
+-- On master and all shards
+
+ALTER TABLE IssueSummary DROP FOREIGN KEY IssueSummary_ibfk_1;
+ALTER TABLE IssueSummary CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE IssueSummary ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE Issue2Label DROP FOREIGN KEY Issue2Label_ibfk_1;
+ALTER TABLE Issue2Label CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE Issue2Label ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE Issue2Component DROP FOREIGN KEY Issue2Component_ibfk_1;
+ALTER TABLE Issue2Component CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE Issue2Component ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE Issue2Cc DROP FOREIGN KEY Issue2Cc_ibfk_1;
+ALTER TABLE Issue2Cc CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE Issue2Cc ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE Issue2Notify DROP FOREIGN KEY Issue2Notify_ibfk_1;
+ALTER TABLE Issue2Notify CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE Issue2Notify ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE IssueStar DROP FOREIGN KEY IssueStar_ibfk_1;
+ALTER TABLE IssueStar CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE IssueStar ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE IssueRelation DROP FOREIGN KEY IssueRelation_ibfk_1;
+ALTER TABLE IssueRelation CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE IssueRelation ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE IssueRelation CHANGE dst_iid dst_issue_id INT NOT NULL;
+
+ALTER TABLE Issue2FieldValue DROP FOREIGN KEY Issue2FieldValue_ibfk_1;
+ALTER TABLE Issue2FieldValue CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE Issue2FieldValue ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+-- On master only
+ALTER TABLE Comment DROP FOREIGN KEY Comment_ibfk_2;
+ALTER TABLE Comment CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE Comment ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE Attachment DROP FOREIGN KEY Attachment_ibfk_1;
+ALTER TABLE Attachment CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE Attachment ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+ALTER TABLE IssueUpdate DROP FOREIGN KEY IssueUpdate_ibfk_1;
+ALTER TABLE IssueUpdate CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE IssueUpdate ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+-- I was missing a foreign key constraint here. Adding now.
+ALTER TABLE IssueFormerLocations CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE IssueFormerLocations ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+-- I was missing a foreign key constraint here. Adding now.
+ALTER TABLE ReindexQueue CHANGE iid issue_id INT NOT NULL;
+ALTER TABLE ReindexQueue ADD FOREIGN KEY (issue_id) REFERENCES Issue(id);
+
+
+================================================================
+2013-08-30: added per-project email sending flag
+
+-- On master and all shards
+ALTER TABLE Project ADD COLUMN deliver_outbound_email BOOLEAN DEFAULT FALSE;
+
+
+================================================================
+2013-10-30: renamed prompts to templates
+
+ALTER TABLE ProjectIssueConfig
+CHANGE default_prompt_for_developers default_template_for_developers INT NOT NULL;
+
+ALTER TABLE ProjectIssueConfig
+CHANGE default_prompt_for_users default_template_for_users INT NOT NULL;
+
+ALTER TABLE Template
+CHANGE prompt_name name VARCHAR(255) NOT NULL,
+CHANGE prompt_text content TEXT,
+CHANGE prompt_summary summary TEXT,
+CHANGE prompt_summary_must_be_edited summary_must_be_edited BOOLEAN,
+CHANGE prompt_owner_id owner_id INT,
+CHANGE prompt_status status VARCHAR(255),
+CHANGE prompt_members_only members_only BOOLEAN;
+
+
+================================================================
+2013-11-18: add LocalIDCounter to master DB only, and fill in values.
+
+CREATE TABLE LocalIDCounter (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ used_local_id INT NOT NULL,
+
+ PRIMARY KEY (project_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+-- Note: this ignores former issue locations, so it can only be run
+-- now, before the "move issue" feature is offered.
+REPLACE INTO LocalIDCounter
+SELECT project_id, MAX(local_id)
+FROM Issue
+GROUP BY project_id;
+
+================================================================
+2015-06-12: add issue_id to Invalidate's enum for kind.
+
+ALTER TABLE Invalidate CHANGE kind kind ENUM('user', 'project', 'issue', 'issue_id');
+
+================================================================
+2015-07-24: Rename blobkey to gcs_object_id because we are using
+Google Cloud storage now.
+
+ALTER TABLE Attachment CHANGE blobkey gcs_object_id VARCHAR(1024) NOT NULL;
+
+===============================================================
+2015-08-14: Use MurmurHash3 to deterministically generate user ids.
+
+-- First, drop foreign key constraints, then alter the keys, then
+-- add back the foreign key constraints.
+
+ALTER TABLE User2Project DROP FOREIGN KEY user2project_ibfk_2;
+ALTER TABLE ExtraPerm DROP FOREIGN KEY extraperm_ibfk_2;
+ALTER TABLE MemberNotes DROP FOREIGN KEY membernotes_ibfk_2;
+ALTER TABLE UserStar DROP FOREIGN KEY userstar_ibfk_1;
+ALTER TABLE UserStar DROP FOREIGN KEY userstar_ibfk_2;
+ALTER TABLE ProjectStar DROP FOREIGN KEY projectstar_ibfk_1;
+ALTER TABLE UserGroup DROP FOREIGN KEY usergroup_ibfk_1;
+ALTER TABLE UserGroup DROP FOREIGN KEY usergroup_ibfk_2;
+ALTER TABLE UserGroupSettings DROP FOREIGN KEY usergroupsettings_ibfk_1;
+ALTER TABLE QuickEditHistory DROP FOREIGN KEY quickedithistory_ibfk_2;
+ALTER TABLE QuickEditMostRecent DROP FOREIGN KEY quickeditmostrecent_ibfk_2;
+ALTER TABLE Issue DROP FOREIGN KEY issue_ibfk_2;
+ALTER TABLE Issue DROP FOREIGN KEY issue_ibfk_3;
+ALTER TABLE Issue DROP FOREIGN KEY issue_ibfk_4;
+ALTER TABLE Issue2Cc DROP FOREIGN KEY issue2cc_ibfk_2;
+ALTER TABLE IssueStar DROP FOREIGN KEY issuestar_ibfk_1; -- ?
+ALTER TABLE Issue2FieldValue DROP FOREIGN KEY issue2fieldvalue_ibfk_2;
+ALTER TABLE Comment DROP FOREIGN KEY comment_ibfk_3;
+ALTER TABLE Comment DROP FOREIGN KEY comment_ibfk_4;
+ALTER TABLE FieldDef2Admin DROP FOREIGN KEY fielddef2admin_ibfk_2;
+ALTER TABLE Template2Admin DROP FOREIGN KEY template2admin_ibfk_2;
+ALTER TABLE Template2FieldValue DROP FOREIGN KEY template2fieldvalue_ibfk_2;
+ALTER TABLE Component2Admin DROP FOREIGN KEY component2admin_ibfk_2;
+ALTER TABLE Component2Cc DROP FOREIGN KEY component2cc_ibfk_2;
+ALTER TABLE User2SavedQuery DROP FOREIGN KEY user2savedquery_ibfk_1;
+
+
+ALTER TABLE User MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE ActionLimit MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE DismissedCues MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE User2Project MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE ExtraPerm MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE MemberNotes MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE UserStar MODIFY starred_user_id INT UNSIGNED NOT NULL,
+ MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE ProjectStar MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE UserGroup MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE UserGroup MODIFY group_id INT UNSIGNED NOT NULL;
+ALTER TABLE UserGroupSettings MODIFY group_id INT UNSIGNED NOT NULL;
+ALTER TABLE QuickEditHistory MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE QuickEditMostRecent MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE Issue MODIFY reporter_id INT UNSIGNED NOT NULL,
+ MODIFY owner_id INT UNSIGNED,
+ MODIFY derived_owner_id INT UNSIGNED;
+ALTER TABLE Issue2Cc MODIFY cc_id INT UNSIGNED NOT NULL;
+ALTER TABLE IssueStar MODIFY user_id INT UNSIGNED NOT NULL;
+ALTER TABLE Issue2FieldValue MODIFY user_id INT UNSIGNED;
+ALTER TABLE Comment MODIFY commenter_id INT UNSIGNED NOT NULL;
+ALTER TABLE Comment MODIFY deleted_by INT UNSIGNED;
+ALTER TABLE IssueUpdate MODIFY added_user_id INT UNSIGNED,
+ MODIFY removed_user_id INT UNSIGNED;
+ALTER TABLE Template MODIFY owner_id INT UNSIGNED;
+ALTER TABLE FieldDef2Admin MODIFY admin_id INT UNSIGNED NOT NULL;
+ALTER TABLE Template2Admin MODIFY admin_id INT UNSIGNED NOT NULL;
+ALTER TABLE Template2FieldValue MODIFY user_id INT UNSIGNED;
+ALTER TABLE Component2Admin MODIFY admin_id INT UNSIGNED NOT NULL;
+ALTER TABLE Component2Cc MODIFY cc_id INT UNSIGNED NOT NULL;
+ALTER TABLE User2SavedQuery MODIFY user_id INT UNSIGNED NOT NULL;
+
+ALTER TABLE User2Project ADD CONSTRAINT user2project_ibfk_2 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE ExtraPerm ADD CONSTRAINT extraperm_ibfk_2 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE MemberNotes ADD CONSTRAINT membernotes_ibfk_2 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE UserStar ADD CONSTRAINT userstar_ibfk_1 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE UserStar ADD CONSTRAINT userstar_ibfk_2 FOREIGN KEY (starred_user_id) REFERENCES User(user_id);
+ALTER TABLE ProjectStar ADD CONSTRAINT projectstar_ibfk_1 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE UserGroup ADD CONSTRAINT usergroup_ibfk_1 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE UserGroup ADD CONSTRAINT usergroup_ibfk_2 FOREIGN KEY (group_id) REFERENCES User(user_id);
+ALTER TABLE UserGroupSettings ADD CONSTRAINT usergroupsettings_ibfk_1 FOREIGN KEY (group_id) REFERENCES User(user_id);
+ALTER TABLE QuickEditHistory ADD CONSTRAINT quickedithistory_ibfk_2 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE QuickEditMostRecent ADD CONSTRAINT quickeditmostrecent_ibfk_2 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE Issue ADD CONSTRAINT issue_ibfk_2 FOREIGN KEY (reporter_id) REFERENCES User(user_id);
+ALTER TABLE Issue ADD CONSTRAINT issue_ibfk_3 FOREIGN KEY (owner_id) REFERENCES User(user_id);
+ALTER TABLE Issue ADD CONSTRAINT issue_ibfk_4 FOREIGN KEY (derived_owner_id) REFERENCES User(user_id);
+ALTER TABLE Issue2Cc ADD CONSTRAINT issue2cc_ibfk_2 FOREIGN KEY (cc_id) REFERENCES User(user_id);
+ALTER TABLE IssueStar ADD CONSTRAINT issuestar_ibfk_1 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE Issue2FieldValue ADD CONSTRAINT issue2fieldvalue_ibfk_2 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE Comment ADD CONSTRAINT comment_ibfk_3 FOREIGN KEY (commenter_id) REFERENCES User(user_id);
+ALTER TABLE Comment ADD CONSTRAINT comment_ibfk_4 FOREIGN KEY (deleted_by) REFERENCES User(user_id);
+ALTER TABLE FieldDef2Admin ADD CONSTRAINT fielddef2admin_ibfk_2 FOREIGN KEY (admin_id) REFERENCES User(user_id);
+ALTER TABLE Template2Admin ADD CONSTRAINT template2admin_ibfk_2 FOREIGN KEY (admin_id) REFERENCES User(user_id);
+ALTER TABLE Template2FieldValue ADD CONSTRAINT template2fieldvalue_ibfk_2 FOREIGN KEY (user_id) REFERENCES User(user_id);
+ALTER TABLE Component2Admin ADD CONSTRAINT component2admin_ibfk_2 FOREIGN KEY (admin_id) REFERENCES User(user_id);
+ALTER TABLE Component2Cc ADD CONSTRAINT component2cc_ibfk_2 FOREIGN KEY (cc_id) REFERENCES User(user_id);
+ALTER TABLE User2SavedQuery ADD CONSTRAINT user2savedquery_ibfk_1 FOREIGN KEY (user_id) REFERENCES User(user_id);
+
+================================================================
+2015-08-20: Add obscure_email column to User.
+
+ALTER TABLE User ADD obscure_email BOOLEAN DEFAULT TRUE;
+
+================================================================
+2015-09-14: Add role column to UserGroup.
+
+ALTER TABLE UserGroup ADD COLUMN role ENUM ('owner', 'member') NOT NULL DEFAULT 'member';
+
+================================================================
+2015-09-14: Remove via_id column from UserGroup.
+
+ALTER TABLE UserGroup DROP COLUMN via_id;
+
+================================================================
+2015-09-14: Add foreign key constraints to Issue2Foo tables
+
+ALTER TABLE Issue ADD CONSTRAINT issue_ibfk_5 FOREIGN KEY (status_id) REFERENCES StatusDef(id);
+ALTER TABLE Issue2Component ADD CONSTRAINT issue2component_ibfk_2 FOREIGN KEY (component_id) REFERENCES ComponentDef(id);
+ALTER TABLE Issue2Label ADD CONSTRAINT issue2label_ibfk_2 FOREIGN KEY (label_id) REFERENCES LabelDef(id);
+ALTER TABLE Issue2FieldValue ADD CONSTRAINT issue2fieldvalue_ibfk_3 FOREIGN KEY (field_id) REFERENCES FieldDef(id);
+
+================================================================
+2015-09-16: Use Binary collation on Varchar unique keys
+
+ALTER TABLE StatusDef MODIFY status VARCHAR(80) BINARY NOT NULL;
+ALTER TABLE ComponentDef MODIFY path VARCHAR(255) BINARY NOT NULL;
+ALTER TABLE LabelDef MODIFY label VARCHAR(80) BINARY NOT NULL;
+ALTER TABLE FieldDef MODIFY field_name VARCHAR(80) BINARY NOT NULL;
+ALTER TABLE Template MODIFY name VARCHAR(255) BINARY NOT NULL;
+
+================================================================
+2015-09-16: Have components use the same ID schema as Labels/Statuses
+
+ALTER TABLE ComponentDef MODIFY id INT NOT NULL AUTO_INCREMENT;
+ALTER TABLE Component2Admin MODIFY component_id INT NOT NULL;
+ALTER TABLE Component2Cc MODIFY component_id INT NOT NULL;
+ALTER TABLE Issue2Component MODIFY component_id INT NOT NULL;
+
+================================================================
+2015-09-17: Introduce DanglingIssueRelation table
+
+ALTER TABLE IssueRelation ADD CONSTRAINT issuerelation_ibfk_2 FOREIGN KEY (dst_issue_id) REFERENCES Issue(id);
+
+CREATE TABLE DanglingIssueRelation (
+ issue_id INT NOT NULL,
+ dst_issue_project VARCHAR(80),
+ dst_issue_local_id INT,
+
+ -- This table uses 'blocking' so that it can guarantee the src issue
+ -- always exists, while the dst issue is always the dangling one.
+ kind ENUM ('blockedon', 'blocking', 'mergedinto') NOT NULL,
+
+ PRIMARY KEY (issue_id, dst_issue_project, dst_issue_local_id),
+ INDEX (issue_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+================================================================
+2015-09-18: Convert table char encodings to utf8.
+
+ALTER DATABASE monorail CHARACTER SET = utf8 COLLATE = utf8_unicode_ci;
+ALTER TABLE Comment CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+ALTER TABLE ComponentDef CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+ALTER TABLE FieldDef CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+ALTER TABLE IssueSummary CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+ALTER TABLE LabelDef CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+ALTER TABLE MemberNotes CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+ALTER TABLE Project CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+ALTER TABLE StatusDef CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci;
+
+================================================================
+2015-09-22: Make IssueRelation primary key more specific
+
+ALTER TABLE IssueRelation DROP PRIMARY KEY, ADD PRIMARY KEY (issue_id, dst_issue_id, kind);
+ALTER TABLE DanglingIssueRelation DROP PRIMARY KEY, ADD PRIMARY KEY (issue_id, dst_issue_project, dst_issue_local_id, kind);
+
+================================================================
+2015-09-29: Make cache_key unsigned so unsigned user ids can be invalidated.
+
+ALTER TABLE Invalidate MODIFY cache_key INT UNSIGNED NOT NULL;
+
+================================================================
+2015-09-29: Add external_group_type and external_group_name to UserGroupSettings
+
+ALTER TABLE UserGroupSettings ADD COLUMN external_group_type ENUM ('chrome_infra_auth', 'mdb');
+ALTER TABLE UserGroupSettings ADD COLUMN last_sync_time INT;
+
+================================================================
+2015-10-27: Eliminate Project.deliver_outbound_email because we have separate staging and prod instances.
+
+ALTER TABLE Project DROP COLUMN deliver_outbound_email;
+
+================================================================
+2015-10-27: Add SpamReport and is_spam fields to Issue and Comment
+
+ALTER TABLE Issue ADD COLUMN is_spam BOOL DEFAULT FALSE;
+ALTER TABLE Issue ADD INDEX (is_spam, project_id);
+
+ALTER TABLE Comment ADD COLUMN is_spam BOOL DEFAULT FALSE;
+ALTER TABLE Comment ADD INDEX (is_spam, project_id, created);
+
+-- Created whenever a user reports an issue or comment as spam.
+-- Note this is distinct from a SpamVerdict, which is issued by
+-- the system rather than a human user.
+CREATE TABLE SpamReport (
+ -- when this report was generated
+ created TIMESTAMP NOT NULL,
+ -- when the reported content was generated
+ content_created TIMESTAMP NOT NULL,
+ -- id of the reporting user
+ user_id INT UNSIGNED NOT NULL,
+ -- id of the reported user
+ reported_user_id INT UNSIGNED NOT NULL,
+ -- either this or issue_id must be set
+ comment_id INT,
+ -- either this or comment_id must be set
+ issue_id INT,
+
+ INDEX (issue_id),
+ INDEX (comment_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (comment_id) REFERENCES Comment(id)
+);
+
+================================================================
+2015-11-03: Add new external group type chromium_committers
+
+ALTER TABLE UserGroupSettings MODIFY COLUMN external_group_type ENUM ('chrome_infra_auth', 'mdb', 'chromium_committers');
+
+================================================================
+2015-11-4: Add SpamVerdict table.
+
+-- Any time a human or the system sets is_spam to true,
+-- or changes it from true to false, we want to have a
+-- record of who did it and why.
+CREATE TABLE SpamVerdict (
+ -- when this verdict was generated
+ created TIMESTAMP NOT NULL,
+
+ -- id of the reporting user, may be null if it was
+ -- an automatic classification.
+ user_id INT UNSIGNED,
+
+ -- either this or issue_id must be set
+ comment_id INT,
+
+ -- either this or comment_id must be set
+ issue_id INT,
+
+ INDEX (issue_id),
+ INDEX (comment_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (comment_id) REFERENCES Comment(id),
+
+ -- If the classifier issued the verdict, this should
+ -- be set.
+ classifier_confidence FLOAT,
+
+ -- This should reflect the new is_spam value that was applied
+ -- by this verdict, not the value it had prior.
+ is_spam BOOLEAN NOT NULL,
+
+ -- owner: a project owner marked it as spam
+ -- threshhold: number of SpamReports from non-members was exceeded.
+ -- classifier: the automatic classifier reports it as spam.
+ reason ENUM ("manual", "threshold", "classifier") NOT NULL
+);
+
+ALTER TABLE LocalIDCounter ADD used_spam_id int(11) NOT NULL;
+
+================================================================
+2015-11-13: Add Template2Component table.
+
+CREATE TABLE Template2Component (
+ template_id INT NOT NULL,
+ component_id INT NOT NULL,
+
+ PRIMARY KEY (template_id, component_id),
+
+ FOREIGN KEY (template_id) REFERENCES Template(id),
+ FOREIGN KEY (component_id) REFERENCES ComponentDef(id)
+) ENGINE=INNODB;
+
+================================================================
+2015-11-13: Add new external group type baggins
+
+ALTER TABLE UserGroupSettings MODIFY COLUMN external_group_type ENUM ('chrome_infra_auth', 'mdb', 'chromium_committers', 'baggins');
+
+================================================================
+2015-11-18: Add new action kind api_request in ActionLimit
+
+ALTER TABLE ActionLimit MODIFY COLUMN action_kind ENUM ('project_creation', 'issue_comment', 'issue_attachment', 'issue_bulk_edit', 'api_request');
+
+================================================================
+2015-11-24: Add shard column to Issue, add indexes, and UPDATE existing rows.
+
+ALTER TABLE Issue ADD COLUMN shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL;
+
+UPDATE Issue set shard = id % 10;
+
+ALTER TABLE Issue ADD INDEX (shard, status_id);
+ALTER TABLE Issue ADD INDEX (shard, project_id);
+
+================================================================
+2015-11-25: Remove external group type chromium_committers
+
+ALTER TABLE UserGroupSettings MODIFY COLUMN external_group_type ENUM ('chrome_infra_auth', 'mdb', 'baggins');
+
+================================================================
+2015-12-08: Modify handling of hidden well-known labels/statuses
+
+ALTER TABLE StatusDef ADD COLUMN hidden BOOLEAN DEFAULT FALSE;
+ALTER TABLE LabelDef ADD COLUMN hidden BOOLEAN DEFAULT FALSE;
+
+UPDATE StatusDef SET status=TRIM(LEADING '#' FROM status), hidden=TRUE WHERE status COLLATE UTF8_GENERAL_CI LIKE '#%';
+UPDATE LabelDef SET label=TRIM(LEADING '#' FROM label), hidden=TRUE WHERE label COLLATE UTF8_GENERAL_CI LIKE '#%';
+
+================================================================
+2015-12-11: Speed up moderation queue queries.
+
+ALTER TABLE SpamVerdict ADD INDEX(classifier_confidence);
+
+================================================================
+2015-12-14: Give components 'deprecated' col to match labels/statuses
+
+ALTER TABLE StatusDef CHANGE hidden deprecated BOOLEAN DEFAULT FALSE;
+ALTER TABLE LabelDef CHANGE hidden deprecated BOOLEAN DEFAULT FALSE;
+ALTER TABLE ComponentDef ADD COLUMN deprecated BOOLEAN DEFAULT FALSE;
+
+================================================================
+2015-12-14: Add table Group2Project
+
+CREATE TABLE Group2Project (
+ group_id INT UNSIGNED NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (group_id, project_id),
+
+ FOREIGN KEY (group_id) REFERENCES UserGroupSettings(group_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+================================================================
+2015-12-15: Increase maximum attachment quota bytes
+
+ALTER TABLE Project MODIFY attachment_bytes_used BIGINT DEFAULT 0;
+ALTER TABLE Project MODIFY attachment_quota BIGINT DEFAULT 0;
+
+================================================================
+2015-12-15: Simplify moderation queue queries.
+
+ALTER TABLE SpamVerdict ADD COLUMN overruled BOOL NOT NULL;
+ALTER TABLE SpamVerdict ADD COLUMN project_id INT NOT NULL;
+UPDATE SpamVerdict s JOIN Issue i ON i.id=s.issue_id SET s.project_id=i.project_id;
+
+================================================================
+2015-12-17: Add cols home_page and logo to table Project
+
+ALTER TABLE Project ADD COLUMN home_page VARCHAR(250);
+ALTER TABLE Project ADD COLUMN logo_gcs_id VARCHAR(250);
+ALTER TABLE Project ADD COLUMN logo_file_name VARCHAR(250);
+
+================================================================
+2015-12-28: Add component_required col to table Template;
+
+ALTER TABLE Template ADD component_required BOOLEAN DEFAULT FALSE;
+
+================================================================
+2016-01-05: Add issue_shard column to Issue2Label, Issue2Component,
+add indexes, and UPDATE existing rows.
+
+ALTER TABLE Issue2Component ADD COLUMN issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL;
+UPDATE Issue2Component set issue_shard = issue_id % 10;
+ALTER TABLE Issue2Component ADD INDEX (component_id, issue_shard);
+
+ALTER TABLE Issue2Label ADD COLUMN issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL;
+UPDATE Issue2Label set issue_shard = issue_id % 10;
+ALTER TABLE Issue2Label ADD INDEX (label_id, issue_shard);
+
+================================================================
+2016-01-06: Add period_soft_limit and period_hard_limit columns to ActionLimit
+
+ALTER TABLE ActionLimit ADD COLUMN period_soft_limit INT;
+ALTER TABLE ActionLimit ADD COLUMN period_hard_limit INT;
+
+================================================================
+2016-01-08: Add issue_shard column to Issue2FieldValue, Issue2Cc,
+add indexes, and UPDATE existing rows.
+
+ALTER TABLE Issue2FieldValue ADD COLUMN issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL;
+UPDATE Issue2FieldValue SET issue_shard = issue_id % 10;
+ALTER TABLE Issue2FieldValue ADD INDEX (field_id, issue_shard, int_value);
+ALTER TABLE Issue2FieldValue ADD INDEX (field_id, issue_shard, str_value(255));
+ALTER TABLE Issue2FieldValue ADD INDEX (field_id, issue_shard, user_id);
+
+ALTER TABLE Issue2Cc ADD COLUMN issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL;
+UPDATE Issue2Cc SET issue_shard = issue_id % 10;
+ALTER TABLE Issue2Cc ADD INDEX (cc_id, issue_shard);
+
+================================================================
+2015-12-17: Add documentation forwarding for /wiki urls
+
+ALTER TABLE Project ADD COLUMN docs_url VARCHAR(250);
+
+================================================================
+2015-12-17: Ensure SavedQueries never have null ids
+
+ALTER TABLE SavedQuery MODIFY id INT NOT NULL AUTO INCREMENT;
+
+================================================================
+2016-02-04: Add created, creator_id, modified, modifier_id for components
+
+ALTER TABLE ComponentDef ADD COLUMN created INT;
+ALTER TABLE ComponentDef ADD COLUMN creator_id INT UNSIGNED;
+ALTER TABLE ComponentDef ADD FOREIGN KEY (creator_id) REFERENCES User(user_id);
+ALTER TABLE ComponentDef ADD COLUMN modified INT;
+ALTER TABLE ComponentDef ADD COLUMN modifier_id INT UNSIGNED;
+ALTER TABLE ComponentDef ADD FOREIGN KEY (modifier_id) REFERENCES User(user_id);
+
+================================================================
+2016-02-19: Opt all privileged accounts into displaying full email.
+
+UPDATE User SET obscure_email = FALSE WHERE email LIKE "%@chromium.org";
+UPDATE User SET obscure_email = FALSE WHERE email LIKE "%@webrtc.org";
+UPDATE User SET obscure_email = FALSE WHERE email LIKE "%@google.com";
+
+================================================================
+2016-04-11: Increase email length limit to 255
+
+ALTER TABLE User MODIFY email VARCHAR(255);
\ No newline at end of file
diff --git a/appengine/monorail/sql/framework.sql b/appengine/monorail/sql/framework.sql
new file mode 100644
index 0000000..cfe1c31
--- /dev/null
+++ b/appengine/monorail/sql/framework.sql
@@ -0,0 +1,36 @@
+-- Copyright 2016 The Chromium Authors. All Rights Reserved.
+--
+-- Use of this source code is governed by a BSD-style
+-- license that can be found in the LICENSE file or at
+-- https://developers.google.com/open-source/licenses/bsd
+
+
+-- Create app framework tables in the monorail DB.
+
+ALTER DATABASE monorail CHARACTER SET = utf8 COLLATE = utf8_unicode_ci;
+
+-- This table allows frontends to selectively invalidate their RAM caches.
+-- On each incoming request, the frontend queries this table to get all rows
+-- that are newer than the last row that it saw. Then it processes each such
+-- row by dropping entries from its RAM caches, and remembers the new highest
+-- timestep that it has seen.
+CREATE TABLE Invalidate (
+ -- The time at which the invalidation took effect, by that time new data
+ -- should be available to retrieve to fill local caches as needed.
+ -- This is not a clock value, it is just an integer that counts up by one
+ -- on each change.
+ timestep BIGINT NOT NULL AUTO_INCREMENT,
+
+ -- Which kind of entity was invalidated? Each kind is broad, e.g.,
+ -- invalidating a project also invalidates all issue tracker config within
+ -- that project. But, they do not nest. E.g., invalidating a project does
+ -- not invalidate all issues in the project.
+ kind enum('user', 'project', 'issue', 'issue_id') NOT NULL,
+
+ -- Which cache entry should be invalidated? Special value 0 indicates
+ -- that all entries should be invalidated.
+ cache_key INT UNSIGNED,
+
+ INDEX (timestep)
+) ENGINE=INNODB;
+
diff --git a/appengine/monorail/sql/project.sql b/appengine/monorail/sql/project.sql
new file mode 100644
index 0000000..f0bf38f
--- /dev/null
+++ b/appengine/monorail/sql/project.sql
@@ -0,0 +1,211 @@
+-- Copyright 2016 The Chromium Authors. All Rights Reserved.
+--
+-- Use of this source code is governed by a BSD-style
+-- license that can be found in the LICENSE file or at
+-- https://developers.google.com/open-source/licenses/bsd
+
+
+-- Create project-realted tables in monorail db.
+
+
+CREATE TABLE User (
+ user_id INT UNSIGNED NOT NULL,
+ email VARCHAR(255) NOT NULL, -- lowercase
+
+ is_site_admin BOOLEAN DEFAULT FALSE,
+ obscure_email BOOLEAN DEFAULT TRUE,
+ notify_issue_change BOOLEAN DEFAULT TRUE,
+ notify_starred_issue_change BOOLEAN DEFAULT TRUE,
+ banned VARCHAR(80),
+ after_issue_update ENUM ('up_to_list', 'stay_same_issue', 'next_in_list'),
+ keep_people_perms_open BOOLEAN DEFAULT FALSE,
+ preview_on_hover BOOLEAN DEFAULT TRUE,
+ ignore_action_limits BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (user_id),
+ UNIQUE KEY (email)
+) ENGINE=INNODB;
+
+
+
+CREATE TABLE Project (
+ project_id SMALLINT UNSIGNED NOT NULL AUTO_INCREMENT,
+ project_name VARCHAR(80) NOT NULL,
+
+ summary TEXT,
+ description TEXT,
+
+ state ENUM ('live', 'archived', 'deletable') NOT NULL,
+ access ENUM ('anyone', 'members_only') NOT NULL,
+ read_only_reason VARCHAR(80), -- normally empty for read-write.
+ state_reason VARCHAR(80), -- optional reason for doomed project.
+ delete_time INT, -- if set, automatically transition to state deletable.
+
+ issue_notify_address VARCHAR(80),
+ attachment_bytes_used BIGINT DEFAULT 0,
+ attachment_quota BIGINT DEFAULT 0, -- 50 MB default set in python code.
+
+ cached_content_timestamp INT,
+ recent_activity_timestamp INT,
+ moved_to VARCHAR(250),
+ process_inbound_email BOOLEAN DEFAULT FALSE,
+
+ only_owners_remove_restrictions BOOLEAN DEFAULT FALSE,
+ only_owners_see_contributors BOOLEAN DEFAULT FALSE,
+
+ revision_url_format VARCHAR(250),
+
+ home_page VARCHAR(250),
+ docs_url VARCHAR(250),
+ logo_gcs_id VARCHAR(250),
+ logo_file_name VARCHAR(250),
+
+ PRIMARY KEY (project_id),
+ UNIQUE KEY (project_name)
+) ENGINE=INNODB;
+
+
+CREATE TABLE ActionLimit (
+ user_id INT UNSIGNED NOT NULL,
+ action_kind ENUM (
+ 'project_creation', 'issue_comment', 'issue_attachment',
+ 'issue_bulk_edit', 'api_request'),
+ recent_count INT,
+ reset_timestamp INT,
+ lifetime_count INT,
+ lifetime_limit INT,
+ period_soft_limit INT,
+ period_hard_limit INT,
+
+ PRIMARY KEY (user_id, action_kind)
+) ENGINE=INNODB;
+
+
+CREATE TABLE DismissedCues (
+ user_id INT UNSIGNED NOT NULL,
+ cue VARCHAR(40), -- names of the cue cards that the user has dismissed.
+
+ INDEX (user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE User2Project (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ user_id INT UNSIGNED NOT NULL,
+ role_name ENUM ('owner', 'committer', 'contributor'),
+
+ PRIMARY KEY (project_id, user_id),
+ INDEX (user_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE ExtraPerm (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ user_id INT UNSIGNED NOT NULL,
+ perm VARCHAR(80),
+
+ PRIMARY KEY (project_id, user_id, perm),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE MemberNotes (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ user_id INT UNSIGNED NOT NULL,
+ notes TEXT,
+
+ PRIMARY KEY (project_id, user_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE UserStar (
+ starred_user_id INT UNSIGNED NOT NULL,
+ user_id INT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (starred_user_id, user_id),
+ INDEX (user_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id),
+ FOREIGN KEY (starred_user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE ProjectStar (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ user_id INT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (project_id, user_id),
+ INDEX (user_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE UserGroup (
+ user_id INT UNSIGNED NOT NULL,
+ group_id INT UNSIGNED NOT NULL,
+ role ENUM ('owner', 'member') NOT NULL DEFAULT 'member',
+
+ PRIMARY KEY (user_id, group_id),
+ INDEX (group_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id),
+ FOREIGN KEY (group_id) REFERENCES User(user_id)
+
+) ENGINE=INNODB;
+
+
+CREATE TABLE UserGroupSettings (
+ group_id INT UNSIGNED NOT NULL,
+
+ who_can_view_members ENUM ('owners', 'members', 'anyone'),
+
+ external_group_type ENUM ('chrome_infra_auth', 'mdb', 'baggins'),
+ -- timestamps in seconds since the epoch.
+ last_sync_time INT,
+
+ PRIMARY KEY (group_id),
+ FOREIGN KEY (group_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Group2Project (
+ group_id INT UNSIGNED NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (group_id, project_id),
+
+ FOREIGN KEY (group_id) REFERENCES UserGroupSettings(group_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+-- These are quick-edit commands that the user can easily repeat.
+CREATE TABLE QuickEditHistory (
+ user_id INT UNSIGNED NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ slot_num SMALLINT UNSIGNED NOT NULL,
+
+ command VARCHAR(255) NOT NULL,
+ comment TEXT NOT NULL,
+
+ PRIMARY KEY (user_id, project_id, slot_num),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+-- This allows us to offer the most recent command to the user again
+-- as the default quick-edit command for next time.
+CREATE TABLE QuickEditMostRecent (
+ user_id INT UNSIGNED NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ slot_num SMALLINT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (user_id, project_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
diff --git a/appengine/monorail/sql/queries.sql b/appengine/monorail/sql/queries.sql
new file mode 100644
index 0000000..a84bc70
--- /dev/null
+++ b/appengine/monorail/sql/queries.sql
@@ -0,0 +1,207 @@
+-- Copyright 2016 The Chromium Authors. All Rights Reserved.
+--
+-- Use of this source code is governed by a BSD-style
+-- license that can be found in the LICENSE file or at
+-- https://developers.google.com/open-source/licenses/bsd
+
+
+-- Example queries for common operations.
+
+use monorail;
+
+-- --------------------------
+-- PROJECT-RELATED QUERIES
+
+-- Look up the id of the project mention in the URL, and get info to display
+-- in the page header.
+SELECT project_id, summary, state, access
+FROM Project
+WHERE project_name = 'projb';
+
+-- Get one project to display on the project home page.
+SELECT summary, description, state, access
+FROM Project
+WHERE project_id = 1002;
+
+-- Get the list of members in a project for the project peoeple list page.
+SELECT email, role_name
+FROM User2Project NATURAL JOIN User
+WHERE project_id = 1002
+ORDER BY role_name, email;
+
+-- Get the list of all projects where a user has a role for the profile page.
+SELECT project_name, role_name
+FROM User2Project NATURAL JOIN Project
+WHERE user_id = 111 AND state = 'live'
+ORDER BY role_name, project_name;
+
+
+-- TODO: user groups
+
+
+-- --------------------------
+-- ISSUE-RELATED QUERIES
+
+-- Get all issues in a project, ordered by ID, no pagination.
+SELECT Issue.*
+FROM Issue
+WHERE project_id = 1002
+ORDER BY Issue.id;
+
+-- Get the second page of issues in a project, ordered by ID. Pagination size is 10.
+SELECT Issue.*
+FROM Issue
+WHERE project_id = 1002
+ORDER BY Issue.id
+LIMIT 10 OFFSET 10;
+
+-- Get all open issues in a project.
+SELECT Issue.*
+FROM Issue
+ LEFT JOIN StatusDef sd1 ON Issue.project_id = sd1.project_id AND LOWER(Issue.status) = LOWER(sd1.status)
+WHERE Issue.project_id = 1002
+ AND (sd1.means_open = TRUE OR sd1.means_open IS NULL); -- this matches oddball or NULL status values
+
+-- Search based on ID.
+SELECT Issue.*
+FROM Issue
+WHERE project_id = 1002 AND Issue.local_id > 8;
+
+
+-- Search based on status and owner_id.
+SELECT Issue.*
+FROM Issue
+WHERE project_id = 1002 AND LOWER(status) = 'new' AND owner_id = 222;
+
+-- Search based on date modiifed, opened, and closed.
+-- TODO: Gives an empty result with the current test data.
+SELECT Issue.*
+FROM Issue
+WHERE project_id = 1002 AND modified > '2011-01-01'
+AND opened > '2010-01-01' AND closed > '2010-02-01';
+
+
+-- Search for has:owner and has:status.
+SELECT Issue.*
+FROM Issue
+WHERE project_id = 1002 AND status != '' AND owner_id IS NOT NULL;
+
+
+-- All issues in a project that have a label Priority-High
+SELECT Issue.*
+FROM Issue NATURAL JOIN Issue2Label
+WHERE project_id = 1002 AND label = 'Priority-High';
+
+-- All issues in a project that DO NOT have a label Priority-High
+SELECT Issue.*
+FROM Issue
+WHERE project_id = 1002
+ AND NOT EXISTS (
+ SELECT *
+ FROM Issue2Label cond1
+ WHERE cond1.project_id = Issue.project_id AND cond1.id = Issue.id
+ AND label = 'Priority-High');
+
+
+-- Search based on priority and milestone.
+SELECT Issue.*
+FROM Issue
+ JOIN Issue2Label cond1 ON Issue.project_id = cond1.project_id AND Issue.id = cond1.id
+ JOIN Issue2Label cond2 ON Issue.project_id = cond2.project_id AND Issue.id = cond2.id
+WHERE Issue.project_id = 1002
+ AND LOWER(cond1.label) = 'priority-medium'
+ AND LOWER(cond2.label) = 'milestone-1.1';
+
+
+-- Permissions checked
+-- TODO: add additional permissions
+
+
+-- Get all comments on issue
+-- TODO: add some comment test data
+SELECT Comment.*
+FROM Comment
+WHERE project_id = 1002 AND issue.local_id = 3
+ORDER BY created;
+
+
+-- Get non-deleted comments on an issue
+-- TODO: add some comment test data
+SELECT Comment.*
+FROM Comment
+WHERE project_id = 1002 AND issue.local_id = 3 AND deleted_by IS NULL
+ORDER BY created;
+
+-- Cross-project search
+SELECT Issue.*
+FROM Issue
+ JOIN Issue2Label cond1 ON Issue.project_id = cond1.project_id AND Issue.id = cond1.id
+ JOIN Issue2Label cond2 ON Issue.project_id = cond2.project_id AND Issue.id = cond2.id
+WHERE LOWER(cond1.label) = 'priority-medium'
+ AND LOWER(cond2.label) = 'type-defect';
+
+-- All issues in a project, sorted by milestone. Milestone order is defined by the rank field of the well-known labels table.
+-- Issues with oddball milestones sort lexographcially after issues with well known milestones.
+-- Issues which do not have milestone sort last.
+-- Note that table sort_N holds the value needed for the Nth sort directive, and table rank_N holds the ranking
+-- number of that value, if any.
+SELECT Issue.*, sort1.label
+FROM Issue
+ LEFT JOIN (Issue2Label sort1 LEFT JOIN LabelDef rank1
+ ON sort1.project_id = rank1.project_id AND LOWER(sort1.label) = LOWER(rank1.label))
+ ON Issue.project_id = sort1.project_id AND Issue.id = sort1.id
+ AND sort1.label LIKE 'milestone-%'
+WHERE Issue.project_id = 1002
+ORDER BY ISNULL(rank1.rank), rank1.rank, ISNULL(sort1.label), LOWER(sort1.label), Issue.id;
+
+-- *Open* issues, sorted by milestone. Any status that is not known to be closed is considered open.
+SELECT Issue.project_id, Issue.local_id, Issue.summary, Issue.status, sort1.label
+FROM Issue
+ LEFT JOIN (Issue2Label sort1 LEFT JOIN LabelDef rank1
+ ON sort1.project_id = rank1.project_id AND LOWER(sort1.label) = LOWER(rank1.label))
+ ON Issue.project_id = sort1.project_id AND Issue.id = sort1.id
+ AND sort1.label LIKE 'milestone-%'
+ LEFT JOIN StatusDef sd1 ON Issue.project_id = sd1.project_id AND LOWER(Issue.status) = LOWER(sd1.status)
+WHERE Issue.project_id = 1002
+ AND (sd1.means_open = TRUE OR sd1.means_open IS NULL) -- this matches oddball or NULL status values
+ORDER BY ISNULL(rank1.rank), rank1.rank, ISNULL(sort1.label), LOWER(sort1.label),
+ Issue.id; -- tie breaker
+
+-- *Open* issues, sorted by status. Any status that is not known to be closed is considered open.
+SELECT Issue.*
+FROM Issue
+ LEFT JOIN StatusDef rank1 ON Issue.project_id = rank1.project_id AND LOWER(Issue.status) = LOWER(rank1.status)
+ LEFT JOIN StatusDef sr1 ON Issue.project_id = sr1.project_id AND LOWER(Issue.status) = LOWER(sr1.status)
+WHERE Issue.project_id = 1002
+ AND (sr1.means_open = TRUE or sr1.means_open IS NULL) -- this matches oddball or NULL status values
+ORDER BY ISNULL(rank1.rank), rank1.rank, ISNULL(Issue.status), LOWER(Issue.status),
+ Issue.id; -- tie breaker
+
+
+-- Realistic query: Open issues with component != printing, sorted by milestone then priority.
+SELECT Issue.local_id, Issue.summary, Issue.status, sort1.label, sort2.label
+FROM Issue
+ LEFT JOIN (Issue2Label sort1 LEFT JOIN LabelDef rank1
+ ON sort1.project_id = rank1.project_id AND LOWER(sort1.label) = LOWER(rank1.label))
+ ON Issue.project_id = sort1.project_id AND Issue.id = sort1.id
+ AND sort1.label LIKE 'mstone-%'
+ LEFT JOIN (Issue2Label sort2 LEFT JOIN LabelDef rank2
+ ON sort2.project_id = rank2.project_id AND LOWER(sort2.label) = LOWER(rank2.label))
+ ON Issue.project_id = sort2.project_id AND Issue.id = sort2.id
+ AND sort2.label LIKE 'pri-%'
+ LEFT JOIN StatusDef sr1 ON Issue.project_id = sr1.project_id AND LOWER(Issue.status) = LOWER(sr1.status)
+WHERE Issue.project_id = 1002
+ AND (sr1.means_open = TRUE or sr1.means_open IS NULL) -- this matches oddball or NULL status values
+ AND NOT EXISTS (
+ SELECT *
+ FROM Issue2Label cond1
+ WHERE Issue.project_id = cond1.project_id AND Issue.id = cond1.id
+ AND LOWER(cond1.label) = 'component-printing'
+ )
+ORDER BY ISNULL(rank1.rank), rank1.rank, ISNULL(sort1.label), LOWER(sort1.label),
+ ISNULL(rank2.rank), rank2.rank, ISNULL(sort2.label), LOWER(sort2.label),
+ Issue.id; -- tie breaker
+
+
+
+
diff --git a/appengine/monorail/sql/tracker.sql b/appengine/monorail/sql/tracker.sql
new file mode 100644
index 0000000..27c9e44
--- /dev/null
+++ b/appengine/monorail/sql/tracker.sql
@@ -0,0 +1,646 @@
+-- Copyright 2016 The Chromium Authors. All Rights Reserved.
+--
+-- Use of this source code is governed by a BSD-style
+-- license that can be found in the LICENSE file or at
+-- https://developers.google.com/open-source/licenses/bsd
+
+
+-- Create issue-realted tables in monorail db.
+
+
+CREATE TABLE StatusDef (
+ id INT NOT NULL AUTO_INCREMENT,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ status VARCHAR(80) BINARY NOT NULL,
+ rank SMALLINT UNSIGNED,
+ means_open BOOLEAN,
+ docstring TEXT,
+ deprecated BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (id),
+ UNIQUE KEY (project_id, status),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE ComponentDef (
+ id INT NOT NULL AUTO_INCREMENT,
+ project_id SMALLINT UNSIGNED NOT NULL,
+
+ -- Note: parent components have paths that are prefixes of child components.
+ path VARCHAR(255) BINARY NOT NULL,
+ docstring TEXT,
+ deprecated BOOLEAN DEFAULT FALSE,
+ created INT,
+ creator_id INT UNSIGNED,
+ modified INT,
+ modifier_id INT UNSIGNED,
+
+ PRIMARY KEY (id),
+ UNIQUE KEY (project_id, path),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (creator_id) REFERENCES User(user_id),
+ FOREIGN KEY (modifier_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Component2Admin (
+ component_id INT NOT NULL,
+ admin_id INT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (component_id, admin_id),
+
+ FOREIGN KEY (component_id) REFERENCES ComponentDef(id),
+ FOREIGN KEY (admin_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Component2Cc (
+ component_id INT NOT NULL,
+ cc_id INT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (component_id, cc_id),
+
+ FOREIGN KEY (component_id) REFERENCES ComponentDef(id),
+ FOREIGN KEY (cc_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE LabelDef (
+ id INT NOT NULL AUTO_INCREMENT,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ label VARCHAR(80) BINARY NOT NULL,
+ rank SMALLINT UNSIGNED,
+ docstring TEXT,
+ deprecated BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (id),
+ UNIQUE KEY (project_id, label),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE FieldDef (
+ id INT NOT NULL AUTO_INCREMENT,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ rank SMALLINT UNSIGNED,
+
+ field_name VARCHAR(80) BINARY NOT NULL,
+ -- TODO(jrobbins): more types
+ field_type ENUM ('enum_type', 'int_type', 'str_type', 'user_type') NOT NULL,
+ applicable_type VARCHAR(80), -- No value means: offered for all issue types
+ applicable_predicate TEXT, -- No value means: TRUE
+ is_required BOOLEAN, -- true means required if applicable
+ is_multivalued BOOLEAN,
+ -- TODO(jrobbins): access controls: restrict, grant
+ -- Validation for int_type fields
+ min_value INT,
+ max_value INT,
+ -- Validation for str_type fields
+ regex VARCHAR(80),
+ -- Validation for user_type fields
+ needs_member BOOLEAN, -- User value can only be set to users who are members
+ needs_perm VARCHAR(80), -- User value can only be set to users w/ that perm
+ grants_perm VARCHAR(80), -- User named in this field gains this perm in the issue
+ -- notification options for user_type fields
+ notify_on ENUM ('never', 'any_comment') DEFAULT 'never' NOT NULL,
+
+ -- TODO(jrobbins): default value
+ -- TODO(jrobbins): deprecated boolean?
+ docstring TEXT,
+ is_deleted BOOLEAN, -- If true, reap this field def after all values reaped.
+
+ PRIMARY KEY (id),
+ UNIQUE KEY (project_id, field_name),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE FieldDef2Admin (
+ field_id INT NOT NULL,
+ admin_id INT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (field_id, admin_id),
+ FOREIGN KEY (field_id) REFERENCES FieldDef(id),
+ FOREIGN KEY (admin_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Issue (
+ id INT NOT NULL AUTO_INCREMENT,
+ shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ local_id INT NOT NULL,
+
+ reporter_id INT UNSIGNED NOT NULL,
+ owner_id INT UNSIGNED,
+ status_id INT,
+
+ -- These are each timestamps in seconds since the epoch.
+ modified INT NOT NULL,
+ opened INT,
+ closed INT,
+
+ derived_owner_id INT UNSIGNED,
+ derived_status_id INT,
+
+ deleted BOOLEAN,
+
+ -- These are denormalized fields that should be updated when child
+ -- records are added or removed for stars or attachments. If they
+ -- get out of sync, they can be updated via an UPDATE ... SELECT statement.
+ star_count INT DEFAULT 0,
+ attachment_count INT DEFAULT 0,
+
+ is_spam BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY(id),
+ UNIQUE KEY (project_id, local_id),
+ INDEX (shard, status_id),
+ INDEX (shard, project_id),
+
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (reporter_id) REFERENCES User(user_id),
+ FOREIGN KEY (owner_id) REFERENCES User(user_id),
+ FOREIGN KEY (status_id) REFERENCES StatusDef(id),
+ FOREIGN KEY (derived_owner_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+-- This is a parallel table to the Issue table because we don't want
+-- any very wide columns in the Issue table that would slow it down.
+CREATE TABLE IssueSummary (
+ issue_id INT NOT NULL,
+ summary TEXT,
+
+ PRIMARY KEY (issue_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Issue2Component (
+ issue_id INT NOT NULL,
+ issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL,
+ component_id INT NOT NULL,
+ derived BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (issue_id, component_id, derived),
+ INDEX (component_id, issue_shard),
+
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (component_id) REFERENCES ComponentDef(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Issue2Label (
+ issue_id INT NOT NULL,
+ issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL,
+ label_id INT NOT NULL,
+ derived BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (issue_id, label_id, derived),
+ INDEX (label_id, issue_shard),
+
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (label_id) REFERENCES LabelDef(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Issue2FieldValue (
+ issue_id INT NOT NULL,
+ issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL,
+ field_id INT NOT NULL,
+
+ int_value INT,
+ str_value VARCHAR(1024),
+ user_id INT UNSIGNED,
+
+ derived BOOLEAN DEFAULT FALSE,
+
+ INDEX (issue_id, field_id),
+ INDEX (field_id, issue_shard, int_value),
+ INDEX (field_id, issue_shard, str_value(255)),
+ INDEX (field_id, issue_shard, user_id),
+
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (field_id) REFERENCES FieldDef(id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Issue2Cc (
+ issue_id INT NOT NULL,
+ issue_shard SMALLINT UNSIGNED DEFAULT 0 NOT NULL,
+ cc_id INT UNSIGNED NOT NULL,
+ derived BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (issue_id, cc_id),
+ INDEX (cc_id, issue_shard),
+
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (cc_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Issue2Notify (
+ issue_id INT NOT NULL,
+ email VARCHAR(80) NOT NULL,
+
+ PRIMARY KEY (issue_id, email),
+
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE IssueStar (
+ issue_id INT NOT NULL,
+ user_id INT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (issue_id, user_id),
+ INDEX (user_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE IssueRelation (
+ issue_id INT NOT NULL,
+ dst_issue_id INT NOT NULL,
+
+ -- Read as: src issue is blocked on dst issue.
+ kind ENUM ('blockedon', 'mergedinto') NOT NULL,
+
+ PRIMARY KEY (issue_id, dst_issue_id, kind),
+ INDEX (issue_id),
+ INDEX (dst_issue_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (dst_issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE DanglingIssueRelation (
+ issue_id INT NOT NULL,
+ dst_issue_project VARCHAR(80),
+ dst_issue_local_id INT,
+
+ -- This table uses 'blocking' so that it can guarantee the src issue
+ -- always exists, while the dst issue is always the dangling one.
+ kind ENUM ('blockedon', 'blocking', 'mergedinto') NOT NULL,
+
+ PRIMARY KEY (issue_id, dst_issue_project, dst_issue_local_id, kind),
+ INDEX (issue_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Comment (
+ id INT NOT NULL AUTO_INCREMENT,
+ issue_id INT NOT NULL,
+ created INT NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+
+ commenter_id INT UNSIGNED NOT NULL,
+ content TEXT,
+ inbound_message TEXT,
+
+ was_escaped BOOLEAN DEFAULT FALSE,
+ deleted_by INT UNSIGNED,
+ is_spam BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY(id),
+ INDEX (is_spam, project_id, created),
+ INDEX (commenter_id, created),
+
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (commenter_id) REFERENCES User(user_id),
+ FOREIGN KEY (deleted_by) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Attachment (
+ id INT NOT NULL AUTO_INCREMENT,
+
+ issue_id INT NOT NULL,
+ comment_id INT,
+
+ filename VARCHAR(255) NOT NULL,
+ filesize INT NOT NULL,
+ mimetype VARCHAR(255) NOT NULL,
+ deleted BOOLEAN,
+ gcs_object_id VARCHAR(1024) NOT NULL,
+
+ PRIMARY KEY (id),
+ INDEX (issue_id),
+ INDEX (comment_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE IssueUpdate (
+ id INT NOT NULL AUTO_INCREMENT,
+ issue_id INT NOT NULL,
+ comment_id INT,
+
+ field ENUM (
+ 'summary', 'status', 'owner', 'cc', 'labels', 'blockedon', 'blocking', 'mergedinto',
+ 'project', 'components', 'custom', 'is_spam' ) NOT NULL,
+ old_value TEXT,
+ new_value TEXT,
+ added_user_id INT UNSIGNED,
+ removed_user_id INT UNSIGNED,
+ custom_field_name VARCHAR(255),
+ is_spam BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (id),
+ INDEX (issue_id),
+ INDEX (comment_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+ -- FOREIGN KEY (added_user_id) REFERENCES User(user_id),
+ -- FOREIGN KEY (removed_user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE IssueFormerLocations (
+ issue_id INT NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ local_id INT NOT NULL,
+
+ INDEX (issue_id),
+ UNIQUE KEY (project_id, local_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Template (
+ id INT NOT NULL AUTO_INCREMENT,
+ project_id SMALLINT UNSIGNED NOT NULL,
+ name VARCHAR(255) BINARY NOT NULL,
+
+ content TEXT,
+ summary TEXT,
+ summary_must_be_edited BOOLEAN,
+ owner_id INT UNSIGNED,
+ status VARCHAR(255),
+ members_only BOOLEAN,
+ owner_defaults_to_member BOOLEAN,
+ component_required BOOLEAN DEFAULT FALSE,
+
+ PRIMARY KEY (id),
+ UNIQUE KEY (project_id, name),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Template2Label (
+ template_id INT NOT NULL,
+ label VARCHAR(255) NOT NULL,
+
+ PRIMARY KEY (template_id, label),
+ FOREIGN KEY (template_id) REFERENCES Template(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Template2Admin (
+ template_id INT NOT NULL,
+ admin_id INT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (template_id, admin_id),
+ FOREIGN KEY (template_id) REFERENCES Template(id),
+ FOREIGN KEY (admin_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Template2FieldValue (
+ template_id INT NOT NULL,
+ field_id INT NOT NULL,
+
+ int_value INT,
+ str_value VARCHAR(1024),
+ user_id INT UNSIGNED,
+
+ INDEX (template_id, field_id),
+
+ FOREIGN KEY (template_id) REFERENCES Template(id),
+ FOREIGN KEY (field_id) REFERENCES FieldDef(id),
+ FOREIGN KEY (user_id) REFERENCES User(user_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE Template2Component (
+ template_id INT NOT NULL,
+ component_id INT NOT NULL,
+
+ PRIMARY KEY (template_id, component_id),
+
+ FOREIGN KEY (template_id) REFERENCES Template(id),
+ FOREIGN KEY (component_id) REFERENCES ComponentDef(id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE ProjectIssueConfig (
+ project_id SMALLINT UNSIGNED NOT NULL,
+
+ statuses_offer_merge VARCHAR(255) NOT NULL,
+ exclusive_label_prefixes VARCHAR(255) NOT NULL,
+ default_template_for_developers INT NOT NULL,
+ default_template_for_users INT NOT NULL,
+
+ default_col_spec TEXT,
+ default_sort_spec TEXT,
+ default_x_attr TEXT,
+ default_y_attr TEXT,
+
+ custom_issue_entry_url TEXT,
+
+ PRIMARY KEY (project_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+CREATE TABLE FilterRule (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ rank SMALLINT UNSIGNED,
+
+ -- TODO: or should this be broken down into structured fields?
+ predicate TEXT NOT NULL,
+ -- TODO: or should this be broken down into structured fields?
+ consequence TEXT NOT NULL,
+
+ INDEX (project_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+-- Each row in this table indicates an issue that needs to be reindexed
+-- in the GAE fulltext index by our batch indexing cron job.
+CREATE TABLE ReindexQueue (
+ issue_id INT NOT NULL,
+ created TIMESTAMP,
+
+ PRIMARY KEY (issue_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id)
+) ENGINE=INNODB;
+
+
+-- This holds counters with the highest issue local_id that is
+-- already used in each project. Clients should atomically increment
+-- the value for current project and then use the new counter value
+-- when creating an issue.
+CREATE TABLE LocalIDCounter (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ used_local_id INT NOT NULL,
+ used_spam_id INT NOT NULL,
+
+ PRIMARY KEY (project_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id)
+) ENGINE=INNODB;
+
+
+-- This is a saved query. It can be configured by a project owner to
+-- be used by all visitors to that project. Or, it can be a a
+-- personal saved query that appears on a user's "Saved queries" page
+-- and executes in the scope of one or more projects.
+CREATE TABLE SavedQuery (
+ id INT NOT NULL AUTO_INCREMENT,
+ name VARCHAR(80) NOT NULL,
+
+ -- For now, we only allow saved queries to be based off ane of the built-in
+ -- query scopes, and those can never be deleted, so there can be no nesting,
+ -- dangling references, and thus no need for cascading deletes.
+ base_query_id INT,
+ query TEXT NOT NULL,
+
+ PRIMARY KEY (id)
+) ENGINE=INNODB;
+
+
+-- Rows for built-in queries. These are in the database soley so that
+-- foreign key constraints are satisfied. These rows ar never read or updated.
+INSERT IGNORE INTO SavedQuery VALUES
+ (1, 'All issues', 0, ''),
+ (2, 'Open issues', 0, 'is:open'),
+ (3, 'Open and owned by me', 0, 'is:open owner:me'),
+ (4, 'Open and reported by me', 0, 'is:open reporter:me'),
+ (5, 'Open and starred by me', 0, 'is:open is:starred'),
+ (6, 'New issues', 0, 'status:new'),
+ (7, 'Issues to verify', 0, 'status=fixed,done'),
+ (8, 'Open with comment by me', 0, 'is:open commentby:me');
+
+-- The sole purpose of this statement is to force user defined saved queries
+-- to have IDs greater than 100 so that 1-100 are reserved for built-ins.
+INSERT IGNORE INTO SavedQuery VALUES (100, '', 0, '');
+
+
+-- User personal queries default to executing in the context of the
+-- project where they were created, but the user can edit them to make
+-- them into cross-project queries. Project saved queries always
+-- implicitly execute in the context of a project.
+CREATE TABLE SavedQueryExecutesInProject (
+ query_id INT NOT NULL,
+ project_id SMALLINT UNSIGNED NOT NULL,
+
+ PRIMARY KEY (query_id, project_id),
+ INDEX (project_id),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (query_id) REFERENCES SavedQuery(id)
+) ENGINE=INNODB;
+
+
+-- These are the queries edited by the project owner on the project
+-- admin pages.
+CREATE TABLE Project2SavedQuery (
+ project_id SMALLINT UNSIGNED NOT NULL,
+ rank SMALLINT UNSIGNED NOT NULL,
+ query_id INT NOT NULL,
+
+ -- TODO(jrobbins): visibility: owners, committers, contributors, anyone
+
+ PRIMARY KEY (project_id, rank),
+ FOREIGN KEY (project_id) REFERENCES Project(project_id),
+ FOREIGN KEY (query_id) REFERENCES SavedQuery(id)
+) ENGINE=INNODB;
+
+
+-- These are personal saved queries.
+CREATE TABLE User2SavedQuery (
+ user_id INT UNSIGNED NOT NULL,
+ rank SMALLINT UNSIGNED NOT NULL,
+ query_id INT NOT NULL,
+
+ -- TODO(jrobbins): daily and weekly digests, and the ability to have
+ -- certain subscriptions go to username+SOMETHING@example.com.
+ subscription_mode ENUM ('noemail', 'immediate') DEFAULT 'noemail' NOT NULL,
+
+ PRIMARY KEY (user_id, rank),
+ FOREIGN KEY (user_id) REFERENCES User(user_id),
+ FOREIGN KEY (query_id) REFERENCES SavedQuery(id)
+) ENGINE=INNODB;
+
+
+-- Created whenever a user reports an issue or comment as spam.
+-- Note this is distinct from a SpamVerdict, which is issued by
+-- the system rather than a human user.
+CREATE TABLE SpamReport (
+ -- when this report was generated
+ created TIMESTAMP NOT NULL,
+ -- when the reported content was generated
+ -- TODO(jrobbins): needs default current_time in MySQL 5.7.
+ content_created TIMESTAMP NOT NULL,
+ -- id of the reporting user
+ user_id INT UNSIGNED NOT NULL,
+ -- id of the reported user
+ reported_user_id INT UNSIGNED NOT NULL,
+ -- either this or issue_id must be set
+ comment_id INT,
+ -- either this or comment_id must be set
+ issue_id INT,
+
+ INDEX (issue_id),
+ INDEX (comment_id),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (comment_id) REFERENCES Comment(id)
+) ENGINE=INNODB;
+
+
+-- Any time a human or the system sets is_spam to true,
+-- or changes it from true to false, we want to have a
+-- record of who did it and why.
+CREATE TABLE SpamVerdict (
+ -- when this verdict was generated
+ created TIMESTAMP NOT NULL,
+
+ -- id of the reporting user, may be null if it was
+ -- an automatic classification.
+ user_id INT UNSIGNED,
+
+ -- id of the containing project.
+ project_id INT NOT NULL,
+
+ -- either this or issue_id must be set.
+ comment_id INT,
+
+ -- either this or comment_id must be set.
+ issue_id INT,
+
+ -- If the classifier issued the verdict, this should be set.
+ classifier_confidence FLOAT,
+
+ -- This should reflect the new is_spam value that was applied
+ -- by this verdict, not the value it had prior.
+ is_spam BOOLEAN NOT NULL,
+
+ -- owner: a project owner marked it as spam
+ -- threshhold: number of SpamReports from non-members was exceeded.
+ -- classifier: the automatic classifier reports it as spam.
+ reason ENUM ("manual", "threshold", "classifier") NOT NULL,
+
+ overruled BOOL NOT NULL,
+
+ INDEX (issue_id),
+ INDEX (comment_id),
+ INDEX (classifier_confidence),
+ FOREIGN KEY (issue_id) REFERENCES Issue(id),
+ FOREIGN KEY (comment_id) REFERENCES Comment(id)
+
+) ENGINE=INNODB;
diff --git a/appengine/monorail/static/css/d_sb.css b/appengine/monorail/static/css/d_sb.css
new file mode 100644
index 0000000..f12944d
--- /dev/null
+++ b/appengine/monorail/static/css/d_sb.css
@@ -0,0 +1,181 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/* Style sheet for issue attachment source browsing pages. */
+
+/* List */
+#resultstable {table-layout:fixed}
+#resultstable div {white-space:nowrap; overflow:hidden; text-overflow:ellipsis}
+
+/* Diffs */
+.diff pre {
+ margin:0;
+ padding:0;
+ white-space: pre-wrap;
+ white-space: -moz-pre-wrap;
+ white-space: -pre-wrap;
+ white-space: -o-pre-wrap;
+ word-wrap: break-word;
+}
+.diff th {padding:0 .6em; text-align:right; font-weight:normal; color:#666}
+.diff b {font-weight: normal}
+.diff .noline {background: #eee; border: 1px solid #888; border-width: 0 1px 0 1px}
+.diff .oldbackrm {background: #f88; border: 1px solid #a33; border-width: 0 1px 0 1px}
+.diff .oldbackeq {background: #ffd8d8; border: 1px solid #a33; border-width: 0 1px 0 1px}
+.diff .newbackadd {background: #9f9; border: 1px solid #3a3; border-width: 0 1px 0 1px}
+.diff .newbackeq {background: #ddf8cc; border: 1px solid #3a3; border-width: 0 1px 0 1px}
+.diff .oldrm {background: #f88;}
+.diff .oldeq {background: #ffd8d8;}
+.diff .newadd {background: #9f9;}
+.diff .neweq {background: #ddf8cc;}
+.diff .first td {border-top-width:1px}
+.diff .last td {border-bottom-width:1px}
+.header td {padding-bottom:.3em; text-align:center; font-family:arial, sans-serif}
+#controls {padding:.5em; white-space:nowrap}
+#controls td {padding:0 2px}
+#controls input, #controls select {font-size:93%; margin:0; padding:0}
+#controls form {margin:0; padding:0 1em}
+#controls a.revchoose {
+ text-decoration: none;
+ color: #000;
+ padding: 4px;
+ border: 1px solid #ebeff9;
+}
+#controls a.revchoose:hover {
+ border: 1px inset #fff;
+}
+
+/* Property Diffs */
+.diff .firstseg {padding-left: 2px}
+.diff .lastseg {padding-right: 2px}
+.diff .samepropback {border: 1px solid #000; border-width: 0 1px 0 1px}
+.diff td.nopropsep {border-bottom-width: 0px}
+.diff .propname td {font-size: 110%; font-weight: bold; padding: 1em 0.5em}
+.diff .bincontent {border-bottom-width: 1px; font-style: italic; font-size: 110%; padding: 0px 0.5em}
+.diff .propspace {font-size: 100%}
+.diff .sectiontitle {padding: 2em 0; font-style: italic; font-size: 110%}
+
+/* Meta bubble */
+#older, #props, #fileinfo {border-top:3px solid white; padding-top:6px; margin-top: 1em}
+#older pre {margin-top:4px; margin-left:1em}
+
+/* File */
+.fc pre, .fc td, .fc tr, .fc table, .fc tbody, #nums, #lines {padding:0; margin:0}
+.fc {position:relative; width:100%; min-height:30em}
+.fc table {border-collapse:collapse; margin:0; padding:0}
+#nums, #lines, #nums th, #lines th, #nums td, #lines td { vertical-align:top }
+pre {
+ font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
+ font-size: 93%;
+}
+#nums {padding-right:.5em; width:3.7em}
+#nums td {text-align:right}
+#nums a {color:#77c; text-decoration:none}
+#nums tr:hover a {color:blue; text-decoration:underline}
+#nums td:target a {color:black; font-weight:bold}
+.sep {visibility:hidden; width:2px}
+#nums span { cursor: pointer; width: 14px; float: left; background-repeat: no-repeat; }
+#lines td {padding-left:4px;}
+
+/* Applies only to sb files and issue attachments */
+.fc #nums, .fc #lines {
+ padding-top: 0.5em;
+}
+.fc #lines {
+ border-left: 3px solid #ebeff9;
+}
+
+#log { position:absolute; top:2px; right:0; width:28em}
+#log p { font-size:120%; margin: 0 0 0.5em 0}
+#log pre { margin-top: 0.3em}
+
+/* IE Whitespace Fix */
+.prettyprint td.source {
+ white-space: pre-wrap;
+ white-space: -moz-pre-wrap;
+ white-space: -pre-wrap;
+ white-space: -o-pre-wrap;
+}
+
+/* Header */
+.src_nav {
+ height:1.2em;
+ padding-top:0.2em;
+}
+.src_crumbs {
+ padding:0;
+ margin:0;
+}
+#crumb_root {
+ padding:0.2em 0 0.2em 0.2em;
+ margin:0;
+}
+#crumb_links {
+ margin-top:0;
+ margin-right:0;
+ padding:0.2em 1px;
+}
+form.src_nav {
+ padding:0;
+ margin:0;
+ display: inline;
+}
+#src_nav_title {
+ margin-right: 0.5em;
+}
+
+.heading {
+ background:#c3d9ff;
+}
+.sp {
+ color:#555;
+}
+.sourcelabel {
+ margin-left: 20px;
+ white-space: nowrap;
+}
+.sourcelabel select {
+ font-size: 93%;
+}
+#contents {
+ display: none;
+}
+
+/* Branch detail and revision log message */
+pre.wrap {
+ white-space: pre-wrap;
+ white-space: -moz-pre-wrap;
+}
+
+.edit_icon {
+ width: 14px;
+ height: 14px;
+ padding-right: 4px;
+}
+
+/* Source editing */
+.CodeMirror-line-numbers {
+ margin: .4em;
+ padding-right: 0.3em;
+ font-size: 83%;
+ font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ color: #777;
+ text-align: right;
+ border-right: 1px solid #aaa;
+}
+.editbox {
+ border-color: #999 #ccc #ccc;
+ border-width: 1px;
+ border-style: solid;
+ background: white;
+}
+.pending_bubble {
+ background-color: #e5ecf9;
+}
+#pending {
+ padding: 2px 2px 2px 4px;
+}
diff --git a/appengine/monorail/static/css/d_updates_page.css b/appengine/monorail/static/css/d_updates_page.css
new file mode 100644
index 0000000..9e89f85
--- /dev/null
+++ b/appengine/monorail/static/css/d_updates_page.css
@@ -0,0 +1,268 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+.activity-stream-list h4 {
+ font-size: 100%;
+ font-weight: normal;
+ padding: 0;
+ margin: 0;
+ padding-left: 1em;
+ background-color: #e5ecf9;
+ line-height: 160%;
+}
+ul.activity-stream {
+ list-style: none;
+ margin: 0;
+ padding: 0;
+}
+ul.activity-stream li {
+ margin: 0;
+ padding: 0.375em 0;
+ z-index: 0;
+ clear: both;
+}
+ul.activity-stream li.even {
+ background-color: #f8f8f8;
+}
+ul.activity-stream li:hover {
+ background-color: #f2f6ff;
+}
+ul.activity-stream li:last-child {
+ border-bottom: 1px solid #f8f8f8;
+}
+ul.activity-stream span.date {
+ float: left;
+ width: 7.5em;
+ text-align: right;
+ color: #5f5f5f;
+ padding-right: 1em;
+ background-repeat: no-repeat;
+ background-position: 5px center;
+}
+ul.activity-stream span.below-more {
+ background-image: url(/static/images/plus.gif);
+ cursor: pointer;
+}
+ul.activity-stream li.click span.below-more {
+ background-image: url(/static/images/minus.gif);
+}
+ul.activity-stream span.content {
+ display: block;
+ overflow: hidden;
+ white-space: nowrap;
+}
+ul.activity-stream span.content span.highlight-column {
+ padding-right: 1em;
+}
+ul.activity-stream span.details-inline {
+ color: #676767;
+}
+ul.activity-stream span.details-inline pre {
+ display: inline;
+}
+ul.activity-stream span.details-inline div,
+ul.activity-stream span.details-inline span {
+ display: inline;
+}
+ul.activity-stream div.details-wrapper {
+ display: none;
+}
+ul.activity-stream li.click span.details-inline {
+ display: none;
+}
+ul.activity-stream li.click div.details-wrapper {
+ display: block;
+ overflow: hidden;
+}
+ul.activity-stream div.details {
+ color: #5f5f5f;
+ margin-top: 0.3em;
+ padding-top: 0.2em;
+ padding-bottom: 0.2em;
+ margin-left: 0.2em;
+ border-left: 0.3em solid #e5ecf9;
+ padding-left: 0.5em;
+ line-height: 130%;
+}
+ul.activity-stream div.details span.ot-logmessage,
+ul.activity-stream div.details span.ot-issue-comment,
+ul.activity-stream div.details span.ot-project-summary {
+ white-space: pre;
+}
+ul.activity-stream div.details a,
+ul.activity-stream span.details-inline a {
+ color: #7777cc;
+}
+a.showAll,
+a.hideAll {
+ color: #0000CC;
+}
+ul.activity-stream div.details a:visited,
+ul.activity-stream span.details-inline a:visited {
+ color: #a376cc;
+}
+body.detailedInfo_hidden ul.activity-stream a.details {
+ color: #0000cc;
+ text-decoration: underline;
+ cursor: pointer;
+}
+ul.activity-stream div.details pre {
+ font-size: 110%;
+ line-height: 125%;
+ padding: 0;
+ margin: 0;
+}
+ul.activity-stream span.content a.ot-profile-link-1,
+ul.activity-stream span.content a.ot-project-link-1 {
+ color: #00C;
+}
+ul.activity-stream span.content a.ot-profile-link-2,
+ul.activity-stream span.content a.ot-project-link-2 {
+ color: #77C;
+}
+ul.activity-stream div.details span.ot-revlogs-br-1 {
+ display: block;
+ padding: 0;
+ margin: 0;
+}
+ul.activity-stream div.details span.ot-revlogs-br-2,
+ul.activity-stream div.details span.ot-issue-fields-br {
+ display: block;
+ padding: 0;
+ margin: 0.5em;
+}
+ul.activity-stream div.details span.ot-issue-field-wrapper,
+ul.activity-stream div.details span.ot-labels-field-wrapper {
+ font-family: arial, sans-serif;
+}
+ul.activity-stream span.details-inline span.ot-issue-field-wrapper,
+ul.activity-stream span.details-inline span.ot-labels-field-wrapper {
+ font-family: arial, sans-serif;
+}
+ul.activity-stream div.details span.ot-issue-field-name,
+ul.activity-stream div.details span.ot-labels-field-name {
+ font-weight: bold;
+}
+ul.activity-stream span.details-inline span.ot-issue-field-name,
+ul.activity-stream span.details-inline span.ot-labels-field-name {
+ font-weight: bold;
+}
+div.display-error {
+ font-style: italic;
+ text-align: center;
+ padding: 3em;
+}
+.results td a {
+ color: #0000CC;
+ text-decoration: underline;
+}
+.results td a.closed_ref {
+ color: #0000CC;
+ text-decoration: line-through;
+}
+.results td {
+ cursor: auto;
+}
+.highlight-column {
+ overflow: hidden;
+ white-space: nowrap;
+ display: block;
+}
+
+/**
+ * Document container designed for fluid width scaling.
+ * Alternative g-doc- fixed-width classes are in gui-fixed.css.
+ */
+.g-doc {
+ width: 100%;
+ text-align: left;
+}
+
+/* For agents that support the pseudo-element selector syntax. */
+.g-section:after {
+ content: ".";
+ display: block;
+ height: 0;
+ clear: both;
+ visibility: hidden;
+}
+
+/* Disable the clear on nested sections so they'll actually nest. */
+.g-unit .g-section:after {
+ clear: none;
+}
+.g-section {
+ /* Helps with extreme float-drops in nested sections in IE 6 & 7. */
+ width: 100%;
+ /* So nested sections' background-color paints the full height. */
+ overflow: hidden;
+}
+
+/* Forces "hasLayout" for IE. This fixes the usual gamut of peekaboo bugs. */
+.g-section,
+.g-unit {
+ zoom: 1;
+}
+
+/* Used for splitting a template's units text-alignment to the outer edges. */
+.g-split .g-unit {
+ text-align: right;
+}
+.g-split .g-first {
+ text-align: left;
+}
+
+/* Document container designed for 1024x768 */
+.g-doc-1024 {
+ width: 73.074em;
+ *width: 71.313em;
+ min-width: 950px; /* min-width doesn't work in IE6 */
+ margin: 0 auto;
+ text-align: left;
+}
+/* Document container designed for 800x600 */
+.g-doc-800 {
+ width: 57.69em;
+ *width: 56.3em;
+ min-width: 750px; /* min-width doesn't work in IE6 */
+ margin: 0 auto;
+ text-align: left;
+}
+
+.g-tpl-160 .g-unit,
+.g-unit .g-tpl-160 .g-unit,
+.g-unit .g-unit .g-tpl-160 .g-unit,
+.g-unit .g-unit .g-unit .g-tpl-160 .g-unit {
+ margin: 0 0 0 8.5em;
+ width: auto;
+ float: none;
+}
+.g-unit .g-unit .g-unit .g-tpl-160 .g-first,
+.g-unit .g-unit .g-tpl-160 .g-first,
+.g-unit .g-tpl-160 .g-first,
+.g-tpl-160 .g-first {
+ margin: 0;
+ width: 8.5em;
+ float: left;
+}
+
+.g-tpl-300 .g-unit,
+.g-unit .g-tpl-300 .g-unit,
+.g-unit .g-unit .g-tpl-300 .g-unit,
+.g-unit .g-unit .g-unit .g-tpl-300 .g-unit {
+ margin: 0 0 0 19.5em;
+ width: auto;
+ float: none;
+}
+.g-unit .g-unit .g-unit .g-tpl-300 .g-first,
+.g-unit .g-unit .g-tpl-300 .g-first,
+.g-unit .g-tpl-300 .g-first,
+.g-tpl-300 .g-first {
+ margin: 0;
+ width: 19.5em;
+ float: left;
+}
diff --git a/appengine/monorail/static/css/ph_core.css b/appengine/monorail/static/css/ph_core.css
new file mode 100644
index 0000000..fe91619
--- /dev/null
+++ b/appengine/monorail/static/css/ph_core.css
@@ -0,0 +1,835 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+@charset "utf-8";
+
+body {
+ background: #fff;
+ font: 82% arial, sans-serif;
+ margin: 0 0 3px 0;
+ min-width: 768px;
+}
+
+#monobar {
+ background: #f1f1f1;
+ margin: 0;
+ padding: 0;
+ border-bottom: 1px solid #ccc;
+}
+
+#monobar th {
+ border-right: 1px solid #ccc;
+ white-space: nowrap;
+ vertical-align: middle;
+ font-weight: normal;
+}
+
+.padded {
+ padding: 4px 1em;
+}
+
+#monobar a#wordmark {
+ font-family: sans-serif;
+ font-variant: small-caps;
+ font-size: 140%;
+ font-weight: bold;
+ font-style: oblique;
+ color: #822;
+ letter-spacing: 1px;
+ text-decoration: none;
+}
+
+#thumbnail_box {
+ background-color: white;
+ vertical-align: middle;
+}
+
+#thumbnail_box a, #thumbnail_box img {
+ display: block;
+}
+
+.toptabs a:link, .toptabs a:visited {
+ color: #444;
+ padding: 0 .5em ;
+ text-decoration: none;
+}
+
+.toptabs a:hover {
+ color: #00c;
+ text-decoration: underline;
+}
+
+.toptabs a.active {
+ font-weight: bold;
+ color: #000;
+ text-decoration: none;
+}
+
+#userbar {
+ text-align: right;
+}
+
+#userbar a {
+ color: #000;
+}
+
+.subt {
+ background: #e3e9ff;
+ margin: 0;
+ padding: 2px 1em;
+ border-bottom: 1px solid #ddd;
+}
+
+a:link, a:focus {
+ color: #00c;
+}
+
+a:active {
+ color: red;
+}
+
+select, input {
+ font-family: arial, sans-serif;
+}
+
+input[type="text"] {
+ border-color: #999 #ccc #ccc;
+ border-style: solid;
+ border-width: 1px;
+ padding: 2px 1px;
+}
+
+input[type=button], input[type=reset], input[type=submit], .buttonify {
+ font-size: 100%;
+ background: url("/static/images/button-bg.gif") repeat-x scroll left top #E3E3E3;
+ background: -webkit-gradient(linear,0% 40%,0% 70%,from(#F9F9F9),to(#E3E3E3));
+ background: -moz-linear-gradient(top, #fff, #ddd);
+
+ vertical-align: baseline;
+ padding: 1px 3px 1px 3px;
+ border: 1px solid #aaa;
+ border-top-color: #ccc;
+ border-bottom-color: #888;
+ border-radius: 3px;
+ cursor: pointer;
+ text-decoration: none;
+}
+
+.buttonify {
+ color: #000 !important;
+}
+
+@-moz-document url-prefix() {
+ .buttonify {
+ padding: 2px 3px 2px 3px;
+ }
+}
+
+input[type=button]:hover, input[type=reset]:hover, input[type=submit]:hover, .buttonify:hover {
+ border-color: #666;
+ text-decoration: none !important;
+}
+
+.capsule_left {
+ border-right: 0;
+ border-top-right-radius: 0;
+ border-bottom-right-radius: 0;
+}
+
+.capsule_right {
+ border-top-left-radius: 0;
+ border-bottom-left-radius: 0;
+}
+
+.capsule_middle {
+ border-radius: 0;
+ border-right: 0;
+}
+
+.mode_button_active, .buttonify:active,
+input[type=button]:active, input[type=reset]:active, input[type=submit]:active {
+ background: url("/static/images/button-bg.gif") repeat-x scroll left bottom #bbb;
+ background: -webkit-gradient(linear,0% 40%,0% 70%,from(#e3e3e3),to(#f9f9f9));
+ background: -moz-linear-gradient(top, #e3e3e3, #f9f9f9);
+ border-color: #aaa;
+}
+
+textarea {
+ border-color: #999 #ccc #ccc;
+ border-style: solid;
+ border-width: 1px;
+}
+
+td td, th th, th td, td th {
+ font-size: 100%;
+}
+
+pre {
+ font-size: 110%;
+}
+
+form {
+ padding: 0;
+ margin: 0;
+}
+
+/* Project tab bar. */
+.gtb {
+ background: #fff;
+ border-bottom: 1px solid #ccc;
+ padding: 5px 10px 0 5px;
+ white-space: nowrap;
+}
+
+.user_bar {
+ cursor: pointer;
+ float: right;
+ margin: 5px 15px 6px 10px;
+}
+
+.gtb .gtbc {
+ clear: left;
+}
+
+table {
+ border-collapse: separate;
+}
+
+.nowrap { white-space: nowrap; }
+
+.bubble_bg {
+ background: #e5ecf9;
+ margin-bottom: 0.6em;
+}
+
+.bubble {
+ padding: 4px;
+}
+
+#bub {
+ padding: 0 1px 0 1px;
+}
+
+.bub-top {
+ margin: 0 2px 2px;
+}
+
+.bub-bottom {
+ margin: 2px 2px 0;
+}
+
+.drop-down-bub {
+ font-size: 80%;
+ margin-top: -1px;
+}
+
+
+h4 {
+ color: #222;
+ font-size: 18pt;
+ margin: 1.5em 0 .5em 0;
+ padding: 0;
+}
+
+.section {
+ margin: 0 4px 1.6em 4px;
+ padding:4px;
+}
+.section .submit {
+ margin: 8px;
+}
+
+#maincol {
+ padding:4px;
+ background: #fff;
+}
+
+.isf a, .at a, .isf a:visited, .at a:visited {
+ color: #0000cc;
+ text-decoration: none;
+}
+
+.at span {
+ margin-right: 1em;
+ white-space: nowrap;
+}
+
+.isf a:hover, .at a:hover {
+ color: #0000cc;
+ text-decoration: underline;
+}
+
+.at {
+ padding-top: 6px;
+ padding-bottom: 3px;
+}
+
+.st1 .inst1 a,
+.st2 .inst2 a,
+.st3 .inst3 a,
+.st4 .inst4 a,
+.st5 .inst5 a,
+.st6 .inst6 a,
+.st7 .inst7 a,
+.st8 .inst8 a,
+.st9 .inst9 a {
+ color: #000;
+ font-weight: bold;
+ text-decoration: none;
+}
+
+.notice, .error {
+ background: #fff1a8;
+ font-weight: bold;
+ padding: 4px 16px;
+ border-radius: 4px;
+}
+
+.adminonly {
+ color: #a00;
+ font-style: italic;
+}
+
+.fielderror {
+ color: #a00;
+ font-weight: bold;
+ padding: 4px;
+}
+
+.tip, .help {
+ background: #e5ecf9;
+ font-size: 92%;
+ margin: 5px;
+ padding: 6px;
+ border-radius: 6px;
+}
+
+.tip {
+ width: 14em;
+}
+
+.help {
+ width: 44em;
+}
+
+/* Google standard */
+.gbh {
+ border-top: 1px solid #C9D7F1;
+ font-size: 1px;
+ height: 0;
+ position: absolute;
+ top: 24px;
+ width: 100%;
+}
+
+#pname {
+ font-size:300%;
+ margin: 0;
+ padding: 0;
+}
+
+#pname a,
+#pname a:visited {
+ text-decoration:none;
+ color: #666;
+}
+
+#project_summary_link {
+ text-decoration: none;
+ color: #444;
+}
+
+.vt td,
+.vt th,
+.vt {
+ vertical-align: top;
+}
+
+.indicator {
+ font-size: x-small;
+ color: #00c;
+}
+
+div.h4, table.h4 {
+ background-color: #e5ecf9;
+ margin-bottom: 2px;
+ border-top: 1px solid #3366cc;
+ padding: 2px;
+ font-weight: bold;
+ position: relative;
+ margin-top: 2px;
+}
+
+.mainhdr {
+ background-color: #ebeff9;
+ border-bottom: 1px solid #6b90da;
+ font-weight: bold;
+ font-size: 133%;
+ padding: 2px;
+}
+
+.secondaryhdr {
+ background-color: #eee;
+ padding: 10px;
+ border-bottom: 1px solid #ddd;
+ border-left: 1px solid #ddd;
+ border-right: 1px solid #ddd;
+}
+
+h1 {
+ font-size: x-large;
+ margin-top: 0px;
+}
+
+h2 {
+ font-size: large;
+}
+
+h3 {
+ font-size: medium;
+ background: #e5ecf9;
+ border-top: 1px solid #3366cc;
+ padding: 0.5ex 0.5em 0.5ex 0.5em;
+ margin-right: 2em;
+}
+
+h4 {
+ font-size: small;
+}
+
+img {
+ border: 0;
+}
+
+#user_bar {
+ text-align: right;
+ margin-bottom: 10px;
+}
+
+#user_bar a {
+ color: #00c;
+ text-decoration: none;
+}
+
+#header {
+ position: relative;
+ height: 55px;
+ padding-top: 6px;
+ margin-bottom: -9px;
+}
+
+#title {
+ margin-left: 171px;
+ border-top: 1px solid #3366cc;
+ background-color: #e5ecf9;
+ font-size: large;
+ font-weight: bold;
+ padding-left: 3px;
+ padding-top: 1px;
+ padding-bottom: 1px;
+}
+
+#footer {
+ clear: both;
+ text-align: right;
+ padding-top: 1em;
+ margin: 3.5em 0em;
+ color: #999;
+}
+
+#footer a,
+#footer a:visited {
+ text-decoration: none;
+ margin-right: 2em;
+}
+
+.label { text-decoration: none; color: green !important; }
+.label:hover { text-decoration: underline; }
+
+.fieldvalue { text-decoration: none; }
+.fieldvalue:hover { text-decoration: underline; }
+
+#colcontrol {
+ padding: 5px;
+}
+
+#cue {
+ margin-top: -4px;
+ padding: 1px;
+ background: #f9edbe;
+ border: 1px solid #f0c36d;
+}
+#cue td span {
+ font-size: 85%;
+ text-align: center;
+ padding: 0 1em;
+ }
+
+.results tr td { border: 0; }
+.results tr:last-child td { border-bottom: 1px solid #f8f8f8 }
+.striped tr:nth-child(even) { background: #f8f8f8; }
+
+.results th, .results_lite th {
+ background: #e3e9ff;
+ text-align: left;
+ padding: 3px;
+ border: 0;
+ border-right: 1px solid #fff;
+}
+.results th:last-child { border-right: 0; }
+
+.results th a, .results th a:visited {
+ color: #0000cc;
+ padding-right: 4px;
+ margin-right: 4px;
+}
+.results td { cursor: pointer }
+.results td { padding: 4px; }
+.results td a { color: #000; text-decoration: none; }
+.results td.id a { color: #0000cc; text-decoration: underline; white-space: nowrap; }
+.results td.id a:visited { color: purple; text-decoration: underline; }
+.results td.id a:hover { color: red; text-decoration: underline; }
+table.results .hoverTarget:hover { color: #009; background-color: #f8f8ff; }
+table.results .hoverTarget:hover a { color: #009; }
+.results .label { font-size: 80% }
+.results .selected { background-color: #ffe; }
+.results .selected:nth-child(even) { background-color: #f8f8e8; }
+.results td tt { color: #999; font-style: italic; font-weight: bold; }
+.results .displayproperties { font-size: 80%; color: #666; }
+
+.results .grid td { border: solid #f1f1f1; border-width: 0 1px 1px 0; }
+.results .grid .gridtile tr { border: 0; }
+.results .grid .gridtile td { border: 0; }
+
+.comptable.all .comprow { display: table-row; }
+.comptable.active .comprow { display: none; }
+.comptable.active .comprow.active { display: table-row; }
+.comptable.toplevel .comprow { display: none; }
+.comptable.toplevel .comprow.toplevel { display: table-row; }
+.comptable.toplevel .comprow.toplevel.deprecated { display: none; }
+.comptable.myadmin .comprow { display: none; }
+.comptable.myadmin .comprow.myadmin { display: table-row; }
+.comptable.mycc .comprow { display: none; }
+.comptable.mycc .comprow.mycc { display: table-row; }
+.comptable.deprecated .comprow { display: none; }
+.comptable.deprecated .comprow.deprecated { display: table-row; }
+
+/* The revision flipper. */
+.flipper { font-family: monospace; font-size: 120%; }
+.flipper ul { list-style-type: none; padding: 0; margin: 0em 0.3em; }
+.flipper b { margin: 0em 0.3em; }
+
+.closed .ifOpened { display: none }
+.closed .opened span.ifOpened { display: inline }
+.opened .ifClosed { display: none }
+.opened .closed span.ifClosed { display: inline }
+
+a.star {
+ text-decoration: none;
+ cursor: pointer;
+ display: inline-block;
+ font-size: 18px;
+}
+
+a.spamflag {
+ text-decoration: none;
+ cursor: pointer;
+}
+
+.h3 {
+ font-size: 130%;
+ font-weight: bolder;
+ font-family: "Geneva", arial, sans-serif;
+}
+input { padding-left: 1px; padding-right: 1px; }
+textarea { padding-left: 1px; padding-right: 1px; }
+
+.pagination { font-size: 100%; float: right; white-space: nowrap; }
+.pagination a { margin-left: 0.3em; margin-right: 0.3em; }
+
+.author { margin-bottom: 1em; }
+
+#searchtips { padding-left: 2em; }
+#searchtips p { margin-left: 2em; }
+
+.issueList .inIssueList span,
+.issueAdvSearch .inIssueAdvSearch a,
+.issueSearchTips .inIssueSearchTips a {
+ font-weight: bold;
+ text-decoration: none;
+ color: #000;
+}
+
+iframe[frameborder="1"] {
+ border: 1px solid #999;
+}
+
+/* For project menu */
+.menuDiv {
+ margin-top: 5px;
+ border-color: #C9D7F1 #3366CC #3366CC #A2BAE7;
+ border-style: solid;
+ border-width: 1px;
+ z-index: 1001;
+ padding: 0;
+ width: 175px;
+ background: #fff;
+ overflow: hidden;
+}
+.menuDiv .menuText {
+ padding: 3px;
+ text-decoration: none;
+ background: #fff;
+}
+.menuDiv .menuItem {
+ color: #0000CC;
+ padding: 3px;
+ text-decoration: none;
+ background: #fff;
+}
+.menuDiv .menuItem:hover {
+ color: #FFF;
+ background: #3366CC;
+}
+.menuDiv .categoryTitle {
+ padding-left: 1px;
+}
+.menuDiv .menuCategory,
+.menuDiv .categoryTitle {
+ margin-top: 4px;
+}
+.menuDiv .menuSeparator {
+ margin: 0 0.5em;
+ border: 0;
+ border-top: 1px solid #C9D7F1;
+}
+
+.hostedBy {
+ text-align: center;
+ vertical-align: center;
+}
+
+.fullscreen-popup {
+ position: fixed;
+ right: 4%;
+ left: 4%;
+ top: 5%;
+ max-height: 90%;
+ opacity: 0.85;
+ -moz-opacity: 0.85;
+ -khtml-opacity: 0.85;
+ filter: alpha(opacity=85);
+ -moz-border-radius: 10px;
+
+ background: #000;
+ color: white;
+ text-shadow: #000 1px 1px 7px;
+
+ padding: 1em;
+ z-index: 10;
+ overflow-x: hidden;
+ overflow-y: hidden;
+}
+
+/* Make links on this dark background a lighter blue. */
+.fullscreen-popup a {
+ color: #dd0;
+}
+
+div#keys_help th {
+ color: yellow;
+ text-align: left;
+}
+
+div#keys_help td {
+ font-weight: normal;
+ color: white;
+}
+
+td.shortcut {
+ text-align: right;
+}
+
+span.keystroke {
+ color: #8d0;
+ font-family: monospace;
+ font-size: medium;
+}
+
+#ac-list {
+ border: 1px solid #bbb;
+ background: #fff;
+ color: #00C;
+ padding: 2px;
+ z-index: 10;
+ max-height: 18em;
+ overflow-x: hidden;
+ overflow-y: auto;
+}
+#ac-list { font-size: 95%; }
+#ac-list tr { margin: 1px; cursor: pointer; padding: 0 10px; }
+#ac-list th { color: #333; text-align: left; }
+#ac-list .selected,
+#ac-list .selected td { background: #c3d9ff; }
+#ac-list td, #ac-list th { white-space: nowrap; padding-right: 22px}
+
+.list {
+ background-color:#fff;
+ padding: 5px;
+}
+
+.list-foot {
+ background-color:#fff;
+ padding: 5px;
+ height: 20px;
+}
+
+.graytext {
+ color: #666;
+}
+
+.vspacer {
+ margin-top: 1em;
+}
+
+.hspacer {
+ margin-right: 1em;
+}
+
+.emphasis {
+ font-weight: bold;
+}
+
+.formrow {
+ vertical-align: top;
+ padding-bottom: .569em;
+ white-space: nowrap;
+ overflow: hidden;
+ padding-top: .2em;
+}
+
+.forminline {
+ display: inline-block;
+ vertical-align: top;
+}
+
+.formlabelgutter {
+ margin-top: 0.3em;
+ text-align: right;
+ vertical-align: top;
+ white-space: normal;
+ width: 13em;
+}
+
+.formlabel {
+ font-weight: bold;
+ text-align: right;
+}
+
+.forminputgutter {
+ margin-top: 0.3em;
+ text-align: left;
+ vertical-align: top;
+ white-space: normal;
+ width: 36em;
+}
+
+.forminput {
+ width: 100%;
+}
+
+.formshortinput {
+ width: 11em;
+}
+
+.formselectgutter {
+ margin-top: 0.3em;
+ text-align: left;
+ vertical-align: top;
+ white-space: normal;
+ width: 18em;
+}
+
+.formselect {
+ width: 18em;
+}
+
+.formqm {
+ margin-left: 0.25em;
+ margin-right: 0.25em;
+}
+
+.formerror {
+ color: #a00;
+ display: block;
+ text-align: left;
+}
+
+.tablerow {
+ vertical-align: top;
+ padding-bottom: .569em;
+ white-space: nowrap;
+ overflow: hidden;
+ padding-top: .2em;
+}
+
+.tablelabelgutter {
+ margin-top: 0.3em;
+ text-align: left;
+ vertical-align: top;
+ white-space: normal;
+ width: 10em;
+}
+
+.tablelabel {
+ font-weight: bold;
+ text-align: left;
+}
+
+/* Gecko */
+html>body .goog-inline-block {
+ display: -moz-inline-box; /* This is ignored by FF3 and later*/
+ display: inline-block; /* This is ignored by pre-FF3 Gecko */
+}
+
+/* Default rule */
+.goog-inline-block {
+ position: relative;
+ display: inline-block;
+}
+
+/* Pre-IE7 */
+* html .goog-inline-block {
+ display: inline;
+}
+
+/* IE7 */
+*:first-child+html .goog-inline-block {
+ display: inline;
+}
+
+#popular {
+ border: solid silver;
+ border-width: 1px 0 1px 0;
+ padding: 0.3em;
+ width: 40em;
+}
+
+#popular table {
+ width: 40em;
+}
+
+#popular td {
+ padding: 2px;
+ white-space: nowrap;
+}
+
+#intro {
+ background:#ada;
+ margin: 3em;
+ width: 52em;
+}
diff --git a/appengine/monorail/static/css/ph_detail.css b/appengine/monorail/static/css/ph_detail.css
new file mode 100644
index 0000000..39bbe79
--- /dev/null
+++ b/appengine/monorail/static/css/ph_detail.css
@@ -0,0 +1,350 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+pre.prettyprint {
+ padding: 0.5em;
+ overflow: auto;
+ font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
+ font-size: 93%;
+}
+
+.role_label {
+ background-color: #8ae;
+ border-radius: 3px;
+ color: white;
+ display: inline-block;
+ padding: 2px 4px;
+ font-size: 75%;
+ font-weight: bold;
+ line-height: 14px;
+}
+
+.issuedescription pre, .issuecomment pre {
+ white-space: pre-wrap;
+ white-space: -moz-pre-wrap;
+ white-space: -pre-wrap;
+ white-space: -o-pre-wrap;
+}
+
+.issuedescription pre a, .issuecomment pre a {
+ word-wrap: break-word;
+ word-break: break-all;
+}
+
+.closed_ref { text-decoration: line-through }
+.rowmajor { width: 700px; }
+
+.rowmajor th {
+ text-align: right;
+ white-space: nowrap;
+}
+
+#meta-float th { white-space: nowrap; }
+.labelediting input { margin: 0 3px 4px 0; }
+.labelediting input { color: #060; }
+.collapse .ifExpand { display: none }
+.expand .ifCollapse { display: none }
+.inplace input { width: 100%; }
+.inplace td { border: 0; }
+td.issueheader { background: #e5ecf9; }
+
+.closed_colors td.issueheader, .closed_colors td.issueheader a {
+ background: #888; color: #fff;
+}
+
+.issuepage { margin-top: 0; }
+.issuepage td { padding: 2px; }
+.issuecomment { margin-top: 1em;}
+
+.issuecommentheader {
+ background: #e5ecf9;
+ padding: 3px;
+}
+
+.closed_colors .issuecommentheader { background: #eee;}
+
+.issuedescription pre, .issuecomment pre {
+ max-width: 80em;
+ padding: 0 0 3px .7em;
+}
+
+.issuedescription pre b, .issuecomment pre b {
+ font-size: 110%;
+ font-weight: bolder;
+ padding: 3px 0 3px 0;
+}
+
+.issue_text {
+ font-family: monospace;
+}
+
+.author { padding-left: 4px; }
+
+.ichcommands a {
+ color: #aaa;
+ text-decoration: none;
+}
+
+.issuecomment:hover .ichcommands a {
+ text-decoration: underline;
+}
+
+#issuemeta {
+ width: 12em;
+ background: #eef8ee;
+ font-size: 95%;
+ vertical-align: top;
+ border-right: 3px solid #fff;
+ border-top: 3px solid #fff;
+}
+
+#issuemeta td, #issuemeta td div, #issuemeta div.widemeta {
+ max-width: 14em;
+ overflow-x: hidden;
+ text-overflow: ellipsis;
+}
+#issuemeta td.widemeta, #issuemeta td.widemeta div, #issuemeta div.widemeta {
+ max-width: 20em;
+}
+
+.closed_colors #issuemeta { background: #eee; }
+
+.issuemetaheader {
+ padding: 0 5px;
+ background: #eef8ee;
+ border-right: 3px solid #fff;
+}
+
+.closed_colors .issuemetaheader {
+ background:#eee;
+}
+
+#issuemeta table td, #issuemeta table th {
+ margin: 0;
+ padding: 0;
+ padding-top: 5px;
+}
+
+.rel_issues a { white-space: nowrap; }
+
+.issue_restrictions {
+ padding: 2px 4px;
+ background-color: #fed;
+ min-width: 14em;
+ border: 2px solid #fff;
+}
+
+.issue_restrictions .restrictions_header {
+ padding: 0 0 2px 0;
+ text-align: center;
+ font-weight: bold;
+}
+
+.issue_restrictions ul {
+ padding: 0 2px;
+ margin: 0;
+ list-style: none;
+}
+
+.issue_restrictions .other_restriction {
+ white-space: nowrap;
+}
+
+.lock_grey {
+ background: no-repeat url(/static/images/lock.png);
+ width: 15px;
+ height: 16px
+}
+
+.updates {
+ margin: 0 7px 5px 7px;
+ background: #f2f6ff;
+ font-size: 90%;
+ padding: 5px;
+ border-radius: 0 0 10px 10px;
+}
+
+.closed_colors .updates { background: #f4f4f4;}
+
+.fakelink {
+ color: #0000cc;
+ cursor: pointer;
+ white-space: nowrap;
+ text-decoration: underline;
+}
+
+.undef { color: #666; }
+table.advquery { border: 3px solid #e5ecf9;}
+
+table.advquery td {
+ white-space: nowrap;
+ padding: 2px;
+}
+
+.focus td { background: #e5ecf9; }
+
+.eg {
+ color: #666;
+ font-size: 90%;
+}
+
+#submit { font-weight: bold; }
+div td .novel { color: #430; }
+div td .blockingsubmit { color: #a03; }
+div td .exclconflict { color: #a03; }
+div td .questionmark { color: #a03; }
+.delcom { background: #e8e8e8; }
+.numberentry { text-align: right; }
+
+.rollovercontrol { display: none; }
+.rolloverzone:hover .rollovercontrol { display: inline; }
+
+td u {
+ margin-left: .3em;
+ color: #0000cc;
+ cursor: pointer;
+ white-space: nowrap;
+ text-decoration: none;
+}
+
+td u:hover { text-decoration: underline; }
+#peopledetail input { margin-bottom: 2px; }
+#perm_defs { margin-top: 1em; }
+#perm_defs th { text-align:left; }
+
+#perm_defs td {
+ vertical-align:bottom;
+ padding-left: 1em;
+}
+
+.attachments { width:33%; border-top:2px solid #999; padding-top: 3px; margin-left: .7em;}
+.attachments table { margin-bottom: 0.75em; }
+.attachments table tr td { padding: 0; margin: 0; font-size: 95%; }
+.preview { border: 2px solid #c3d9ff; padding: 1px; }
+.preview:hover { border: 2px solid blue; }
+.label { white-space: nowrap; }
+.derived { font-style: italic; }
+
+.cursor_on .author {
+ background: url(/static/images/show-arrow.gif) no-repeat 2px;
+}
+
+/* Issue Peek Feature */
+
+#infobubble {
+ position: absolute;
+ display: none;
+ border: 1px solid #666;
+ padding: 3px 5px 5px 5px;
+ background: #ebeff9;
+}
+
+#peekarea {
+ min-height: 30em;
+ font-size: 95%;
+ background: #fff;
+}
+
+.perms_EditIssue #peekarea {
+ min-height: 36.4em;
+}
+
+#issuesummary {
+ width: 300px;
+ max-width: 300px;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+}
+
+td.rowwidgets { padding: 2px 2px 0 7px; }
+.cursor_on td.rowwidgets {
+ background-image: url(/static/images/show-arrow.gif);
+ background-repeat: no-repeat;
+ background-position: 2px;
+}
+
+.loading {
+ background-image: url(/static/images/spin_16.gif);
+ background-repeat: no-repeat;
+ background-position: 2px;
+ padding: 4px 20px;
+}
+
+#peekheading {
+ background: #ebeff9;
+ font-size:140%;
+ padding:2px 2px 0; overflow-x: hidden;
+ white-space:nowrap;
+}
+
+.peek #issuemeta, .peek #issuecomments {
+ height: 28em;
+ max-height: 28em;
+ overflow-y: auto;
+ overflow-x: hidden;
+ scroll: auto;
+}
+
+.issuecomment { padding-top: 1px; }
+.updates { margin-bottom: 2px; }
+#hc_controls { float: right; }
+#hc_controls a.paginate { margin-left: 1px; }
+#hc_controls a.close { margin-left: 3px; }
+
+#infobuttons {
+ background: #fff;
+ /* for IE */
+ filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#f1f1f1');
+ /* for webkit browsers */
+ background: -webkit-gradient(linear, left top, left bottom, from(#fff), to(#f1f1f1));
+ /* for firefox 3.6+ */
+ background: -moz-linear-gradient(top, #fff, #f1f1f1);
+ border-top: 1px solid #ccc;
+ white-space:nowrap;
+}
+
+#infobuttons td {
+ padding: 0;
+}
+
+.custom_field_value_menu {
+ width: 20em;
+}
+
+#cue.scrim {
+ position: fixed;
+ z-index: 1;
+ left: 0;
+ top: 0;
+ width: 100%;
+ height: 100%;
+ overflow: auto;
+ background-color: rgb(0,0,0);
+ background-color: rgba(0,0,0,0.4);
+}
+
+#privacy_dialog {
+ background: #fefefe;
+ border: 1px solid #888;
+ border-radius: 4px;
+ margin: 15% auto;
+ padding: 20px;
+ width: 80%;
+ max-width: 40em;
+}
+
+#privacy_dialog .actions {
+ margin-top: 2em;
+ text-align: right;
+ font-weight: bold;
+}
+
+#privacy_dialog .actions a {
+ text-decoration: none;
+ margin-left: 2em;
+}
diff --git a/appengine/monorail/static/css/ph_list.css b/appengine/monorail/static/css/ph_list.css
new file mode 100644
index 0000000..c230b16
--- /dev/null
+++ b/appengine/monorail/static/css/ph_list.css
@@ -0,0 +1,183 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+.popup {
+ display: none;
+ background: #fff;
+ border: 2px solid #bbb;
+ border-width: 0 2px 2px 1px;
+ position: absolute;
+ padding: 2px;
+}
+
+.popup a { text-decoration: none; cursor: pointer; }
+.popup td, .popup th {
+ font-size: 90%;
+ font-weight: normal;
+ text-align: left;
+ cursor: pointer;
+}
+.popup td { color: #0000cc; padding: 2px; }
+.popup tr:hover { background: #f9edbe; }
+.subpopup { border-width: 1px 2px 2px 1px; }
+
+.group_row td {
+ background: #f1f1f1;
+ cursor: pointer;
+ cursor: hand;
+}
+
+div.gridtile td.id {
+ width: 5em;
+ text-align: left;
+}
+
+div.gridtile {
+ border: 2px solid #f1f1f1;
+ border-radius: 6px;
+ padding: 1px;
+ background: white;
+}
+
+tr.grid {
+ border: 1px solid #bbb;
+}
+
+.results .grid th {
+ border-top: 1px solid #fff;
+}
+
+tr.grid .idcount {
+ text-align: left;
+}
+
+.results th a.dotdotdot {
+ text-decoration: none;
+ margin-right: 0;
+ padding-right: 0;
+}
+
+tr.grid .idcount a, .results .id a {
+ color: #0000cc;
+ text-decoration: underline;
+}
+
+tr.grid .idcount a {
+ margin-right: 0.6em;
+}
+
+div.gridtile {
+ width: 10em;
+ float: left;
+ margin: 2px;
+}
+
+div.gridtile table, div.projecttile table {
+ width: 100%;
+ table-layout: fixed;
+}
+
+div.gridtile td, div.projecttile td {
+ border: 0;
+ padding: 2px;
+ overflow: hidden;
+}
+
+div.gridtile td div {
+ height: 5.5ex;
+ font-size: 90%;
+ line-height: 100%;
+}
+
+div.gridtile td.status {
+ font-size: 90%;
+ text-align: right;
+ width: 70%;
+}
+
+div.projecttile {
+ width: 14em;
+ height: 90px;
+ margin: 0 1em 2em 1em;
+ float: left;
+ padding: 1px;
+ border: 2px solid #c3d9ff;
+ border-radius: 6px;
+ }
+
+div.projecttile:hover {
+ background: #f1f1f1;
+}
+
+
+.hide_col_0 .col_0, .hide_col_1 .col_1, .hide_col_2 .col_2, .hide_col_3 .col_3,
+.hide_col_4 .col_4, .hide_col_5 .col_5, .hide_col_6 .col_6,
+.hide_col_7 .col_7, .hide_col_8 .col_8, .hide_col_9 .col_9,
+.hide_col_10 .col_10, .hide_col_11 .col_11, .hide_col_12 .col_12,
+.hide_col_13 .col_13, .hide_col_14 .col_14, .hide_col_15 .col_15,
+.hide_col_16 .col_16, .hide_col_17 .col_17, .hide_col_18 .col_18,
+.hide_col_19 .col_19, .hide_col_20 .col_20 { display: none; }
+
+.hide_col_0 .popup span.col_0, .hide_col_1 .popup span.col_1,
+.hide_col_2 .popup span.col_2, .hide_col_3 .popup span.col_3,
+.hide_col_4 .popup span.col_4, .hide_col_4 .popup span.col_4,
+.hide_col_5 .popup span.col_5, .hide_col_6 .popup span.col_6,
+.hide_col_7 .popup span.col_7, .hide_col_8 .popup span.col_8,
+.hide_col_9 .popup span.col_9, .hide_col_10 .popup span.col_10,
+.hide_col_11 .popup span.col_11, .hide_col_12 .popup span.col_12,
+.hide_col_13 .popup span.col_13, .hide_col_14 .popup span.col_14,
+.hide_col_14 .popup span.col_14, .hide_col_15 .popup span.col_15,
+.hide_col_16 .popup span.col_16, .hide_col_17 .popup span.col_17,
+.hide_col_18 .popup span.col_18, .hide_col_19 .popup span.col_19,
+.hide_col_20 .popup span.col_20 { display: inline; color: #fff; }
+
+.hide_col_0 .popup tr:hover span.col_0,
+.hide_col_1 .popup tr:hover span.col_1,
+.hide_col_2 .popup tr:hover span.col_2,
+.hide_col_3 .popup tr:hover span.col_3,
+.hide_col_4 .popup tr:hover span.col_4,
+.hide_col_5 .popup tr:hover span.col_5,
+.hide_col_6 .popup tr:hover span.col_6,
+.hide_col_7 .popup tr:hover span.col_7,
+.hide_col_8 .popup tr:hover span.col_8,
+.hide_col_9 .popup tr:hover span.col_9,
+.hide_col_10 .popup tr:hover span.col_10,
+.hide_col_11 .popup tr:hover span.col_11,
+.hide_col_12 .popup tr:hover span.col_12,
+.hide_col_13 .popup tr:hover span.col_13,
+.hide_col_14 .popup tr:hover span.col_14,
+.hide_col_15 .popup tr:hover span.col_15,
+.hide_col_16 .popup tr:hover span.col_16,
+.hide_col_17 .popup tr:hover span.col_17,
+.hide_col_18 .popup tr:hover span.col_18,
+.hide_col_19 .popup tr:hover span.col_19,
+.hide_col_20 .popup tr:hover span.col_20 { color: #fff; }
+
+
+.table_title {
+ font-weight: bold;
+}
+
+.contentarea {
+ position: relative;
+ margin-bottom: 1em;
+}
+
+#resultstable td, #resultstable th {
+ padding-right: 2em;
+}
+
+.labels a:link { color: #080; }
+.labels a:visited { color: #080; }
+.labels a:active { color: #f00; }
+.name { margin-top: 2ex; font-size: 120%; }
+
+.results .id { text-align: right; }
+#resultstable .id { text-align: left; }
+#projecttable .id { text-align: left; }
+#starredtable .id { text-align: left; }
+#archivedtable .id { text-align: left; }
diff --git a/appengine/monorail/static/css/ph_mobile.css b/appengine/monorail/static/css/ph_mobile.css
new file mode 100644
index 0000000..baf7f23
--- /dev/null
+++ b/appengine/monorail/static/css/ph_mobile.css
@@ -0,0 +1,122 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+@media (max-width: 425px) {
+
+ body {
+ min-width: 0; /* get rid of hardcoded width */
+ }
+
+
+ /* Top navigation bar */
+
+ #monobar .toptabs {
+ display: none; /* hide most of the options to save some space */
+ }
+
+ #userbar {
+ padding: 5px;
+ }
+
+ #userbar > span {
+ display: inline-flex;
+ flex-wrap: wrap;
+ }
+
+
+ /* Search toolbar */
+
+ .subt {
+ padding: 5px;
+ }
+
+ .subt .inIssueEntry, .subt .inIssueList {
+ display: block;
+ margin: 10px 0 !important;
+ }
+
+ .subt label[for="searchq"], .subt label[for="can"], #can {
+ display: none; /* hide some labels and search scope helper field to save some space */
+ }
+
+
+ /* Main content */
+
+ #maincol > div > form > table > tbody > tr {
+ display: flex;
+ flex-direction: column;
+ }
+
+ #maincol > div > form > table > tbody > tr > td {
+ display: block;
+ }
+
+ #maincol table.rowmajor {
+ display: flex;
+ flex-direction: column;
+ width: auto; /* get rid of hardcoded width */
+ max-width: 100%;
+ }
+
+ #maincol table.rowmajor tbody {
+ flex-grow: 1;
+ }
+
+ #maincol table.rowmajor tr {
+ display: flex;
+ flex-direction: column;
+ }
+
+ #maincol table.rowmajor tr > th {
+ text-align: left;
+ }
+
+ #maincol table.rowmajor tr > td {
+ display: block;
+ width: 90%;
+ }
+
+ #maincol input, #maincol select, #maincol textarea {
+ font-size: 100%;
+ width: 100%;
+ margin-bottom: 8px;
+ }
+
+ #maincol .labelediting input {
+ max-width: 19%;
+ }
+
+ #maincol div.tip {
+ display: none;
+ }
+
+
+ /* Others */
+
+ #footer {
+ display: flex;
+ margin: 0 5px 5px 5px ;
+ text-align: left;
+ }
+
+ #attachprompt {
+ display: block;
+ padding: 10px 0;
+ }
+
+ input[type="button"], input[type="submit"], a.buttonify { /* make all types of buttons easier to click */
+ padding: 5px;
+ }
+
+ table#meta-container,
+ table#meta-container > tbody > tr,
+ table#meta-container > tbody > tr> td {
+ display: block;
+ width: 100%;
+ }
+
+}
diff --git a/appengine/monorail/static/css/prettify.css b/appengine/monorail/static/css/prettify.css
new file mode 100644
index 0000000..d44b3a2
--- /dev/null
+++ b/appengine/monorail/static/css/prettify.css
@@ -0,0 +1 @@
+.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee}
\ No newline at end of file
diff --git a/appengine/monorail/static/database-maintenance.html b/appengine/monorail/static/database-maintenance.html
new file mode 100644
index 0000000..d1b0a87
--- /dev/null
+++ b/appengine/monorail/static/database-maintenance.html
@@ -0,0 +1,13 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <link rel="icon" type="image/vnd.microsoft.icon" href="/static/images/monorail.ico">
+ <title>This bug tracker is unavailable due to database issues.</title>
+ <meta name="ROBOTS" content="NOARCHIVE">
+ <link type="text/css" rel="stylesheet" href="/static/css/ph_core.css">
+</head>
+<body>
+ <h2>This bug tracker is currently unavailable due to database issues.</h2>
+ Please try again later.
+</body>
+</html>
diff --git a/appengine/monorail/static/images/button-bg.gif b/appengine/monorail/static/images/button-bg.gif
new file mode 100644
index 0000000..cd2d728
--- /dev/null
+++ b/appengine/monorail/static/images/button-bg.gif
Binary files differ
diff --git a/appengine/monorail/static/images/close_icon.png b/appengine/monorail/static/images/close_icon.png
new file mode 100644
index 0000000..2d3417a
--- /dev/null
+++ b/appengine/monorail/static/images/close_icon.png
Binary files differ
diff --git a/appengine/monorail/static/images/lock.png b/appengine/monorail/static/images/lock.png
new file mode 100644
index 0000000..916f2b0
--- /dev/null
+++ b/appengine/monorail/static/images/lock.png
Binary files differ
diff --git a/appengine/monorail/static/images/minus.gif b/appengine/monorail/static/images/minus.gif
new file mode 100644
index 0000000..5595adf
--- /dev/null
+++ b/appengine/monorail/static/images/minus.gif
Binary files differ
diff --git a/appengine/monorail/static/images/monorail.ico b/appengine/monorail/static/images/monorail.ico
new file mode 100644
index 0000000..4597d7f
--- /dev/null
+++ b/appengine/monorail/static/images/monorail.ico
Binary files differ
diff --git a/appengine/monorail/static/images/new-24.gif b/appengine/monorail/static/images/new-24.gif
new file mode 100644
index 0000000..7106d09
--- /dev/null
+++ b/appengine/monorail/static/images/new-24.gif
Binary files differ
diff --git a/appengine/monorail/static/images/pagination-first.png b/appengine/monorail/static/images/pagination-first.png
new file mode 100644
index 0000000..4ee7f31
--- /dev/null
+++ b/appengine/monorail/static/images/pagination-first.png
Binary files differ
diff --git a/appengine/monorail/static/images/pagination-last.png b/appengine/monorail/static/images/pagination-last.png
new file mode 100644
index 0000000..0dea95d
--- /dev/null
+++ b/appengine/monorail/static/images/pagination-last.png
Binary files differ
diff --git a/appengine/monorail/static/images/pagination-next.png b/appengine/monorail/static/images/pagination-next.png
new file mode 100644
index 0000000..8c8f937
--- /dev/null
+++ b/appengine/monorail/static/images/pagination-next.png
Binary files differ
diff --git a/appengine/monorail/static/images/pagination-prev.png b/appengine/monorail/static/images/pagination-prev.png
new file mode 100644
index 0000000..ac97b8a
--- /dev/null
+++ b/appengine/monorail/static/images/pagination-prev.png
Binary files differ
diff --git a/appengine/monorail/static/images/paperclip.png b/appengine/monorail/static/images/paperclip.png
new file mode 100644
index 0000000..34464c2
--- /dev/null
+++ b/appengine/monorail/static/images/paperclip.png
Binary files differ
diff --git a/appengine/monorail/static/images/plus.gif b/appengine/monorail/static/images/plus.gif
new file mode 100644
index 0000000..116ce91
--- /dev/null
+++ b/appengine/monorail/static/images/plus.gif
Binary files differ
diff --git a/appengine/monorail/static/images/question-16.gif b/appengine/monorail/static/images/question-16.gif
new file mode 100644
index 0000000..948b230
--- /dev/null
+++ b/appengine/monorail/static/images/question-16.gif
Binary files differ
diff --git a/appengine/monorail/static/images/show-arrow.gif b/appengine/monorail/static/images/show-arrow.gif
new file mode 100644
index 0000000..7864453
--- /dev/null
+++ b/appengine/monorail/static/images/show-arrow.gif
Binary files differ
diff --git a/appengine/monorail/static/images/spin_16.gif b/appengine/monorail/static/images/spin_16.gif
new file mode 100644
index 0000000..73a6a86
--- /dev/null
+++ b/appengine/monorail/static/images/spin_16.gif
Binary files differ
diff --git a/appengine/monorail/static/images/tearoff_icon.gif b/appengine/monorail/static/images/tearoff_icon.gif
new file mode 100644
index 0000000..c23734e
--- /dev/null
+++ b/appengine/monorail/static/images/tearoff_icon.gif
Binary files differ
diff --git a/appengine/monorail/static/js/framework/env.js b/appengine/monorail/static/js/framework/env.js
new file mode 100644
index 0000000..7c6db59
--- /dev/null
+++ b/appengine/monorail/static/js/framework/env.js
@@ -0,0 +1,73 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview Defines the type of the CS_env Javascript object
+ * provided by the Codesite server.
+ *
+ * This is marked as an externs file so that any variable defined with a
+ * CS.env type will not have its properties renamed.
+ * @externs
+ */
+
+/** Codesite namespace object. */
+var CS = {};
+
+/**
+ * Javascript object holding basic information about the current page.
+ * This is defined as an interface so that we can use CS.env as a Closure
+ * type name, but it will never be implemented; rather, it will be
+ * made available on every page as the global object CS_env (see
+ * codesite/templates/demetrius/master-header.ezt).
+ *
+ * The type of the CS_env global object will actually be one of
+ * CS.env, CS.project_env, etc. depending on the page
+ * rendered by the server.
+ *
+ * @interface
+ */
+CS.env = function() {};
+
+/**
+ * Like relativeBaseUrl, but a full URL preceded by http://code.google.com
+ * @type {string}
+ */
+CS.env.prototype.absoluteBaseUrl;
+
+/**
+ * Path to versioned static assets (mostly js and css).
+ * @type {string}
+ */
+CS.env.prototype.appVersion;
+
+/**
+ * Request token for the logged-in user, or null for the anonymous user.
+ * @type {?string}
+ */
+CS.env.prototype.token;
+
+/**
+ * Email address of the logged-in user, or null for anon.
+ * @type {?string}
+ */
+CS.env.prototype.loggedInUserEmail;
+
+/**
+ * Url to the logged-in user's profile, or null for anon.
+ * @type {?string}
+ */
+CS.env.prototype.profileUrl;
+
+/**
+ * CS.env specialization for browsing project pages.
+ * @interface
+ * @extends {CS.env}
+ */
+CS.project_env = function() {};
+
+/** @type {string} */
+CS.project_env.prototype.projectName;
diff --git a/appengine/monorail/static/js/framework/externs.js b/appengine/monorail/static/js/framework/externs.js
new file mode 100644
index 0000000..a0375a1
--- /dev/null
+++ b/appengine/monorail/static/js/framework/externs.js
@@ -0,0 +1,25 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/** @type {CS.env} */
+var CS_env;
+
+// Exported functions must be mentioned in this externs file so that JSCompiler
+// will allow exporting functions by writing '_hideID = CS_hideID'.
+var _hideID;
+var _showID;
+var _hideEl;
+var _showEl;
+var _showInstead;
+var _toggleHidden;
+var _toggleCollapse;
+var _CS_dismissCue;
+var _CS_updateProjects;
+var _CP_checkProjectName;
+var _TKR_toggleStar;
+var _TKR_toggleStarLocal;
+var _TKR_syncStarIcons;
diff --git a/appengine/monorail/static/js/framework/framework-accountmenu.js b/appengine/monorail/static/js/framework/framework-accountmenu.js
new file mode 100644
index 0000000..b66058b
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-accountmenu.js
@@ -0,0 +1,30 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview This file initializes the drop down menu attached
+ * to the signed in user's email address. It utilizes the menu
+ * widget defined in framework-menu.js.
+ */
+
+/** @type {Menu} */
+var accountMenu;
+
+(function() {
+ var target = document.getElementById('account-menu');
+
+ if (!target) {
+ return;
+ }
+
+ accountMenu = new Menu(target, function() {});
+ accountMenu.addItem('Switch accounts', CS_env.login_url);
+ accountMenu.addEvent(window, 'load', function() {
+ document.body.appendChild(accountMenu.menu);
+ });
+})();
+
diff --git a/appengine/monorail/static/js/framework/framework-ajax.js b/appengine/monorail/static/js/framework/framework-ajax.js
new file mode 100644
index 0000000..6b1136b
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-ajax.js
@@ -0,0 +1,136 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+
+/**
+ * @fileoverview AJAX-related helper functions.
+ */
+
+
+/**
+ * Builds a POST string from a parameter dictionary.
+ * @param {Object} args parameters to encode.
+ * @param {boolean} opt_includeToken whether to include an XSRF token.
+ * If unspecified, defaults to true. Requires the user to be logged in.
+ * @return {string} encoded POST data.
+ */
+function CS_postData(args, opt_token) {
+ var params = [];
+ for (var key in args) {
+ params.push(key + "=" + encodeURIComponent(String(args[key])));
+ }
+ if (opt_token) {
+ params.push('token=' + encodeURIComponent(opt_token));
+ } else if (opt_token !== false) {
+ params.push('token=' + encodeURIComponent(CS_env.token));
+ }
+ return params.join('&');
+}
+
+/**
+ * Helper for an extremely common kind of XHR: a POST with an XHRF token
+ * where we silently ignore server or connectivity errors. If the token
+ * has expired, get a new one and retry the original request with the new
+ * token.
+ * @param {string} url request destination.
+ * @param {function(event)} callback function to be called
+ * upon successful completion of the request.
+ * @param {Object} args parameters to encode as POST data.
+ */
+function CS_doPost(url, callback, args, opt_token, opt_tokenPath) {
+ if (isTokenExpired()) {
+ var refreshXHR = XH_XmlHttpCreate();
+ var refreshURL = '/hosting/tokenRefresh.do';
+ var refreshArgs = {
+ form_token: opt_token || CS_env.token,
+ form_token_path: opt_tokenPath || 'xhr'
+ };
+ var refreshCallback = function(event) {
+ var xhr = event.target;
+ if (xhr.readyState != 4 || xhr.status != 200)
+ return;
+ var resp = CS_parseJSON(xhr);
+ if (opt_tokenPath)
+ CS_env[opt_tokenPath] = resp.form_token;
+ CS_env.tokenExpiresSec = Number(resp.token_expires_sec);
+ var retryXh = XH_XmlHttpCreate();
+ XH_XmlHttpPOST(
+ retryXh, url, CS_postData(args, resp.form_token), callback);
+ };
+ XH_XmlHttpPOST(
+ refreshXHR, refreshURL, CS_postData(refreshArgs), refreshCallback);
+ } else {
+ var xh = XH_XmlHttpCreate();
+ XH_XmlHttpPOST(
+ xh, url,
+ CS_postData(args, CS_env[opt_tokenPath] || opt_token),
+ callback);
+ }
+}
+
+
+/**
+ * Helper function to strip leading junk characters from a JSON response
+ * and then parse it into a JS constant.
+ *
+ * The reason that "}])'\n" is prepended to the response text is that
+ * it makes it impossible for a hacker to hit one of our JSON servlets
+ * via a <script src="..."> tag and do anything with the result. Even
+ * though a JSON response is just a constant, it could be passed into
+ * hacker code by tricks such as overriding the array constructor.
+ */
+function CS_parseJSON(xhr) {
+ return JSON.parse(xhr.responseText.substr(5));
+}
+
+
+function isTokenExpired(opt_tokenExpiresSec) {
+ var expiresSec = opt_tokenExpiresSec || CS_env.tokenExpiresSec;
+ var tokenExpiresDate = new Date(expiresSec * 1000);
+ return tokenExpiresDate <= new Date();
+}
+
+/**
+ * After we refresh the form token, we need to actually submit the form.
+ * formToSubmit keeps track of which form the user was trying to submit.
+ */
+var formToSubmit = null;
+
+/**
+ * If the form token that was generated when the page was served has
+ * now expired, then request a refreshed token from the server, and
+ * don't submit the form until after it arrives.
+ */
+function refreshTokens(event, formToken, formTokenPath, tokenExpiresSec) {
+ if (!isTokenExpired(tokenExpiresSec))
+ return;
+
+ formToSubmit = event.target;
+ event.preventDefault();
+ CS_doPost("/hosting/tokenRefresh.do", gotXSRFToken,
+ {form_token: formToken,
+ form_token_path: formTokenPath});
+}
+
+/**
+ * If we got a new XSRF token from the server, use it to actually
+ * submit the form that the user wanted to submit.
+ */
+function gotXSRFToken(event) {
+ var xhr = event.target;
+ if (xhr.readyState != 4 || xhr.status != 200)
+ return;
+ var resp = CS_parseJSON(xhr);
+ var freshFormToken = resp["form_token"];
+ var tokenFields = document.querySelectorAll("input[name=token]");
+ for (var i = 0; i < tokenFields.length; ++i) {
+ tokenFields[i].value = freshFormToken;
+ }
+ if (formToSubmit) {
+ formToSubmit.submit();
+ }
+}
diff --git a/appengine/monorail/static/js/framework/framework-ajax_test.js b/appengine/monorail/static/js/framework/framework-ajax_test.js
new file mode 100644
index 0000000..6193692
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-ajax_test.js
@@ -0,0 +1,37 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview Tests for framework-ajax.js.
+ */
+
+var CS_env;
+
+function setUp() {
+ CS_env = {'token': 'd34db33f'};
+}
+
+function testPostData() {
+ assertEquals(
+ 'token=d34db33f',
+ CS_postData({}));
+ assertEquals(
+ 'token=d34db33f',
+ CS_postData({}, true));
+ assertEquals(
+ '',
+ CS_postData({}, false));
+ assertEquals(
+ 'a=5&b=foo&token=d34db33f',
+ CS_postData({a: 5, b: 'foo'}));
+
+ var unescaped = {};
+ unescaped['f oo?'] = 'b&ar';
+ assertEquals(
+ 'f%20oo%3F=b%26ar',
+ CS_postData(unescaped, false));
+}
diff --git a/appengine/monorail/static/js/framework/framework-cues.js b/appengine/monorail/static/js/framework/framework-cues.js
new file mode 100644
index 0000000..beaa280
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-cues.js
@@ -0,0 +1,47 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview Simple functions for dismissible on-page help ("cues").
+ */
+
+/**
+ * Dimisses the cue. This both updates the DOM and hits the server to
+ * record the fact that the user has dismissed it, so that it won't
+ * be shown again.
+ *
+ * If no security token is present, only the DOM is updated and
+ * nothing is recorded on the server.
+ *
+ * @param {string} cueId The identifier of the cue to hide.
+ * @return {boolean} false to cancel any event.
+ */
+function CS_dismissCue(cueId) {
+ $('cue').style.display = 'none';
+
+ if (CS_env.token) {
+ CS_setCue(cueId);
+ }
+ return false;
+}
+
+/**
+ * Function to communicate with the server to record the fact that the
+ * user has dismissed a cue. This just passes an object through to the
+ * cues servlet as key-value pairs.
+ *
+ * @param {string} cueId The identifier of the cue to hide.
+ */
+function CS_setCue(cueId) {
+ var setCueUrl = '/hosting/cues.do';
+
+ // Ignore the response, since we can't do anything about failures.
+ CS_doPost(setCueUrl, null, {'cue_id': cueId});
+}
+
+// Exports
+_CS_dismissCue = CS_dismissCue;
diff --git a/appengine/monorail/static/js/framework/framework-display.js b/appengine/monorail/static/js/framework/framework-display.js
new file mode 100644
index 0000000..9ed9c55
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-display.js
@@ -0,0 +1,139 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * Functions used by the Project Hosting to control the display of
+ * elements on the page, rollovers, and popup menus.
+ *
+ * Most of these functions are extracted from dit-display.js
+ */
+
+
+/**
+ * Hide the HTML element with the given ID.
+ * @param {string} id The HTML element ID.
+ * @return {boolean} Always returns false to cancel the browser event
+ * if used as an event handler.
+ */
+function CS_hideID(id) {
+ $(id).style.display = 'none';
+ return false;
+}
+
+
+/**
+ * Show the HTML element with the given ID.
+ * @param {string} id The HTML element ID.
+ * @return {boolean} Always returns false to cancel the browser event
+ * if used as an event handler.
+ */
+function CS_showID(id) {
+ $(id).style.display = '';
+ return false;
+}
+
+
+/**
+ * Hide the given HTML element.
+ * @param {Element} el The HTML element.
+ * @return {boolean} Always returns false to cancel the browser event
+ * if used as an event handler.
+ */
+function CS_hideEl(el) {
+ el.style.display = 'none';
+ return false;
+}
+
+
+/**
+ * Show the given HTML element.
+ * @param {Element} el The HTML element.
+ * @return {boolean} Always returns false to cancel the browser event
+ * if used as an event handler.
+ */
+function CS_showEl(el) {
+ el.style.display = '';
+ return false;
+}
+
+
+/**
+ * Show one element instead of another. That is to say, show a new element and
+ * hide an old one. Usually the element is the element that the user clicked
+ * on with the intention of "expanding it" to access the new element.
+ * @param {string} newID The ID of the HTML element to show.
+ * @param {Element} oldEl The HTML element to hide.
+ * @return {boolean} Always returns false to cancel the browser event
+ * if used as an event handler.
+ */
+function CS_showInstead(newID, oldEl) {
+ $(newID).style.display = '';
+ oldEl.style.display = 'none';
+ return false;
+}
+
+/**
+ * Toggle the open/closed state of a section of the page. As a result, CSS
+ * rules will make certain elements displayed and other elements hidden. The
+ * section is some HTML element that encloses the element that the user clicked
+ * on.
+ * @param {Element} el The element that the user clicked on.
+ * @return {boolean} Always returns false to cancel the browser event
+ * if used as an event handler.
+ */
+function CS_toggleHidden(el) {
+ while (el) {
+ if (el.classList.contains('closed')) {
+ el.classList.remove('closed');
+ el.classList.add('opened');
+ return false;
+ }
+ if (el.classList.contains('opened')) {
+ el.classList.remove('opened');
+ el.classList.add('closed');
+ return false;
+ }
+ el = el.parentNode;
+ }
+}
+
+
+/**
+ * Toggle the expand/collapse state of a section of the page. As a result, CSS
+ * rules will make certain elements displayed and other elements hidden. The
+ * section is some HTML element that encloses the element that the user clicked
+ * on.
+ * TODO(jrobbins): eliminate redundancy with function above.
+ * @param {Element} el The element that the user clicked on.
+ * @return {boolean} Always returns false to cancel the browser event
+ * if used as an event handler.
+ */
+function CS_toggleCollapse(el) {
+ while (el) {
+ if (el.classList.contains('collapse')) {
+ el.classList.remove('collapse');
+ el.classList.add('expand');
+ return false;
+ }
+ if (el.classList.contains('expand')) {
+ el.classList.remove('expand');
+ el.classList.add('collapse');
+ return false;
+ }
+ el = el.parentNode;
+ }
+}
+
+
+// Exports
+_hideID = CS_hideID;
+_showID = CS_showID;
+_hideEl = CS_hideEl;
+_showEl = CS_showEl;
+_showInstead = CS_showInstead;
+_toggleHidden = CS_toggleHidden;
+_toggleCollapse = CS_toggleCollapse;
diff --git a/appengine/monorail/static/js/framework/framework-menu.js b/appengine/monorail/static/js/framework/framework-menu.js
new file mode 100644
index 0000000..14d4e15
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-menu.js
@@ -0,0 +1,583 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview This file represents a standalone, reusable drop down menu
+ * widget that can be attached to any element on a given page. It supports
+ * multiple instances of the widget on a page. It has no dependencies. Usage
+ * is as simple as creating a new Menu object and supplying it with a target
+ * element.
+ */
+
+/**
+ * The entry point and constructor for the Menu object. Creating
+ * a valid instance of this object will insert a drop down menu
+ * near the element supplied as the target, attach all the necessary
+ * events and insert the necessary elements on the page.
+ *
+ * @param {Element} target the target element on the page to which
+ * the drop down menu will be placed near.
+ * @param {Function=} opt_onShow function to execute every time the
+ * menu is made visible, most likely through a click on the target.
+ * @constructor
+ */
+var Menu = function(target, opt_onShow) {
+ this.iid = Menu.instance.length;
+ Menu.instance[this.iid] = this;
+ this.target = target;
+ this.onShow = opt_onShow || null;
+
+ // An optional trigger element on the page that can be used to trigger
+ // the drop-down. Currently hard-coded to be the same as the target element.
+ this.trigger = target;
+ this.items = [];
+ this.onOpenEvents = [];
+ this.menu = this.createElement('div', 'menuDiv instance' + this.iid);
+ this.targetId = this.target.getAttribute('id');
+ var menuId = (this.targetId != null) ?
+ 'menuDiv-' + this.targetId : 'menuDiv-instance' + this.iid;
+ this.menu.setAttribute('id', menuId);
+ this.menu.role = 'listbox';
+ this.hide();
+ this.addCategory('default');
+ this.addEvent(this.trigger, 'click', this.wrap(this.toggle));
+ this.addEvent(window, 'resize', this.wrap(this.adjustSizeAndLocation));
+
+ // Hide the menu if a user clicks outside the menu widget
+ this.addEvent(document, 'click', this.wrap(this.hide));
+ this.addEvent(this.menu, 'click', this.stopPropagation());
+ this.addEvent(this.trigger, 'click', this.stopPropagation());
+};
+
+// A reference to the element or node that the drop down
+// will appear next to
+Menu.prototype.target = null;
+
+// Element ID of the target. ID will be assigned to the newly created
+// menu div based on the target ID. A default ID will be
+// assigned If there is no ID on the target.
+Menu.prototype.targetId = null;
+
+/**
+ * A reference to the element or node that will trigger
+ * the drop down to appear. If not specified, this value
+ * will be the same as <Menu Instance>.target
+ * @type {Element}
+ */
+Menu.prototype.trigger = null;
+
+// A reference to the event type that will "open" the
+// menu div. By default this is the (on)click method.
+Menu.prototype.triggerType = null;
+
+// A reference to the element that will appear when the
+// trigger is clicked.
+Menu.prototype.menu = null;
+
+/**
+ * Function to execute every time the menu is made shown.
+ * @type {Function}
+ */
+Menu.prototype.onShow = null;
+
+// A list of category divs. By default these categories
+// are set to display none until at least one element
+// is placed within them.
+Menu.prototype.categories = null;
+
+// An id used to track timed intervals
+Menu.prototype.thread = -1;
+
+// The static instance id (iid) denoting which menu in the
+// list of Menu.instance items is this instantiated object.
+Menu.prototype.iid = -1;
+
+// A counter to indicate the number of items added with
+// addItem(). After 5 items, a height is set on the menu
+// and a scroll bar will appear.
+Menu.prototype.items = null;
+
+// A flag to detect whether or not a scroll bar has been added
+Menu.prototype.scrolls = false;
+
+// onOpen event handlers; each function in this list will
+// be executed and passed the executing instance as a
+// parameter before the menu is to be displayed.
+Menu.prototype.onOpenEvents = null;
+
+/**
+ * An extended short-cut for document.createElement(); this
+ * method allows the creation of an element, the assignment
+ * of one or more class names and the ability to set the
+ * content of the created element all with one function call.
+ * @param {string} element name of the element to create. Examples would
+ * be 'div' or 'a'.
+ * @param {string} opt_className an optional string to assign to the
+ * newly created element's className property.
+ * @param {string|Element} opt_content either a snippet of HTML or a HTML
+ * element that is to be appended to the newly created element.
+ * @return {Element} a reference to the newly created element.
+ */
+Menu.prototype.createElement = function(element, opt_className, opt_content) {
+ var div = document.createElement(element);
+ div.className = opt_className;
+ if (opt_content) {
+ this.append(opt_content, div);
+ }
+ return div;
+};
+
+/**
+ * Uses a fairly browser agnostic approach to applying a callback to
+ * an element on the page.
+ *
+ * @param {Element|EventTarget} element a reference to an element on the page to
+ * which to attach and event.
+ * @param {string} eventType a browser compatible event type as a string
+ * without the sometimes assumed on- prefix. Examples: 'click',
+ * 'mousedown', 'mouseover', etc...
+ * @param {Function} callback a function reference to invoke when the
+ * the event occurs.
+ */
+Menu.prototype.addEvent = function(element, eventType, callback) {
+ if (element.addEventListener) {
+ element.addEventListener(eventType, callback, false);
+ } else {
+ try {
+ element.attachEvent('on' + eventType, callback);
+ } catch (e) {
+ element['on' + eventType] = callback;
+ }
+ }
+};
+
+/**
+ * Similar to addEvent, this provides a specialied handler for onOpen
+ * events that apply to this instance of the Menu class. The supplied
+ * callbacks are appended to an internal array and called in order
+ * every time the menu is opened. The array can be accessed via
+ * menuInstance.onOpenEvents.
+ */
+Menu.prototype.addOnOpen = function(eventCallback) {
+ var eventIndex = this.onOpenEvents.length;
+ this.onOpenEvents.push(eventCallback);
+ return eventIndex;
+};
+
+/**
+ * Used throughout the code, this method wraps any supplied function
+ * in a closure that calls the supplied function in the context of either
+ * the optional thisObj parameter or instance of the menu this function
+ * is called from.
+ * @param {Function} callback the function to wrap and embed context
+ * within.
+ * @param {Object} opt_thisObj an alternate 'this' object to use instead
+ * of this instance of Menu.
+ */
+Menu.prototype.wrap = function(callback, opt_thisObj) {
+ var closured_callback = callback;
+ var this_object = opt_thisObj || this;
+ return (function() {
+ closured_callback.apply(this_object);
+ });
+};
+
+/**
+ * This method will create a div with the classes .menuCategory and the
+ * name of the category as supplied in the first parameter. It then, if
+ * a title is supplied, creates a title div and appends it as well. The
+ * optional title is styled with the .categoryTitle and category name
+ * class.
+ *
+ * Categories are stored within the menu object instance for programmatic
+ * manipulation in the array, menuInstance.categories. Note also that this
+ * array is doubly linked insofar as that the category div can be accessed
+ * via it's index in the array as well as by instance.categories[category]
+ * where category is the string name supplied when creating the category.
+ *
+ * @param {string} category the string name used to create the category;
+ * used as both a class name and a key into the internal array. It
+ * must be a valid JavaScript variable name.
+ * @param {string|Element} opt_title this optional field is used to visibly
+ * denote the category title. It can be either HTML or an element.
+ * @return {Element} the newly created div.
+ */
+Menu.prototype.addCategory = function(category, opt_title) {
+ this.categories = this.categories || [];
+ var categoryDiv = this.createElement('div', 'menuCategory ' + category);
+ categoryDiv._categoryName = category;
+ if (opt_title) {
+ var categoryTitle = this.createElement('b', 'categoryTitle ' +
+ category, opt_title);
+ categoryTitle.style.display = 'block';
+ this.append(categoryTitle);
+ categoryDiv._categoryTitle = categoryTitle;
+ }
+ this.append(categoryDiv);
+ this.categories[this.categories.length] = this.categories[category] =
+ categoryDiv;
+
+ return categoryDiv;
+};
+
+/**
+ * This method removes the contents of a given category but does not
+ * remove the category itself.
+ */
+Menu.prototype.emptyCategory = function(category) {
+ if (!this.categories[category]) {
+ return;
+ }
+ var div = this.categories[category];
+ for (var i = div.childNodes.length - 1; i >= 0; i--) {
+ div.removeChild(div.childNodes[i]);
+ }
+};
+
+/**
+ * This function is the most drastic of the cleansing functions; it removes
+ * all categories and all menu items and all HTML snippets that have been
+ * added to this instance of the Menu class.
+ */
+Menu.prototype.clear = function() {
+ for (var i = 0; i < this.categories.length; i++) {
+ // Prevent memory leaks
+ this.categories[this.categories[i]._categoryName] = null;
+ }
+ this.items.splice(0, this.items.length);
+ this.categories.splice(0, this.categories.length);
+ this.categories = [];
+ this.items = [];
+ for (var i = this.menu.childNodes.length - 1; i >= 0; i--) {
+ this.menu.removeChild(this.menu.childNodes[i]);
+ }
+};
+
+/**
+ * Passed an instance of a menu item, it will be removed from the menu
+ * object, including any residual array links and possible memory leaks.
+ * @param {Element} item a reference to the menu item to remove.
+ * @return {Element} returns the item removed.
+ */
+Menu.prototype.removeItem = function(item) {
+ var result = null;
+ for (var i = 0; i < this.items.length; i++) {
+ if (this.items[i] == item) {
+ result = this.items[i];
+ this.items.splice(i, 1);
+ }
+ // Renumber
+ this.items[i].item._index = i;
+ }
+ return result;
+};
+
+/**
+ * Removes a category from the menu element and all of its children thus
+ * allowing the Element to be collected by the browsers VM.
+ * @param {string} category the name of the category to retrieve and remove.
+ */
+Menu.prototype.removeCategory = function(category) {
+ var div = this.categories[category];
+ if (!div || !div.parentNode) {
+ return;
+ }
+ if (div._categoryTitle) {
+ div._categoryTitle.parentNode.removeChild(div._categoryTitle);
+ }
+ div.parentNode.removeChild(div);
+ for (var i = 0; i < this.categories.length; i++) {
+ if (this.categories[i] === div) {
+ this.categories[this.categories[i]._categoryName] = null;
+ this.categories.splice(i, 1);
+ return;
+ }
+ }
+ for (var i = 0; i < div.childNodes.length; i++) {
+ if (div.childNodes[i]._index) {
+ this.items.splice(div.childNodes[i]._index, 1);
+ } else {
+ this.removeItem(div.childNodes[i]);
+ }
+ }
+};
+
+/**
+ * This heart of the menu population scheme, the addItem function creates
+ * a combination of elements that visually form up a menu item. If no
+ * category is supplied, the default category is used. The menu item is
+ * an <a> tag with the class .menuItem. The menu item is directly styled
+ * as a block element. Other than that, all styling should be done via a
+ * external CSS definition.
+ *
+ * @param {string|Element} html_or_element a string of HTML text or a
+ * HTML element denoting the contents of the menu item.
+ * @param {string} opt_href the href of the menu item link. This is
+ * the most direct way of defining the menu items function.
+ * [Default: '#'].
+ * @param {string} opt_category the category string name of the category
+ * to append the menu item to. If the category doesn't exist, one will
+ * be created. [Default: 'default'].
+ * @param {string} opt_title used when creating a new category and is
+ * otherwise ignored completely. It is also ignored when supplied if
+ * the named category already exists.
+ * @return {Element} returns the element that was created.
+ */
+Menu.prototype.addItem = function(html_or_element, opt_href, opt_category,
+ opt_title) {
+ var category = opt_category ? (this.categories[opt_category] ||
+ this.addCategory(opt_category, opt_title)) :
+ this.categories['default'];
+ var menuHref = (opt_href == undefined ? '#' : opt_href);
+ var menuItem = undefined;
+ if (menuHref) {
+ menuItem = this.createElement('a', 'menuItem', html_or_element);
+ } else {
+ menuItem = this.createElement('span', 'menuText', html_or_element);
+ }
+ var itemText = typeof html_or_element == 'string' ? html_or_element :
+ html_or_element.innerText || 'ERROR';
+
+ menuItem.style.display = 'block';
+ if (menuHref) {
+ menuItem.setAttribute('href', menuHref);
+ }
+ menuItem._index = this.items.length;
+ menuItem.role = 'option';
+ this.append(menuItem, category);
+ this.items[this.items.length] = {item: menuItem, text: itemText};
+
+ return menuItem;
+};
+
+/**
+ * Adds a visual HTML separator to the menu, optionally creating a
+ * category as per addItem(). See above.
+ * @param {string} opt_category the category string name of the category
+ * to append the menu item to. If the category doesn't exist, one will
+ * be created. [Default: 'default'].
+ * @param {string} opt_title used when creating a new category and is
+ * otherwise ignored completely. It is also ignored when supplied if
+ * the named category already exists.
+ */
+Menu.prototype.addSeparator = function(opt_category, opt_title) {
+ var category = opt_category ? (this.categories[opt_category] ||
+ this.addCategory(opt_category, opt_title)) :
+ this.categories['default'];
+ var hr = this.createElement('hr', 'menuSeparator');
+ this.append(hr, category);
+};
+
+/**
+ * This method performs all the dirty work of positioning the menu. It is
+ * responsible for dynamic sizing, insertion and deletion of scroll bars
+ * and calculation of offscreen width considerations.
+ */
+Menu.prototype.adjustSizeAndLocation = function() {
+ var style = this.menu.style;
+ style.position = 'absolute';
+
+ var firstCategory = null;
+ for (var i = 0; i < this.categories.length; i++) {
+ this.categories[i].className = this.categories[i].className.
+ replace(/ first/, '');
+ if (this.categories[i].childNodes.length == 0) {
+ this.categories[i].style.display = 'none';
+ } else {
+ this.categories[i].style.display = '';
+ if (!firstCategory) {
+ firstCategory = this.categories[i];
+ firstCategory.className += ' first';
+ }
+ }
+ }
+
+ var alreadyVisible = style.display != 'none' &&
+ style.visibility != 'hidden';
+ var docElemWidth = document.documentElement.clientWidth;
+ var docElemHeight = document.documentElement.clientHeight;
+ var pageSize = {
+ w: (window.innerWidth || docElemWidth && docElemWidth > 0 ?
+ docElemWidth : document.body.clientWidth) || 1,
+ h: (window.innerHeight || docElemHeight && docElemHeight > 0 ?
+ docElemHeight : document.body.clientHeight) || 1
+ };
+ var targetPos = this.find(this.target);
+ var targetSize = {w: this.target.offsetWidth,
+ h: this.target.offsetHeight};
+ var menuSize = {w: this.menu.offsetWidth, h: this.menu.offsetHeight};
+
+ if (!alreadyVisible) {
+ var oldVisibility = style.visibility;
+ var oldDisplay = style.display;
+ style.visibility = 'hidden';
+ style.display = '';
+ style.height = '';
+ style.width = '';
+ menuSize = {w: this.menu.offsetWidth, h: this.menu.offsetHeight};
+ style.display = oldDisplay;
+ style.visibility = oldVisibility;
+ }
+
+ var addScroll = (this.menu.offsetHeight / pageSize.h) > 0.8;
+ if (addScroll) {
+ menuSize.h = parseInt((pageSize.h * 0.8), 10);
+ style.height = menuSize.h + 'px';
+ style.overflowX = 'hidden';
+ style.overflowY = 'auto';
+ } else {
+ style.height = style.overflowY = style.overflowX = '';
+ }
+
+ style.top = (targetPos.y + targetSize.h) + 'px';
+ style.left = targetPos.x + 'px';
+
+ if (menuSize.w < 175) {
+ style.width = '175px';
+ }
+
+ if (addScroll) {
+ style.width = parseInt(style.width, 10) + 13 + 'px';
+ }
+
+ if ((targetPos.x + menuSize.w) > pageSize.w) {
+ style.left = targetPos.x - (menuSize.w - targetSize.w) + 'px';
+ }
+};
+
+
+/**
+ * This function is used heavily, internally. It appends text
+ * or the supplied element via appendChild(). If
+ * the opt_target variable is present, the supplied element will be
+ * the container rather than the menu div for this instance.
+ *
+ * @param {string|Element} text_or_element the html or element to insert
+ * into opt_target.
+ * @param {Element} opt_target the target element it should be appended to.
+ *
+ */
+Menu.prototype.append = function(text_or_element, opt_target) {
+ var element = opt_target || this.menu;
+ if (typeof opt_target == 'string' && this.categories[opt_target]) {
+ element = this.categories[opt_target];
+ }
+ if (typeof text_or_element == 'string') {
+ element.innerText += text_or_element;
+ } else {
+ element.appendChild(text_or_element);
+ }
+};
+
+/**
+ * Displays the menu (such as upon mouseover).
+ */
+Menu.prototype.over = function() {
+ if (this.menu.style.display != 'none') {
+ this.show();
+ }
+ if (this.thread != -1) {
+ clearTimeout(this.thread);
+ this.thread = -1;
+ }
+};
+
+/**
+ * Hides the menu (such as upon mouseout).
+ */
+Menu.prototype.out = function() {
+ if (this.thread != -1) {
+ clearTimeout(this.thread);
+ this.thread = -1;
+ }
+ this.thread = setTimeout(this.wrap(this.hide), 400);
+};
+
+/**
+ * Stops event propagation.
+ */
+Menu.prototype.stopPropagation = function() {
+ return (function(e) {
+ if (!e) {
+ e = window.event;
+ }
+ e.cancelBubble = true;
+ if (e.stopPropagation) {
+ e.stopPropagation();
+ }
+ });
+};
+
+/**
+ * Toggles the menu between hide/show.
+ */
+Menu.prototype.toggle = function() {
+ if (this.menu.style.display == 'none') {
+ this.show();
+ } else {
+ this.hide();
+ }
+};
+
+/**
+ * Makes the menu visible, then calls the user-supplied onShow callback.
+ */
+Menu.prototype.show = function() {
+ if (this.menu.style.display != '') {
+ for (var i = 0; i < this.onOpenEvents.length; i++) {
+ this.onOpenEvents[i].call(null, this);
+ }
+
+ // Invisibly show it first
+ this.menu.style.visibility = 'hidden';
+ this.menu.style.display = '';
+ this.adjustSizeAndLocation();
+ if (this.trigger.nodeName && this.trigger.nodeName == 'A') {
+ this.trigger.blur();
+ }
+ this.menu.style.visibility = 'visible';
+
+ // Hide other menus
+ for (var i = 0; i < Menu.instance.length; i++) {
+ var menuInstance = Menu.instance[i];
+ if (menuInstance != this) {
+ menuInstance.hide();
+ }
+ }
+
+ if (this.onShow) {
+ this.onShow();
+ }
+ }
+};
+
+/**
+ * Makes the menu invisible.
+ */
+Menu.prototype.hide = function() {
+ this.menu.style.display = 'none';
+};
+
+Menu.prototype.find = function(element) {
+ var curleft = 0, curtop = 0;
+ if (element.offsetParent) {
+ do {
+ curleft += element.offsetLeft;
+ curtop += element.offsetTop;
+ }
+ while ((element = element.offsetParent) && (element.style &&
+ element.style.position != 'relative' &&
+ element.style.position != 'absolute'));
+ }
+ return {x: curleft, y: curtop};
+};
+
+/**
+ * A static array of object instances for global reference.
+ * @type {Array.<Menu>}
+ */
+Menu.instance = [];
diff --git a/appengine/monorail/static/js/framework/framework-myprojects.js b/appengine/monorail/static/js/framework/framework-myprojects.js
new file mode 100644
index 0000000..8979162
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-myprojects.js
@@ -0,0 +1,131 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview This file initializes the "My favorites" drop down menu in the
+ * user bar. It utilizes the menu widget defined in framework-menu.js.
+ */
+
+/** @type {Menu} */
+var myprojects;
+
+(function() {
+ var target = document.getElementById('projects-dropdown');
+
+ if (!target) {
+ return;
+ }
+
+ myprojects = new Menu(target, function() {});
+
+ myprojects.addEvent(window, 'load', CS_updateProjects);
+ myprojects.addOnOpen(CS_updateProjects);
+ myprojects.addEvent(window, 'load', function() {
+ document.body.appendChild(myprojects.menu);
+ });
+})();
+
+/**
+ * Grabs the list of logged in user's projects to populate the "My favorites"
+ * drop down menu.
+ */
+function CS_updateProjects() {
+ if (!myprojects) return;
+ // Set a request token to prevent XSRF leaking of user project lists.
+ if (CS_env.token) {
+ var postUrl = '/hosting/projects.do';
+ CS_doPost(postUrl, CS_updateProjectsCallback, {});
+ } else {
+ CS_updateProjectsCallback(null);
+ }
+}
+
+/**
+ * Updates the drop down menu based on the json data received.
+ * @param {event} event with xhr Response with JSON data of list of projects.
+ */
+function CS_updateProjectsCallback(event) {
+ var xhr = event ? event.target : null;
+ // Grab and show projects if user is signed in
+ if (xhr) {
+ if (xhr.readyState != 4 || xhr.status != 200)
+ return;
+ var projects = [];
+ var starredProjects = [];
+
+ var json = CS_parseJSON(xhr);
+ for (var category in json) {
+ switch (category) {
+ case 'contributorto':
+ case 'memberof':
+ case 'ownerof':
+ for (var i = 0; i < json[category].length; i++) {
+ projects.push(json[category][i]);
+ }
+ break;
+
+ case 'starred_projects':
+ for (var i = 0; i < json[category].length; i++) {
+ starredProjects.push(json[category][i]);
+ }
+ break;
+
+ case 'error':
+ return;
+
+ default:
+ break;
+ }
+ }
+
+ myprojects.clear();
+
+ projects.sort();
+ for (var i = 0; i < projects.length; i++) {
+ var url = '/p/' + projects[i] + '/';
+ myprojects.addItem(projects[i], url, 'projects', 'Projects');
+ }
+
+ starredProjects.sort();
+ for (var i = 0; i < starredProjects.length; i++) {
+ var url = '/p/' + starredProjects[i] + '/';
+ myprojects.addItem(
+ starredProjects[i], url, 'starred_projects', 'Starred projects');
+ }
+
+ if (projects.length == 0 && starredProjects.length == 0) {
+ // If user has no project memberships then add default control.
+ CS_addDefaultControl();
+ } else {
+ // If there is atleast one project membership then add a 'All projects'
+ // link that goes to hosting/
+ myprojects.addCategory('---', '---');
+ myprojects.addItem('All projects', '/hosting/', '---');
+ }
+
+ // Otherwise, ask the user to sign in
+ } else {
+ myprojects.clear();
+
+ myprojects.addItem(
+ 'Sign in to see your favorites',
+ CS_env['login_url'],
+ 'controls');
+
+ CS_addDefaultControl();
+ }
+}
+
+/**
+ * Adds default control to the bottom of the "My favorites" menu.
+ * It currently adds links to /more and /hosting.
+ */
+function CS_addDefaultControl() {
+ myprojects.addSeparator('controls', '');
+ myprojects.addItem('Find projects...', '/hosting/',
+ 'controls');
+}
diff --git a/appengine/monorail/static/js/framework/framework-stars.js b/appengine/monorail/static/js/framework/framework-stars.js
new file mode 100644
index 0000000..f0ed16b
--- /dev/null
+++ b/appengine/monorail/static/js/framework/framework-stars.js
@@ -0,0 +1,147 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains JS functions that support setting and showing
+ * stars throughout Monorail.
+ */
+
+
+/**
+ * The character to display when the user has starred an issue.
+ */
+var TKR_STAR_ON = '\u2605';
+
+
+/**
+ * The character to display when the user has not starred an issue.
+ */
+var TKR_STAR_OFF = '\u2606';
+
+
+/**
+ * Function to toggle the star on an issue. Does both an update of the
+ * DOM and hit the server to record the star.
+ *
+ * @param {Element} el The star <a> element.
+ * @param {String} projectName name of the project to be starred, or name of
+ * the project containing the issue to be starred.
+ * @param {Integer} localId number of the issue to be starred.
+ * @param {String} projectName number of the user to be starred.
+ * @param {string} token The security token.
+ */
+function TKR_toggleStar(el, projectName, localId, userId, token) {
+ var starred = (el.textContent.trim() == TKR_STAR_OFF) ? 1 : 0;
+ TKR_toggleStarLocal(el);
+
+ var type;
+ if (userId) type = 'users';
+ if (projectName) type = 'projects';
+ if (projectName && localId) type = 'issues';
+
+ args = {'starred': starred};
+ if (type == 'users' || type == 'projects') {
+ url = '/hosting/stars.do';
+ args['scope'] = type;
+ args['item'] = type == 'projects' ? projectName : userId;
+ } else {
+ url = '/p/' + projectName + '/issues/setstar.do';
+ args['id'] = localId;
+ };
+
+ TKR_setStar(el, url, args, token, url);
+}
+
+
+/**
+ * Just update the display state of a star, without contacting the server.
+ * Optionally update the value of a form element as well. Useful for when
+ * a user is entering a new issue and wants to set its initial starred state.
+ * @param {Element} el Star <img> element.
+ * @param {string} opt_formElementId HTML ID of the hidden form element for
+ * stars.
+ */
+function TKR_toggleStarLocal(el, opt_formElementId) {
+ var starred = (el.textContent.trim() == TKR_STAR_OFF) ? 1 : 0;
+
+ el.textContent = starred ? TKR_STAR_ON : TKR_STAR_OFF;
+ el.style.color = starred ? 'cornflowerblue' : 'grey';
+ el.title = starred ? 'You have starred this item' : 'Click to star this item';
+
+ if (opt_formElementId) {
+ $(opt_formElementId).value = '' + starred; // convert to string
+ }
+}
+
+
+/**
+ * Send the new star state to the server and create a callback for its response.
+ * @param {Element} el The star <a> element.
+ * @param {String} url The server URL to post to.
+ * @param {Dict} args The arguments to send in the POST request.
+ * @param {String} opt_token The security token to send in the request.
+ */
+function TKR_setStar(el, url, args, opt_token) {
+ if (opt_token) {
+ CS_doPost(url, function(event) { TKR_gotSetStar(el, event); }, args,
+ opt_token, url);
+ } else {
+ CS_doPost(url, function(event) { TKR_gotSetStar(el, event); }, args);
+ }
+}
+
+
+/**
+ * Evaluates the server response after a starring operation completed.
+ * @param {Element} el <a> element containing the star which was clicked.
+ * @param {event} event with xhr JSON response from the server.
+ */
+function TKR_gotSetStar(el, event) {
+ var xhr = event.target;
+ if (xhr.readyState != 4 || xhr.status != 200)
+ return;
+ var args = CS_parseJSON(xhr);
+ var localStarred = (el.textContent.trim() == TKR_STAR_ON) ? 1 : 0;
+ var serverStarred = args['starred'];
+ if (localStarred != serverStarred) {
+ TKR_toggleStarLocal(el);
+ }
+}
+
+
+/**
+ * When we show two star icons on the same details page, keep them
+ * in sync with each other. And, update a message about starring
+ * that is displayed near the issue update form.
+ * @param {Element} clickedStar The star that the user clicked on.
+ * @param {string} otherStarId ID of the other star icon.
+ */
+function TKR_syncStarIcons(clickedStar, otherStarId) {
+ var otherStar = document.getElementById(otherStarId);
+ if (!otherStar) {
+ return;
+ }
+ TKR_toggleStarLocal(otherStar);
+
+ var vote_feedback = document.getElementById('vote_feedback');
+ if (!vote_feedback) {
+ return;
+ }
+
+ if (clickedStar.textContent == TKR_STAR_OFF) {
+ vote_feedback.textContent =
+ 'Vote for this issue and get email change notifications.';
+ } else {
+ vote_feedback.textContent = 'Your vote has been recorded.';
+ }
+}
+
+
+// Exports
+_TKR_toggleStar = TKR_toggleStar;
+_TKR_toggleStarLocal = TKR_toggleStarLocal;
+_TKR_syncStarIcons = TKR_syncStarIcons;
diff --git a/appengine/monorail/static/js/framework/project-name-check.js b/appengine/monorail/static/js/framework/project-name-check.js
new file mode 100644
index 0000000..8e3981d
--- /dev/null
+++ b/appengine/monorail/static/js/framework/project-name-check.js
@@ -0,0 +1,42 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview Functions that support project name checks when
+ * creating a new project.
+ */
+
+/**
+ * Function that communicates with the server.
+ * @param {string} projectName The proposed project name.
+ */
+function checkProjectName(projectName) {
+ var createProjectUrl = '/hosting/createProject/checkProjectName.do';
+ var args = {
+ 'project': projectName
+ };
+ CS_doPost(createProjectUrl, nameTaken, args);
+}
+
+/**
+ * Function that evaluates the server response and sets the error message.
+ * @param {event} event with xhr server's JSON response to the AJAX request.
+ */
+function nameTaken(event) {
+ var xhr = event.target;
+ if (xhr.readyState != 4 || xhr.status != 200)
+ return;
+ var resp = CS_parseJSON(xhr);
+ var errorMessage = resp['error_message'];
+ document.getElementById('projectnamefeedback').innerText = errorMessage;
+ if (errorMessage != '') {
+ document.getElementById('submit_btn').disabled = 'disabled';
+ }
+}
+
+// Make this function globally available
+_CP_checkProjectName = checkProjectName;
diff --git a/appengine/monorail/static/js/graveyard/common.js b/appengine/monorail/static/js/graveyard/common.js
new file mode 100644
index 0000000..ebc9306
--- /dev/null
+++ b/appengine/monorail/static/js/graveyard/common.js
@@ -0,0 +1,719 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+//------------------------------------------------------------------------
+// This file contains common utilities and basic javascript infrastructure.
+//
+// Notes:
+// * Press 'D' to toggle debug mode.
+//
+// Functions:
+//
+// - Assertions
+// DEPRECATED: Use assert.js
+// AssertTrue(): assert an expression. Throws an exception if false.
+// Fail(): Throws an exception. (Mark block of code that should be unreachable)
+// AssertEquals(): assert that two values are equal.
+// AssertType(): assert that a value has a particular type
+//
+// - Cookies
+// SetCookie(): Sets a cookie.
+// ExpireCookie(): Expires a cookie.
+// GetCookie(): Gets a cookie value.
+//
+// - Dynamic HTML/DOM utilities
+// MaybeGetElement(): get an element by its id
+// GetElement(): get an element by its id
+// GetParentNode(): Get the parent of an element
+// GetAttribute(): Get attribute value of a DOM node
+// GetInnerHTML(): get the inner HTML of a node
+// SetCssStyle(): Sets a CSS property of a node.
+// GetStyleProperty(): Get CSS property from a style attribute string
+// GetCellIndex(): Get the index of a table cell in a table row
+// ShowElement(): Show/hide element by setting the "display" css property.
+// ShowBlockElement(): Show/hide block element
+// SetButtonText(): Set the text of a button element.
+// AppendNewElement(): Create and append a html element to a parent node.
+// CreateDIV(): Create a DIV element and append to the document.
+// HasClass(): check if element has a given class
+// AddClass(): add a class to an element
+// RemoveClass(): remove a class from an element
+//
+// - Window/Screen utiltiies
+// GetPageOffsetLeft(): get the X page offset of an element
+// GetPageOffsetTop(): get the Y page offset of an element
+// GetPageOffset(): get the X and Y page offsets of an element
+// GetPageOffsetRight() : get X page offset of the right side of an element
+// GetPageOffsetRight() : get Y page offset of the bottom of an element
+// GetScrollTop(): get the vertical scrolling pos of a window.
+// GetScrollLeft(): get the horizontal scrolling pos of a window
+// IsScrollAtEnd(): check if window scrollbar has reached its maximum offset
+// ScrollTo(): scroll window to a position
+// ScrollIntoView(): scroll window so that an element is in view.
+// GetWindowWidth(): get width of a window.
+// GetWindowHeight(): get height of a window
+// GetAvailScreenWidth(): get available screen width
+// GetAvailScreenHeight(): get available screen height
+// GetNiceWindowHeight(): get a nice height for a new browser window.
+// Open{External/Internal}Window(): open a separate window
+// CloseWindow(): close a window
+//
+// - DOM walking utilities
+// AnnotateTerms(): find terms in a node and decorate them with some tag
+// AnnotateText(): find terms in a text node and decorate them with some tag
+//
+// - String utilties
+// HtmlEscape(): html escapes a string
+// HtmlUnescape(): remove html-escaping.
+// QuoteEscape(): escape " quotes.
+// CollapseWhitespace(): collapse multiple whitespace into one whitespace.
+// Trim(): trim whitespace on ends of string
+// IsEmpty(): check if CollapseWhiteSpace(String) == ""
+// IsLetterOrDigit(): check if a character is a letter or a digit
+// ConvertEOLToLF(): normalize the new-lines of a string.
+// HtmlEscapeInsertWbrs(): HtmlEscapes and inserts <wbr>s (word break tags)
+// after every n non-space chars and/or after or before certain special chars
+//
+// - TextArea utilities
+// GetCursorPos(): finds the cursor position of a textfield
+// SetCursorPos(): sets the cursor position in a textfield
+//
+// - Array utilities
+// FindInArray(): do a linear search to find an element value.
+// DeleteArrayElement(): return a new array with a specific value removed.
+// CloneObject(): clone an object, copying its values recursively.
+// CloneEvent(): clone an event; cannot use CloneObject because it
+// suffers from infinite recursion
+//
+// - Formatting utilities
+// PrintArray(): used to print/generate HTML by combining static text
+// and dynamic strings.
+// ImageHtml(): create html for an img tag
+// FormatJSLink(): formats a link that invokes js code when clicked.
+// MakeId3(): formats an id that has two id numbers, eg, foo_3_7
+//
+// - Timeouts
+// SafeTimeout(): sets a timeout with protection against ugly JS-errors
+// CancelTimeout(): cancels a timeout with a given ID
+// CancelAllTimeouts(): cancels all timeouts on a given window
+//
+// - Miscellaneous
+// IsDefined(): returns true if argument is not undefined
+//------------------------------------------------------------------------
+
+// browser detection
+function BR_AgentContains_(str) {
+ if (str in BR_AgentContains_cache_) {
+ return BR_AgentContains_cache_[str];
+ }
+
+ return BR_AgentContains_cache_[str] =
+ (navigator.userAgent.toLowerCase().indexOf(str) != -1);
+}
+// We cache the results of the indexOf operation. This gets us a 10x benefit in
+// Gecko, 8x in Safari and 4x in MSIE for all of the browser checks
+var BR_AgentContains_cache_ = {};
+
+function BR_IsIE() {
+ return (BR_AgentContains_('msie') || BR_AgentContains_('trident')) &&
+ !window.opera;
+}
+
+function BR_IsKonqueror() {
+ return BR_AgentContains_('konqueror');
+}
+
+function BR_IsSafari() {
+ return BR_AgentContains_('safari') || BR_IsKonqueror();
+}
+
+function BR_IsNav() {
+ return !BR_IsIE() &&
+ !BR_IsSafari() &&
+ BR_AgentContains_('mozilla');
+}
+
+var BACKSPACE_KEYCODE = 8;
+var COMMA_KEYCODE = 188; // ',' key
+var DEBUG_KEYCODE = 68; // 'D' key
+var DELETE_KEYCODE = 46;
+var DOWN_KEYCODE = 40; // DOWN arrow key
+var ENTER_KEYCODE = 13; // ENTER key
+var ESC_KEYCODE = 27; // ESC key
+var LEFT_KEYCODE = 37; // LEFT arrow key
+var RIGHT_KEYCODE = 39; // RIGHT arrow key
+var SPACE_KEYCODE = 32; // space bar
+var TAB_KEYCODE = 9; // TAB key
+var UP_KEYCODE = 38; // UP arrow key
+var SHIFT_KEYCODE = 16;
+var PAGE_DOWN_KEYCODE = 34;
+var PAGE_UP_KEYCODE = 33;
+
+var MAX_EMAIL_ADDRESS_LENGTH = 320; // 64 + '@' + 255
+var MAX_SIGNATURE_LENGTH = 1000; // 1000 chars of maximum signature
+
+//------------------------------------------------------------------------
+// Assertions
+// DEPRECATED: Use assert.js
+//------------------------------------------------------------------------
+/**
+ * DEPRECATED: Use assert.js
+ */
+function raise(msg) {
+ if (typeof Error != 'undefined') {
+ throw new Error(msg || 'Assertion Failed');
+ } else {
+ throw (msg);
+ }
+}
+
+/**
+ * DEPRECATED: Use assert.js
+ *
+ * Fail() is useful for marking logic paths that should
+ * not be reached. For example, if you have a class that uses
+ * ints for enums:
+ *
+ * MyClass.ENUM_FOO = 1;
+ * MyClass.ENUM_BAR = 2;
+ * MyClass.ENUM_BAZ = 3;
+ *
+ * And a switch statement elsewhere in your code that
+ * has cases for each of these enums, then you can
+ * "protect" your code as follows:
+ *
+ * switch(type) {
+ * case MyClass.ENUM_FOO: doFooThing(); break;
+ * case MyClass.ENUM_BAR: doBarThing(); break;
+ * case MyClass.ENUM_BAZ: doBazThing(); break;
+ * default:
+ * Fail("No enum in MyClass with value: " + type);
+ * }
+ *
+ * This way, if someone introduces a new value for this enum
+ * without noticing this switch statement, then the code will
+ * fail if the logic allows it to reach the switch with the
+ * new value, alerting the developer that he should add a
+ * case to the switch to handle the new value he has introduced.
+ *
+ * @param {string} opt_msg to display for failure
+ * DEFAULT: "Assertion failed"
+ */
+function Fail(opt_msg) {
+ opt_msg = opt_msg || 'Assertion failed';
+ if (IsDefined(DumpError)) DumpError(opt_msg + '\n');
+ raise(opt_msg);
+}
+
+/**
+ * DEPRECATED: Use assert.js
+ *
+ * Asserts that an expression is true (non-zero and non-null).
+ *
+ * Note that it is critical not to pass logic
+ * with side-effects as the expression for AssertTrue
+ * because if the assertions are removed by the
+ * JSCompiler, then the expression will be removed
+ * as well, in which case the side-effects will
+ * be lost. So instead of this:
+ *
+ * AssertTrue( criticalComputation() );
+ *
+ * Do this:
+ *
+ * var result = criticalComputation();
+ * AssertTrue(result);
+ *
+ * @param expression to evaluate
+ * @param {string} opt_msg to display if the assertion fails
+ *
+ */
+function AssertTrue(expression, opt_msg) {
+ if (!expression) {
+ opt_msg = opt_msg || 'Assertion failed';
+ Fail(opt_msg);
+ }
+}
+
+/**
+ * DEPRECATED: Use assert.js
+ *
+ * Asserts that a value is of the provided type.
+ *
+ * AssertType(6, Number);
+ * AssertType("ijk", String);
+ * AssertType([], Array);
+ * AssertType({}, Object);
+ * AssertType(ICAL_Date.now(), ICAL_Date);
+ *
+ * @param value
+ * @param type A constructor function
+ * @param {string} opt_msg to display if the assertion fails
+ */
+function AssertType(value, type, opt_msg) {
+ // for backwards compatability only
+ if (typeof value == type) return;
+
+ if (value || value == "") {
+ try {
+ if (type == AssertTypeMap[typeof value] || value instanceof type) return;
+ } catch (e) { /* failure, type was an illegal argument to instanceof */ }
+ }
+ var makeMsg = opt_msg === undefined;
+ if (makeMsg) {
+ if (typeof type == 'function') {
+ var match = type.toString().match(/^\s*function\s+([^\s\{]+)/);
+ if (match) type = match[1];
+ }
+ opt_msg = "AssertType failed: <" + value + "> not typeof "+ type;
+ }
+ Fail(opt_msg);
+}
+
+var AssertTypeMap = {
+ 'string' : String,
+ 'number' : Number,
+ 'boolean' : Boolean
+};
+
+var EXPIRED_COOKIE_VALUE = 'EXPIRED';
+
+
+//------------------------------------------------------------------------
+// Window/screen utilities
+// TODO: these should be renamed (e.g. GetWindowWidth to GetWindowInnerWidth
+// and moved to geom.js)
+//------------------------------------------------------------------------
+// Get page offset of an element
+function GetPageOffsetLeft(el) {
+ var x = el.offsetLeft;
+ if (el.offsetParent != null)
+ x += GetPageOffsetLeft(el.offsetParent);
+ return x;
+}
+
+// Get page offset of an element
+function GetPageOffsetTop(el) {
+ var y = el.offsetTop;
+ if (el.offsetParent != null)
+ y += GetPageOffsetTop(el.offsetParent);
+ return y;
+}
+
+// Get page offset of an element
+function GetPageOffset(el) {
+ var x = el.offsetLeft;
+ var y = el.offsetTop;
+ if (el.offsetParent != null) {
+ var pos = GetPageOffset(el.offsetParent);
+ x += pos.x;
+ y += pos.y;
+ }
+ return {x: x, y: y};
+}
+
+// Get the y position scroll offset.
+function GetScrollTop(win) {
+ return GetWindowPropertyByBrowser_(win, getScrollTopGetters_);
+}
+
+var getScrollTopGetters_ = {
+ ieQuirks_: function(win) {
+ return win.document.body.scrollTop;
+ },
+ ieStandards_: function(win) {
+ return win.document.documentElement.scrollTop;
+ },
+ dom_: function(win) {
+ return win.pageYOffset;
+ }
+};
+
+// Get the x position scroll offset.
+function GetScrollLeft(win) {
+ return GetWindowPropertyByBrowser_(win, getScrollLeftGetters_);
+}
+
+var getScrollLeftGetters_ = {
+ ieQuirks_: function(win) {
+ return win.document.body.scrollLeft;
+ },
+ ieStandards_: function(win) {
+ return win.document.documentElement.scrollLeft;
+ },
+ dom_: function(win) {
+ return win.pageXOffset;
+ }
+};
+
+// Scroll so that as far as possible the entire element is in view.
+var ALIGN_BOTTOM = 'b';
+var ALIGN_MIDDLE = 'm';
+var ALIGN_TOP = 't';
+
+var getWindowWidthGetters_ = {
+ ieQuirks_: function(win) {
+ return win.document.body.clientWidth;
+ },
+ ieStandards_: function(win) {
+ return win.document.documentElement.clientWidth;
+ },
+ dom_: function(win) {
+ return win.innerWidth;
+ }
+};
+
+function GetWindowHeight(win) {
+ return GetWindowPropertyByBrowser_(win, getWindowHeightGetters_);
+}
+
+var getWindowHeightGetters_ = {
+ ieQuirks_: function(win) {
+ return win.document.body.clientHeight;
+ },
+ ieStandards_: function(win) {
+ return win.document.documentElement.clientHeight;
+ },
+ dom_: function(win) {
+ return win.innerHeight;
+ }
+};
+
+/**
+ * Allows the easy use of different getters for IE quirks mode, IE standards
+ * mode and fully DOM-compliant browers.
+ *
+ * @param win window to get the property for
+ * @param getters object with various getters. Invoked with the passed window.
+ * There are three properties:
+ * - ieStandards_: IE 6.0 standards mode
+ * - ieQuirks_: IE 6.0 quirks mode and IE 5.5 and older
+ * - dom_: Mozilla, Safari and other fully DOM compliant browsers
+ *
+ * @private
+ */
+function GetWindowPropertyByBrowser_(win, getters) {
+ try {
+ if (BR_IsSafari()) {
+ return getters.dom_(win);
+ } else if (!window.opera &&
+ "compatMode" in win.document &&
+ win.document.compatMode == "CSS1Compat") {
+ return getters.ieStandards_(win);
+ } else if (BR_IsIE()) {
+ return getters.ieQuirks_(win);
+ }
+ } catch (e) {
+ // Ignore for now and fall back to DOM method
+ }
+
+ return getters.dom_(win);
+}
+
+function GetAvailScreenWidth(win) {
+ return win.screen.availWidth;
+}
+
+// Used for horizontally centering a new window of the given width in the
+// available screen. Set the new window's distance from the left of the screen
+// equal to this function's return value.
+// Params: width: the width of the new window
+// Returns: the distance from the left edge of the screen for the new window to
+// be horizontally centered
+function GetCenteringLeft(win, width) {
+ return (win.screen.availWidth - width) >> 1;
+}
+
+// Used for vertically centering a new window of the given height in the
+// available screen. Set the new window's distance from the top of the screen
+// equal to this function's return value.
+// Params: height: the height of the new window
+// Returns: the distance from the top edge of the screen for the new window to
+// be vertically aligned.
+function GetCenteringTop(win, height) {
+ return (win.screen.availHeight - height) >> 1;
+}
+
+/**
+ * Opens a child popup window that has no browser toolbar/decorations.
+ * (Copied from caribou's common.js library with small modifications.)
+ *
+ * @param url the URL for the new window (Note: this will be unique-ified)
+ * @param opt_name the name of the new window
+ * @param opt_width the width of the new window
+ * @param opt_height the height of the new window
+ * @param opt_center if true, the new window is centered in the available screen
+ * @param opt_hide_scrollbars if true, the window hides the scrollbars
+ * @param opt_noresize if true, makes window unresizable
+ * @param opt_blocked_msg message warning that the popup has been blocked
+ * @return {Window} a reference to the new child window
+ */
+function Popup(url, opt_name, opt_width, opt_height, opt_center,
+ opt_hide_scrollbars, opt_noresize, opt_blocked_msg) {
+ if (!opt_height) {
+ opt_height = Math.floor(GetWindowHeight(window.top) * 0.8);
+ }
+ if (!opt_width) {
+ opt_width = Math.min(GetAvailScreenWidth(window), opt_height);
+ }
+
+ var features = "resizable=" + (opt_noresize ? "no" : "yes") + "," +
+ "scrollbars=" + (opt_hide_scrollbars ? "no" : "yes") + "," +
+ "width=" + opt_width + ",height=" + opt_height;
+ if (opt_center) {
+ features += ",left=" + GetCenteringLeft(window, opt_width) + "," +
+ "top=" + GetCenteringTop(window, opt_height);
+ }
+ return OpenWindow(window, url, opt_name, features, opt_blocked_msg);
+}
+
+/**
+ * Opens a new window. Returns the new window handle. Tries to open the new
+ * window using top.open() first. If that doesn't work, then tries win.open().
+ * If that still doesn't work, prints an alert.
+ * (Copied from caribou's common.js library with small modifications.)
+ *
+ * @param win the parent window from which to open the new child window
+ * @param url the URL for the new window (Note: this will be unique-ified)
+ * @param opt_name the name of the new window
+ * @param opt_features the properties of the new window
+ * @param opt_blocked_msg message warning that the popup has been blocked
+ * @return {Window} a reference to the new child window
+ */
+function OpenWindow(win, url, opt_name, opt_features, opt_blocked_msg) {
+ var newwin = OpenWindowHelper(top, url, opt_name, opt_features);
+ if (!newwin || newwin.closed || !newwin.focus) {
+ newwin = OpenWindowHelper(win, url, opt_name, opt_features);
+ }
+ if (!newwin || newwin.closed || !newwin.focus) {
+ if (opt_blocked_msg) alert(opt_blocked_msg);
+ } else {
+ // Make sure that the window has the focus
+ newwin.focus();
+ }
+ return newwin;
+}
+
+/*
+ * Helper for OpenWindow().
+ * (Copied from caribou's common.js library with small modifications.)
+ */
+function OpenWindowHelper(win, url, name, features) {
+ var newwin;
+ if (features) {
+ newwin = win.open(url, name, features);
+ } else if (name) {
+ newwin = win.open(url, name);
+ } else {
+ newwin = win.open(url);
+ }
+ return newwin;
+}
+
+//------------------------------------------------------------------------
+// String utilities
+//------------------------------------------------------------------------
+// Do html escaping
+var amp_re_ = /&/g;
+var lt_re_ = /</g;
+var gt_re_ = />/g;
+
+// converts multiple ws chars to a single space, and strips
+// leading and trailing ws
+var spc_re_ = /\s+/g;
+var beg_spc_re_ = /^ /;
+var end_spc_re_ = / $/;
+
+var newline_re_ = /\r?\n/g;
+var spctab_re_ = /[ \t]+/g;
+var nbsp_re_ = /\xa0/g;
+
+// URL-decodes the string. We need to specially handle '+'s because
+// the javascript library doesn't properly convert them to spaces
+var plus_re_ = /\+/g;
+
+// Converts any instances of "\r" or "\r\n" style EOLs into "\n" (Line Feed),
+// and also trim the extra newlines and whitespaces at the end.
+var eol_re_ = /\r\n?/g;
+var trailingspc_re_ = /[\n\t ]+$/;
+
+// Converts a string to its canonicalized label form.
+var illegal_chars_re_ = /[ \/(){}&|\\\"\000]/g;
+
+//------------------------------------------------------------------------
+// TextArea utilities
+//------------------------------------------------------------------------
+
+// Gets the cursor pos in a text area. Returns -1 if the cursor pos cannot
+// be determined or if the cursor out of the textfield.
+function GetCursorPos(win, textfield) {
+ try {
+ if (IsDefined(textfield.selectionEnd)) {
+ // Mozilla directly supports this
+ return textfield.selectionEnd;
+
+ } else if (win.document.selection && win.document.selection.createRange) {
+ // IE doesn't export an accessor for the endpoints of a selection.
+ // Instead, it uses the TextRange object, which has an extremely obtuse
+ // API. Here's what seems to work:
+
+ // (1) Obtain a textfield from the current selection (cursor)
+ var tr = win.document.selection.createRange();
+
+ // Check if the current selection is in the textfield
+ if (tr.parentElement() != textfield) {
+ return -1;
+ }
+
+ // (2) Make a text range encompassing the textfield
+ var tr2 = tr.duplicate();
+ tr2.moveToElementText(textfield);
+
+ // (3) Move the end of the copy to the beginning of the selection
+ tr2.setEndPoint("EndToStart", tr);
+
+ // (4) The span of the textrange copy is equivalent to the cursor pos
+ var cursor = tr2.text.length;
+
+ // Finally, perform a sanity check to make sure the cursor is in the
+ // textfield. IE sometimes screws this up when the window is activated
+ if (cursor > textfield.value.length) {
+ return -1;
+ }
+ return cursor;
+ } else {
+ Debug("Unable to get cursor position for: " + navigator.userAgent);
+
+ // Just return the size of the textfield
+ // TODO: Investigate how to get cursor pos in Safari!
+ return textfield.value.length;
+ }
+ } catch (e) {
+ DumpException(e, "Cannot get cursor pos");
+ }
+
+ return -1;
+}
+
+function SetCursorPos(win, textfield, pos) {
+ if (IsDefined(textfield.selectionEnd) &&
+ IsDefined(textfield.selectionStart)) {
+ // Mozilla directly supports this
+ textfield.selectionStart = pos;
+ textfield.selectionEnd = pos;
+
+ } else if (win.document.selection && textfield.createTextRange) {
+ // IE has textranges. A textfield's textrange encompasses the
+ // entire textfield's text by default
+ var sel = textfield.createTextRange();
+
+ sel.collapse(true);
+ sel.move("character", pos);
+ sel.select();
+ }
+}
+
+//------------------------------------------------------------------------
+// Array utilities
+//------------------------------------------------------------------------
+// Find an item in an array, returns the key, or -1 if not found
+function FindInArray(array, x) {
+ for (var i = 0; i < array.length; i++) {
+ if (array[i] == x) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+// Delete an element from an array
+function DeleteArrayElement(array, x) {
+ var i = 0;
+ while (i < array.length && array[i] != x)
+ i++;
+ array.splice(i, 1);
+}
+
+// Clean up email address:
+// - remove extra spaces
+// - Surround name with quotes if it contains special characters
+// to check if we need " quotes
+// Note: do not use /g in the regular expression, otherwise the
+// regular expression cannot be reusable.
+var specialchars_re_ = /[()<>@,;:\\\".\[\]]/;
+
+//------------------------------------------------------------------------
+// Timeouts
+//
+// It is easy to forget to put a try/catch block around a timeout function,
+// and the result is an ugly user visible javascript error.
+// Also, it would be nice if a timeout associated with a window is
+// automatically cancelled when the user navigates away from that window.
+//
+// When storing timeouts in a window, we can't let that variable be renamed
+// since the window could be top.js, and renaming such a property could
+// clash with any of the variables/functions defined in top.js.
+//------------------------------------------------------------------------
+/**
+ * Sets a timeout safely.
+ * @param win the window object. If null is passed in, then a timeout if set
+ * on the js frame. If the window is closed, or freed, the timeout is
+ * automaticaaly cancelled
+ * @param fn the callback function: fn(win) will be called.
+ * @param ms number of ms the callback should be called later
+ */
+function SafeTimeout(win, fn, ms) {
+ if (!win) win = window;
+ if (!win._tm) {
+ win._tm = [];
+ }
+ var timeoutfn = SafeTimeoutFunction_(win, fn);
+ var id = win.setTimeout(timeoutfn, ms);
+
+ // Save the id so that it can be removed from the _tm array
+ timeoutfn.id = id;
+
+ // Safe the timeout in the _tm array
+ win._tm[id] = 1;
+
+ return id;
+}
+
+/** Creates a callback function for a timeout*/
+function SafeTimeoutFunction_(win, fn) {
+ var timeoutfn = function() {
+ try {
+ fn(win);
+
+ var t = win._tm;
+ if (t) {
+ delete t[timeoutfn.id];
+ }
+ } catch (e) {
+ DumpException(e);
+ }
+ };
+ return timeoutfn;
+}
+
+//------------------------------------------------------------------------
+// Misc
+//------------------------------------------------------------------------
+// Check if a value is defined
+function IsDefined(value) {
+ return (typeof value) != 'undefined';
+}
+
+function GetKeyCode(event) {
+ var code;
+ if (event.keyCode) {
+ code = event.keyCode;
+ } else if (event.which) {
+ code = event.which;
+ }
+ return code;
+}
diff --git a/appengine/monorail/static/js/graveyard/geom.js b/appengine/monorail/static/js/graveyard/geom.js
new file mode 100644
index 0000000..33b67b6
--- /dev/null
+++ b/appengine/monorail/static/js/graveyard/geom.js
@@ -0,0 +1,94 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+// functions for dealing with layout and geometry of page elements.
+// Requires shapes.js
+
+/** returns the bounding box of the given DOM node in document space.
+ *
+ * @param {Element?} obj a DOM node.
+ * @return {Rect?}
+ */
+function nodeBounds(obj) {
+ if (!obj) return null;
+
+ function fixRectForScrolling(r) {
+ // Need to take into account scrolling offset of ancestors (IE already does
+ // this)
+ for (var o = obj.offsetParent;
+ o && o.offsetParent;
+ o = o.offsetParent) {
+ if (o.scrollLeft) {
+ r.x -= o.scrollLeft;
+ }
+ if (o.scrollTop) {
+ r.y -= o.scrollTop;
+ }
+ }
+ }
+
+ var refWindow;
+ if (obj.ownerDocument && obj.ownerDocument.parentWindow) {
+ refWindow = obj.ownerDocument.parentWindow;
+ } else if (obj.ownerDocument && obj.ownerDocument.defaultView) {
+ refWindow = obj.ownerDocument.defaultView;
+ } else {
+ refWindow = window;
+ }
+
+ // IE, Mozilla 3+
+ if (obj.getBoundingClientRect) {
+ var rect = obj.getBoundingClientRect();
+
+ return new Rect(rect.left + GetScrollLeft(refWindow),
+ rect.top + GetScrollTop(refWindow),
+ rect.right - rect.left,
+ rect.bottom - rect.top,
+ refWindow);
+ }
+
+ // Mozilla < 3
+ if (obj.ownerDocument && obj.ownerDocument.getBoxObjectFor) {
+ var box = obj.ownerDocument.getBoxObjectFor(obj);
+ var r = new Rect(box.x, box.y, box.width, box.height, refWindow);
+ fixRectForScrolling(r);
+ return r;
+ }
+
+ // Fallback to recursively computing this
+ var left = 0;
+ var top = 0;
+ for (var o = obj; o.offsetParent; o = o.offsetParent) {
+ left += o.offsetLeft;
+ top += o.offsetTop;
+ }
+
+ var r = new Rect(left, top, obj.offsetWidth, obj.offsetHeight, refWindow);
+ fixRectForScrolling(r);
+ return r;
+}
+
+function GetMousePosition(e) {
+ // copied from http://www.quirksmode.org/js/events_compinfo.html
+ var posx = 0;
+ var posy = 0;
+ if (e.pageX || e.pageY) {
+ posx = e.pageX;
+ posy = e.pageY;
+ } else if (e.clientX || e.clientY) {
+ var obj = (e.target ? e.target : e.srcElement);
+ var refWindow;
+ if (obj.ownerDocument && obj.ownerDocument.parentWindow) {
+ refWindow = obj.ownerDocument.parentWindow;
+ } else {
+ refWindow = window;
+ }
+ posx = e.clientX + GetScrollLeft(refWindow);
+ posy = e.clientY + GetScrollTop(refWindow);
+ }
+ return new Point(posx, posy, window);
+}
diff --git a/appengine/monorail/static/js/graveyard/listen.js b/appengine/monorail/static/js/graveyard/listen.js
new file mode 100644
index 0000000..a1e1e21
--- /dev/null
+++ b/appengine/monorail/static/js/graveyard/listen.js
@@ -0,0 +1,146 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+var listen;
+var unlisten;
+var unlistenByKey;
+
+(function() {
+ var listeners = {};
+ var nextId = 0;
+
+ function getHashCode_(obj) {
+ if (obj.listen_hc_ == null) {
+ obj.listen_hc_ = ++nextId;
+ }
+ return obj.listen_hc_;
+ }
+
+ /**
+ * Takes a node, event, listener, and capture flag to create a key
+ * to identify the tuple in the listeners hash.
+ *
+ * @param {Element} node The node to listen to events on.
+ * @param {string} event The name of the event without the "on" prefix.
+ * @param {Function} listener A function to call when the event occurs.
+ * @param {boolean} opt_useCapture In DOM-compliant browsers, this determines
+ * whether the listener is fired during the
+ * capture or bubble phase of the event.
+ * @return {string} key to identify this tuple in the listeners hash.
+ */
+ function createKey_(node, event, listener, opt_useCapture) {
+ var nodeHc = getHashCode_(node);
+ var listenerHc = getHashCode_(listener);
+ opt_useCapture = !!opt_useCapture;
+ var key = nodeHc + '_' + event + '_' + listenerHc + '_' + opt_useCapture;
+ return key;
+ }
+
+ /**
+ * Adds an event listener to a DOM node for a specific event.
+ *
+ * Listen() and unlisten() use an indirect lookup of listener functions
+ * to avoid circular references between DOM (in IE) or XPCOM (in Mozilla)
+ * objects which leak memory. This makes it easier to write OO
+ * Javascript/DOM code.
+ *
+ * Examples:
+ * listen(myButton, 'click', myHandler, true);
+ * listen(myButton, 'click', this.myHandler.bind(this), true);
+ *
+ * @param {Element} node The node to listen to events on.
+ * @param {string} event The name of the event without the "on" prefix.
+ * @param {Function} listener A function to call when the event occurs.
+ * @param {boolean} opt_useCapture In DOM-compliant browsers, this determines
+ * whether the listener is fired during the
+ * capture or bubble phase of the event.
+ * @return {string} a unique key to indentify this listener.
+ */
+ listen = function(node, event, listener, opt_useCapture) {
+ var key = createKey_(node, event, listener, opt_useCapture);
+
+ // addEventListener does not allow multiple listeners
+ if (key in listeners) {
+ return key;
+ }
+
+ var proxy = handleEvent.bind(null, key);
+ listeners[key] = {
+ listener: listener,
+ proxy: proxy,
+ event: event,
+ node: node,
+ useCapture: opt_useCapture
+ };
+
+ if (node.addEventListener) {
+ node.addEventListener(event, proxy, opt_useCapture);
+ } else if (node.attachEvent) {
+ node.attachEvent('on' + event, proxy);
+ } else {
+ throw new Error('Node {' + node + '} does not support event listeners.');
+ }
+
+ return key;
+ }
+
+ /**
+ * Removes an event listener which was added with listen().
+ *
+ * @param {Element} node The node to stop listening to events on.
+ * @param {string} event The name of the event without the "on" prefix.
+ * @param {Function} listener The listener function to remove.
+ * @param {boolean} opt_useCapture In DOM-compliant browsers, this determines
+ * whether the listener is fired during the
+ * capture or bubble phase of the event.
+ * @return {boolean} indicating whether the listener was there to remove.
+ */
+ unlisten = function(node, event, listener, opt_useCapture) {
+ var key = createKey_(node, event, listener, opt_useCapture);
+
+ return unlistenByKey(key);
+ }
+
+ /**
+ * Variant of {@link unlisten} that takes a key that was returned by
+ * {@link listen} and removes that listener.
+ *
+ * @param {string} key Key of event to be unlistened.
+ * @return {boolean} indicating whether it was there to be removed.
+ */
+ unlistenByKey = function(key) {
+ if (!(key in listeners)) {
+ return false;
+ }
+ var listener = listeners[key];
+ var proxy = listener.proxy;
+ var event = listener.event;
+ var node = listener.node;
+ var useCapture = listener.useCapture;
+
+ if (node.removeEventListener) {
+ node.removeEventListener(event, proxy, useCapture);
+ } else if (node.detachEvent) {
+ node.detachEvent('on' + event, proxy);
+ }
+
+ delete listeners[key];
+ return true;
+ }
+
+ /**
+ * The function which is actually called when the DOM event occurs. This
+ * function is a proxy for the real listener the user specified.
+ */
+ function handleEvent(key) {
+ // pass all arguments which were sent to this function except listenerID
+ // on to the actual listener.
+ var args = Array.prototype.splice.call(arguments, 1, arguments.length);
+ return listeners[key].listener.apply(null, args);
+ }
+
+})();
diff --git a/appengine/monorail/static/js/graveyard/popup_controller.js b/appengine/monorail/static/js/graveyard/popup_controller.js
new file mode 100644
index 0000000..5537cde
--- /dev/null
+++ b/appengine/monorail/static/js/graveyard/popup_controller.js
@@ -0,0 +1,145 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * It is common to make a DIV temporarily visible to simulate
+ * a popup window. Often, this is done by adding an onClick
+ * handler to the element that can be clicked on to show the
+ * popup.
+ *
+ * Unfortunately, closing the popup is not as simple.
+ * The popup creator often wants to let the user close
+ * the popup by clicking elsewhere on the window; however,
+ * the popup only receives mouse events that occur
+ * on the popup itself. Thus, popups need a mechanism
+ * that notifies them that the user has clicked elsewhere
+ * to try to get rid of them.
+ *
+ * PopupController is such a mechanism --
+ * it monitors all mousedown events that
+ * occur in the window so that it can notify registered
+ * popups of the mousedown, and the popups can choose
+ * to deactivate themselves.
+ *
+ * For an object to qualify as a popup, it must have a
+ * function called "deactivate" that takes a mousedown event
+ * and returns a boolean indicating that it has deactivated
+ * itself as a result of that event.
+ *
+ * EXAMPLE:
+ *
+ * // popup that attaches itself to the supplied div
+ * function MyPopup(div) {
+ * this._div = div;
+ * this._isVisible = false;
+ * this._innerHTML = ...
+ * }
+ *
+ * MyPopup.prototype.show = function() {
+ * this._div.display = '';
+ * this._isVisible = true;
+ * PC_addPopup(this);
+ * }
+ *
+ * MyPopup.prototype.hide = function() {
+ * this._div.display = 'none';
+ * this._isVisible = false;
+ * }
+ *
+ * MyPopup.prototype.deactivate = function(e) {
+ * if (this._isVisible) {
+ * var p = GetMousePosition(e);
+ * if (nodeBounds(this._div).contains(p)) {
+ * return false; // use clicked on popup, remain visible
+ * } else {
+ * this.hide();
+ * return true; // clicked outside popup, make invisible
+ * }
+ * } else {
+ * return true; // already deactivated, not visible
+ * }
+ * }
+ *
+ * DEPENDENCIES (from this directory):
+ * bind.js
+ * listen.js
+ * common.js
+ * shapes.js
+ * geom.js
+ *
+ * USAGE:
+ * _PC_Install() must be called after the body is loaded
+ */
+
+/**
+ * PopupController constructor.
+ * @constructor
+ */
+function PopupController() {
+ this.activePopups_ = [];
+}
+
+/**
+ * @param {Document} opt_doc document to add PopupController to
+ * DEFAULT: "document" variable that is currently in scope
+ * @return {boolean} indicating if PopupController installed for the document;
+ * returns false if document already had PopupController
+ */
+function _PC_Install(opt_doc) {
+ if (gPopupControllerInstalled) return false;
+ gPopupControllerInstalled = true;
+ var doc = (opt_doc) ? opt_doc : document;
+
+ // insert _notifyPopups in BODY's onmousedown chain
+ listen(doc.body, 'mousedown', PC_notifyPopups);
+ return true;
+}
+
+/**
+ * Notifies each popup of a mousedown event, giving
+ * each popup the chance to deactivate itself.
+ *
+ * @throws Error if a popup does not have a deactivate function
+ *
+ * @private
+ */
+function PC_notifyPopups(e) {
+ if (gPopupController.activePopups_.length == 0) return false;
+ e = e || window.event;
+ for (var i = gPopupController.activePopups_.length - 1; i >= 0; --i) {
+ var popup = gPopupController.activePopups_[i];
+ PC_assertIsPopup(popup);
+ if (popup.deactivate(e)) {
+ gPopupController.activePopups_.splice(i, 1);
+ }
+ }
+ return true;
+}
+
+/**
+ * Adds the popup to the list of popups to be
+ * notified of a mousedown event.
+ *
+ * @return boolean indicating if added popup; false if already contained
+ * @throws Error if popup does not have a deactivate function
+ */
+function PC_addPopup(popup) {
+ PC_assertIsPopup(popup);
+ for (var i = 0; i < gPopupController.activePopups_.length; ++i) {
+ if (popup === gPopupController.activePopups_[i]) return false;
+ }
+ gPopupController.activePopups_.push(popup);
+ return true;
+}
+
+/** asserts that popup has a deactivate function */
+function PC_assertIsPopup(popup) {
+ AssertType(popup.deactivate, Function, 'popup missing deactivate function');
+}
+
+var gPopupController = new PopupController();
+var gPopupControllerInstalled = false;
diff --git a/appengine/monorail/static/js/graveyard/shapes.js b/appengine/monorail/static/js/graveyard/shapes.js
new file mode 100644
index 0000000..97174e4
--- /dev/null
+++ b/appengine/monorail/static/js/graveyard/shapes.js
@@ -0,0 +1,126 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+// shape related classes
+
+/** a point in 2 cartesian dimensions.
+ * @constructor
+ * @param x x-coord.
+ * @param y y-coord.
+ * @param opt_coordinateFrame a key that can be passed to a translation function to
+ * convert from one coordinate frame to another.
+ * Coordinate frames might correspond to things like windows, iframes, or
+ * any element with a position style attribute.
+ */
+function Point(x, y, opt_coordinateFrame) {
+ /** a numeric x coordinate. */
+ this.x = x;
+ /** a numeric y coordinate. */
+ this.y = y;
+ /** a key that can be passed to a translation function to
+ * convert from one coordinate frame to another.
+ * Coordinate frames might correspond to things like windows, iframes, or
+ * any element with a position style attribute.
+ */
+ this.coordinateFrame = opt_coordinateFrame || null;
+}
+Point.prototype.toString = function () {
+ return '[P ' + this.x + ',' + this.y + ']';
+};
+Point.prototype.clone = function() {
+ return new Point(this.x, this.y, this.coordinateFrame);
+}
+
+/** a distance between two points in 2-space in cartesian form.
+ * A delta doesn't have a coordinate frame associated since all the coordinate
+ * frames used in the HTML dom are convertible without rotation/scaling.
+ * If a delta is not being used in pixel-space then it may be annotated with
+ * a coordinate frame, and the undefined coordinate frame can be assumed
+ * to represent pixel space.
+ * @constructor
+ * @param dx distance along x axis
+ * @param dy distance along y axis
+ */
+function Delta(dx, dy) {
+ /** a numeric distance along the x dimension. */
+ this.dx = dx;
+ /** a numeric distance along the y dimension. */
+ this.dy = dy;
+}
+Delta.prototype.toString = function () {
+ return '[D ' + this.dx + ',' + this.dy + ']';
+};
+
+/** a rectangle or bounding region.
+ * @constructor
+ * @param x x-coord of the left edge.
+ * @param y y-coord of the top edge.
+ * @param w width.
+ * @param h height.
+ * @param opt_coordinateFrame a key that can be passed to a translation function to
+ * convert from one coordinate frame to another.
+ * Coordinate frames might correspond to things like windows, iframes, or
+ * any element with a position style attribute.
+ */
+function Rect(x, y, w, h, opt_coordinateFrame) {
+ /** the numeric x coordinate of the left edge. */
+ this.x = x;
+ /** the numeric y coordinate of the top edge. */
+ this.y = y;
+ /** the numeric distance between the right edge and the left. */
+ this.w = w;
+ /** the numeric distance between the top edge and the bottom. */
+ this.h = h;
+ /** a key that can be passed to a translation function to
+ * convert from one coordinate frame to another.
+ * Coordinate frames might correspond to things like windows, iframes, or
+ * any element with a position style attribute.
+ */
+ this.coordinateFrame = opt_coordinateFrame || null;
+}
+
+/**
+ * Determines whether the Rectangle contains the Point.
+ * The Point is considered "contained" if it lies
+ * on the boundary of, or in the interior of, the Rectangle.
+ *
+ * @param {Point} p
+ * @return boolean indicating if this Rect contains p
+ */
+Rect.prototype.contains = function(p) {
+ return this.x <= p.x && p.x < (this.x + this.w) &&
+ this.y <= p.y && p.y < (this.y + this.h);
+}
+
+/**
+ * Determines whether the given rectangle intersects this rectangle.
+ *
+ * @param {Rect} r
+ * @return boolean indicating if this the two rectangles intersect
+ */
+Rect.prototype.intersects = function(r) {
+ var p = function(x, y) {
+ return new Point(x, y, null);
+ }
+
+ return this.contains(p(r.x, r.y)) ||
+ this.contains(p(r.x + r.w, r.y)) ||
+ this.contains(p(r.x + r.w, r.y + r.h)) ||
+ this.contains(p(r.x, r.y + r.h)) ||
+ r.contains(p(this.x, this.y)) ||
+ r.contains(p(this.x + this.w, this.y)) ||
+ r.contains(p(this.x + this.w, this.y + this.h)) ||
+ r.contains(p(this.x, this.y + this.h));
+}
+
+Rect.prototype.toString = function () {
+ return '[R ' + this.w + 'x' + this.h + '+' + this.x + '+' + this.y + ']';
+};
+
+Rect.prototype.clone = function() {
+ return new Rect(this.x, this.y, this.w, this.h, this.coordinateFrame);
+};
diff --git a/appengine/monorail/static/js/graveyard/xmlhttp.js b/appengine/monorail/static/js/graveyard/xmlhttp.js
new file mode 100644
index 0000000..b19fb84
--- /dev/null
+++ b/appengine/monorail/static/js/graveyard/xmlhttp.js
@@ -0,0 +1,141 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * @fileoverview A bunch of XML HTTP recipes used to do RPC from JavaScript
+ */
+
+
+/**
+ * The active x identifier used for ie.
+ * @type String
+ * @private
+ */
+var XH_ieProgId_;
+
+
+// Domain for XMLHttpRequest readyState
+var XML_READY_STATE_UNINITIALIZED = 0;
+var XML_READY_STATE_LOADING = 1;
+var XML_READY_STATE_LOADED = 2;
+var XML_READY_STATE_INTERACTIVE = 3;
+var XML_READY_STATE_COMPLETED = 4;
+
+
+/**
+ * Initialize the private state used by other functions.
+ * @private
+ */
+function XH_XmlHttpInit_() {
+ // The following blog post describes what PROG IDs to use to create the
+ // XMLHTTP object in Internet Explorer:
+ // http://blogs.msdn.com/xmlteam/archive/2006/10/23/using-the-right-version-of-msxml-in-internet-explorer.aspx
+ // However we do not (yet) fully trust that this will be OK for old versions
+ // of IE on Win9x so we therefore keep the last 2.
+ // Versions 4 and 5 have been removed because 3.0 is the preferred "fallback"
+ // per the article above.
+ // - Version 5 was built for Office applications and is not recommended for
+ // web applications.
+ // - Version 4 has been superseded by 6 and is only intended for legacy apps.
+ // - Version 3 has a wide install base and is serviced regularly with the OS.
+
+ /**
+ * Candidate Active X types.
+ * @type Array.<String>
+ * @private
+ */
+ var XH_ACTIVE_X_IDENTS = ["MSXML2.XMLHTTP.6.0", "MSXML2.XMLHTTP.3.0",
+ "MSXML2.XMLHTTP", "Microsoft.XMLHTTP"];
+
+ if (typeof XMLHttpRequest == "undefined" &&
+ typeof ActiveXObject != "undefined") {
+ for (var i = 0; i < XH_ACTIVE_X_IDENTS.length; i++) {
+ var candidate = XH_ACTIVE_X_IDENTS[i];
+
+ try {
+ new ActiveXObject(candidate);
+ XH_ieProgId_ = candidate;
+ break;
+ } catch (e) {
+ // do nothing; try next choice
+ }
+ }
+
+ // couldn't find any matches
+ if (!XH_ieProgId_) {
+ throw Error("Could not create ActiveXObject. ActiveX might be disabled," +
+ " or MSXML might not be installed.");
+ }
+ }
+}
+
+
+XH_XmlHttpInit_();
+
+
+/**
+ * Create and return an xml http request object that can be passed to
+ * {@link #XH_XmlHttpGET} or {@link #XH_XmlHttpPOST}.
+ */
+function XH_XmlHttpCreate() {
+ if (XH_ieProgId_) {
+ return new ActiveXObject(XH_ieProgId_);
+ } else {
+ return new XMLHttpRequest();
+ }
+}
+
+
+/**
+ * Send a get request.
+ * @param {XMLHttpRequest} xmlHttp as from {@link XH_XmlHttpCreate}.
+ * @param {string} url the service to contact
+ * @param {Function} handler function called when the response is received.
+ */
+function XH_XmlHttpGET(xmlHttp, url, handler) {
+ xmlHttp.open("GET", url, true);
+ xmlHttp.onreadystatechange = handler;
+ XH_XmlHttpSend(xmlHttp, null);
+}
+
+/**
+ * Send a post request.
+ * @param {XMLHttpRequest} xmlHttp as from {@link XH_XmlHttpCreate}.
+ * @param {string} url the service to contact
+ * @param {string} data the request content.
+ * @param {Function} handler function called when the response is received.
+ */
+function XH_XmlHttpPOST(xmlHttp, url, data, handler) {
+ xmlHttp.open("POST", url, true);
+ xmlHttp.onreadystatechange = handler;
+ xmlHttp.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
+ XH_XmlHttpSend(xmlHttp, data);
+}
+
+/**
+ * Calls 'send' on the XMLHttpRequest object and calls a function called 'log'
+ * if any error occured.
+ *
+ * @deprecated This dependes on a function called 'log'. You are better off
+ * handling your errors on application level.
+ *
+ * @param {XMLHttpRequest} xmlHttp as from {@link XH_XmlHttpCreate}.
+ * @param {string|null} data the request content.
+ */
+function XH_XmlHttpSend(xmlHttp, data) {
+ try {
+ xmlHttp.send(data);
+ } catch (e) {
+ // You may want to log/debug this error one that you should be aware of is
+ // e.number == -2146697208, which occurs when the 'Languages...' setting in
+ // IE is empty.
+ // This is not entirely true. The same error code is used when the user is
+ // off line.
+ console.log('XMLHttpSend failed ' + e.toString() + '<br>' + e.stack);
+ throw e;
+ }
+}
diff --git a/appengine/monorail/static/js/prettify.js b/appengine/monorail/static/js/prettify.js
new file mode 100644
index 0000000..7b99049
--- /dev/null
+++ b/appengine/monorail/static/js/prettify.js
@@ -0,0 +1,30 @@
+!function(){var q=null;window.PR_SHOULD_USE_CONTINUATION=!0;
+(function(){function S(a){function d(e){var b=e.charCodeAt(0);if(b!==92)return b;var a=e.charAt(1);return(b=r[a])?b:"0"<=a&&a<="7"?parseInt(e.substring(1),8):a==="u"||a==="x"?parseInt(e.substring(2),16):e.charCodeAt(1)}function g(e){if(e<32)return(e<16?"\\x0":"\\x")+e.toString(16);e=String.fromCharCode(e);return e==="\\"||e==="-"||e==="]"||e==="^"?"\\"+e:e}function b(e){var b=e.substring(1,e.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),e=[],a=
+b[0]==="^",c=["["];a&&c.push("^");for(var a=a?1:0,f=b.length;a<f;++a){var h=b[a];if(/\\[bdsw]/i.test(h))c.push(h);else{var h=d(h),l;a+2<f&&"-"===b[a+1]?(l=d(b[a+2]),a+=2):l=h;e.push([h,l]);l<65||h>122||(l<65||h>90||e.push([Math.max(65,h)|32,Math.min(l,90)|32]),l<97||h>122||e.push([Math.max(97,h)&-33,Math.min(l,122)&-33]))}}e.sort(function(e,a){return e[0]-a[0]||a[1]-e[1]});b=[];f=[];for(a=0;a<e.length;++a)h=e[a],h[0]<=f[1]+1?f[1]=Math.max(f[1],h[1]):b.push(f=h);for(a=0;a<b.length;++a)h=b[a],c.push(g(h[0])),
+h[1]>h[0]&&(h[1]+1>h[0]&&c.push("-"),c.push(g(h[1])));c.push("]");return c.join("")}function s(e){for(var a=e.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),c=a.length,d=[],f=0,h=0;f<c;++f){var l=a[f];l==="("?++h:"\\"===l.charAt(0)&&(l=+l.substring(1))&&(l<=h?d[l]=-1:a[f]=g(l))}for(f=1;f<d.length;++f)-1===d[f]&&(d[f]=++x);for(h=f=0;f<c;++f)l=a[f],l==="("?(++h,d[h]||(a[f]="(?:")):"\\"===l.charAt(0)&&(l=+l.substring(1))&&l<=h&&
+(a[f]="\\"+d[l]);for(f=0;f<c;++f)"^"===a[f]&&"^"!==a[f+1]&&(a[f]="");if(e.ignoreCase&&m)for(f=0;f<c;++f)l=a[f],e=l.charAt(0),l.length>=2&&e==="["?a[f]=b(l):e!=="\\"&&(a[f]=l.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return a.join("")}for(var x=0,m=!1,j=!1,k=0,c=a.length;k<c;++k){var i=a[k];if(i.ignoreCase)j=!0;else if(/[a-z]/i.test(i.source.replace(/\\u[\da-f]{4}|\\x[\da-f]{2}|\\[^UXux]/gi,""))){m=!0;j=!1;break}}for(var r={b:8,t:9,n:10,v:11,
+f:12,r:13},n=[],k=0,c=a.length;k<c;++k){i=a[k];if(i.global||i.multiline)throw Error(""+i);n.push("(?:"+s(i)+")")}return RegExp(n.join("|"),j?"gi":"g")}function T(a,d){function g(a){var c=a.nodeType;if(c==1){if(!b.test(a.className)){for(c=a.firstChild;c;c=c.nextSibling)g(c);c=a.nodeName.toLowerCase();if("br"===c||"li"===c)s[j]="\n",m[j<<1]=x++,m[j++<<1|1]=a}}else if(c==3||c==4)c=a.nodeValue,c.length&&(c=d?c.replace(/\r\n?/g,"\n"):c.replace(/[\t\n\r ]+/g," "),s[j]=c,m[j<<1]=x,x+=c.length,m[j++<<1|1]=
+a)}var b=/(?:^|\s)nocode(?:\s|$)/,s=[],x=0,m=[],j=0;g(a);return{a:s.join("").replace(/\n$/,""),d:m}}function H(a,d,g,b){d&&(a={a:d,e:a},g(a),b.push.apply(b,a.g))}function U(a){for(var d=void 0,g=a.firstChild;g;g=g.nextSibling)var b=g.nodeType,d=b===1?d?a:g:b===3?V.test(g.nodeValue)?a:d:d;return d===a?void 0:d}function C(a,d){function g(a){for(var j=a.e,k=[j,"pln"],c=0,i=a.a.match(s)||[],r={},n=0,e=i.length;n<e;++n){var z=i[n],w=r[z],t=void 0,f;if(typeof w==="string")f=!1;else{var h=b[z.charAt(0)];
+if(h)t=z.match(h[1]),w=h[0];else{for(f=0;f<x;++f)if(h=d[f],t=z.match(h[1])){w=h[0];break}t||(w="pln")}if((f=w.length>=5&&"lang-"===w.substring(0,5))&&!(t&&typeof t[1]==="string"))f=!1,w="src";f||(r[z]=w)}h=c;c+=z.length;if(f){f=t[1];var l=z.indexOf(f),B=l+f.length;t[2]&&(B=z.length-t[2].length,l=B-f.length);w=w.substring(5);H(j+h,z.substring(0,l),g,k);H(j+h+l,f,I(w,f),k);H(j+h+B,z.substring(B),g,k)}else k.push(j+h,w)}a.g=k}var b={},s;(function(){for(var g=a.concat(d),j=[],k={},c=0,i=g.length;c<i;++c){var r=
+g[c],n=r[3];if(n)for(var e=n.length;--e>=0;)b[n.charAt(e)]=r;r=r[1];n=""+r;k.hasOwnProperty(n)||(j.push(r),k[n]=q)}j.push(/[\S\s]/);s=S(j)})();var x=d.length;return g}function v(a){var d=[],g=[];a.tripleQuotedStrings?d.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?d.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/,
+q,"'\"`"]):d.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&g.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var b=a.hashComments;b&&(a.cStyleComments?(b>1?d.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):d.push(["com",/^#(?:(?:define|e(?:l|nd)if|else|error|ifn?def|include|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),g.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h(?:h|pp|\+\+)?|[a-z]\w*)>/,q])):d.push(["com",
+/^#[^\n\r]*/,q,"#"]));a.cStyleComments&&(g.push(["com",/^\/\/[^\n\r]*/,q]),g.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));if(b=a.regexLiterals){var s=(b=b>1?"":"\n\r")?".":"[\\S\\s]";g.push(["lang-regex",RegExp("^(?:^^\\.?|[+-]|[!=]=?=?|\\#|%=?|&&?=?|\\(|\\*=?|[+\\-]=|->|\\/=?|::?|<<?=?|>>?>?=?|,|;|\\?|@|\\[|~|{|\\^\\^?=?|\\|\\|?=?|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\\s*("+("/(?=[^/*"+b+"])(?:[^/\\x5B\\x5C"+b+"]|\\x5C"+s+"|\\x5B(?:[^\\x5C\\x5D"+b+"]|\\x5C"+
+s+")*(?:\\x5D|$))+/")+")")])}(b=a.types)&&g.push(["typ",b]);b=(""+a.keywords).replace(/^ | $/g,"");b.length&&g.push(["kwd",RegExp("^(?:"+b.replace(/[\s,]+/g,"|")+")\\b"),q]);d.push(["pln",/^\s+/,q," \r\n\t\u00a0"]);b="^.[^\\s\\w.$@'\"`/\\\\]*";a.regexLiterals&&(b+="(?!s*/)");g.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,
+q],["pun",RegExp(b),q]);return C(d,g)}function J(a,d,g){function b(a){var c=a.nodeType;if(c==1&&!x.test(a.className))if("br"===a.nodeName)s(a),a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)b(a);else if((c==3||c==4)&&g){var d=a.nodeValue,i=d.match(m);if(i)c=d.substring(0,i.index),a.nodeValue=c,(d=d.substring(i.index+i[0].length))&&a.parentNode.insertBefore(j.createTextNode(d),a.nextSibling),s(a),c||a.parentNode.removeChild(a)}}function s(a){function b(a,c){var d=
+c?a.cloneNode(!1):a,e=a.parentNode;if(e){var e=b(e,1),g=a.nextSibling;e.appendChild(d);for(var i=g;i;i=g)g=i.nextSibling,e.appendChild(i)}return d}for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),d;(d=a.parentNode)&&d.nodeType===1;)a=d;c.push(a)}for(var x=/(?:^|\s)nocode(?:\s|$)/,m=/\r\n?|\n/,j=a.ownerDocument,k=j.createElement("li");a.firstChild;)k.appendChild(a.firstChild);for(var c=[k],i=0;i<c.length;++i)b(c[i]);d===(d|0)&&c[0].setAttribute("value",d);var r=j.createElement("ol");
+r.className="linenums";for(var d=Math.max(0,d-1|0)||0,i=0,n=c.length;i<n;++i)k=c[i],k.className="L"+(i+d)%10,k.firstChild||k.appendChild(j.createTextNode("\u00a0")),r.appendChild(k);a.appendChild(r)}function p(a,d){for(var g=d.length;--g>=0;){var b=d[g];F.hasOwnProperty(b)?D.console&&console.warn("cannot override language handler %s",b):F[b]=a}}function I(a,d){if(!a||!F.hasOwnProperty(a))a=/^\s*</.test(d)?"default-markup":"default-code";return F[a]}function K(a){var d=a.h;try{var g=T(a.c,a.i),b=g.a;
+a.a=b;a.d=g.d;a.e=0;I(d,b)(a);var s=/\bMSIE\s(\d+)/.exec(navigator.userAgent),s=s&&+s[1]<=8,d=/\n/g,x=a.a,m=x.length,g=0,j=a.d,k=j.length,b=0,c=a.g,i=c.length,r=0;c[i]=m;var n,e;for(e=n=0;e<i;)c[e]!==c[e+2]?(c[n++]=c[e++],c[n++]=c[e++]):e+=2;i=n;for(e=n=0;e<i;){for(var p=c[e],w=c[e+1],t=e+2;t+2<=i&&c[t+1]===w;)t+=2;c[n++]=p;c[n++]=w;e=t}c.length=n;var f=a.c,h;if(f)h=f.style.display,f.style.display="none";try{for(;b<k;){var l=j[b+2]||m,B=c[r+2]||m,t=Math.min(l,B),A=j[b+1],G;if(A.nodeType!==1&&(G=x.substring(g,
+t))){s&&(G=G.replace(d,"\r"));A.nodeValue=G;var L=A.ownerDocument,o=L.createElement("span");o.className=c[r+1];var v=A.parentNode;v.replaceChild(o,A);o.appendChild(A);g<l&&(j[b+1]=A=L.createTextNode(x.substring(t,l)),v.insertBefore(A,o.nextSibling))}g=t;g>=l&&(b+=2);g>=B&&(r+=2)}}finally{if(f)f.style.display=h}}catch(u){D.console&&console.log(u&&u.stack||u)}}var D=window,y=["break,continue,do,else,for,if,return,while"],E=[[y,"auto,case,char,const,default,double,enum,extern,float,goto,inline,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"],
+"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],M=[E,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,delegate,dynamic_cast,explicit,export,friend,generic,late_check,mutable,namespace,nullptr,property,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],N=[E,"abstract,assert,boolean,byte,extends,final,finally,implements,import,instanceof,interface,null,native,package,strictfp,super,synchronized,throws,transient"],
+O=[N,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,internal,into,is,let,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var,virtual,where"],E=[E,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],P=[y,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"],
+Q=[y,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],W=[y,"as,assert,const,copy,drop,enum,extern,fail,false,fn,impl,let,log,loop,match,mod,move,mut,priv,pub,pure,ref,self,static,struct,true,trait,type,unsafe,use"],y=[y,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],R=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)\b/,
+V=/\S/,X=v({keywords:[M,O,E,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",P,Q,y],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),F={};p(X,["default-code"]);p(C([],[["pln",/^[^<?]+/],["dec",/^<!\w[^>]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",
+/^<xmp\b[^>]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^<script\b[^>]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^<style\b[^>]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]),["default-markup","htm","html","mxml","xhtml","xml","xsl"]);p(C([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],
+["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css",/^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);p(C([],[["atv",/^[\S\s]+/]]),["uq.val"]);p(v({keywords:M,hashComments:!0,cStyleComments:!0,types:R}),["c","cc","cpp","cxx","cyc","m"]);p(v({keywords:"null,true,false"}),["json"]);p(v({keywords:O,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:R}),
+["cs"]);p(v({keywords:N,cStyleComments:!0}),["java"]);p(v({keywords:y,hashComments:!0,multiLineStrings:!0}),["bash","bsh","csh","sh"]);p(v({keywords:P,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}),["cv","py","python"]);p(v({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:2}),["perl","pl","pm"]);p(v({keywords:Q,
+hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb","ruby"]);p(v({keywords:E,cStyleComments:!0,regexLiterals:!0}),["javascript","js"]);p(v({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,throw,true,try,unless,until,when,while,yes",hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);p(v({keywords:W,cStyleComments:!0,multilineStrings:!0}),["rc","rs","rust"]);
+p(C([],[["str",/^[\S\s]+/]]),["regex"]);var Y=D.PR={createSimpleLexer:C,registerLangHandler:p,sourceDecorator:v,PR_ATTRIB_NAME:"atn",PR_ATTRIB_VALUE:"atv",PR_COMMENT:"com",PR_DECLARATION:"dec",PR_KEYWORD:"kwd",PR_LITERAL:"lit",PR_NOCODE:"nocode",PR_PLAIN:"pln",PR_PUNCTUATION:"pun",PR_SOURCE:"src",PR_STRING:"str",PR_TAG:"tag",PR_TYPE:"typ",prettyPrintOne:D.prettyPrintOne=function(a,d,g){var b=document.createElement("div");b.innerHTML="<pre>"+a+"</pre>";b=b.firstChild;g&&J(b,g,!0);K({h:d,j:g,c:b,i:1});
+return b.innerHTML},prettyPrint:D.prettyPrint=function(a,d){function g(){for(var b=D.PR_SHOULD_USE_CONTINUATION?c.now()+250:Infinity;i<p.length&&c.now()<b;i++){for(var d=p[i],j=h,k=d;k=k.previousSibling;){var m=k.nodeType,o=(m===7||m===8)&&k.nodeValue;if(o?!/^\??prettify\b/.test(o):m!==3||/\S/.test(k.nodeValue))break;if(o){j={};o.replace(/\b(\w+)=([\w%+\-.:]+)/g,function(a,b,c){j[b]=c});break}}k=d.className;if((j!==h||e.test(k))&&!v.test(k)){m=!1;for(o=d.parentNode;o;o=o.parentNode)if(f.test(o.tagName)&&
+o.className&&e.test(o.className)){m=!0;break}if(!m){d.className+=" prettyprinted";m=j.lang;if(!m){var m=k.match(n),y;if(!m&&(y=U(d))&&t.test(y.tagName))m=y.className.match(n);m&&(m=m[1])}if(w.test(d.tagName))o=1;else var o=d.currentStyle,u=s.defaultView,o=(o=o?o.whiteSpace:u&&u.getComputedStyle?u.getComputedStyle(d,q).getPropertyValue("white-space"):0)&&"pre"===o.substring(0,3);u=j.linenums;if(!(u=u==="true"||+u))u=(u=k.match(/\blinenums\b(?::(\d+))?/))?u[1]&&u[1].length?+u[1]:!0:!1;u&&J(d,u,o);r=
+{h:m,c:d,j:u,i:o};K(r)}}}i<p.length?setTimeout(g,250):"function"===typeof a&&a()}for(var b=d||document.body,s=b.ownerDocument||document,b=[b.getElementsByTagName("pre"),b.getElementsByTagName("code"),b.getElementsByTagName("xmp")],p=[],m=0;m<b.length;++m)for(var j=0,k=b[m].length;j<k;++j)p.push(b[m][j]);var b=q,c=Date;c.now||(c={now:function(){return+new Date}});var i=0,r,n=/\blang(?:uage)?-([\w.]+)(?!\S)/,e=/\bprettyprint\b/,v=/\bprettyprinted\b/,w=/pre|xmp/i,t=/^code$/i,f=/^(?:pre|code|xmp)$/i,
+h={};g()}};typeof define==="function"&&define.amd&&define("google-code-prettify",[],function(){return Y})})();}()
diff --git a/appengine/monorail/static/js/tracker/ac.js b/appengine/monorail/static/js/tracker/ac.js
new file mode 100644
index 0000000..503efe4
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/ac.js
@@ -0,0 +1,953 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * An autocomplete library for javascript.
+ * Public API
+ * - _ac_install() install global handlers required for everything else to
+ * function.
+ * - _ac_register(SC) register a store constructor (see below)
+ * - _ac_isCompleting() true iff focus is in an auto complete box and the user
+ * has triggered completion with a keystroke, and completion has not been
+ * cancelled (programatically or otherwise).
+ * - _ac_isCompleteListShowing() true if _as_isCompleting and the complete list
+ * is visible to the user.
+ * - _ac_cancel() if completing, stop it, otherwise a no-op.
+ *
+ *
+ * A quick example
+ * // an auto complete store
+ * var myFavoritestAutoCompleteStore = new _AC_SimpleStore(
+ * ['some', 'strings', 'to', 'complete']);
+ *
+ * // a store constructor
+ * _ac_register(function (inputNode, keyEvent) {
+ * if (inputNode.id == 'my-auto-completing-check-box') {
+ * return myFavoritestAutoCompleteStore;
+ * }
+ * return null;
+ * });
+ *
+ * <html>
+ * <head>
+ * <script type=text/javascript src=ac.js></script>
+ * </head>
+ * <body onload=_ac_install()>
+ * <!-- the constructor above looks at the id. It could as easily
+ * - look at the class, name, or value.
+ * - The autocomplete=off stops browser autocomplete from
+ * - interfering with our autocomplete
+ * -->
+ * <input type=text id="my-auto-completing-check-box"
+ * autocomplete=off>
+ * </body>
+ * </html>
+ *
+ *
+ * Concepts
+ * - Store Constructor function
+ * A store constructor is a policy function with the signature
+ * _AC_Store myStoreConstructor(
+ * HtmlInputElement|HtmlTextAreaElement inputNode, Event keyEvent)
+ * When a key event is received on a text input or text area, the autocomplete
+ * library will try each of the store constructors in turn until it finds one
+ * that returns an AC_Store which will be used for auto-completion of that
+ * text box until focus is lost.
+ *
+ * - interface _AC_Store
+ * An autocomplete store encapsulates all operations that affect how a
+ * particular text node is autocompleted. It has the following operations:
+ * - String completable(String inputValue, int caret)
+ * This method returns null if not completable or the section of inputValue
+ * that is subject to completion. If autocomplete works on items in a
+ * comma separated list, then the input value "foo, ba" might yield "ba"
+ * as the completable chunk since it is separated from its predecessor by
+ * a comma.
+ * caret is the position of the text cursor (caret) in the text input.
+ * - _AC_Completion[] completions(String completable,
+ * _AC_Completion[] toFilter)
+ * This method returns null if there are no completions. If toFilter is
+ * not null or undefined, then this method may assume that toFilter was
+ * returned as a set of completions that contain completable.
+ * - String substitute(String inputValue, int caret,
+ * String completable, _AC_Completion completion)
+ * returns the inputValue with the given completion substituted for the
+ * given completable. caret has the same meaning as in the
+ * completable operation.
+ * - String oncomplete(boolean completed, int keycode,
+ * HTMLElement element, String text)
+ * This method is called when the user hits a completion key. The default
+ * value is to do nothing, but you can override it if you want. Note that
+ * keycode will be null if the user clicked on it to select
+ * - Boolean autoselectFirstRow()
+ * This method returns True by default, but subclasses can override it
+ * to make autocomplete fields that require the user to press the down
+ * arrow or do a mouseover once before any completion option is considered
+ * to be selected.
+ *
+ * - class _AC_SimpleStore
+ * An implementation of _AC_Store that completes a set of strings given at
+ * construct time in a text field with a comma separated value.
+ *
+ * - struct _AC_Completion
+ * a struct with two fields
+ * - String value : the plain text completion value
+ * - String html : the value, as html, with the completable in bold.
+ *
+ * Key Handling
+ * Several keys affect completion in an autocompleted input.
+ * ESC - the escape key cancels autocompleting. The autocompletion will have
+ * no effect on the focused textbox until it loses focus, regains it, and
+ * a key is pressed.
+ * ENTER - completes using the currently selected completion, or if there is
+ * only one, uses that completion.
+ * UP ARROW - selects the completion above the current selection.
+ * DOWN ARROW - selects the completion below the current selection.
+ *
+ *
+ * CSS styles
+ * The following CSS selector rules can be used to change the completion list
+ * look:
+ * #ac-list style of the auto-complete list
+ * #ac-list .selected style of the selected item
+ * #ac-list b style of the matching text in a candidate completion
+ *
+ * Dependencies
+ * The library depends on the following libraries:
+ * javascript:base for definition of key constants and SetCursorPos
+ * javascript:shapes for nodeBounds()
+ */
+
+/**
+ * install global handlers required for the rest of the module to function.
+ */
+function _ac_install() {
+ ac_addHandler_(document.body, 'onkeydown', ac_keyevent_);
+ ac_addHandler_(document.body, 'onkeypress', ac_keyevent_);
+}
+
+/**
+ * register a store constructor
+ * @param storeConstructor a function like
+ * _AC_Store myStoreConstructor(HtmlInputElement|HtmlTextArea, Event)
+ */
+function _ac_register(storeConstructor) {
+ // check that not already registered
+ for (var i = ac_storeConstructors.length; --i >= 0;) {
+ if (ac_storeConstructors[i] === storeConstructor) { return; }
+ }
+ ac_storeConstructors.push(storeConstructor);
+}
+
+/**
+ * may be attached as an onfocus handler to a text input to popup autocomplete
+ * immediately on the box gaining focus.
+ */
+function _ac_onfocus(event) {
+ ac_keyevent_(event);
+}
+
+/**
+ * true iff the autocomplete widget is currently active.
+ */
+function _ac_isCompleting() {
+ return !!ac_store && !ac_suppressCompletions;
+}
+
+/**
+ * true iff the completion list is displayed.
+ */
+function _ac_isCompleteListShowing() {
+ return !!ac_store && !ac_suppressCompletions && ac_completions &&
+ ac_completions.length;
+}
+
+/**
+ * cancel any autocomplete in progress.
+ */
+function _ac_cancel() {
+ ac_suppressCompletions = true;
+ ac_updateCompletionList(false);
+}
+
+/** add a handler without whacking any existing handler. @private */
+function ac_addHandler_(node, handlerName, handler) {
+ var oldHandler = node[handlerName];
+ if (!oldHandler) {
+ node[handlerName] = handler;
+ } else {
+ node[handlerName] = ac_fnchain_(node[handlerName], handler);
+ }
+ return oldHandler;
+}
+
+/** cancel the event. @private */
+function ac_cancelEvent_(event) {
+ if ('stopPropagation' in event) {
+ event.stopPropagation();
+ } else {
+ event.cancelBubble = true;
+ }
+
+ // This is handled in IE by returning false from the handler
+ if ('preventDefault' in event) {
+ event.preventDefault();
+ }
+}
+
+/** Call two functions, a and b, and return false if either one returns
+ false. This is used as a primitive way to attach multiple event
+ handlers to an element without using addEventListener(). This
+ library predates the availablity of addEventListener().
+ @private
+*/
+function ac_fnchain_(a, b) {
+ return function () {
+ var ar = a.apply(this, arguments);
+ var br = b.apply(this, arguments);
+
+ // NOTE 1: (undefined && false) -> undefined
+ // NOTE 2: returning FALSE from a onkeypressed cancels it,
+ // returning UNDEFINED does not.
+ // As such, we specifically look for falses here
+ if (ar === false || br === false) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+}
+
+/** key press handler. @private */
+function ac_keyevent_(event) {
+ event = event || window.event;
+
+ var source = event.target || event.srcElement;
+ if (('INPUT' == source.tagName && source.type.match(/^text|email$/i))
+ || 'TEXTAREA' == source.tagName) {
+ var code = GetKeyCode(event);
+ var isDown = event.type == 'keydown';
+ var isShiftKey = event.shiftKey;
+ var storeFound = true;
+
+ if ((source !== ac_focusedInput) || (ac_store === null)) {
+ ac_focusedInput = source;
+ storeFound = false;
+ if (ENTER_KEYCODE !== code && ESC_KEYCODE !== code) {
+ for (var i = 0; i < ac_storeConstructors.length; ++i) {
+ var store = (ac_storeConstructors[i])(source, event);
+ if (store) {
+ ac_store = store;
+ ac_oldBlurHandler = ac_addHandler_(
+ ac_focusedInput, 'onblur', _ac_ob);
+ storeFound = true;
+ break;
+ }
+ }
+
+ // There exists an odd condition where an edit box with autocomplete
+ // attached can be removed from the DOM without blur being called
+ // In which case we are left with a store around that will try to
+ // autocomplete the next edit box to receive focus. We need to clean
+ // this up
+
+ // If we can't find a store, force a blur
+ if (!storeFound) {
+ _ac_ob(null);
+ }
+ }
+ }
+ if (storeFound) {
+ var isCompletion = ac_store.isCompletionKey(code, isDown, isShiftKey);
+ var hasResults = ac_completions && (ac_completions.length > 0);
+ var cancelEvent = false;
+
+ if (isCompletion && hasResults) {
+ // Cancel any enter keystrokes if something is selected so that the
+ // browser doesn't go submitting the form.
+ cancelEvent = (!ac_suppressCompletions && !!ac_completions &&
+ (ac_selected != -1));
+ window.setTimeout(function () {
+ if (ac_store) { ac_handleKey_(code, isDown, isShiftKey); }
+ }, 0);
+ } else if (!isCompletion) {
+ // Don't want to also blur the field. Up and down move the cursor (in
+ // Firefox) to the start/end of the field. We also don't want that while
+ // the list is showing.
+ cancelEvent = (code == ESC_KEYCODE ||
+ code == DOWN_KEYCODE ||
+ code == UP_KEYCODE);
+
+ window.setTimeout(function () {
+ if (ac_store) { ac_handleKey_(code, isDown, isShiftKey); }
+ }, 0);
+
+ } else { // implicit if (isCompletion && !hasResults)
+ if (ac_store.oncomplete) {
+ ac_store.oncomplete(false, code, ac_focusedInput, undefined);
+ }
+ }
+
+ if (cancelEvent) {
+ ac_cancelEvent_(event);
+ }
+
+ return !cancelEvent;
+ }
+ }
+ return true;
+}
+
+/** on blur handler. Since webkit gives spurious onblur events,
+ * so ignore all onblur and use a document-wide onclick instead. */
+function _ac_ob(event) {
+ // WebKit browsers: we use a document-wide on-click rather than blur events.
+ if (!BR_hasExcessBlurEvents()) {
+ _ac_real_onblur(event);
+ }
+}
+
+/** Original autocomplete onblur handler. */
+function _ac_real_onblur(event) {
+ if (ac_focusedInput) {
+ ac_focusedInput.onblur = ac_oldBlurHandler;
+ }
+ ac_store = null;
+ ac_focusedInput = null;
+ ac_everTyped = false;
+ ac_oldBlurHandler = null;
+ ac_suppressCompletions = false;
+ ac_updateCompletionList(false);
+}
+
+/** This is a document-wide onclick handler that is only registered
+ * when using webkit browsers. It calls the real onblur handler
+ * when the user clicks away from a text field (or in many other
+ * situations where the user clicks, but that is OK). */
+function _ac_fake_onblur(e) {
+ var targ;
+ if (!e) var e = window.event;
+ if (e.target) targ = e.target;
+ else if (e.srcElement) targ = e.srcElement;
+ if (targ.nodeType == 3) { // Safari
+ targ = targ.parentNode;
+ }
+
+ // If the user clicked anywhere other than one of the
+ // form elements that can have its own autocomplete,
+ // then close any open autocomplete menu.
+ if ('INPUT' != targ.nodeName) {
+ _ac_real_onblur(e);
+ }
+}
+
+
+/** @constructor */
+function _AC_Store() {
+}
+/** returns the chunk of the input to treat as completable. */
+_AC_Store.prototype.completable = function (inputValue, caret) {
+ console.log('UNIMPLEMENTED completable');
+};
+/** returns the chunk of the input to treat as completable. */
+_AC_Store.prototype.completions = function (prefix, tofilter) {
+ console.log('UNIMPLEMENTED completions');
+};
+/** returns the chunk of the input to treat as completable. */
+_AC_Store.prototype.oncomplete = function (completed, keycode, element, text) {
+ // Call the onkeyup handler so that choosing an autocomplete option has
+ // the same side-effect as typing. E.g., exposing the next row of input
+ // fields.
+ element.dispatchEvent(new Event('keyup'));
+};
+/** substitutes a completion for a completable in a text input's value. */
+_AC_Store.prototype.substitute =
+ function (inputValue, caret, completable, completion) {
+ console.log('UNIMPLEMENTED substitute');
+};
+/** true iff hitting a comma key should complete. */
+_AC_Store.prototype.commaCompletes = true;
+/**
+ * true iff the given keystroke should cause a completion (and be consumed in
+ * the process.
+ */
+_AC_Store.prototype.isCompletionKey = function (code, isDown, isShiftKey) {
+ if (!isDown && (ENTER_KEYCODE === code
+ || (AC_COMMA_KEYCODE == code && this.commaCompletes))) {
+ return true;
+ }
+ if (TAB_KEYCODE === code && !isShiftKey) {
+ // IE doesn't fire an event for tab on click in a text field, and firefox
+ // requires that the onkeypress event for tab be consumed or it navigates
+ // to next field.
+ return false;
+ //JER: return isDown == BR_IsIE();
+ }
+ return false;
+};
+
+function _AC_AddItemToFirstCharMap(firstCharMap, ch, s) {
+ var l = firstCharMap[ch];
+ if (!l) {
+ l = firstCharMap[ch] = [];
+ } else if (l[l.length - 1].value == s) {
+ return;
+ }
+ l.push(new _AC_Completion(s, null, ''));
+}
+
+/**
+ * an _AC_Store implementation suitable for completing lists of email
+ * addresses.
+ * @constructor
+ */
+function _AC_SimpleStore(strings) {
+ this.firstCharMap_ = {};
+
+ for (var i = 0; i < strings.length; ++i) {
+ var s = strings[i];
+ if (!s) { continue; }
+
+ _AC_AddItemToFirstCharMap(
+ this.firstCharMap_, s.charAt(0).toLowerCase(), s);
+
+ var parts = s.split(/\W+/);
+ for (var j = 0; j < parts.length; ++j) {
+ if (parts[j]) {
+ _AC_AddItemToFirstCharMap(
+ this.firstCharMap_, parts[j].charAt(0).toLowerCase(), s);
+ }
+ }
+ }
+
+ // The maximimum number of results that we are willing to show
+ this.countThreshold = 2500;
+ this.docstrings = {};
+}
+_AC_SimpleStore.prototype = new _AC_Store();
+_AC_SimpleStore.prototype.constructor = _AC_SimpleStore;
+_AC_SimpleStore.prototype.completable =
+ function (inputValue, caret) {
+
+ // complete after the last comma not inside ""s
+ var start = 0;
+ var state = 0;
+ for (var i = 0; i < caret; ++i) {
+ var ch = inputValue.charAt(i);
+ switch (state) {
+ case 0:
+ if ('"' == ch) {
+ state = 1;
+ } else if (',' == ch || ' ' == ch) {
+ start = i + 1;
+ }
+ break;
+ case 1:
+ if ('"' == ch) {
+ state = 0;
+ }
+ break;
+ }
+ }
+ while (start < caret &&
+ ' \t\r\n'.indexOf(inputValue.charAt(start)) >= 0) {
+ ++start;
+ }
+ return inputValue.substring(start, caret);
+};
+
+/**
+ * Get all completions matching the given prefix.
+ * @param {string} prefix The prefix of the text to autocomplete on.
+ * @param {List.<string>?} toFilter Optional list to filter on. Otherwise will
+ * use this.firstCharMap_ using the prefix's first character.
+ * @return {List.<_AC_Completion>} The computed list of completions.
+ */
+_AC_SimpleStore.prototype.completions = function(prefix, toFilter) {
+ if (!prefix) {
+ return [];
+ }
+ if ((toFilter == null) || (toFilter.length == 0)) {
+ toFilter = this.firstCharMap_[prefix.charAt(0).toLowerCase()];
+ }
+
+ // Since we use prefix to build a regular expression, we need to escape RE
+ // characters. We match '-', '{', '$' and others in the prefix and convert
+ // them into "\-", "\{", "\$".
+ var regexForRegexCharacters = /([\^*+\-\$\\\{\}\(\)\[\]\#?\.])/g;
+ var modifiedPrefix = prefix.replace(regexForRegexCharacters, '\\$1');
+
+ // Match the modifiedPrefix anywhere as long as it is either at the very
+ // beginning "Th" -> "The Hobbit", or comes immediately after a word separator
+ // such as "Ga" -> "The-Great-Gatsby".
+ var patternRegex = '^(.*[-=><:@.,])?(' + modifiedPrefix + ')(.*)';
+ var pattern = new RegExp(patternRegex, 'i' /* ignore case */);
+
+ var completions = [];
+ if (toFilter) {
+ var toFilterLength = toFilter.length;
+ for (var i = 0; i < toFilterLength; ++i) {
+ var matches = toFilter[i].value.match(pattern);
+ if (matches) {
+ var compSpan = document.createElement('span');
+ compSpan.appendChild(document.createTextNode(matches[1] || ''));
+ var compBold = document.createElement('b');
+ compSpan.appendChild(compBold);
+ compBold.appendChild(document.createTextNode(matches[2]));
+ compSpan.appendChild(document.createTextNode(matches[3] || ''));
+
+ var newCompletion = new _AC_Completion(
+ toFilter[i].value,
+ compSpan,
+ this.docstrings[toFilter[i].value]);
+
+ completions.push(newCompletion);
+ if (completions.length > this.countThreshold) {
+ break;
+ }
+ }
+ }
+ }
+
+ return completions;
+};
+
+// Normally, when the user types a few characters, we aggressively
+// select the first possible completion (if any). When the user
+// hits ENTER, that first completion is substituted. When that
+// behavior is not desired, override this to return false.
+_AC_SimpleStore.prototype.autoselectFirstRow = function () {
+ return true;
+};
+
+// Comparison function for _AC_Completion
+function _AC_CompareACCompletion(a, b) {
+ // convert it to lower case and remove all leading junk
+ var aval = a.value.toLowerCase().replace(/^\W*/,'');
+ var bval = b.value.toLowerCase().replace(/^\W*/,'');
+
+ if (a.value === b.value) {
+ return 0;
+ } else if (aval < bval) {
+ return -1;
+ } else {
+ return 1;
+ }
+}
+
+_AC_SimpleStore.prototype.substitute =
+function (inputValue, caret, completable, completion) {
+ return inputValue.substring(0, caret - completable.length) +
+ completion.value + ', ' + inputValue.substring(caret);
+};
+
+/**
+ * a possible completion.
+ * @constructor
+ */
+function _AC_Completion(value, compSpan, docstr) {
+ /** plain text. */
+ this.value = value;
+ if (typeof compSpan == 'string') compSpan = document.createTextNode(compSpan);
+ this.compSpan = compSpan;
+ this.docstr = docstr;
+}
+_AC_Completion.prototype.toString = function () {
+ return '(AC_Completion: ' + this.value + ')';
+};
+
+/** registered store constructors. @private */
+var ac_storeConstructors = [];
+/**
+ * the focused text input or textarea whether store is null or not.
+ * A text input may have focus and this may be null iff no key has been typed in
+ * the text input.
+ */
+var ac_focusedInput = null;
+/**
+ * null or the autocomplete store used to compelte ac_focusedInput.
+ * @private
+ */
+var ac_store = null;
+/** store handler from ac_focusedInput. @private */
+var ac_oldBlurHandler = null;
+/**
+ * true iff user has indicated completions are unwanted (via ESC key)
+ * @private
+ */
+var ac_suppressCompletions = false;
+/**
+ * chunk of completable text seen last keystroke.
+ * Used to generate ac_completions.
+ * @private
+ */
+var ac_lastCompletable = null;
+/** an array of _AC_Completions. @private */
+var ac_completions = null;
+/** -1 or in [0, _AC_Completions.length). @private */
+var ac_selected = -1;
+
+/**
+ * handles all the key strokes, updating the completion list, tracking selected
+ * element, performing substitutions, etc.
+ * @private
+ */
+function ac_handleKey_(code, isDown, isShiftKey) {
+ // check completions
+ ac_checkCompletions();
+
+ var show = true;
+ var numCompletions = ac_completions ? ac_completions.length : 0;
+ // handle enter and tab on key press and the rest on key down
+ if (ac_store.isCompletionKey(code, isDown, isShiftKey)) {
+ if (ac_selected < 0 && numCompletions >= 1 &&
+ ac_store.autoselectFirstRow()) {
+ ac_selected = 0;
+ }
+ if (ac_selected >= 0) {
+ var backupInput = ac_focusedInput;
+ var completeValue = ac_completions[ac_selected].value;
+ ac_complete();
+ if (ac_store.oncomplete) {
+ ac_store.oncomplete(true, code, backupInput, completeValue);
+ }
+ }
+ } else {
+ switch (code) {
+ case ESC_KEYCODE: // escape
+ //JER?? ac_suppressCompletions = true;
+ ac_selected = -1;
+ show = false;
+ break;
+ case UP_KEYCODE: // up
+ if (isDown) {
+ // firefox fires arrow events on both down and press, but IE only fires
+ // then on press.
+ ac_selected = Math.max(numCompletions >= 0 ? 0 : -1, ac_selected - 1);
+ }
+ break;
+ case DOWN_KEYCODE: // down
+ if (isDown) {
+ ac_selected = Math.min(numCompletions - 1, ac_selected + 1);
+ }
+ break;
+ }
+
+ if (isDown) {
+ switch (code) {
+ case ESC_KEYCODE:
+ case ENTER_KEYCODE:
+ case UP_KEYCODE:
+ case DOWN_KEYCODE:
+ case RIGHT_KEYCODE:
+ case LEFT_KEYCODE:
+ case TAB_KEYCODE:
+ case SHIFT_KEYCODE:
+ case BACKSPACE_KEYCODE:
+ case DELETE_KEYCODE:
+ break;
+ default: // User typed some new characters.
+ ac_everTyped = true;
+ }
+ }
+
+ }
+
+ if (ac_focusedInput) {
+ ac_updateCompletionList(show);
+ }
+}
+
+/**
+ * called when an option is clicked on to select that option.
+ */
+function _ac_select(optionIndex) {
+ ac_selected = optionIndex;
+ ac_complete();
+ if (ac_store.oncomplete) {
+ ac_store.oncomplete(true, null, ac_focusedInput, ac_focusedInput.value);
+ }
+
+ // check completions
+ ac_checkCompletions();
+ ac_updateCompletionList(true);
+}
+
+function _ac_mouseover(optionIndex) {
+ ac_selected = optionIndex;
+ ac_updateCompletionList(true);
+}
+
+/** perform the substitution of the currently selected item. */
+function ac_complete() {
+ var caret = ac_getCaretPosition_(ac_focusedInput);
+ var completion = ac_completions[ac_selected];
+
+ ac_focusedInput.value = ac_store.substitute(
+ ac_focusedInput.value, caret,
+ ac_lastCompletable, completion);
+ // When the prefix starts with '*' we want to return the complete set of all
+ // possible completions. We treat the ac_lastCompletable value as empty so
+ // that the caret is correctly calculated (i.e. the caret should not consider
+ // placeholder values like '*member').
+ var new_caret = caret + completion.value.length;
+ if (!ac_lastCompletable.startsWith("*")) {
+ // Only consider the ac_lastCompletable length if it does not start with '*'
+ new_caret = new_caret - ac_lastCompletable.length
+ }
+ // If we inserted something ending in two quotation marks, position
+ // the cursor between the quotation marks. If we inserted a complete term,
+ // skip over the trailing space so that the user is ready to enter the next
+ // term. If we inserted just a search operator, leave the cursor immediately
+ // after the colon or equals and don't skip over the space.
+ if (completion.value.substring(completion.value.length - 2) == '""') {
+ new_caret--;
+ } else if (completion.value.substring(completion.value.length - 1) != ':' &&
+ completion.value.substring(completion.value.length - 1) != '=') {
+ new_caret++; // To account for the comma.
+ new_caret++; // To account for the space after the comma.
+ }
+ ac_selected = -1;
+ ac_completions = null;
+ ac_lastCompletable = null;
+ ac_everTyped = false;
+ SetCursorPos(window, ac_focusedInput, new_caret);
+}
+
+/**
+ * True if the user has ever typed any actual characters in the currently
+ * focused text field. False if they have only clicked, backspaced, and
+ * used the arrow keys.
+ */
+var ac_everTyped = false;
+
+/**
+ * maintains ac_completions, ac_selected, ac_lastCompletable.
+ * @private
+ */
+function ac_checkCompletions() {
+ if (!ac_suppressCompletions) {
+ var caret = ac_getCaretPosition_(ac_focusedInput);
+ var completable = ac_store.completable(ac_focusedInput.value, caret);
+
+ // If we already have completed, then our work here is done.
+ if (completable == ac_lastCompletable) { return; }
+ var tofilter;
+ if (ac_lastCompletable &&
+ ac_lastCompletable.length < completable.length &&
+ completable.substring(0, ac_lastCompletable.length) ==
+ ac_lastCompletable) {
+ tofilter = ac_completions;
+ } else {
+ ac_completions = null;
+ ac_selected = -1;
+ }
+
+ var oldSelected =
+ (ac_selected >= 0) ? ac_completions[ac_selected].value : null;
+ ac_completions = ac_store.completions(completable, tofilter);
+ ac_selected = -1;
+ for (var i = 0; i < ac_completions.length; ++i) {
+ if (oldSelected == ac_completions[i].value) {
+ ac_selected = i;
+ break;
+ }
+ }
+ ac_lastCompletable = completable;
+ return;
+ }
+ ac_lastCompletable = null;
+ ac_completions = null;
+ ac_selected = -1;
+}
+
+/**
+ * maintains the the completion list GUI.
+ * @private
+ */
+function ac_updateCompletionList(show) {
+ var clist = document.getElementById('ac-list');
+ if (show && ac_completions && ac_completions.length) {
+ if (!clist) {
+ clist = document.createElement('DIV');
+ clist.id = 'ac-list';
+ clist.style.position = 'absolute';
+ clist.style.display = 'none';
+ document.body.appendChild(clist);
+ }
+
+ // If no choice is selected, then select the first item, if desired.
+ if (ac_selected < 0 && ac_store && ac_store.autoselectFirstRow()) {
+ ac_selected = 0;
+ }
+
+ var headerCount= 0;
+ var tableEl = document.createElement('table');
+ tableEl.setAttribute('cellpadding', 0);
+ tableEl.setAttribute('cellspacing', 0);
+ for (var i = 0; i < ac_completions.length; ++i) {
+ if (ac_completions[i].heading) {
+ var rowEl = document.createElement('tr');
+ tableEl.appendChild(rowEl);
+ var cellEl = document.createElement('th');
+ rowEl.appendChild(cellEl);
+ cellEl.setAttribute('colspan', 2);
+ if (headerCount) {
+ cellEl.appendChild(document.createElement('br'));
+ }
+ cellEl.appendChild(
+ document.createTextNode(ac_completions[i].heading));
+ headerCount++;
+ } else {
+ var rowEl = document.createElement('tr');
+ tableEl.appendChild(rowEl);
+ if (i == ac_selected) {
+ rowEl.id = "ac-selected-row";
+ rowEl.className = "selected";
+ }
+ rowEl.setAttribute("data-index", i);
+ rowEl.addEventListener("mouseup", function(event) {
+ var target = event.target;
+ while (target && target.tagName != "TR")
+ target = target.parentNode;
+ var idx = Number(target.getAttribute("data-index"));
+ try {
+ _ac_select(idx);
+ } finally {
+ return false;
+ }
+ });
+ rowEl.addEventListener('mouseover', function(event) {
+ var target = event.target;
+ while (target && target.tagName != "TR")
+ target = target.parentNode;
+ var idx = Number(target.getAttribute("data-index"));
+ _ac_mouseover(idx);
+ });
+ var valCellEl = document.createElement('td');
+ rowEl.appendChild(valCellEl);
+ if (ac_completions[i].compSpan) {
+ valCellEl.appendChild(ac_completions[i].compSpan);
+ }
+ var docCellEl = document.createElement('td');
+ rowEl.appendChild(docCellEl);
+ if (ac_completions[i].docstr)
+ docCellEl.appendChild(document.createTextNode(' = ' + ac_completions[i].docstr));
+ }
+ }
+
+ while (clist.childNodes.length) {
+ clist.removeChild(clist.childNodes[0]);
+ }
+ clist.appendChild(tableEl);
+ // position
+ var inputBounds = nodeBounds(ac_focusedInput);
+ clist.style.left = inputBounds.x + 'px';
+ clist.style.top = (inputBounds.y + inputBounds.h) + 'px';
+
+ // Note - we use '' instead of 'block', since 'block' has odd effects on
+ // the screen in IE, and causes scrollbars to resize
+ clist.style.display = '';
+
+ window.setTimeout(ac_autoscroll, 100);
+
+ } else {
+ if (clist) {
+ clist.style.display = 'none';
+ while (clist.childNodes.length) {
+ clist.removeChild(clist.childNodes[0]);
+ }
+ }
+ }
+}
+
+// TODO(jrobbins): make arrow keys and mouse not conflict if they are
+// used at the same time.
+
+
+/** Scroll the autocomplete menu to show the currently selected row. */
+function ac_autoscroll() {
+ var acList = document.getElementById('ac-list');
+ var acSelRow = document.getElementById('ac-selected-row');
+ var acSelRowTop = acSelRow ? acSelRow.offsetTop : 0;
+ var acSelRowHeight = acSelRow ? acSelRow.offsetHeight : 0;
+
+
+ var EXTRA = 8; // Go an extra few pixels so the next row is partly exposed.
+
+ if (!acList || !acSelRow) return;
+
+ // Autoscroll upward if the selected item is above the visible area,
+ // else autoscroll downward if the selected item is below the visible area.
+ if (acSelRowTop < acList.scrollTop) {
+ acList.scrollTop = acSelRowTop - EXTRA;
+ } else if (acSelRowTop + acSelRowHeight + EXTRA >
+ acList.scrollTop + acList.offsetHeight) {
+ acList.scrollTop = (acSelRowTop + acSelRowHeight -
+ acList.offsetHeight + EXTRA);
+ }
+}
+
+
+/** the position of the text caret in the given text field.
+ *
+ * @param textField an INPUT node with type=text or a TEXTAREA node
+ * @return an index in [0, textField.value.length]
+ */
+function ac_getCaretPosition_(textField) {
+ if ('INPUT' == textField.tagName) {
+ var caret = textField.value.length;
+
+ // chrome/firefox
+ if (undefined != textField.selectionStart) {
+ caret = textField.selectionEnd;
+
+ // JER: Special treatment for issue status field that makes all
+ // options show up more often
+ if (textField.id.startsWith('status')) {
+ caret = textField.selectionStart;
+ }
+ // ie
+ } else if (document.selection) {
+ // get an empty selection range
+ var range = document.selection.createRange();
+ var origSelectionLength = range.text.length;
+ // Force selection start to 0 position
+ range.moveStart('character', -caret);
+ // the caret end position is the new selection length
+ caret = range.text.length;
+
+ // JER: Special treatment for issue status field that makes all
+ // options show up more often
+ if (textField.id.startsWith('status')) {
+ // The amount that the selection grew when we forced start to
+ // position 0 is == the original start position.
+ caret = range.text.length - origSelectionLength;
+ }
+ }
+
+ return caret;
+ } else {
+ // a textarea
+
+ return GetCursorPos(window, textField);
+ }
+}
+
+/**
+ * on key press, the keycode for comma comes out as 44.
+ * on keydown it comes out as 188.
+ */
+var AC_COMMA_KEYCODE = ','.charCodeAt(0);
+
+function BR_hasExcessBlurEvents() {
+ return navigator.userAgent.toLowerCase().indexOf('webkit') != -1;
+}
+
+function BR_hasUnreliableMouseDown() {
+ return navigator.userAgent.toLowerCase().indexOf('webkit') != -1;
+}
diff --git a/appengine/monorail/static/js/tracker/ac_test.js b/appengine/monorail/static/js/tracker/ac_test.js
new file mode 100644
index 0000000..24a6458
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/ac_test.js
@@ -0,0 +1,40 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+var firstCharMap;
+
+function setUp() {
+ firstCharMap = new Object();
+}
+
+function testAddItemToFirstCharMap_OneWordLabel() {
+ _AC_AddItemToFirstCharMap(firstCharMap, 'h', 'Hot');
+ var hArray = firstCharMap['h'];
+ assertEquals(1, hArray.length);
+ assertEquals('Hot', hArray[0].value);
+
+ _AC_AddItemToFirstCharMap(firstCharMap, '-', '-Hot');
+ _AC_AddItemToFirstCharMap(firstCharMap, 'h', '-Hot');
+ var minusArray = firstCharMap['-'];
+ assertEquals(1, minusArray.length);
+ assertEquals('-Hot', minusArray[0].value);
+ hArray = firstCharMap['h'];
+ assertEquals(2, hArray.length);
+ assertEquals('Hot', hArray[0].value);
+ assertEquals('-Hot', hArray[1].value);
+}
+
+function testAddItemToFirstCharMap_KeyValueLabels() {
+ _AC_AddItemToFirstCharMap(firstCharMap, 'p', 'Priority-High');
+ _AC_AddItemToFirstCharMap(firstCharMap, 'h', 'Priority-High');
+ var pArray = firstCharMap['p'];
+ assertEquals(1, pArray.length);
+ assertEquals('Priority-High', pArray[0].value);
+ var hArray = firstCharMap['h'];
+ assertEquals(1, hArray.length);
+ assertEquals('Priority-High', hArray[0].value);
+}
diff --git a/appengine/monorail/static/js/tracker/externs.js b/appengine/monorail/static/js/tracker/externs.js
new file mode 100644
index 0000000..5551e16
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/externs.js
@@ -0,0 +1,116 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+// Defined in framework/js:core_scripts
+var _hideID;
+var _showID;
+var _hideEl;
+var _showEl;
+var _showInstead;
+var _toggleHidden;
+
+var _selectAllIssues;
+var _selectNoneIssues;
+
+var _toggleRows;
+var _toggleColumn;
+var _toggleColumnUpdate;
+var _addGroupBy;
+var _addcol;
+var _checkRangeSelect;
+var _setRowLinks;
+var _makeIssueLink;
+
+var _onload;
+
+var _handleListActions;
+var _handleDetailActions;
+
+var _fetchOptions;
+var _setACOptions;
+var _openIssueUpdateForm;
+var _addAttachmentFields;
+
+var _formatContextQueryArgs;
+var _ctxArgs;
+var _ctxCan;
+var _ctxQuery;
+var _ctxSortspec;
+var _ctxGroupBy;
+var _ctxDefaultColspec;
+var _ctxStart;
+var _ctxNum;
+var _ctxResultsPerPage;
+
+var _filterTo;
+var _sortUp;
+var _sortDown;
+
+var _closeAllPopups;
+var _closeSubmenus;
+var _showRight;
+var _showBelow;
+var _highlightRow;
+var _highlightRowCallback;
+var _floatMetadata;
+var _floatVertically;
+var _allColumnNames;
+
+var _setFieldIDs;
+var _selectTemplate;
+var _saveTemplate;
+var _newTemplate;
+var _deleteTemplate;
+var _switchTemplate;
+var _templateNames;
+
+var _confirmNovelStatus;
+var _confirmNovelLabel;
+var _lfidprefix;
+var _allOrigLabels;
+var _vallab;
+var _dirty;
+var _exposeExistingLabelFields;
+var _confirmDiscardEntry;
+var _confirmDiscardUpdate;
+var _checkPlusOne;
+var _checkUnrestrict;
+
+var _clearOnFirstEvent;
+var _forceProperTableWidth;
+
+var _acof;
+var _acmo;
+var _acse;
+var _acstore;
+var _acreg;
+var _accomp;
+var _acrob;
+
+var _d;
+
+var _getColspec;
+var _getSearchColspec;
+
+var issueRefs;
+
+var kibbles;
+var _setupKibblesOnEntryPage;
+var _setupKibblesOnListPage;
+var _setupKibblesOnDetailPage;
+
+var _setPeoplePrefs;
+
+var CS_env;
+
+var _checkFieldNameOnServer;
+var _checkLeafName;
+
+var _addMultiFieldValueWidget;
+var _removeMultiFieldValueWidget;
+var console;
+var _trimCommas;
diff --git a/appengine/monorail/static/js/tracker/tracker-ac.js b/appengine/monorail/static/js/tracker/tracker-ac.js
new file mode 100644
index 0000000..ab98612
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-ac.js
@@ -0,0 +1,1135 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains the autocomplete configuration logic that is
+ * specific to the issue fields of Monorail. It depends on ac.js, our
+ * modified version of the autocomplete library.
+ */
+
+
+/**
+ * This is an autocomplete store that holds well-known issue label
+ * values for the current project.
+ */
+var TKR_labelStore;
+
+/**
+ * This is an autocomplete store that holds issue components.
+ */
+var TKR_componentListStore;
+
+/**
+ * This is an autocomplete store that holds many different kinds of
+ * items that can be shown in the artifact search autocomplete.
+ */
+var TKR_searchStore;
+
+/**
+ * This is similar to TKR_searchStore, but does not include any suggestions
+ * to use the "me" keyword. Using "me" is not a good idea for project canned
+ * queries and filter rules.
+ */
+var TKR_projectQueryStore;
+
+/**
+ * This is an autocomplete store that holds items for the quick edit
+ * autocomplete.
+ */
+// TODO(jrobbins): add options for fields and components.
+var TKR_quickEditStore;
+
+/**
+ * This is a list of label prefixes that each issue should only use once.
+ * E.g., each issue should only have one Priority-* label. We do not prevent
+ * the user from using multiple such labels, we just warn the user before
+ * he/she submits.
+ */
+var TKR_exclPrefixes = [];
+
+/**
+ * This is an autocomplete store that holds custom permission names that
+ * have already been used in this project.
+ */
+var TKR_customPermissionsStore;
+
+
+/**
+ * This is an autocomplete store that holds well-known issue status
+ * values for the current project.
+ */
+var TKR_statusStore;
+
+
+/**
+ * This is an autocomplete store that holds the usernames of all the
+ * members of the current project. This is used for autocomplete in
+ * the cc-list of an issue, where many user names can entered with
+ * commas between them.
+ */
+var TKR_memberListStore;
+
+
+/**
+ * This is an autocomplete store that holds the projects that the current
+ * user is contributor/member/owner of.
+ */
+var TKR_projectStore;
+
+/**
+ * This is an autocomplete store that holds the usernames of possible
+ * issue owners in the current project. The list of possible issue
+ * owners is the same as the list of project members, but the behavior
+ * of this autocompete store is different because the issue owner text
+ * field can only accept one value.
+ */
+var TKR_ownerStore;
+
+
+/**
+ * This is an autocomplete store that holds any list of string for choices.
+ */
+var TKR_autoCompleteStore;
+
+
+/**
+ * An array of autocomplete stores used for user-type custom fields.
+ */
+var TKR_userAutocompleteStores = [];
+
+
+/**
+ * This boolean controls whether odd-ball status and labels are treated as
+ * a warning or an error. Normally, it is False.
+ */
+// TODO(jrobbins): split this into one option for statuses and one for labels.
+var TKR_restrict_to_known;
+
+/**
+ * This keeps track of the type of autocomplete feed that will be displayed.
+ * The type determines which search operators are offered to the user. E.g.,
+ * "filename:" only makes sense for downloads.
+ */
+// TODO(jrobbins): remove, this seems unneeded now.
+var TKR_autoCompleteFeedName;
+
+/**
+ * This substitute function should be used for multi-valued autocomplete fields
+ * that are delimited by commas. When we insert an autocomplete value, replace
+ * an entire search term. Add a comma and a space after it if it is a complete
+ * search term.
+ */
+function TKR_acSubstituteWithComma(inputValue, caret, completable, completion) {
+ var nextTerm = caret;
+ while (inputValue.charAt(nextTerm) != ' ' && nextTerm < inputValue.length) {
+ nextTerm++;
+ }
+ while (inputValue.charAt(nextTerm) == ' ' && nextTerm < inputValue.length) {
+ nextTerm++;
+ }
+ return inputValue.substring(0, caret - completable.length) +
+ completion.value + ', ' + inputValue.substring(nextTerm);
+}
+
+/**
+ * When the prefix starts with '*', return the complete set of all
+ * possible completions.
+ * @param {string} prefix If this starts with '*', return all possible
+ * completions. Otherwise return null.
+ * @param {Array} labelDefs The array of label names and docstrings.
+ * @returns Array of new _AC_Completions for each possible completion, or null.
+ */
+function TKR_fullComplete(prefix, labelDefs) {
+ if (!prefix.startsWith('*')) return null;
+ var out = [];
+ for (var i = 0; i < labelDefs.length; i++) {
+ out.push(new _AC_Completion(labelDefs[i].name,
+ labelDefs[i].name,
+ labelDefs[i].doc));
+ }
+ return out;
+}
+
+
+/**
+ * Constucts a list of all completions for both open and closed
+ * statuses, with a header for each group.
+ * @param {string} prefix If starts with '*', return all possible completions,
+ * else return null.
+ * @param {Array} openStatusDefs The array of open status values and
+ * docstrings.
+ * @param {Array} closedStatusDefs The array of closed status values
+ * and docstrings.
+ * @returns Array of new _AC_Completions for each possible completion, or null.
+ */
+function TKR_openClosedComplete(prefix, openStatusDefs, closedStatusDefs) {
+ if (!prefix.startsWith('*')) return null;
+ var out = [];
+ out.push({heading:'Open Statuses:'}); // TODO: i18n
+ for (var i = 0; i < openStatusDefs.length; i++) {
+ out.push(new _AC_Completion(openStatusDefs[i].name,
+ openStatusDefs[i].name,
+ openStatusDefs[i].doc));
+ }
+ out.push({heading:'Closed Statuses:'}); // TODO: i18n
+ for (var i = 0; i < closedStatusDefs.length; i++) {
+ out.push(new _AC_Completion(closedStatusDefs[i].name,
+ closedStatusDefs[i].name,
+ closedStatusDefs[i].doc));
+ }
+ return out;
+}
+
+
+/**
+ * An array of definitions of all well-known issue statuses. Each
+ * definition has the name of the status value, and a docstring that
+ * describes its meaning.
+ */
+var TKR_statusWords = [];
+
+
+/**
+ * Constuct a new autocomplete store with all the well-known issue
+ * status values. The store has some DIT-specific methods.
+ * TODO(jrobbins): would it be easier to define my own class to use
+ * instead of _AC_Simple_Store?
+ * @param {Array} openStatusDefs An array of definitions of the
+ * well-known open status values. Each definition has a name and
+ * docstring.
+ * @param {Array} closedStatusDefs An array of definitions of the
+ * well-known closed status values. Each definition has a name and
+ * docstring.
+ */
+function TKR_setUpStatusStore(openStatusDefs, closedStatusDefs) {
+ var docdict = {};
+ TKR_statusWords = [];
+ for (var i = 0; i < openStatusDefs.length; i++) {
+ var status = openStatusDefs[i];
+ TKR_statusWords.push(status.name);
+ docdict[status.name] = status.doc;
+ }
+ for (var i = 0; i < closedStatusDefs.length; i++) {
+ var status = closedStatusDefs[i];
+ TKR_statusWords.push(status.name);
+ docdict[status.name] = status.doc;
+ }
+
+ TKR_statusStore = new _AC_SimpleStore(TKR_statusWords);
+ TKR_statusStore.docstrings = docdict;
+
+ TKR_statusStore.commaCompletes = false;
+
+ TKR_statusStore.substitute =
+ function(inputValue, cursor, completable, completion) {
+ return completion.value;
+ };
+
+ TKR_statusStore.completable = function(inputValue, cursor) {
+ if (!ac_everTyped) return '*status';
+ return inputValue;
+ }
+
+ TKR_statusStore.completions = function(prefix, tofilter) {
+ var fullList = TKR_openClosedComplete(prefix,
+ openStatusDefs,
+ closedStatusDefs);
+ if (fullList) return fullList;
+ return _AC_SimpleStore.prototype.completions.call(this, prefix, tofilter);
+ }
+
+}
+
+
+/**
+ * Simple function to add a given item to the list of items used to construct
+ * an "autocomplete store", and also update the docstring that describes
+ * that item. They are stored separately for backward compatability with
+ * autocomplete store logic that preceeded the introduction of descriptions.
+ */
+function TKR_addACItem(items, docDict, item, docStr) {
+ items.push(item);
+ docDict[item] = docStr;
+}
+
+/**
+ * Add several autocomplete items to a word list that will be used to construct
+ * an autocomplete store. Also, keep track of description strings for each
+ * item. A search operator is prepended to the name of each item. The opt_old
+ * and opt_new parameters are used to transform Key-Value labels into Key=Value
+ * search terms.
+ */
+function TKR_addACItemList(
+ items, docDict, searchOp, acDefs, opt_old, opt_new) {
+ var item;
+ for (var i = 0; i < acDefs.length; i++) {
+ var nameAndDoc = acDefs[i];
+ item = searchOp + nameAndDoc.name;
+ if (opt_old) {
+ // Preserve any leading minus-sign.
+ item = item.slice(0, 1) + item.slice(1).replace(opt_old, opt_new);
+ }
+ TKR_addACItem(items, docDict, item, nameAndDoc.doc)
+ }
+}
+
+
+/**
+ * Use information from an options feed to populate the artifact search
+ * autocomplete menu. The order of sections is: custom fields, labels,
+ * components, people, status, special, dates. Within each section,
+ * options are ordered semantically where possible, or alphabetically
+ * if there is no semantic ordering. Negated options all come after
+ * all normal options.
+ */
+function TKR_setUpSearchStore(
+ labelDefs, memberDefs, openDefs, closedDefs, componentDefs, fieldDefs,
+ indMemberDefs) {
+ var searchWords = [];
+ var searchWordsNeg = [];
+ var docDict = {};
+
+ // Treat Key-Value and OneWord labels separately.
+ var keyValueLabelDefs = [];
+ var oneWordLabelDefs = [];
+ for (var i = 0; i < labelDefs.length; i++) {
+ var nameAndDoc = labelDefs[i];
+ if (nameAndDoc.name.indexOf('-') == -1) {
+ oneWordLabelDefs.push(nameAndDoc)
+ } else {
+ keyValueLabelDefs.push(nameAndDoc)
+ }
+ }
+
+ // Autocomplete for custom fields.
+ for (i = 0; i < fieldDefs.length; i++) {
+ var fieldName = fieldDefs[i]['field_name'];
+ var fieldType = fieldDefs[i]['field_type'];
+ if (fieldType == '1') { // enum type
+ var choices = fieldDefs[i]['choices'];
+ TKR_addACItemList(searchWords, docDict, fieldName + '=', choices);
+ TKR_addACItemList(searchWordsNeg, docDict, '-' + fieldName + '=', choices);
+ } else if (fieldType == '3') { // string types
+ TKR_addACItem(searchWords, docDict, fieldName + ':',
+ fieldDefs[i]['docstring']);
+ } else {
+ TKR_addACItem(searchWords, docDict, fieldName + '=',
+ fieldDefs[i]['docstring']);
+ }
+ TKR_addACItem(searchWords, docDict, 'has:' + fieldName,
+ 'Issues with any ' + fieldName + ' value');
+ TKR_addACItem(searchWordsNeg, docDict, '-has:' + fieldName,
+ 'Issues with no ' + fieldName + ' value');
+ }
+
+ // Add suggestions with "me" first, because otherwise they may be impossible
+ // to reach in a project that has a lot of members with emails starting with
+ // "me".
+ TKR_addACItem(searchWords, docDict, 'owner:me', 'Issues owned by me');
+ TKR_addACItem(searchWordsNeg, docDict, '-owner:me', 'Issues not owned by me');
+ TKR_addACItem(searchWords, docDict, 'cc:me', 'Issues that CC me');
+ TKR_addACItem(searchWordsNeg, docDict, '-cc:me', 'Issues that don\'t CC me');
+ TKR_addACItem(searchWords, docDict, 'reporter:me', 'Issues I reported');
+ TKR_addACItem(searchWordsNeg, docDict, '-reporter:me', 'Issues reported by others');
+ TKR_addACItem(searchWords, docDict, 'commentby:me',
+ 'Issues that I commented on');
+ TKR_addACItem(searchWordsNeg, docDict, '-commentby:me',
+ 'Issues that I didn\'t comment on');
+
+ TKR_addACItemList(searchWords, docDict, '', keyValueLabelDefs, '-', '=');
+ TKR_addACItemList(searchWordsNeg, docDict, '-', keyValueLabelDefs, '-', '=');
+ TKR_addACItemList(searchWords, docDict, 'label:', oneWordLabelDefs);
+ TKR_addACItemList(searchWordsNeg, docDict, '-label:', oneWordLabelDefs);
+
+ TKR_addACItemList(searchWords, docDict, 'component:', componentDefs);
+ TKR_addACItemList(searchWordsNeg, docDict, '-component:', componentDefs);
+ TKR_addACItem(searchWords, docDict, 'has:component',
+ 'Issues with any components specified');
+ TKR_addACItem(searchWordsNeg, docDict, '-has:component',
+ 'Issues with no components specified');
+
+ TKR_addACItemList(searchWords, docDict, 'owner:', indMemberDefs);
+ TKR_addACItemList(searchWordsNeg, docDict, '-owner:', indMemberDefs);
+ TKR_addACItemList(searchWords, docDict, 'cc:', memberDefs);
+ TKR_addACItemList(searchWordsNeg, docDict, '-cc:', memberDefs);
+ TKR_addACItem(searchWords, docDict, 'has:cc',
+ 'Issues with any cc\'d users');
+ TKR_addACItem(searchWordsNeg, docDict, '-has:cc',
+ 'Issues with no cc\'d users');
+ TKR_addACItemList(searchWords, docDict, 'reporter:', memberDefs);
+ TKR_addACItemList(searchWordsNeg, docDict, '-reporter:', memberDefs);
+ TKR_addACItemList(searchWords, docDict, 'status:', openDefs);
+ TKR_addACItemList(searchWordsNeg, docDict, '-status:', openDefs);
+ TKR_addACItemList(searchWords, docDict, 'status:', closedDefs);
+ TKR_addACItemList(searchWordsNeg, docDict, '-status:', closedDefs);
+ TKR_addACItem(searchWords, docDict, 'has:status',
+ 'Issues with any status');
+ TKR_addACItem(searchWordsNeg, docDict, '-has:status',
+ 'Issues with no status');
+
+ TKR_addACItem(searchWords, docDict, 'is:blocked',
+ 'Issues that are blocked');
+ TKR_addACItem(searchWordsNeg, docDict, '-is:blocked',
+ 'Issues that are not blocked');
+ TKR_addACItem(searchWords, docDict, 'has:blockedon',
+ 'Issues that are blocked');
+ TKR_addACItem(searchWordsNeg, docDict, '-has:blockedon',
+ 'Issues that are not blocked');
+ TKR_addACItem(searchWords, docDict, 'has:blocking',
+ 'Issues that are blocking other issues');
+ TKR_addACItem(searchWordsNeg, docDict, '-has:blocking',
+ 'Issues that are not blocking other issues');
+
+ TKR_addACItem(searchWords, docDict, 'is:starred',
+ 'Starred by me');
+ TKR_addACItem(searchWordsNeg, docDict, '-is:starred',
+ 'Not starred by me');
+ TKR_addACItem(searchWords, docDict, 'stars>10',
+ 'More than 10 stars');
+ TKR_addACItem(searchWords, docDict, 'stars>100',
+ 'More than 100 stars');
+ TKR_addACItem(searchWords, docDict, 'summary:',
+ 'Search within the summary field');
+
+ TKR_addACItemList(searchWords, docDict, 'commentby:', memberDefs);
+ TKR_addACItem(searchWords, docDict, 'attachment:',
+ 'Search within attachment names');
+ TKR_addACItem(searchWords, docDict, 'attachments>5',
+ 'Has more than 5 attachments');
+ TKR_addACItem(searchWords, docDict, 'is:open', 'Issues that are open');
+ TKR_addACItem(searchWordsNeg, docDict, '-is:open', 'Issues that are closed');
+ TKR_addACItem(searchWords, docDict, 'has:owner',
+ 'Issues with some owner');
+ TKR_addACItem(searchWordsNeg, docDict, '-has:owner',
+ 'Issues with no owner');
+ TKR_addACItem(searchWords, docDict, 'has:attachment',
+ 'Issues with some attachments');
+ TKR_addACItem(searchWords, docDict, 'id:1,2,3',
+ 'Match only the specified issues');
+ TKR_addACItem(searchWords, docDict, 'id<100000',
+ 'Issues with IDs under 100,000');
+ TKR_addACItem(searchWords, docDict, 'blockedon:1',
+ 'Blocked on the specified issues');
+ TKR_addACItem(searchWords, docDict, 'blocking:1',
+ 'Blocking the specified issues');
+ TKR_addACItem(searchWords, docDict, 'is:spam', 'Issues classified as spam');
+ // We do not suggest -is:spam because it is implicit.
+
+ var today = new Date();
+ var todayStr = (today.getFullYear() + '/' + (today.getMonth() + 1) + '/' +
+ today.getDate());
+ TKR_addACItem(searchWords, docDict, 'opened>today-1',
+ 'Opened within the last N days');
+ TKR_addACItem(searchWords, docDict, 'opened>' + todayStr,
+ 'Opened after the specified date');
+ TKR_addACItem(searchWords, docDict, 'opened<today-1',
+ 'Opened more than N days ago');
+ TKR_addACItem(searchWords, docDict, 'opened<' + todayStr,
+ 'Opened before the specified date');
+ TKR_addACItem(searchWords, docDict, 'modified>today-1',
+ 'Modified within the last N days');
+ TKR_addACItem(searchWords, docDict, 'modified>' + todayStr,
+ 'Modified after the specified date');
+ TKR_addACItem(searchWords, docDict, 'modified<today-1',
+ 'Modified more than N days ago');
+ TKR_addACItem(searchWords, docDict, 'modified<' + todayStr,
+ 'Modified before the specified date');
+ TKR_addACItem(searchWords, docDict, 'closed>today-1',
+ 'Closed within the last N days');
+ TKR_addACItem(searchWords, docDict, 'closed>' + todayStr,
+ 'Closed after the specified date');
+ TKR_addACItem(searchWords, docDict, 'closed<today-1',
+ 'Closed more than N days ago');
+ TKR_addACItem(searchWords, docDict, 'closed<' + todayStr,
+ 'Closed before the specified date');
+
+ TKR_projectQueryStore = new _AC_SimpleStore(searchWords);
+ TKR_projectQueryStore.docstrings = docDict;
+
+ searchWords = searchWords.concat(searchWordsNeg);
+
+ TKR_searchStore = new _AC_SimpleStore(searchWords);
+ TKR_searchStore.docstrings = docDict;
+
+ // When we insert an autocomplete value, replace an entire search term.
+ // Add just a space after it (not a comma) if it is a complete search term,
+ // or leave the caret immediately after the completion if we are just helping
+ // the user with the search operator.
+ TKR_searchStore.substitute =
+ function(inputValue, caret, completable, completion) {
+ var nextTerm = caret;
+ while (inputValue.charAt(nextTerm) != ' ' &&
+ nextTerm < inputValue.length) {
+ nextTerm++;
+ }
+ while (inputValue.charAt(nextTerm) == ' ' &&
+ nextTerm < inputValue.length) {
+ nextTerm++;
+ }
+ return inputValue.substring(0, caret - completable.length) +
+ completion.value + ' ' + inputValue.substring(nextTerm);
+ };
+ TKR_searchStore.autoselectFirstRow =
+ function() {
+ return false;
+ };
+
+ TKR_projectQueryStore.substitute = TKR_searchStore.substitute;
+ TKR_projectQueryStore.autoselectFirstRow = TKR_searchStore.autoselectFirstRow;
+}
+
+
+/**
+ * Use information from an options feed to populate the issue quick edit
+ * autocomplete menu.
+ */
+function TKR_setUpQuickEditStore(
+ labelDefs, memberDefs, openDefs, closedDefs, indMemberDefs) {
+ var qeWords = [];
+ var docDict = {};
+
+ // Treat Key-Value and OneWord labels separately.
+ var keyValueLabelDefs = [];
+ var oneWordLabelDefs = [];
+ for (var i = 0; i < labelDefs.length; i++) {
+ var nameAndDoc = labelDefs[i];
+ if (nameAndDoc.name.indexOf('-') == -1) {
+ oneWordLabelDefs.push(nameAndDoc)
+ } else {
+ keyValueLabelDefs.push(nameAndDoc)
+ }
+ }
+ TKR_addACItemList(qeWords, docDict, '', keyValueLabelDefs, '-', '=');
+ TKR_addACItemList(qeWords, docDict, '-', keyValueLabelDefs, '-', '=');
+ TKR_addACItemList(qeWords, docDict, '', oneWordLabelDefs);
+ TKR_addACItemList(qeWords, docDict, '-', oneWordLabelDefs);
+
+ TKR_addACItem(qeWords, docDict, 'owner=me', 'Make me the owner');
+ TKR_addACItem(qeWords, docDict, 'owner=----', 'Clear the owner field');
+ TKR_addACItem(qeWords, docDict, 'cc=me', 'CC me on this issue');
+ TKR_addACItem(qeWords, docDict, 'cc=-me', 'Remove me from CC list');
+ TKR_addACItemList(qeWords, docDict, 'owner=', indMemberDefs);
+ TKR_addACItemList(qeWords, docDict, 'cc=', memberDefs);
+ TKR_addACItemList(qeWords, docDict, 'cc=-', memberDefs);
+ TKR_addACItemList(qeWords, docDict, 'status=', openDefs);
+ TKR_addACItemList(qeWords, docDict, 'status=', closedDefs);
+ TKR_addACItem(qeWords, docDict, 'summary=""', 'Set the summary field');
+
+ TKR_quickEditStore = new _AC_SimpleStore(qeWords);
+ TKR_quickEditStore.docstrings = docDict;
+
+ // When we insert an autocomplete value, replace an entire command part.
+ // Add just a space after it (not a comma) if it is a complete part,
+ // or leave the caret immediately after the completion if we are just helping
+ // the user with the command operator.
+ TKR_quickEditStore.substitute =
+ function(inputValue, caret, completable, completion) {
+ var nextTerm = caret;
+ while (inputValue.charAt(nextTerm) != ' ' &&
+ nextTerm < inputValue.length) {
+ nextTerm++;
+ }
+ while (inputValue.charAt(nextTerm) == ' ' &&
+ nextTerm < inputValue.length) {
+ nextTerm++;
+ }
+ return inputValue.substring(0, caret - completable.length) +
+ completion.value + ' ' + inputValue.substring(nextTerm);
+ };
+}
+
+
+
+/**
+ * Constuct a new autocomplete store with all the project
+ * custom permissions.
+ * @param {Array} customPermissions An array of custom permission names.
+ */
+function TKR_setUpCustomPermissionsStore(customPermissions) {
+ var permWords = ['View', 'EditIssue', 'AddIssueComment', 'DeleteIssue'];
+ var docdict = {
+ 'View': '', 'EditIssue': '', 'AddIssueComment': '', 'DeleteIssue': ''};
+ for (var i = 0; i < customPermissions.length; i++) {
+ permWords.push(customPermissions[i]);
+ docdict[customPermissions[i]] = '';
+ }
+
+ TKR_customPermissionsStore = new _AC_SimpleStore(permWords);
+ TKR_customPermissionsStore.docstrings = docdict;
+
+ TKR_customPermissionsStore.commaCompletes = false;
+
+ TKR_customPermissionsStore.substitute =
+ function(inputValue, cursor, completable, completion) {
+ return completion.value;
+ };
+}
+
+
+/**
+ * Constuct a new autocomplete store with all the well-known project
+ * member user names and real names. The store has some
+ * monorail-specific methods.
+ * TODO(jrobbins): would it be easier to define my own class to use
+ * instead of _AC_Simple_Store?
+ * @param {Array} memberDefs An array of definitions of the project
+ * members. Each definition has a name and docstring.
+ */
+function TKR_setUpMemberStore(memberDefs, indMemerDefs) {
+ var memberWords = [];
+ var indMemberWords = [];
+ var docdict = {};
+ for (var i = 0; i < memberDefs.length; i++) {
+ var member = memberDefs[i];
+ memberWords.push(member.name);
+ docdict[member.name] = member.doc;
+ if(!member.is_group) {
+ indMemberWords.push(member.name);
+ }
+ }
+
+ TKR_memberListStore = new _AC_SimpleStore(memberWords);
+ TKR_memberListStore.docstrings = docdict;
+
+ TKR_memberListStore.completions = function(prefix, tofilter) {
+ var fullList = TKR_fullComplete(prefix, memberDefs);
+ if (fullList) return fullList;
+ return _AC_SimpleStore.prototype.completions.call(this, prefix, tofilter);
+ }
+
+ TKR_memberListStore.completable = function(inputValue, cursor) {
+ if (inputValue == '') return '*member';
+ return _AC_SimpleStore.prototype.completable.call(this, inputValue, cursor);
+ }
+
+ TKR_memberListStore.substitute = TKR_acSubstituteWithComma;
+
+ TKR_ownerStore = new _AC_SimpleStore(indMemberWords);
+ TKR_ownerStore.docstrings = docdict;
+
+ TKR_ownerStore.commaCompletes = false;
+
+ TKR_ownerStore.substitute =
+ function(inputValue, cursor, completable, completion) {
+ return completion.value;
+ };
+
+ TKR_ownerStore.completions = function(prefix, tofilter) {
+ var fullList = TKR_fullComplete(prefix, indMemerDefs);
+ if (fullList) return fullList;
+ return _AC_SimpleStore.prototype.completions.call(this, prefix, tofilter);
+ };
+
+ TKR_ownerStore.completable = function(inputValue, cursor) {
+ if (!ac_everTyped) return '*owner';
+ return inputValue;
+ };
+
+}
+
+
+/**
+ * Constuct one new autocomplete store for each user-valued custom
+ * field that has a needs_perm validation requirement, and thus a
+ * list of allowed user indexes.
+ * TODO(jrobbins): would it be easier to define my own class to use
+ * instead of _AC_Simple_Store?
+ * @param {Array} fieldDefs An array of field definitions, only some
+ * of which have a 'user_indexes' entry.
+ * @param {Array} memberDefs An array of definitions of the project
+ * members. Each definition has a name and docstring.
+ */
+function TKR_setUpUserAutocompleteStores(fieldDefs, memberDefs) {
+ for (var i = 0; i < fieldDefs.length; i++) {
+ var fieldDef = fieldDefs[i];
+ if (fieldDef['user_indexes']) {
+ var userIndexes = fieldDef['user_indexes'];
+ var qualifiedMembers = [];
+ for (var j = 0; j < userIndexes.length; j++) {
+ var mem = memberDefs[userIndexes[j]];
+ if (mem) qualifiedMembers.push(mem);
+ }
+ var us = makeOneUserAutocompleteStore(fieldDef, qualifiedMembers);
+ TKR_userAutocompleteStores['custom_' + fieldDef['field_id']] = us;
+ }
+ }
+}
+
+function makeOneUserAutocompleteStore(fieldDef, memberDefs) {
+ var memberWords = [];
+ var docdict = {};
+ for (var i = 0; i < memberDefs.length; i++) {
+ var member = memberDefs[i];
+ memberWords.push(member.name);
+ docdict[member.name] = member.doc;
+ }
+
+ var userStore = new _AC_SimpleStore(memberWords);
+ userStore.docstrings = docdict;
+ userStore.commaCompletes = false;
+
+ userStore.substitute =
+ function(inputValue, cursor, completable, completion) {
+ return completion.value;
+ };
+
+ userStore.completions = function(prefix, tofilter) {
+ var fullList = TKR_fullComplete(prefix, memberDefs);
+ if (fullList) return fullList;
+ return _AC_SimpleStore.prototype.completions.call(this, prefix, tofilter);
+ };
+
+ userStore.completable = function(inputValue, cursor) {
+ if (!ac_everTyped) return '*custom';
+ return inputValue;
+ };
+
+ return userStore;
+}
+
+
+/**
+ * Constuct a new autocomplete store with all the components.
+ * The store has some monorail-specific methods.
+ * @param {Array} componentDefs An array of definitions of components.
+ */
+function TKR_setUpComponentStore(componentDefs) {
+ var componentWords = [];
+ var docdict = {};
+ for (var i = 0; i < componentDefs.length; i++) {
+ var component = componentDefs[i];
+ componentWords.push(component.name);
+ docdict[component.name] = component.doc;
+ }
+
+ TKR_componentListStore = new _AC_SimpleStore(componentWords);
+ TKR_componentListStore.docstrings = docdict;
+
+ TKR_componentListStore.completions = function(prefix, tofilter) {
+ var fullList = TKR_fullComplete(prefix, componentDefs);
+ if (fullList) return fullList;
+ return _AC_SimpleStore.prototype.completions.call(this, prefix, tofilter);
+ }
+
+ TKR_componentListStore.substitute = TKR_acSubstituteWithComma;
+
+ TKR_componentListStore.completable = function(inputValue, cursor) {
+ if (inputValue == '') return '*component';
+ return _AC_SimpleStore.prototype.completable.call(this, inputValue, cursor);
+ }
+
+}
+
+
+/**
+ * An array of definitions of all well-known issue labels. Each
+ * definition has the name of the label, and a docstring that
+ * describes its meaning.
+ */
+var TKR_labelWords = [];
+
+
+/**
+ * Constuct a new autocomplete store with all the well-known issue
+ * labels for the current project. The store has some DIT-specific methods.
+ * TODO(jrobbins): would it be easier to define my own class to use
+ * instead of _AC_Simple_Store?
+ * @param {Array} labelDefs An array of definitions of the project
+ * members. Each definition has a name and docstring.
+ */
+function TKR_setUpLabelStore(labelDefs) {
+ TKR_labelWords = [];
+ var docdict = {};
+ for (var i = 0; i < labelDefs.length; i++) {
+ var label = labelDefs[i];
+ TKR_labelWords.push(label.name);
+ docdict[label.name] = label.doc;
+ }
+
+ TKR_labelStore = new _AC_SimpleStore(TKR_labelWords);
+ TKR_labelStore.docstrings = docdict;
+
+ TKR_labelStore.commaCompletes = false;
+ TKR_labelStore.substitute =
+ function(inputValue, cursor, completable, completion) {
+ return completion.value;
+ };
+
+ /* Given what the user typed, return the part of it that should be used
+ * to determine the auto-complete options offered to the user. */
+ TKR_labelStore.completable = function(inputValue, cursor) {
+ if (cursor == 0) {
+ return '*label'; // Show every well-known label that is not redundant.
+ }
+ var start = 0;
+ for (var i = cursor; --i >= 0;) {
+ var c = inputValue.charAt(i)
+ if (c == ' ' || c == ',') {
+ start = i + 1;
+ break;
+ }
+ }
+ var questionPos = inputValue.indexOf('?');
+ if (questionPos >= 0) {
+ // Ignore any "?" character and anything after it.
+ inputValue = inputValue.substring(start, questionPos);
+ }
+ var result = inputValue.substring(start, cursor);
+ if (inputValue.lastIndexOf('-') > 0 && !ac_everTyped) {
+ // Act like a menu: offer all alternative values for the same prefix.
+ result = inputValue.substring(
+ start, Math.min(cursor, inputValue.lastIndexOf('-')));
+ }
+ if (inputValue.startsWith('Restrict-') && !ac_everTyped) {
+ // If user is in the middle of 2nd part, use that to narrow the choices.
+ result = inputValue;
+ // If they completed 2nd part, give all choices matching 2-part prefix.
+ if (inputValue.lastIndexOf('-') > 8) {
+ result = inputValue.substring(
+ start, Math.min(cursor, inputValue.lastIndexOf('-') + 1));
+ }
+ }
+
+ return result;
+ };
+
+ /* Start with all labels or only those that match what the user typed so far,
+ * then filter out any that would lead to conflicts or redundancy. */
+ TKR_labelStore.completions = function(prefix, tofilter) {
+ var comps = TKR_fullComplete(prefix, labelDefs);
+ if (comps == null) {
+ comps = _AC_SimpleStore.prototype.completions.call(
+ this, prefix, tofilter);
+ }
+
+ var filtered_comps = [];
+ for (var i = 0; i < comps.length; i++) {
+ var prefix_parts = comps[i].value.split('-');
+ var label_prefix = prefix_parts[0].toLowerCase();
+ if (comps[i].value.startsWith('Restrict-')) {
+ if (!prefix.toLowerCase().startsWith('r')) {
+ // Consider restriction labels iff user has started typing.
+ continue;
+ }
+ if (prefix_parts.length > 1) {
+ label_prefix += '-' + prefix_parts[1].toLowerCase();
+ }
+ }
+ if (FindInArray(TKR_exclPrefixes, label_prefix) == -1 ||
+ TKR_usedPrefixes[label_prefix] == undefined ||
+ TKR_usedPrefixes[label_prefix].length == 0 ||
+ (TKR_usedPrefixes[label_prefix].length == 1 &&
+ TKR_usedPrefixes[label_prefix][0] == ac_focusedInput)) {
+ var uniq = true;
+ for (var p in TKR_usedPrefixes) {
+ var textFields = TKR_usedPrefixes[p];
+ for (var j = 0; j < textFields.length; j++) {
+ var tf = textFields[j];
+ if (tf.value.toLowerCase() == comps[i].value.toLowerCase() &&
+ tf != ac_focusedInput) {
+ uniq = false;
+ }
+ }
+ }
+ if (uniq) {
+ filtered_comps.push(comps[i]);
+ }
+ }
+ }
+
+ return filtered_comps;
+ };
+}
+
+
+/**
+ * Constuct a new autocomplete store with the given strings as choices.
+ * @param {Array} choices An array of autocomplete choices.
+ */
+function TKR_setUpAutoCompleteStore(choices) {
+ TKR_autoCompleteStore = new _AC_SimpleStore(choices);
+ var choicesDefs = []
+ for (var i = 0; i < choices.length; ++i) {
+ choicesDefs.push({'name': choices[i], 'doc': ''});
+ }
+
+ /**
+ * Override the default completions() function to return a list of
+ * available choices. It proactively shows all choices when the user has
+ * not yet typed anything. It stops offering choices if the text field
+ * has a pretty long string in it already. It does not offer choices that
+ * have already been chosen.
+ */
+ TKR_autoCompleteStore.completions = function(prefix, tofilter) {
+ if (prefix.length > 18) {
+ return [];
+ }
+ var comps = TKR_fullComplete(prefix, choicesDefs);
+ if (comps == null) {
+ comps = _AC_SimpleStore.prototype.completions.call(
+ this, prefix, tofilter);
+ }
+
+ var usedComps = {}
+ var textFields = document.getElementsByTagName('input');
+ for (var i = 0; i < textFields.length; ++i) {
+ if (textFields[i].classList.contains('autocomplete')) {
+ usedComps[textFields[i].value] = true;
+ }
+ }
+ var unusedComps = []
+ for (i = 0; i < comps.length; ++i) {
+ if (!usedComps[comps[i].value]) {
+ unusedComps.push(comps[i]);
+ }
+ }
+
+ return unusedComps;
+ }
+
+ /**
+ * Override the default completable() function with one that gives a
+ * special value when the user has not yet typed anything. This
+ * causes TKR_fullComplete() to show all choices. Also, always consider
+ * the whole textfield value as an input to completion matching. Otherwise,
+ * it would only consider the part after the last comma (which makes sense
+ * for gmail To: and Cc: address fields).
+ */
+ TKR_autoCompleteStore.completable = function(inputValue, cursor) {
+ if (inputValue == '') {
+ return '*ac';
+ }
+ return inputValue;
+ }
+
+ /**
+ * Override the default substitute() function to completely replace the
+ * contents of the text field when the user selects a completion. Otherwise,
+ * it would append, much like the Gmail To: and Cc: fields append autocomplete
+ * selections.
+ */
+ TKR_autoCompleteStore.substitute =
+ function(inputValue, cursor, completable, completion) {
+ return completion.value;
+ };
+
+ /**
+ * We consider the whole textfield to be one value, not a comma separated
+ * list. So, typing a ',' should not trigger an autocomplete selection.
+ */
+ TKR_autoCompleteStore.commaCompletes = false;
+}
+
+
+/**
+ * XMLHTTP object used to fetch autocomplete options from the server.
+ */
+var TKR_optionsXmlHttp = undefined;
+
+/**
+ * URL used to fetch autocomplete options from the server, WITHOUT the
+ * project's cache content timestamp.
+ */
+var TKR_optionsURL = undefined;
+
+/**
+ * Contact the server to fetch the set of autocomplete options for the
+ * projects the user is contributor/member/owner of.
+ * If multiValue is set to true then the projectStore is configured to
+ * have support for multi-values (useful for example for saved queries where
+ * a query can apply to multiple projects).
+ */
+function TKR_fetchUserProjects(multiValue) {
+ // Set a request token to prevent XSRF leaking of user project lists.
+ if (CS_env.token) {
+ var postURL = '/hosting/projects.do';
+ var xh = XH_XmlHttpCreate()
+ var data = 'token=' + CS_env.token;
+ var callback = multiValue ? TKR_fetchMultiValProjectsCallback
+ : TKR_fetchProjectsCallback;
+ XH_XmlHttpPOST(xh, postURL, data, callback);
+ }
+}
+
+/**
+ * Sets up the projectStore based on the json data received.
+ * The projectStore is setup with support for multiple values.
+ * @param {event} event with xhr Response with JSON data of projects.
+ */
+function TKR_fetchMultiValProjectsCallback(event) {
+ var projects = TKR_getMemberProjects(event)
+ if (projects) {
+ TKR_setUpProjectStore(projects, true);
+ }
+}
+
+/**
+ * Sets up the projectStore based on the json data received.
+ * @param {event} event with xhr Response with JSON data of projects.
+ */
+function TKR_fetchProjectsCallback(event) {
+ var projects = TKR_getMemberProjects(event)
+ if (projects) {
+ TKR_setUpProjectStore(projects, false);
+ }
+}
+
+function TKR_getMemberProjects(event) {
+ var xhr = event.target;
+ if (xhr) {
+ if (xhr.readyState != 4 || xhr.status != 200)
+ return;
+
+ var projects = [];
+ var json = CS_parseJSON(xhr);
+ for (var category in json) {
+ switch (category) {
+ case 'contributorto':
+ case 'memberof':
+ case 'ownerof':
+ for (var i = 0; i < json[category].length; i++) {
+ projects.push(json[category][i]);
+ }
+ break;
+ case 'error':
+ return;
+ default:
+ break;
+ }
+ }
+ projects.sort();
+ return projects;
+ }
+}
+
+
+/**
+ * Constuct a new autocomplete store with all the projects that the
+ * current user has visibility into. The store has some monorail-specific
+ * methods.
+ * @param {Array} projects An array of project names.
+ * @param {Boolean} multiValue Determines whether the store should support
+ * multiple values.
+ */
+function TKR_setUpProjectStore(projects, multiValue) {
+ TKR_projectStore = new _AC_SimpleStore(projects);
+ TKR_projectStore.commaCompletes = !multiValue;
+
+ var projectsDefs = []
+ var docdict = {}
+ for (var i = 0; i < projects.length; ++i) {
+ projectsDefs.push({'name': projects[i], 'doc': ''});
+ docdict[projects[i]] = '';
+ }
+
+ TKR_projectStore.docstrings = docdict;
+ if (multiValue) {
+ TKR_projectStore.substitute = TKR_acSubstituteWithComma;
+ } else {
+ TKR_projectStore.substitute =
+ function(inputValue, cursor, completable, completion) {
+ return completion.value;
+ };
+ }
+
+ TKR_projectStore.completions = function(prefix, tofilter) {
+ var fullList = TKR_fullComplete(prefix, projectsDefs);
+ if (fullList) return fullList;
+ return _AC_SimpleStore.prototype.completions.call(this, prefix, tofilter);
+ };
+
+ TKR_projectStore.completable = function(inputValue, cursor) {
+ if (inputValue == '') return '*project';
+ if (multiValue)
+ return _AC_SimpleStore.prototype.completable.call(
+ this, inputValue, cursor);
+ else
+ return inputValue;
+ };
+}
+
+
+/**
+ * Contact the server to fetch the set of autocomplete options for the
+ * current project. This is done with XMLHTTPRequest because the list
+ * could be long, and most of the time, the user will only view an
+ * issue not edit it.
+ * @param {string} projectName The name of the current project.
+ * @param {string} feedName The name of the feed to fetch.
+ * @param {string} token The user's url-command-attack-prevention token.
+ * @param {number} cct The project's cached-content-timestamp.
+ * @param {Object} opt_args Key=value pairs.
+ */
+function TKR_fetchOptions(projectName, feedName, token, cct, opt_args) {
+ TKR_autoCompleteFeedName = feedName;
+ TKR_optionsXmlHttp = XH_XmlHttpCreate();
+ var projectPart = projectName ? '/p/' + projectName : '/hosting';
+ TKR_optionsURL = (
+ projectPart + '/feeds/' + feedName + '?' +
+ 'token=' + token);
+ for (var arg in opt_args) {
+ TKR_optionsURL += '&' + arg + '=' + encodeURIComponent(opt_args[arg]);
+ }
+
+ XH_XmlHttpGET(
+ TKR_optionsXmlHttp, TKR_optionsURL + '&cct=' + cct,
+ TKR_issueOptionsFeedCallback);
+}
+
+
+/**
+ * The communication with the server has made some progress. If it is
+ * done, then process the response.
+ */
+function TKR_issueOptionsFeedCallback() {
+ if (TKR_optionsXmlHttp.readyState == 4) {
+ if (TKR_optionsXmlHttp.status == 200) {
+ TKR_gotIssueOptionsFeed(TKR_optionsXmlHttp);
+ }
+ }
+}
+
+
+/**
+ * The server has sent the list of all options. Parse them and then set up each
+ * of the label stores.
+ * @param {Object} xhr The JSON response object the server. The response JSON
+ * consists of one large dictionary with four items: open statuses,
+ * closed statuses, issue labels, and project members.
+ */
+function TKR_gotIssueOptionsFeed(xhr) {
+ var json_data = null;
+ try {
+ json_data = CS_parseJSON(xhr);
+ }
+ catch (e) {
+ return null;
+ }
+ indMemerDefs = []
+ for (var i = 0; i < json_data.members.length; i++) {
+ var member = json_data.members[i];
+ if(!member.is_group) {
+ indMemerDefs.push(member);
+ }
+ }
+ TKR_setUpStatusStore(json_data.open, json_data.closed);
+ TKR_setUpSearchStore(
+ json_data.labels, json_data.members, json_data.open, json_data.closed,
+ json_data.components, json_data.fields, indMemerDefs);
+ TKR_setUpQuickEditStore(
+ json_data.labels, json_data.members, json_data.open, json_data.closed,
+ indMemerDefs);
+ TKR_setUpLabelStore(json_data.labels);
+ TKR_setUpComponentStore(json_data.components);
+ TKR_setUpMemberStore(json_data.members, indMemerDefs);
+ TKR_setUpUserAutocompleteStores(json_data.fields, json_data.members);
+ TKR_setUpCustomPermissionsStore(json_data.custom_permissions);
+ TKR_exclPrefixes = json_data.excl_prefixes;
+ TKR_prepLabelAC(TKR_labelFieldIDPrefix);
+ TKR_prepOwnerField(json_data.members);
+ TKR_restrict_to_known = json_data.strict;
+}
diff --git a/appengine/monorail/static/js/tracker/tracker-components.js b/appengine/monorail/static/js/tracker/tracker-components.js
new file mode 100644
index 0000000..8851e36
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-components.js
@@ -0,0 +1,92 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains JS code for editing components and component definitions.
+ */
+
+var TKR_leafNameXmlHttp;
+
+var TKR_leafNameRE = /^[a-zA-Z]([-_]?[a-zA-Z0-9])+$/;
+var TKR_oldName = '';
+
+/**
+ * Function to validate the component leaf name..
+ * @param {string} projectName Current project name.
+ * @param {string} parentPath Path to this component's parent.
+ * @param {string} originalName Original leaf name, keeping that is always OK.
+ * @param {string} token security token.
+ */
+function TKR_checkLeafName(projectName, parentPath, originalName, token) {
+ var name = $('leaf_name').value;
+ var feedback = $('leafnamefeedback');
+ if (name == originalName) {
+ $('submit_btn').disabled = '';
+ feedback.innerText = '';
+ } else if (name != TKR_oldName) {
+ $('submit_btn').disabled = 'disabled';
+ if (name == '') {
+ feedback.innerText = 'Please choose a name';
+ } else if (!TKR_leafNameRE.test(name)) {
+ feedback.innerText = 'Invalid component name';
+ } else if (name.length > 30) {
+ feedback.innerText = 'Name is too long';
+ } else {
+ TKR_checkLeafNameOnServer(projectName, parentPath, name, token);
+ }
+ }
+ TKR_oldName = name;
+}
+
+
+
+/**
+ * Function that communicates with the server.
+ * @param {string} projectName Current project name.
+ * @param {string} leafName The proposed leaf name.
+ * @param {string} token security token.
+ */
+function TKR_checkLeafNameOnServer(projectName, parentPath, leafName, token) {
+ var url = ('/p/' + projectName + '/components/checkName' +
+ '?parent_path=' + parentPath +
+ '&leaf_name=' + encodeURIComponent(leafName) +
+ '&token=' + token);
+ TKR_leafNameXmlHttp = XH_XmlHttpCreate();
+ XH_XmlHttpGET(TKR_leafNameXmlHttp, url, TKR_leafNameCallback);
+
+}
+
+/**
+ * The communication with the server has made some progress. If it is
+ * done, then process the response.
+ */
+function TKR_leafNameCallback() {
+ if (TKR_leafNameXmlHttp.readyState == 4) {
+ if (TKR_leafNameXmlHttp.status == 200) {
+ TKR_gotLeafNameFeed(TKR_leafNameXmlHttp);
+ }
+ }
+}
+
+
+/**
+ * Function that evaluates the server response and sets the error message.
+ * @param {object} xhr AJAX response object.
+ */
+function TKR_gotLeafNameFeed(xhr) {
+ var json_data = null;
+ try {
+ json_data = CS_parseJSON(xhr);
+ }
+ catch (e) {
+ return;
+ }
+ var errorMessage = json_data['error_message'];
+ $('leafnamefeedback').innerText = errorMessage || '';
+
+ $('submit_btn').disabled = errorMessage ? 'disabled' : '';
+}
diff --git a/appengine/monorail/static/js/tracker/tracker-display.js b/appengine/monorail/static/js/tracker/tracker-display.js
new file mode 100644
index 0000000..91c2b82
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-display.js
@@ -0,0 +1,407 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * Functions used by Monorail to control the display of elements on
+ * the page, rollovers, and popup menus.
+ *
+ */
+
+
+/**
+ * Show a popup menu below a specified element. Optional x and y deltas can be
+ * used to fine-tune placement.
+ * @param {string} id The HTML id of the popup menu.
+ * @param {Element} el The HTML element that the popup should appear near.
+ * @param {number} opt_deltaX Optional X offset to finetune placement.
+ * @param {number} opt_deltaY Optional Y offset to finetune placement.
+ * @param {Element} opt_menuButton The HTML element for a menu button that
+ * was pressed to open the menu. When a button was used, we need to ignore
+ * the first "click" event, otherwise the menu will immediately close.
+ * @returns Always returns false to indicate that the browser should handle the
+ * event normally.
+ */
+function TKR_showBelow(id, el, opt_deltaX, opt_deltaY, opt_menuButton) {
+ var popupDiv = $(id);
+ var elBounds = nodeBounds(el)
+ var startX = elBounds.x;
+ var startY = elBounds.y + elBounds.h;
+ if (BR_IsIE()) {
+ startX -= 1;
+ startY -= 2;
+ }
+ if (BR_IsSafari()) {
+ startX += 1;
+ }
+ popupDiv.style.display = 'block'; //needed so that offsetWidth != 0
+
+ popupDiv.style.left = '-2000px';
+ if (id == 'pop_dot' || id == 'redoMenu') {
+ startX = startX - popupDiv.offsetWidth + el.offsetWidth;
+ }
+ if (opt_deltaX) startX += opt_deltaX;
+ if (opt_deltaY) startY += opt_deltaY;
+ popupDiv.style.left = (startX)+'px';
+ popupDiv.style.top = (startY)+'px';
+ var popup = new TKR_MyPopup(popupDiv, opt_menuButton);
+ popup.show();
+ return false;
+}
+
+
+/**
+ * Show a popup menu to the right of a specified element. If there is not
+ * enough space to the right, then it will open to the left side instead.
+ * Optional x and y deltas can be used to fine-tune placement.
+ * TODO(jrobbins): reduce redundancy with function above.
+ * @param {string} id The HTML id of the popup menu.
+ * @param {Element} el The HTML element that the popup should appear near.
+ * @param {number} opt_deltaX Optional X offset to finetune placement.
+ * @param {number} opt_deltaY Optional Y offset to finetune placement.
+ * @returns Always returns false to indicate that the browser should handle the
+ * event normally.
+ */
+function TKR_showRight(id, el, opt_deltaX, opt_deltaY) {
+ var popupDiv = $(id);
+ var elBounds = nodeBounds(el);
+ var startX = elBounds.x + elBounds.w;
+ var startY = elBounds.y;
+
+ // Calculate pageSize.w and pageSize.h
+ var docElemWidth = document.documentElement.clientWidth;
+ var docElemHeight = document.documentElement.clientHeight;
+ var pageSize = {
+ w: (window.innerWidth || docElemWidth && docElemWidth > 0 ?
+ docElemWidth : document.body.clientWidth) || 1,
+ h: (window.innerHeight || docElemHeight && docElemHeight > 0 ?
+ docElemHeight : document.body.clientHeight) || 1
+ }
+
+ // We need to make the popupDiv visible in order to capture its width
+ popupDiv.style.display = 'block';
+ var popupDivBounds = nodeBounds(popupDiv);
+
+ // Show popup to the left
+ if (startX + popupDivBounds.w > pageSize.w) {
+ startX = elBounds.x - popupDivBounds.w;
+ if (BR_IsIE()) {
+ startX -= 4;
+ startY -= 2;
+ }
+ if (BR_IsNav()) {
+ startX -= 2;
+ }
+ if (BR_IsSafari()) {
+ startX += -1;
+ }
+
+ // Show popup to the right
+ } else {
+ if (BR_IsIE()) {
+ startY -= 2;
+ }
+ if (BR_IsNav()) {
+ startX += 2;
+ }
+ if (BR_IsSafari()) {
+ startX += 3;
+ }
+ }
+
+ popupDiv.style.left = '-2000px';
+ popupDiv.style.position = 'absolute';
+ if (opt_deltaX) startX += opt_deltaX;
+ if (opt_deltaY) startY += opt_deltaY;
+ popupDiv.style.left = (startX)+'px';
+ popupDiv.style.top = (startY)+'px';
+ var popup = new TKR_MyPopup(popupDiv);
+ popup.show();
+ return false;
+}
+
+
+/**
+ * Close the specified popup menu and unregister it with the popup
+ * controller, otherwise old leftover popup instances can mess with
+ * the future display of menus.
+ * @param {string} id The HTML ID of the element to hide.
+ */
+function TKR_closePopup(id) {
+ var e = $(id);
+ if (e) {
+ for (var i = 0; i < gPopupController.activePopups_.length; ++i) {
+ if (e === gPopupController.activePopups_[i]._div) {
+ var popup = gPopupController.activePopups_[i];
+ popup.hide();
+ gPopupController.activePopups_.splice(i, 1);
+ return;
+ }
+ }
+ }
+}
+
+
+var TKR_allColumnNames = []; // Will be defined in HTML file.
+
+/**
+ * Close all popup menus. Also, reset the hover state of the menu item that
+ * was selected. The list of popup menu names is computed from the list of
+ * columns specified in the HTML for the issue list page.
+ * @param menuItem {Element} The menu item that the user clicked.
+ * @returns Always returns false to indicate that the browser should handle the
+ * event normally.
+ */
+function TKR_closeAllPopups(menuItem) {
+ for (var col_index = 0; col_index < TKR_allColumnNames.length; col_index++) {
+ TKR_closePopup('pop_' + col_index);
+ TKR_closePopup('filter_' + col_index);
+ }
+ TKR_closePopup('pop_dot');
+ TKR_closePopup('redoMenu');
+ menuItem.classList.remove('hover');
+ return false;
+}
+
+
+/**
+ * Close all the submenus (of which, one may be currently open).
+ * @returns Always returns false to indicate that the browser should handle the
+ * event normally.
+ */
+function TKR_closeSubmenus() {
+ for (var col_index = 0; col_index < TKR_allColumnNames.length; col_index++) {
+ TKR_closePopup('filter_' + col_index);
+ }
+ return false;
+}
+
+
+/**
+ * Find the enclosing HTML element that controls this section of the
+ * page and set it to use CSS class "opened". That will make the
+ * section display in the opened state, regardless of what state is
+ * was in before.
+ * @param {Element} el The HTML element that the user clicked on.
+ * @returns Always returns false to indicate that the browser should handle the
+ * event normally.
+ */
+function TKR_showHidden(el) {
+ while (el) {
+ if (el.classList.contains('closed')) {
+ el.classList.remove('closed');
+ el.classList.add('opened');
+ return false;
+ }
+ if (el.classList.contains('opened')) {
+ return false;
+ }
+ el = el.parentNode;
+ }
+}
+
+
+/**
+ * Toggle the display of a column in the issue list page. That is
+ * done by adding or removing a CSS class of an enclosing HTML
+ * element, and by CSS rules that trigger based on that CSS class.
+ * @param {string} colName The name of the column to toggle,
+ * corresponds to a CSS class.
+ * @returns Always returns false to indicate that the browser should
+ * handle the event normally.
+ */
+function TKR_toggleColumn(colName) {
+ var controlDiv = $('colcontrol');
+ if (controlDiv.classList.contains(colName)) {
+ controlDiv.classList.remove(colName);
+ }
+ else {
+ controlDiv.classList.add(colName);
+ }
+ return false;
+}
+
+
+/**
+ * Toggle the display of a set of rows in the issue list page. That is
+ * done by adding or removing a CSS class of an enclosing HTML
+ * element, and by CSS rules that trigger based on that CSS class.
+ * TODO(jrobbins): actually, this automatically hides the other groups.
+ * @param {string} rowClassName The name of the row group to toggle,
+ * corresponds to a CSS class.
+ * @returns Always returns false to indicate that the browser should
+ * handle the event normally.
+ */
+function TKR_toggleRows(rowClassName) {
+ var controlDiv = $('colcontrol');
+ controlDiv.classList.add('hide_pri_groups');
+ controlDiv.classList.add('hide_mile_groups');
+ controlDiv.classList.add('hide_stat_groups');
+ TKR_toggleColumn(rowClassName);
+ return false;
+}
+
+
+/**
+ * A simple class that can manage the display of a popup menu. Instances
+ * of this class are used by popup_controller.js.
+ * @param {Element} div The div that contains the popup menu.
+ * @param {Element} opt_launcherEl The button that launched the popup menu,
+ * if any.
+ * @constructor
+ */
+function TKR_MyPopup(div, opt_launcherEl) {
+ this._div = div;
+ this._launcher = opt_launcherEl;
+ this._isVisible = false;
+}
+
+
+/**
+ * Show a popup menu. This method registers the popup with popup_controller.
+ */
+TKR_MyPopup.prototype.show = function() {
+ this._div.style.display = 'block';
+ this._isVisible = true;
+ PC_addPopup(this);
+}
+
+
+/**
+ * Show a popup menu. This method is called from the deactive method,
+ * which is called by popup_controller.
+ */
+TKR_MyPopup.prototype.hide = function() {
+ this._div.style.display = 'none';
+ this._isVisible = false;
+}
+
+
+/**
+ * When the popup_controller gets a user click, it calls deactive() on
+ * every active popup to check if the click should close that popup.
+ */
+TKR_MyPopup.prototype.deactivate = function(e) {
+ if (this._isVisible) {
+ var p = GetMousePosition(e);
+ if (nodeBounds(this._div).contains(p)) {
+ return false; // use clicked on popup, remain visible
+ } else if (this._launcher && nodeBounds(this._launcher).contains(p)) {
+ this._launcher = null;
+ return false; // mouseup element that launched menu, remain visible
+ } else {
+ this.hide();
+ return true; // clicked outside popup, make invisible
+ }
+ } else {
+ return true; // already deactivated, not visible
+ }
+}
+
+
+/**
+ * Highlight the issue row on the list page that contains the given
+ * checkbox.
+ * @param {Element} cb The checkbox that the user changed.
+ * @returns Always returns false to indicate that the browser should
+ * handle the event normally.
+ */
+function TKR_highlightRow(el) {
+ var checked = el.checked;
+ while (el && el.tagName != 'TR') {
+ el = el.parentNode;
+ }
+ if (checked) {
+ el.classList.add('selected');
+ }
+ else {
+ el.classList.remove('selected');
+ }
+ return false;
+}
+
+
+/**
+ * Floats the metadata section on the LHS of issue/source detail pages.
+ * It assumes that the metadata <div> has id 'meta-float' and its outer
+ * container 'meta-container'.
+ */
+function TKR_floatMetadata() {
+ var el = $('meta-float');
+ var container = $('issuemeta');
+
+ window.addEventListener('scroll', function() {
+ TKR_floatVertically(el, container);
+ }, false);
+}
+
+/**
+ * Floats the given element vertically within the provided container as user
+ * scrolls up or down the page. It adjusts the width and padding of the parent
+ * element since it sets the 'position' style of the target element to 'fixed'.
+ * @param {Element} el The HTML element to float.
+ * @param {Element} container The container HTML element.
+ */
+function TKR_floatVertically(el, container) {
+ var elBounds = nodeBounds(el);
+ var containerBounds = nodeBounds(container);
+ var scrollTop = GetScrollTop(window);
+
+ if (!el.style.width) {
+ el.style.width = elBounds.w + 'px';
+ }
+
+ if ((scrollTop > containerBounds.y) &&
+ (scrollTop - containerBounds.y + elBounds.h <=
+ container.style.top + containerBounds.h) &&
+ (GetWindowHeight(window) > elBounds.h)) {
+ if (el.style.position != 'fixed') {
+ el.style.position = 'fixed';
+ el.style.top = '2px';
+ if (BR_IsIE()) {
+ el.parentNode.style.paddingRight = elBounds.w + 2 + 'px';
+ } else {
+ el.parentNode.style.minWidth = elBounds.w + 'px';
+ }
+ }
+ el.style.left = (6 - GetScrollLeft(window)) + 'px';
+ } else if (el.style.position != 'relative') {
+ el.style.position = 'relative';
+ el.style.left = '0';
+ if (BR_IsIE()) {
+ el.parentNode.style.paddingRight = '';
+ }
+ }
+}
+
+/**
+ * XMLHTTP object used to remember display preferences on the server.
+ */
+var TKR_prefsXmlHttp = undefined;
+
+
+/**
+ * Contact the server to remember a PeopleDetail display preference.
+ * @param {string} projectName The name of the current project.
+ * @param {number} expand Zero or one for the widget hide/show state.
+ * @param {string} token The security token.
+ */
+function TKR_setPeoplePrefs(projectName, expand, token) {
+ TKR_prefsXmlHttp = XH_XmlHttpCreate()
+ var prefsURL = '/p/' + projectName + '/people/detailPrefs.do';
+ var data = 'perms_expanded=' + expand + '&token=' + token;
+ XH_XmlHttpPOST(
+ TKR_prefsXmlHttp, prefsURL, data, TKR_prefsFeedCallback);
+}
+
+
+/**
+ * The communication with the server has made some progress. If it is
+ * done, then process the response.
+ */
+function TKR_prefsFeedCallback() {
+ // Actually, we don't use the return value at all, so do nothing.
+}
diff --git a/appengine/monorail/static/js/tracker/tracker-editing.js b/appengine/monorail/static/js/tracker/tracker-editing.js
new file mode 100644
index 0000000..47634fd
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-editing.js
@@ -0,0 +1,1389 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains JS functions that support various issue editing
+ * features of Monorail. These editing features include: selecting
+ * issues on the issue list page, adding attachments, expanding and
+ * collapsing the issue editing form, and starring issues.
+ *
+ * Browser compatability: IE6, IE7, FF1.0+, Safari.
+ */
+
+
+/**
+ * Here are some string constants that are used repeatedly in the code.
+ */
+var TKR_SELECTED_CLASS = 'selected';
+var TKR_UNDEF_CLASS = 'undef';
+var TKR_NOVEL_CLASS = 'novel';
+var TKR_EXCL_CONFICT_CLASS = 'exclconflict';
+var TKR_QUESTION_MARK_CLASS = 'questionmark';
+var TKR_ATTACHPROMPT_ID = 'attachprompt';
+var TKR_ATTACHAFILE_ID = 'attachafile';
+var TKR_ATTACHMAXSIZE_ID = 'attachmaxsize';
+var TKR_CURRENT_TEMPLATE_INDEX_ID = 'current_template_index';
+var TKR_PROMPT_MEMBERS_ONLY_CHECKBOX_ID = 'members_only_checkbox';
+var TKR_PROMPT_SUMMARY_EDITOR_ID = 'summary_editor';
+var TKR_PROMPT_SUMMARY_MUST_BE_EDITED_CHECKBOX_ID =
+ 'summary_must_be_edited_checkbox';
+var TKR_PROMPT_CONTENT_EDITOR_ID = 'content_editor';
+var TKR_PROMPT_STATUS_EDITOR_ID = 'status_editor';
+var TKR_PROMPT_OWNER_EDITOR_ID = 'owner_editor';
+var TKR_PROMPT_ADMIN_NAMES_EDITOR_ID = 'admin_names_editor';
+var TKR_OWNER_DEFAULTS_TO_MEMBER_CHECKBOX_ID =
+ 'owner_defaults_to_member_checkbox';
+var TKR_OWNER_DEFAULTS_TO_MEMBER_AREA_ID =
+ 'owner_defaults_to_member_area';
+var TKR_COMPONENT_REQUIRED_CHECKBOX_ID =
+ 'component_required_checkbox';
+var TKR_PROMPT_COMPONENTS_EDITOR_ID = 'components_editor';
+var TKR_FIELD_EDITOR_ID_PREFIX = 'custom_';
+var TKR_PROMPT_LABELS_EDITOR_ID_PREFIX = 'label';
+var TKR_CONFIRMAREA_ID = 'confirmarea';
+var TKR_DISCARD_YOUR_CHANGES = 'Discard your changes?';
+// Note, users cannot enter '<'.
+var TKR_DELETED_PROMPT_NAME = '<DELETED>';
+// Display warning if labels contain the following prefixes.
+// The following list is the same as tracker_constants.RESERVED_PREFIXES except
+// for the 'hotlist' prefix. 'hostlist' will be added when it comes a full
+// feature and when projects that use 'Hostlist-*' labels are transitioned off.
+var TKR_LABEL_RESERVED_PREFIXES = [
+ 'id', 'project', 'reporter', 'summary', 'status', 'owner', 'cc',
+ 'attachments', 'attachment', 'component', 'opened', 'closed',
+ 'modified', 'is', 'has', 'blockedon', 'blocking', 'blocked', 'mergedinto',
+ 'stars', 'starredby', 'description', 'comment', 'commentby', 'label',
+ 'rank', 'explicit_status', 'derived_status', 'explicit_owner',
+ 'derived_owner', 'explicit_cc', 'derived_cc', 'explicit_label',
+ 'derived_label', 'last_comment_by', 'exact_component',
+ 'explicit_component', 'derived_component']
+
+/**
+ * Select all the issues on the issue list page.
+ */
+function TKR_selectAllIssues() {
+ TKR_selectIssues(true);
+}
+
+
+/**
+ * Function to deselect all the issues on the issue list page.
+ */
+function TKR_selectNoneIssues() {
+ TKR_selectIssues(false);
+}
+
+
+/**
+ * Function to select or deselect all the issues on the issue list page.
+ * @param {boolean} checked True means select issues, False means deselect.
+ */
+function TKR_selectIssues(checked) {
+ var table = $('resultstable');
+ for (var r = 0; r < table.rows.length; ++r) {
+ var row = table.rows[r];
+ var firstCell = row.cells[0];
+ if (firstCell.tagName == 'TD') {
+ for (var e = 0; e < firstCell.childNodes.length; ++e) {
+ var element = firstCell.childNodes[e];
+ if (element.tagName == 'INPUT' && element.type == 'checkbox') {
+ element.checked = checked ? 'checked' : '';
+ if (checked) {
+ row.classList.add(TKR_SELECTED_CLASS);
+ } else {
+ row.classList.remove(TKR_SELECTED_CLASS);
+ }
+ }
+ }
+ }
+ }
+}
+
+
+/**
+ * The ID number to append to the next dynamically created file upload field.
+ */
+var TKR_nextFileID = 1;
+
+
+/**
+ * Function to dynamically create a new attachment upload field add
+ * insert it into the page DOM.
+ * @param {string} id The id of the parent HTML element.
+ */
+function TKR_addAttachmentFields(id) {
+ if (TKR_nextFileID >= 16) {
+ return;
+ }
+ var el = $(id);
+ el.style.marginTop = '4px';
+ var div = document.createElement('div');
+ var id = 'file' + TKR_nextFileID;
+ var label = TKR_createChild(div, 'label', null, null, 'Attach file:');
+ label.setAttribute('for', id);
+ var input = TKR_createChild(
+ div, 'input', null, id, null, "width:auto;margin-left:17px");
+ input.setAttribute('type', 'file');
+ input.name = id;
+ var removeLink = TKR_createChild(
+ div, 'a', null, null, 'Remove', "font-size:x-small");
+ removeLink.href = '#';
+ removeLink.addEventListener('click', function(event) {
+ console.log(arguments);
+ var target = event.target;
+ $('attachafile').focus();
+ target.parentNode.parentNode.removeChild(target.parentNode);
+ event.preventDefault();
+ });
+ el.appendChild(div);
+ el.querySelector('input').focus();
+ ++TKR_nextFileID;
+ if (TKR_nextFileID < 16) {
+ $(TKR_ATTACHAFILE_ID).innerText = 'Attach another file';
+ } else {
+ $(TKR_ATTACHPROMPT_ID).style.display = 'none';
+ }
+ $(TKR_ATTACHMAXSIZE_ID).style.display = '';
+}
+
+
+/**
+ * Function to display the form so that the user can update an issue.
+ */
+function TKR_openIssueUpdateForm() {
+ TKR_showHidden($('makechangesarea'));
+ TKR_goToAnchor('makechanges');
+ TKR_forceProperTableWidth();
+ window.setTimeout(
+ function () { document.getElementById('addCommentTextArea').focus(); },
+ 100);
+}
+
+
+/**
+ * The index of the template that is currently selected for editing
+ * on the administration page for issues.
+ */
+var TKR_currentTemplateIndex = 0;
+
+
+/**
+ * Array of field IDs that are defined in the current project, set by call to setFieldIDs().
+ */
+var TKR_fieldIDs = [];
+
+
+function TKR_setFieldIDs(fieldIDs) {
+ TKR_fieldIDs = fieldIDs;
+}
+
+
+/**
+ * This function displays the appropriate template text in a text field.
+ * It is called after the user has selected one template to view/edit.
+ * @param {Element} widget The list widget containing the list of templates.
+ */
+function TKR_selectTemplate(widget) {
+ TKR_showHidden($('edit_panel'));
+ TKR_currentTemplateIndex = widget.value;
+ $(TKR_CURRENT_TEMPLATE_INDEX_ID).value = TKR_currentTemplateIndex;
+
+ var content_editor = $(TKR_PROMPT_CONTENT_EDITOR_ID);
+ TKR_makeDefined(content_editor);
+
+ var can_edit = $('can_edit_' + TKR_currentTemplateIndex).value == 'yes';
+ var disabled = can_edit ? '' : 'disabled';
+
+ $(TKR_PROMPT_MEMBERS_ONLY_CHECKBOX_ID).disabled = disabled;
+ $(TKR_PROMPT_MEMBERS_ONLY_CHECKBOX_ID).checked = $(
+ 'members_only_' + TKR_currentTemplateIndex).value == 'yes';
+ $(TKR_PROMPT_SUMMARY_EDITOR_ID).disabled = disabled;
+ $(TKR_PROMPT_SUMMARY_EDITOR_ID).value = $(
+ 'summary_' + TKR_currentTemplateIndex).value;
+ $(TKR_PROMPT_SUMMARY_MUST_BE_EDITED_CHECKBOX_ID).disabled = disabled;
+ $(TKR_PROMPT_SUMMARY_MUST_BE_EDITED_CHECKBOX_ID).checked = $(
+ 'summary_must_be_edited_' + TKR_currentTemplateIndex).value == 'yes';
+ content_editor.disabled = disabled;
+ content_editor.value = $('content_' + TKR_currentTemplateIndex).value;
+ $(TKR_PROMPT_STATUS_EDITOR_ID).disabled = disabled;
+ $(TKR_PROMPT_STATUS_EDITOR_ID).value = $(
+ 'status_' + TKR_currentTemplateIndex).value;
+ $(TKR_PROMPT_OWNER_EDITOR_ID).disabled = disabled;
+ $(TKR_PROMPT_OWNER_EDITOR_ID).value = $(
+ 'owner_' + TKR_currentTemplateIndex).value;
+ $(TKR_OWNER_DEFAULTS_TO_MEMBER_CHECKBOX_ID).disabled = disabled;
+ $(TKR_OWNER_DEFAULTS_TO_MEMBER_CHECKBOX_ID).checked = $(
+ 'owner_defaults_to_member_' + TKR_currentTemplateIndex).value == 'yes';
+ $(TKR_COMPONENT_REQUIRED_CHECKBOX_ID).disabled = disabled;
+ $(TKR_COMPONENT_REQUIRED_CHECKBOX_ID).checked = $(
+ 'component_required_' + TKR_currentTemplateIndex).value == 'yes';
+ $(TKR_OWNER_DEFAULTS_TO_MEMBER_AREA_ID).disabled = disabled;
+ $(TKR_OWNER_DEFAULTS_TO_MEMBER_AREA_ID).style.display =
+ $(TKR_PROMPT_OWNER_EDITOR_ID).value ? 'none' : '';
+ $(TKR_PROMPT_COMPONENTS_EDITOR_ID).disabled = disabled;
+ $(TKR_PROMPT_COMPONENTS_EDITOR_ID).value = $(
+ 'components_' + TKR_currentTemplateIndex).value;
+
+ // Blank out all custom field editors first, then fill them in during the next loop.
+ for (var i = 0; i < TKR_fieldIDs.length; i++) {
+ var fieldEditor = $(TKR_FIELD_EDITOR_ID_PREFIX + TKR_fieldIDs[i]);
+ var holder = $('field_value_' + TKR_currentTemplateIndex + '_' + TKR_fieldIDs[i]);
+ if (fieldEditor) {
+ fieldEditor.disabled = disabled;
+ fieldEditor.value = holder ? holder.value : '';
+ }
+ }
+
+ var i = 0;
+ while ($(TKR_PROMPT_LABELS_EDITOR_ID_PREFIX + i)) {
+ $(TKR_PROMPT_LABELS_EDITOR_ID_PREFIX + i).disabled = disabled;
+ $(TKR_PROMPT_LABELS_EDITOR_ID_PREFIX + i).value =
+ $('label_' + TKR_currentTemplateIndex + '_' + i).value;
+ i++;
+ }
+
+ $(TKR_PROMPT_ADMIN_NAMES_EDITOR_ID).disabled = disabled;
+ $(TKR_PROMPT_ADMIN_NAMES_EDITOR_ID).value = $(
+ 'admin_names_' + TKR_currentTemplateIndex).value;
+
+ var numNonDeletedTemplates = 0;
+ for (var i = 0; i < TKR_templateNames.length; i++) {
+ if (TKR_templateNames[i] != TKR_DELETED_PROMPT_NAME) {
+ numNonDeletedTemplates++;
+ }
+ }
+ if ($('delbtn')) {
+ if (numNonDeletedTemplates > 1) {
+ $('delbtn').disabled='';
+ }
+ else { // Don't allow the last template to be deleted.
+ $('delbtn').disabled='disabled';
+ }
+ }
+}
+
+
+var TKR_templateNames = []; // Exported in tracker-onload.js
+
+
+/**
+ * Create a new issue template and add the needed form fields to the DOM.
+ */
+function TKR_newTemplate() {
+ var newIndex = TKR_templateNames.length;
+ var templateName = prompt('Name of new template?', '');
+ templateName = templateName.replace(
+ /[&<>"]/g, '' // " help emacs highlighing
+ );
+ if (!templateName) return;
+
+ for (var i = 0; i < TKR_templateNames.length; i++) {
+ if (templateName == TKR_templateNames[i]) {
+ alert('Please choose a unique name.')
+ return;
+ }
+ }
+
+ TKR_addTemplateHiddenFields(newIndex, templateName);
+ TKR_templateNames.push(templateName);
+
+ var templateOption = TKR_createChild(
+ $('template_menu'), 'option', null, null, templateName);
+ templateOption.value = newIndex;
+ templateOption.selected = 'selected';
+
+ var developerOption = TKR_createChild(
+ $('default_template_for_developers'), 'option', null, null, templateName);
+ developerOption.value = templateName;
+
+ var userOption = TKR_createChild(
+ $('default_template_for_users'), 'option', null, null, templateName);
+ userOption.value = templateName;
+
+ TKR_selectTemplate($('template_menu'));
+}
+
+
+/**
+ * Private function to append HTML for new hidden form fields
+ * for a new issue template to the issue admin form.
+ */
+function TKR_addTemplateHiddenFields(templateIndex, templateName) {
+ var parentEl = $('adminTemplates');
+ TKR_appendHiddenField(
+ parentEl, 'template_id_' + templateIndex, 'template_id_' + templateIndex, '0');
+ TKR_appendHiddenField(parentEl, 'name_' + templateIndex,
+ 'name_' + templateIndex, templateName);
+ TKR_appendHiddenField(parentEl, 'members_only_' + templateIndex);
+ TKR_appendHiddenField(parentEl, 'summary_' + templateIndex);
+ TKR_appendHiddenField(parentEl, 'summary_must_be_edited_' + templateIndex);
+ TKR_appendHiddenField(parentEl, 'content_' + templateIndex);
+ TKR_appendHiddenField(parentEl, 'status_' + templateIndex);
+ TKR_appendHiddenField(parentEl, 'owner_' + templateIndex);
+ TKR_appendHiddenField(
+ parentEl, 'owner_defaults_to_member_' + templateIndex,
+ 'owner_defaults_to_member_' + templateIndex, 'yes');
+ TKR_appendHiddenField(parentEl, 'component_required_' + templateIndex);
+ TKR_appendHiddenField(parentEl, 'components_' + templateIndex);
+ TKR_appendHiddenField(parentEl, 'members_only_' + templateIndex);
+
+ var i = 0;
+ while ($('label_0_' + i)) {
+ TKR_appendHiddenField(parentEl, 'label_' + templateIndex,
+ 'label_' + templateIndex + '_' + i);
+ i++;
+ }
+
+ for (var i = 0; i < TKR_fieldIDs.length; i++) {
+ var fieldId = 'field_value_' + templateIndex + '_' + TKR_fieldIDs[i];
+ TKR_appendHiddenField(parentEl, fieldId, fieldId);
+ }
+
+ TKR_appendHiddenField(parentEl, 'admin_names_' + templateIndex);
+ TKR_appendHiddenField(
+ parentEl, 'can_edit_' + templateIndex, 'can_edit_' + templateIndex,
+ 'yes');
+}
+
+
+/**
+ * Utility function to append string parts for one hidden field
+ * to the given array.
+ */
+function TKR_appendHiddenField(parentEl, name, opt_id, opt_value) {
+ var input = TKR_createChild(parentEl, 'input', null, opt_id || name);
+ input.setAttribute('type', 'hidden');
+ input.name = name;
+ input.value = opt_value || '';
+}
+
+
+/**
+ * Delete the currently selected issue template, and mark its hidden
+ * form field as deleted so that they will be ignored when submitted.
+ */
+function TKR_deleteTemplate() {
+ // Mark the current template name as deleted.
+ TKR_templateNames.splice(
+ TKR_currentTemplateIndex, 1, TKR_DELETED_PROMPT_NAME);
+ $('name_' + TKR_currentTemplateIndex).value = TKR_DELETED_PROMPT_NAME;
+ _toggleHidden($('edit_panel'));
+ $('delbtn').disabled = 'disabled';
+ TKR_rebuildTemplateMenu();
+ TKR_rebuildDefaultTemplateMenu('default_template_for_developers');
+ TKR_rebuildDefaultTemplateMenu('default_template_for_users');
+}
+
+/**
+ * Utility function to rebuild the template menu on the issue admin page.
+ */
+function TKR_rebuildTemplateMenu() {
+ var parentEl = $('template_menu');
+ while (parentEl.childNodes.length)
+ parentEl.removeChild(parentEl.childNodes[0]);
+ for (var i = 0; i < TKR_templateNames.length; i++) {
+ if (TKR_templateNames[i] != TKR_DELETED_PROMPT_NAME) {
+ var option = TKR_createChild(
+ parentEl, 'option', null, null, TKR_templateNames[i]);
+ option.value = i;
+ }
+ }
+}
+
+
+/**
+ * Utility function to rebuild a default template drop-down.
+ */
+function TKR_rebuildDefaultTemplateMenu(menuID) {
+ var defaultTemplateName = $(menuID).value;
+ var parentEl = $(menuID);
+ while (parentEl.childNodes.length)
+ parentEl.removeChild(parentEl.childNodes[0]);
+ for (var i = 0; i < TKR_templateNames.length; i++) {
+ if (TKR_templateNames[i] != TKR_DELETED_PROMPT_NAME) {
+ var option = TKR_createChild(
+ parentEl, 'option', null, null, TKR_templateNames[i]);
+ option.values = TKR_templateNames[i];
+ if (defaultTemplateName == TKR_templateNames[i]) {
+ option.setAttribute('selected', 'selected');
+ }
+ }
+ }
+}
+
+
+/**
+ * Change the issue template to the specified one.
+ * TODO(jrobbins): move to an AJAX implementation that would not reload page.
+ *
+ * @param {string} projectName The name of the current project.
+ * @param {string} templateName The name of the template to switch to.
+ */
+function TKR_switchTemplate(projectName, templateName) {
+ var ok = true;
+ if (TKR_isDirty) {
+ ok = confirm('Switching to a different template will lose the text you entered.');
+ }
+ if (ok) {
+ window.location = '/p/' + projectName +
+ '/issues/entry?template=' + templateName;
+ }
+}
+
+/**
+ * Function to remove a CSS class and initial tip from a text widget.
+ * Some text fields or text areas display gray textual tips to help the user
+ * make use of those widgets. When the user focuses on the field, the tip
+ * disappears and is made ready for user input (in the normal text color).
+ * @param {Element} el The form field that had the gray text tip.
+ */
+function TKR_makeDefined(el) {
+ if (el.classList.contains(TKR_UNDEF_CLASS)) {
+ el.classList.remove(TKR_UNDEF_CLASS);
+ el.value = '';
+ }
+}
+
+
+/**
+ * Save the contents of the visible issue template text area into a hidden
+ * text field for later submission.
+ * Called when the user has edited the text of a issue template.
+ */
+function TKR_saveTemplate() {
+ if (TKR_currentTemplateIndex) {
+ $('members_only_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_MEMBERS_ONLY_CHECKBOX_ID).checked ? 'yes' : '';
+ $('summary_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_SUMMARY_EDITOR_ID).value;
+ $('summary_must_be_edited_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_SUMMARY_MUST_BE_EDITED_CHECKBOX_ID).checked ? 'yes' : '';
+ $('content_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_CONTENT_EDITOR_ID).value;
+ $('status_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_STATUS_EDITOR_ID).value;
+ $('owner_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_OWNER_EDITOR_ID).value;
+ $('owner_defaults_to_member_' + TKR_currentTemplateIndex).value =
+ $(TKR_OWNER_DEFAULTS_TO_MEMBER_CHECKBOX_ID).checked ? 'yes' : '';
+ $('component_required_' + TKR_currentTemplateIndex).value =
+ $(TKR_COMPONENT_REQUIRED_CHECKBOX_ID).checked ? 'yes' : '';
+ $('components_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_COMPONENTS_EDITOR_ID).value;
+ $(TKR_OWNER_DEFAULTS_TO_MEMBER_AREA_ID).style.display =
+ $(TKR_PROMPT_OWNER_EDITOR_ID).value ? 'none' : '';
+
+ for (var i = 0; i < TKR_fieldIDs.length; i++) {
+ var fieldID = TKR_fieldIDs[i];
+ var fieldEditor = $(TKR_FIELD_EDITOR_ID_PREFIX + fieldID);
+ if (fieldEditor) {
+ _saveFieldValue(fieldID, fieldEditor.value);
+ }
+ }
+
+ var i = 0;
+ while ($('label_' + TKR_currentTemplateIndex + '_' + i)) {
+ $('label_' + TKR_currentTemplateIndex + '_' + i).value =
+ $(TKR_PROMPT_LABELS_EDITOR_ID_PREFIX + i).value;
+ i++;
+ }
+
+ $('admin_names_' + TKR_currentTemplateIndex).value =
+ $(TKR_PROMPT_ADMIN_NAMES_EDITOR_ID).value;
+ }
+}
+
+
+function _saveFieldValue(fieldID, val) {
+ var fieldValId = 'field_value_' + TKR_currentTemplateIndex + '_' + fieldID;
+ $(fieldValId).value = val;
+}
+
+
+/**
+ * This flag indicates that the user had made some edit to some form
+ * field on the page. Only a few specific pages actually update and use
+ * this flag.
+ */
+var TKR_isDirty = false;
+
+
+/**
+ * This function is called when the user edits a form field on a page that
+ * should offer the user the option to discard his/her edits.
+ */
+function TKR_dirty() {
+ TKR_isDirty = true;
+}
+
+
+/**
+ * The user has clicked the 'Discard' button on the issue update form.
+ * If the form has been edited, ask if he/she is sure about discarding
+ * before then navigating to the given URL. This can go up to some
+ * other page, or reload the current page with a fresh form.
+ * @param {string} nextUrl The page to show after discarding.
+ */
+function TKR_confirmDiscardUpdate(nextUrl) {
+ if (!TKR_isDirty || confirm(TKR_DISCARD_YOUR_CHANGES)) {
+ document.location = nextUrl;
+ }
+}
+
+
+/**
+ * The user has clicked the 'Discard' button on the issue entry form.
+ * If the form has been edited, this function asks if he/she is sure about
+ * discarding before doing it.
+ * @param {Element} discardButton The 'Discard' button.
+ */
+function TKR_confirmDiscardEntry(discardButton) {
+ if (!TKR_isDirty || confirm(TKR_DISCARD_YOUR_CHANGES)) {
+ TKR_go('list');
+ }
+}
+
+
+/**
+ * Normally, we show 2 rows of label editing fields when updating an issue.
+ * However, if the issue has more than that many labels already, we make sure to
+ * show them all.
+ */
+function TKR_exposeExistingLabelFields() {
+ if ($('label3').value ||
+ $('label4').value ||
+ $('label5').value) {
+ if ($('addrow1')) {
+ _showID('LF_row2');
+ _hideID('addrow1');
+ }
+ }
+ if ($('label6').value ||
+ $('label7').value ||
+ $('label8').value) {
+ _showID('LF_row3');
+ _hideID('addrow2');
+ }
+ if ($('label9').value ||
+ $('label10').value ||
+ $('label11').value) {
+ _showID('LF_row4');
+ _hideID('addrow3');
+ }
+ if ($('label12').value ||
+ $('label13').value ||
+ $('label14').value) {
+ _showID('LF_row5');
+ _hideID('addrow4');
+ }
+ if ($('label15').value ||
+ $('label16').value ||
+ $('label17').value) {
+ _showID('LF_row6');
+ _hideID('addrow5');
+ }
+ if ($('label18').value ||
+ $('label19').value ||
+ $('label20').value) {
+ _showID('LF_row7');
+ _hideID('addrow6');
+ }
+ if ($('label21').value ||
+ $('label22').value ||
+ $('label23').value) {
+ _showID('LF_row8');
+ _hideID('addrow7');
+ }
+}
+
+
+/**
+ * Flag to indicate when the user has not yet caused any input events.
+ * We use this to clear the placeholder in the new issue summary field
+ * exactly once.
+ */
+var TKR_firstEvent = true;
+
+
+/**
+ * This is called in response to almost any user input event on the
+ * issue entry page. If the placeholder in the new issue sumary field has
+ * not yet been cleared, then this function clears it.
+ */
+function TKR_clearOnFirstEvent() {
+ if (TKR_firstEvent) {
+ TKR_firstEvent = false;
+ $('summary').value = TKR_keepJustSummaryPrefixes($('summary').value);
+ }
+}
+
+/**
+ * Clear the summary, except for any prefixes of the form "[bracketed text]"
+ * or "keyword:". If there were any, add a trailing space. This is useful
+ * to people who like to encode issue classification info in the summary line.
+ */
+function TKR_keepJustSummaryPrefixes(s) {
+ var matches = s.match(/^(\[[^\]]+\])+|^(\S+:\s*)+/);
+ if (matches == null) {
+ return '';
+ }
+
+ var prefix = matches[0];
+ if (prefix.substr(prefix.length - 1) != ' ') {
+ prefix += ' ';
+ }
+ return prefix;
+}
+
+/**
+ * An array of label <input>s that start with reserved prefixes.
+ */
+var TKR_labelsWithReservedPrefixes = [];
+
+/**
+ * An array of label <input>s that are equal to reserved words.
+ */
+var TKR_labelsConflictingWithReserved = [];
+
+/**
+ * An array of novel issue status values entered by the user on the
+ * current page. 'Novel' means that they are not well known and are
+ * likely to be typos. Note that this list will always have zero or
+ * one element, but a list is used for consistency with the list of
+ * novel labels.
+ */
+var TKR_novelStatuses = [];
+
+/**
+ * An array of novel issue label values entered by the user on the
+ * current page. 'Novel' means that they are not well known and are
+ * likely to be typos.
+ */
+var TKR_novelLabels = [];
+
+/**
+ * A boolean that indicates whether the entered owner value is valid or not.
+ */
+var TKR_invalidOwner = false;
+
+/**
+ * The user has changed the issue status text field. This function
+ * checks whether it is a well-known status value. If not, highlight it
+ * as a potential typo.
+ * @param {Element} textField The issue status text field.
+ * @returns Always returns true to indicate that the browser should
+ * continue to process the user input event normally.
+ */
+function TKR_confirmNovelStatus(textField) {
+ var v = textField.value.trim().toLowerCase();
+ var isNovel = (v !== '');
+ var wellKnown = TKR_statusWords;
+ for (var i = 0; i < wellKnown.length && isNovel; ++i) {
+ var wk = wellKnown[i];
+ if (v == wk.toLowerCase()) {
+ isNovel = false;
+ }
+ }
+ if (isNovel) {
+ if (TKR_novelStatuses.indexOf(textField) == -1) {
+ TKR_novelStatuses.push(textField);
+ }
+ textField.classList.add(TKR_NOVEL_CLASS);
+ } else {
+ if (TKR_novelStatuses.indexOf(textField) != -1) {
+ TKR_novelStatuses.splice(TKR_novelStatuses.indexOf(textField), 1);
+ }
+ textField.classList.remove(TKR_NOVEL_CLASS);
+ }
+ TKR_updateConfirmBeforeSubmit();
+ return true;
+}
+
+
+/**
+ * The user has changed a issue label text field. This function checks
+ * whether it is a well-known label value. If not, highlight it as a
+ * potential typo.
+ * @param {Element} textField An issue label text field.
+ * @returns Always returns true to indicate that the browser should
+ * continue to process the user input event normally.
+ *
+ * TODO(jrobbins): code duplication with function above.
+ */
+function TKR_confirmNovelLabel(textField) {
+ var v = textField.value.trim().toLowerCase();
+ if (v.search('-') == 0) {
+ v = v.substr(1);
+ }
+ var isNovel = (v !== '');
+ if (v.indexOf('?') > -1) {
+ isNovel = false; // We don't count labels that the user must edit anyway.
+ }
+ var wellKnown = TKR_labelWords;
+ for (var i = 0; i < wellKnown.length && isNovel; ++i) {
+ var wk = wellKnown[i];
+ if (v == wk.toLowerCase()) {
+ isNovel = false;
+ }
+ }
+
+ var containsReservedPrefix = false;
+ var textFieldWarningDisplayed = TKR_labelsWithReservedPrefixes.indexOf(textField) != -1;
+ for (var i = 0; i < TKR_LABEL_RESERVED_PREFIXES.length; ++i) {
+ if (v.startsWith(TKR_LABEL_RESERVED_PREFIXES[i] + '-')) {
+ if (!textFieldWarningDisplayed) {
+ TKR_labelsWithReservedPrefixes.push(textField);
+ }
+ containsReservedPrefix = true;
+ break;
+ }
+ }
+ if (!containsReservedPrefix && textFieldWarningDisplayed) {
+ TKR_labelsWithReservedPrefixes.splice(
+ TKR_labelsWithReservedPrefixes.indexOf(textField), 1);
+ }
+
+ var conflictsWithReserved = false;
+ var textFieldWarningDisplayed =
+ TKR_labelsConflictingWithReserved.indexOf(textField) != -1;
+ for (var i = 0; i < TKR_LABEL_RESERVED_PREFIXES.length; ++i) {
+ if (v == TKR_LABEL_RESERVED_PREFIXES[i]) {
+ if (!textFieldWarningDisplayed) {
+ TKR_labelsConflictingWithReserved.push(textField);
+ }
+ conflictsWithReserved = true;
+ break;
+ }
+ }
+ if (!conflictsWithReserved && textFieldWarningDisplayed) {
+ TKR_labelsConflictingWithReserved.splice(
+ TKR_labelsConflictingWithReserved.indexOf(textField), 1);
+ }
+
+ if (isNovel) {
+ if (TKR_novelLabels.indexOf(textField) == -1) {
+ TKR_novelLabels.push(textField);
+ }
+ textField.classList.add(TKR_NOVEL_CLASS);
+ } else {
+ if (TKR_novelLabels.indexOf(textField) != -1) {
+ TKR_novelLabels.splice(TKR_novelLabels.indexOf(textField), 1);
+ }
+ textField.classList.remove(TKR_NOVEL_CLASS);
+ }
+ TKR_updateConfirmBeforeSubmit();
+ return true;
+}
+
+/**
+ * Dictionary { prefix:[textField,...], ...} for all the prefixes of any
+ * text that has been entered into any label field. This is used to find
+ * duplicate labels and multiple labels that share an single exclusive
+ * prefix (e.g., Priority).
+ */
+var TKR_usedPrefixes = {};
+
+/**
+ * This is a prefix to the HTML ids of each label editing field.
+ * It varied by page, so it is set in the HTML page. Needed to initialize
+ * our validation across label input text fields.
+ */
+var TKR_labelFieldIDPrefix = '';
+
+/**
+ * Initialize the set of all used labels on forms that allow users to
+ * enter issue labels. Some labels are supplied in the HTML page
+ * itself, and we do not want to offer duplicates of those.
+ */
+function TKR_prepLabelAC() {
+ var i = 0;
+ while ($('label'+i)) {
+ TKR_validateLabel($('label'+i));
+ i++;
+ }
+}
+
+/**
+ * Reads the owner field and determines if the current value is a valid member.
+ */
+function TKR_prepOwnerField(validOwners) {
+ if ($('owneredit')) {
+ currentOwner = $('owneredit').value;
+ if (currentOwner == "") {
+ // Empty owner field is not an invalid owner.
+ invalidOwner = false;
+ return;
+ }
+ invalidOwner = true;
+ for (var i = 0; i < validOwners.length; i++) {
+ var owner = validOwners[i].name;
+ if (currentOwner == owner) {
+ invalidOwner = false;
+ break;
+ }
+ }
+ TKR_invalidOwner = invalidOwner;
+ }
+}
+
+/**
+ * Keep track of which label prefixes have been used so that
+ * we can not offer the same label twice and so that we can highlight
+ * multiple labels that share an exclusive prefix.
+ */
+function TKR_updateUsedPrefixes(textField) {
+ if (textField.oldPrefix != undefined) {
+ DeleteArrayElement(TKR_usedPrefixes[textField.oldPrefix], textField);
+ }
+
+ var prefix = textField.value.split('-')[0].toLowerCase();
+ if (TKR_usedPrefixes[prefix] == undefined) {
+ TKR_usedPrefixes[prefix] = [textField];
+ }
+ else {
+ TKR_usedPrefixes[prefix].push(textField);
+ }
+ textField.oldPrefix = prefix;
+}
+
+/**
+ * Go through all the label entry fields in our prefix-oriented
+ * data structure and highlight any that are part of a conflict
+ * (multiple labels with the same exclusive prefix). Unhighlight
+ * any label text entry fields that are not in conflict. And, display
+ * a warning message to encourage the user to correct the conflict.
+ */
+function TKR_highlightExclusiveLabelPrefixConflicts() {
+ var conflicts = [];
+ for (var prefix in TKR_usedPrefixes) {
+ var textFields = TKR_usedPrefixes[prefix];
+ if (textFields == undefined || textFields.length == 0) {
+ delete TKR_usedPrefixes[prefix];
+ }
+ else if (textFields.length > 1 &&
+ FindInArray(TKR_exclPrefixes, prefix) != -1) {
+ conflicts.push(prefix);
+ for (var i = 0; i < textFields.length; i++) {
+ var tf = textFields[i];
+ tf.classList.add(TKR_EXCL_CONFICT_CLASS);
+ }
+ } else {
+ for (var i = 0; i < textFields.length; i++) {
+ var tf = textFields[i];
+ tf.classList.remove(TKR_EXCL_CONFICT_CLASS);
+ }
+ }
+ }
+ if (conflicts.length > 0) {
+ var severity = TKR_restrict_to_known ? 'Error' : 'Warning';
+ var confirm_area = $(TKR_CONFIRMAREA_ID);
+ if (confirm_area) {
+ $('confirmmsg').innerText = (severity +
+ ': Multiple values for: ' + conflicts.join(', '));
+ confirm_area.className = TKR_EXCL_CONFICT_CLASS;
+ confirm_area.style.display = '';
+ }
+ }
+}
+
+/**
+ * Keeps track of any label text fields that have a value that
+ * is bad enough to prevent submission of the form. When this
+ * list is non-empty, the submit button gets disabled.
+ */
+var TKR_labelsBlockingSubmit = [];
+
+/**
+ * Look for any "?" characters in the label and, if found,
+ * make the label text red, prevent form submission, and
+ * display on-page help to tell the user to edit those labels.
+ * @param {Element} textField An issue label text field.
+ */
+function TKR_highlightQuestionMarks(textField) {
+ var tfIndex = TKR_labelsBlockingSubmit.indexOf(textField);
+ if (textField.value.indexOf('?') > -1 && tfIndex == -1) {
+ TKR_labelsBlockingSubmit.push(textField);
+ textField.classList.add(TKR_QUESTION_MARK_CLASS);
+ } else if (textField.value.indexOf('?') == -1 && tfIndex > -1) {
+ TKR_labelsBlockingSubmit.splice(tfIndex, 1);
+ textField.classList.remove(TKR_QUESTION_MARK_CLASS);
+ }
+
+ var block_submit_msg = $('blocksubmitmsg');
+ if (block_submit_msg) {
+ if (TKR_labelsBlockingSubmit.length > 0) {
+ block_submit_msg.innerText = 'You must edit labels that contain "?".';
+ } else {
+ block_submit_msg.innerText = '';
+ }
+ }
+}
+
+/**
+ * The user has edited a label. Display a warning if the label is
+ * not a well known label, or if there are multiple labels that
+ * share an exclusive prefix.
+ * @param {Element} textField An issue label text field.
+ */
+function TKR_validateLabel(textField) {
+ if (textField == undefined) return;
+ TKR_confirmNovelLabel(textField);
+ TKR_updateUsedPrefixes(textField);
+ TKR_highlightExclusiveLabelPrefixConflicts();
+ TKR_highlightQuestionMarks(textField);
+}
+
+// TODO(jrobbins): what about typos in owner and cc list?
+
+/**
+ * If there are any novel status or label values, we display a message
+ * that explains that to the user so that they can catch any typos before
+ * submitting them. If the project is restricting input to only the
+ * well-known statuses and labels, then show these as an error instead.
+ * In that case, on-page JS will prevent submission.
+ */
+function TKR_updateConfirmBeforeSubmit() {
+ var severity = TKR_restrict_to_known ? 'Error' : 'Note';
+ var novelWord = TKR_restrict_to_known ? 'undefined' : 'uncommon';
+ var msg = '';
+ var labels = TKR_novelLabels.map(function(item) {
+ return item.value;
+ });
+ if (TKR_novelStatuses.length > 0 && TKR_novelLabels.length > 0) {
+ msg = severity + ': You are using an ' + novelWord + ' status and ' + novelWord + ' label(s): ' + labels.join(', ') + '.'; // TODO: i18n
+ }
+ else if (TKR_novelStatuses.length > 0) {
+ msg = severity + ': You are using an ' + novelWord + ' status value.';
+ }
+ else if (TKR_novelLabels.length > 0) {
+ msg = severity + ': You are using ' + novelWord + ' label(s): ' + labels.join(', ') + '.';
+ }
+
+ for (var i = 0; i < TKR_labelsWithReservedPrefixes.length; ++i) {
+ msg += "\nNote: The label " + TKR_labelsWithReservedPrefixes[i].value +
+ " starts with a reserved word. This is not recommended."
+ }
+ for (var i = 0; i < TKR_labelsConflictingWithReserved.length; ++i) {
+ msg += "\nNote: The label " + TKR_labelsConflictingWithReserved[i].value +
+ " conflicts with a reserved word. This is not recommended."
+ }
+ // Display the owner is no longer a member note only if an owner error is not
+ // already shown on the page.
+ if (TKR_invalidOwner && !$('ownererror')) {
+ msg += "\nNote: Current owner is no longer a project member."
+ }
+
+ var confirm_area = $(TKR_CONFIRMAREA_ID);
+ if (confirm_area) {
+ $('confirmmsg').innerText = msg;
+ if (msg != '') {
+ confirm_area.className = TKR_NOVEL_CLASS;
+ confirm_area.style.display = '';
+ } else {
+ confirm_area.style.display = 'none';
+ }
+ }
+}
+
+
+/**
+ * The user has selected a command from the 'Actions...' menu
+ * on the issue list. This function checks the selected value and carry
+ * out the requested action.
+ * @param {Element} actionsMenu The 'Actions...' <select> form element.
+ */
+function TKR_handleListActions(actionsMenu) {
+ switch (actionsMenu.value) {
+ case 'bulk':
+ TKR_HandleBulkEdit();
+ break;
+ case 'colspec':
+ TKR_closeAllPopups(actionsMenu);
+ _showID('columnspec');
+ break;
+ case 'flagspam':
+ TKR_flagSpam(true);
+ break;
+ case 'unflagspam':
+ TKR_flagSpam(false);
+ break;
+ }
+ actionsMenu.value = 'moreactions';
+}
+
+
+function TKR_handleDetailActions() {
+ var moreActions = $('more_actions');
+
+ if (moreActions.value == 'delete') {
+ $('copy_issue_form_fragment').style.display = 'none';
+ $('move_issue_form_fragment').style.display = 'none';
+ var ok = confirm(
+ 'Normally, you should just close issues by setting their status ' +
+ 'to a closed value.\n' +
+ 'Are you sure you want to delete this issue?');
+ if (ok) {
+ $('delete_form').submit();
+ return;
+ }
+ }
+
+ if (moreActions.value == 'move') {
+ $('move_issue_form_fragment').style.display = '';
+ $('copy_issue_form_fragment').style.display = 'none';
+ return;
+ }
+ if (moreActions.value == 'copy') {
+ $('copy_issue_form_fragment').style.display = '';
+ $('move_issue_form_fragment').style.display = 'none';
+ return;
+ }
+
+ // If no action was taken, reset the dropdown to the 'More actions...' item.
+ moreActions.value = '0';
+}
+
+/**
+ * The user has selected the "Flag as spam..." menu item.
+ */
+function TKR_flagSpam(isSpam) {
+ var selectedLocalIDs = [];
+ for (var i = 0; i < issueRefs.length; i++) {
+ var checkbox = document.getElementById('cb_' + issueRefs[i]['id']);
+ if (checkbox && checkbox.checked) {
+ selectedLocalIDs.push(issueRefs[i]['id']);
+ }
+ }
+ if (selectedLocalIDs.length > 0) {
+ if (!confirm((isSpam ? 'Flag' : 'Un-flag') +
+ ' all selected issues as spam?')) {
+ return;
+ }
+ var selectedLocalIDString = selectedLocalIDs.join(',');
+ $('bulk_spam_ids').value = selectedLocalIDString;
+ $('bulk_spam_value').value = isSpam;
+
+ var loading = $('bulk-action-loading');
+ loading.style.visibility = 'visible';
+
+ var form = $('bulkspam');
+ form.submit();
+ }
+ else {
+ alert('Please select some issues to flag as spam');
+ }
+}
+
+/**
+ * The user has selected the "Bulk Edit..." menu item. Go to a page that
+ * offers the ability to edit all selected issues.
+ */
+// TODO(jrobbins): cross-project bulk edit
+function TKR_HandleBulkEdit() {
+ var selectedLocalIDs = [];
+ for (var i = 0; i < issueRefs.length; i++) {
+ var checkbox = document.getElementById('cb_' + issueRefs[i]['id']);
+ if (checkbox && checkbox.checked) {
+ selectedLocalIDs.push(issueRefs[i]['id']);
+ }
+ }
+ if (selectedLocalIDs.length > 0) {
+ var selectedLocalIDString = selectedLocalIDs.join(',');
+ var url = 'bulkedit?ids=' + selectedLocalIDString;
+ TKR_go(url + _ctxArgs);
+ }
+ else {
+ alert('Please select some issues to edit');
+ }
+}
+
+
+/**
+ * Array of original labels on the served page, so that we can notice
+ * when the used submits a form that has any Restrict-* labels removed.
+ */
+var TKR_allOrigLabels = [];
+
+
+/**
+ * Prevent users from easily entering "+1" comments.
+ */
+function TKR_checkPlusOne() {
+ var c = $('addCommentTextArea').value;
+ var instructions = (
+ '\nPlease use the star icon instead.\n' +
+ 'Stars show your interest without annoying other users.');
+ if (new RegExp('^\\s*[-+]+[0-9]+\\s*.{0,30}$', 'm').test(c) &&
+ c.length < 150) {
+ alert('This looks like a "+1" comment.' + instructions);
+ return false;
+ }
+ if (new RegExp('^\\s*me too.{0,30}$', 'i').test(c)) {
+ alert('This looks like a "me too" comment.' + instructions);
+ return false;
+ }
+ return true;
+}
+
+
+/**
+ * If the user removes Restrict-* labels, ask them if they are sure.
+ */
+function TKR_checkUnrestrict(prevent_restriction_removal) {
+ var removedRestrictions = [];
+
+ for (var i = 0; i < TKR_allOrigLabels.length; ++i) {
+ var origLabel = TKR_allOrigLabels[i];
+ if (origLabel.indexOf('Restrict-') == 0) {
+ var found = false;
+ var j = 0;
+ while ($('label' + j)) {
+ var newLabel = $('label' + j).value;
+ if (newLabel == origLabel) {
+ found = true;
+ break;
+ }
+ j++;
+ }
+ if (!found) {
+ removedRestrictions.push(origLabel);
+ }
+ }
+ }
+
+ if (removedRestrictions.length == 0) {
+ return true;
+ }
+
+ if (prevent_restriction_removal) {
+ var msg = 'You may not remove restriction labels.';
+ alert(msg);
+ return false;
+ }
+
+ var instructions = (
+ 'You are removing these restrictions:\n ' +
+ removedRestrictions.join('\n ') +
+ '\nThis may allow more people to access this issue.' +
+ '\nAre you sure?');
+ return confirm(instructions);
+}
+
+
+/**
+ * Add a column to a list view by updating the colspec form element and
+ * submiting an invisible <form> to load a new page that includes the column.
+ * @param {string} colname The name of the column to start showing.
+ */
+function TKR_addColumn(colname) {
+ var colspec = TKR_getColspecElement();
+ colspec.value = colspec.value + ' ' + colname;
+ $('colspecform').submit();
+}
+
+
+/**
+ * Allow members to shift-click to select multiple issues. This keeps
+ * track of the last row that the user clicked a checkbox on.
+ */
+var TKR_lastSelectedRow = undefined;
+
+
+/**
+ * Return true if an event had the shift-key pressed.
+ * @param {Event} evt The mouse click event.
+ */
+function TKR_hasShiftKey(evt) {
+ evt = (evt) ? evt : (window.event) ? window.event : '';
+ if (evt) {
+ if (evt.modifiers) {
+ return evt.modifiers & Event.SHIFT_MASK;
+ } else {
+ return evt.shiftKey;
+ }
+ }
+ return false;
+}
+
+
+/**
+ * Select one row: check the checkbox and use highlight color.
+ * @param {Element} row the row containing the checkbox that the user clicked.
+ * @param {boolean} checked True if the user checked the box.
+ */
+function TKR_rangeSelectRow(row, checked) {
+ if (!row) {
+ return;
+ }
+ if (checked) {
+ row.classList.add('selected');
+ } else {
+ row.classList.remove('selected');
+ }
+
+ var td = row.firstChild;
+ while (td && td.tagName != 'TD') {
+ td = td.nextSibling;
+ }
+ if (!td) {
+ return;
+ }
+
+ var checkbox = td.firstChild;
+ while (checkbox && checkbox.tagName != 'INPUT') {
+ checkbox = checkbox.nextSibling;
+ }
+ if (!checkbox) {
+ return;
+ }
+
+ checkbox.checked = checked;
+}
+
+
+/**
+ * If the user shift-clicked a checkbox, (un)select a range.
+ * @param {Event} evt The mouse click event.
+ * @param {Element} el The checkbox that was clicked.
+ */
+function TKR_checkRangeSelect(evt, el) {
+ var clicked_row = el.parentNode.parentNode.rowIndex;
+ if (clicked_row == TKR_lastSelectedRow) {
+ return;
+ }
+ if (TKR_hasShiftKey(evt) && TKR_lastSelectedRow != undefined) {
+ var results_table = $('resultstable');
+ var delta = (clicked_row > TKR_lastSelectedRow) ? 1 : -1;
+ for (var i = TKR_lastSelectedRow; i != clicked_row; i += delta) {
+ TKR_rangeSelectRow(results_table.rows[i], el.checked);
+ }
+ }
+ TKR_lastSelectedRow = clicked_row;
+}
+
+
+/**
+ * Make a link to a given issue that includes context parameters that allow
+ * the user to see the same list columns, sorting, query, and pagination state
+ * if he/she ever navigates up to the list again.
+ * @param {{issue_url: string}} issueRef The dict with info about an issue,
+ * including a url to the issue detail page.
+ */
+function TKR_makeIssueLink(issueRef) {
+ return '/p/' + issueRef['project_name'] + '/issues/detail?id=' + issueRef['id'] + _ctxArgs;
+}
+
+
+/**
+ * Hide or show a list column in the case where we already have the
+ * data for that column on the page.
+ * @param {number} colIndex index of the column that is being shown or hidden.
+ */
+function TKR_toggleColumnUpdate(colIndex) {
+ var shownCols = TKR_getColspecElement().value.split(' ');
+ var filteredCols = [];
+ for (var i=0; i< shownCols.length; i++) {
+ if (_allColumnNames[colIndex] != shownCols[i].toLowerCase()) {
+ filteredCols.push(shownCols[i]);
+ }
+ }
+
+ TKR_getColspecElement().value = filteredCols.join(' ');
+ TKR_getSearchColspecElement().value = filteredCols.join(' ');
+ TKR_toggleColumn('hide_col_' + colIndex);
+}
+
+
+/**
+ * Convert a column into a groupby clause by removing it from the column spec
+ * and adding it to the groupby spec, then reloading the page.
+ * @param {number} colIndex index of the column that is being shown or hidden.
+ */
+function TKR_addGroupBy(colIndex) {
+ var colName = _allColumnNames[colIndex];
+ var shownCols = TKR_getColspecElement().value.split(' ');
+ var filteredCols = [];
+ for (var i=0; i < shownCols.length; i++) {
+ if (shownCols[i] && colName != shownCols[i].toLowerCase()) {
+ filteredCols.push(shownCols[i]);
+ }
+ }
+
+ TKR_getColspecElement().value = filteredCols.join(' ');
+ TKR_getSearchColspecElement().value = filteredCols.join(' ');
+
+ var groupSpec = $('groupbyspec');
+ var shownGroupings = groupSpec.value.split(' ');
+ var filteredGroupings = [];
+ for (i=0; i < shownGroupings.length; i++) {
+ if (shownGroupings[i] && colName != shownGroupings[i].toLowerCase()) {
+ filteredGroupings.push(shownGroupings[i]);
+ }
+ }
+ filteredGroupings.push(colName);
+ groupSpec.value = filteredGroupings.join(' ');
+ $('colspecform').submit();
+}
+
+
+/**
+ * Add a multi-valued custom field editing widget.
+ */
+function TKR_addMultiFieldValueWidget(
+ el, field_id, field_type, opt_validate_1, opt_validate_2) {
+ var widget = document.createElement('INPUT');
+ widget.name = 'custom_' + field_id;
+ if (field_type == 'str') {
+ widget.size = 90;
+ }
+ if (field_type == 'user') {
+ widget.style = 'width:12em';
+ }
+ if (field_type == 'int') {
+ widget.style.textAlign = 'right';
+ widget.style.width = '12em';
+ widget.type = 'number';
+ widget.min = opt_validate_1;
+ widget.max = opt_validate_2;
+ }
+ el.parentNode.insertBefore(widget, el);
+
+ var del_button = document.createElement('U');
+ del_button.onclick = function(event) {
+ _removeMultiFieldValueWidget(event.target);
+ };
+ del_button.innerText = 'X';
+ el.parentNode.insertBefore(del_button, el);
+}
+
+
+function TKR_removeMultiFieldValueWidget(el) {
+ var target = el.previousSibling;
+ while (target && target.tagName != 'INPUT') {
+ target = target.previousSibling;
+ }
+ if (target) {
+ el.parentNode.removeChild(target);
+ }
+ el.parentNode.removeChild(el); // the X itself
+}
+
+
+/**
+ * Trim trailing commas and spaces off <INPUT type="email" multiple> fields
+ * before submitting the form.
+ */
+function TKR_trimCommas() {
+ var ccField = $('memberccedit');
+ if (ccField) {
+ ccField.value = ccField.value.replace(/,\s*$/, '');
+ }
+ ccField = $('memberenter');
+ if (ccField) {
+ ccField.value = ccField.value.replace(/,\s*$/, '');
+ }
+}
diff --git a/appengine/monorail/static/js/tracker/tracker-fields.js b/appengine/monorail/static/js/tracker/tracker-fields.js
new file mode 100644
index 0000000..cf91e6e
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-fields.js
@@ -0,0 +1,86 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains JS code for editing fields and field definitions.
+ */
+
+var TKR_fieldNameXmlHttp;
+
+
+/**
+ * Function that communicates with the server.
+ * @param {string} projectName Current project name.
+ * @param {string} fieldName The proposed field name.
+ * @param {string} token security token.
+ */
+function TKR_checkFieldNameOnServer(projectName, fieldName, token) {
+ var url = ('/p/' + projectName + '/fields/checkName' +
+ '?field=' + encodeURIComponent(fieldName) +
+ '&token=' + token);
+ TKR_fieldNameXmlHttp = XH_XmlHttpCreate();
+ XH_XmlHttpGET(TKR_fieldNameXmlHttp, url, TKR_fieldNameCallback);
+}
+
+/**
+ * The communication with the server has made some progress. If it is
+ * done, then process the response.
+ */
+function TKR_fieldNameCallback() {
+ if (TKR_fieldNameXmlHttp.readyState == 4) {
+ if (TKR_fieldNameXmlHttp.status == 200) {
+ TKR_gotFieldNameFeed(TKR_fieldNameXmlHttp);
+ }
+ }
+}
+
+
+/**
+ * Function that evaluates the server response and sets the error message.
+ * @param {object} xhr AJAX response with JSON text.
+ */
+function TKR_gotFieldNameFeed(xhr) {
+ var json_data = null;
+ try {
+ json_data = CS_parseJSON(xhr);
+ }
+ catch (e) {
+ return;
+ }
+ var errorMessage = json_data['error_message'];
+ $('fieldnamefeedback').innerText = errorMessage;
+
+ var choicesLines = [];
+ if (json_data['choices'].length > 0) {
+ for (var i = 0; i < json_data['choices'].length; i++) {
+ choicesLines.push(
+ json_data['choices'][i]['name'] + ' = ' +
+ json_data['choices'][i]['doc']);
+ }
+ $('choices').innerText = choicesLines.join('\n');
+ $('field_type').value = 'enum_type';
+ $('choices_row').style.display = '';
+ enableOtherTypeOptions(true);
+ } else {
+ enableOtherTypeOptions(false);
+ }
+
+ $('submit_btn').disabled = errorMessage ? 'disabled' : '';
+}
+
+
+function enableOtherTypeOptions(disabled) {
+ var type_option_el = $('field_type').firstChild;
+ while (type_option_el) {
+ if (type_option_el.tagName == 'OPTION') {
+ if (type_option_el.value != 'enum_type') {
+ type_option_el.disabled = disabled ? 'disabled' : '';
+ }
+ }
+ type_option_el = type_option_el.nextSibling;
+ }
+}
diff --git a/appengine/monorail/static/js/tracker/tracker-keystrokes.js b/appengine/monorail/static/js/tracker/tracker-keystrokes.js
new file mode 100644
index 0000000..c12a908
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-keystrokes.js
@@ -0,0 +1,405 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains JS functions that implement keystroke accelerators
+ * for Monorail.
+ */
+
+/**
+ * Array of HTML elements where the kibbles cursor can be. E.g.,
+ * the TR elements of an issue list, or the TR's for comments on an issue.
+ */
+var TKR_cursorStops;
+
+/**
+ * Integer index into TKR_cursorStops of the currently selected cursor
+ * stop, or undefined if nothing has been selected yet.
+ */
+var TKR_selected = undefined;
+
+
+/**
+ * Scroll to the issue search field, set keyboard focus there, and
+ * select all of its text contents. We use <span id="qq"> around
+ * that form field because IE has a broken getElementById that
+ * confuses id with form element names. We do this in a setTimeout()
+ * so that the keystroke that triggers it ('/') will not be typed into
+ * the search box itself.
+ */
+function TKR_focusArtifactSearchField() {
+ var el = TKR_getArtifactSearchField();
+ el.focus(); // forces browser to scroll to make field visible.
+ el.select();
+}
+
+
+/**
+ * Always hide the keystroke help overlay, if it has been loaded.
+ */
+function TKR_closeKeystrokeHelp() {
+ var dialog = document.getElementById('keys_help');
+ if (dialog) {
+ dialog.style.display = 'none';
+ }
+}
+
+/**
+ * Show or hide the keystroke help overlay. If it has not been loaded
+ * yet, make the request to load it.
+ */
+function TKR_toggleKeystrokeHelp() {
+ var dialog = document.getElementById('keys_help');
+ if (dialog) {
+ dialog.style.display = dialog.style.display ? '' : 'none';
+ } else {
+ TKR_buildKeystrokeHelp();
+ }
+}
+
+function TKR_createChild(parentEl, tag, optClassName, optID, optText, optStyle) {
+ var el = document.createElement(tag);
+ if (optClassName) el.classList.add(optClassName);
+ if (optID) el.id = optID;
+ if (optText) el.innerText = optText;
+ if (optStyle) el.setAttribute('style', optStyle);
+ parentEl.appendChild(el);
+ return el;
+}
+
+function TKR_createKeysHelpHeader(row, text) {
+ TKR_createChild(row, 'td');
+ TKR_createChild(row, 'th', null, null, text);
+ return row;
+}
+
+function TKR_createKeysHelpItem(row, key1, key2, doc) {
+ var keyCell = TKR_createChild(row, 'td', 'shortcut');
+ TKR_createChild(keyCell, 'span', 'keystroke', null, key1);
+ if (key2) {
+ keyCell.appendChild(document.createTextNode(' / '));
+ TKR_createChild(keyCell, 'span', 'keystroke', null, key2);
+ }
+ TKR_createChild(keyCell, 'b', null, null, ' :');
+
+ TKR_createChild(row, 'td', null, null, doc);
+ return keyCell;
+}
+
+/**
+ * Build the keystroke help dialog. It is not part of the template because it
+ * not used on the vast majority of pages viewed.
+ */
+function TKR_buildKeystrokeHelp() {
+ var helpArea = document.getElementById('helparea');
+ var dialog = TKR_createChild(
+ helpArea, 'div', 'fullscreen-popup', 'keys_help');
+ var closeX = TKR_createChild(
+ dialog, 'a', null, null, 'Close', 'float:right; font-size:140%');
+ closeX.href = '#';
+ closeX.addEventListener('click', function () {
+ $('keys_help').style.display = 'none';
+ });
+ TKR_createChild(
+ dialog, 'div', null, null, 'Issue tracker keyboard shortcuts',
+ 'font-size: 140%');
+ TKR_createChild(dialog, 'hr');
+
+ var keysTable = TKR_createChild(
+ dialog, 'table', null, null, null, 'width: 100%');
+ var headerRow = TKR_createChild(keysTable, 'tr');
+ TKR_createKeysHelpHeader(headerRow, 'Issue list');
+ TKR_createKeysHelpHeader(headerRow, 'Issue details');
+ TKR_createKeysHelpHeader(headerRow, 'Anywhere')
+ var row1 = TKR_createChild(keysTable, 'tr');
+ TKR_createKeysHelpItem(row1, 'k', 'j', 'up/down in the list');
+ TKR_createKeysHelpItem(row1, 'k', 'j', 'prev/next issue in list');
+ TKR_createKeysHelpItem(row1, '/', null, 'focus on the issue search field');
+ var row2 = TKR_createChild(keysTable, 'tr');
+ TKR_createKeysHelpItem(row2, 'o', '<Enter>', 'open the current issue');
+ TKR_createKeysHelpItem(row2, 'u', null, 'up to issue list');
+ TKR_createKeysHelpItem(row2, 'c', null, 'compose a new issue');
+ var row3 = TKR_createChild(keysTable, 'tr');
+ TKR_createKeysHelpItem(row3, 'Shift-O', null, 'open issue in new tab');
+ TKR_createKeysHelpItem(row3, 'p', 'n', 'prev/next comment');
+ TKR_createKeysHelpItem(row3, 's', null, 'star the current issue');
+ var row4 = TKR_createChild(keysTable, 'tr');
+ TKR_createKeysHelpItem(row4, 'x', null, 'select the current issue');
+ TKR_createKeysHelpItem(row4, 'r', null, 'add comment & make changes');
+ TKR_createKeysHelpItem(row4, '?', null, 'show this help dialog');
+
+ var footer = TKR_createChild(
+ dialog, 'div', null, null, null,
+ 'font-weight:normal; margin-top: 3em;');
+ TKR_createChild(footer, 'hr');
+ TKR_createChild(footer, 'div', null, null,
+ ('Note: Only signed in users can star issues or add comments, ' +
+ 'and only project members can select issues for bulk edits.'));
+}
+
+
+/**
+ * Register keystrokes that apply to all pages in the current component.
+ * E.g., keystrokes that should work on every page under the "Issues" tab.
+ * @param {string} listUrl Rooted URL of the artifact list.
+ * @param {string} entryUrl Rooted URL of the artifact entry page.
+ * @param {string} currentPageType One of 'list', 'entry', or 'detail'.
+ */
+function TKR_setupKibblesComponentKeys(listUrl, entryUrl, currentPageType) {
+ kibbles.keys.addKeyPressListener(
+ '/',
+ function() {
+ window.setTimeout(TKR_focusArtifactSearchField, 10);
+ });
+ if (currentPageType != 'entry') {
+ kibbles.keys.addKeyPressListener(
+ 'c', function() { TKR_go(entryUrl); });
+ }
+ if (currentPageType != 'list') {
+ kibbles.keys.addKeyPressListener(
+ 'u', function() { TKR_go(listUrl); });
+ }
+ kibbles.keys.addKeyPressListener('?', TKR_toggleKeystrokeHelp);
+
+ kibbles.keys.addKeyPressListener('ESC', TKR_closeKeystrokeHelp);
+}
+
+
+/**
+ * On the artifact list page, go tp the artifact at the kibbles cursor.
+ * @param {number} linkCellIndex row child that is expected to hold a link.
+ */
+function TKR_openArtifactAtCursor(linkCellIndex, newWindow) {
+ if (TKR_selected >= 0 && TKR_selected < TKR_cursorStops.length) {
+ var cell = TKR_cursorStops[TKR_selected].children[linkCellIndex];
+ var anchor = cell.children[0];
+ if (anchor) {
+ TKR_go(anchor.getAttribute('href'), newWindow);
+ }
+ }
+}
+
+
+/**
+ * On the artifact list page, toggle the checkbox for the artifact at
+ * the kibbles cursor.
+ * @param {number} cbCellIndex row child that is expected to hold a checkbox.
+ */
+function TKR_selectArtifactAtCursor(cbCellIndex) {
+ if (TKR_selected >= 0 && TKR_selected < TKR_cursorStops.length) {
+ var cell = TKR_cursorStops[TKR_selected].children[cbCellIndex];
+ var cb = cell.firstChild;
+ while (cb && cb.tagName != 'INPUT') {
+ cb = cb.nextSibling;
+ }
+ if (cb) {
+ cb.checked = cb.checked ? '' : 'checked';
+ TKR_highlightRow(cb);
+ }
+ }
+}
+
+/**
+ * On the artifact list page, toggle the star for the artifact at
+ * the kibbles cursor.
+ * @param {number} cbCellIndex row child that is expected to hold a checkbox
+ * and star widget.
+ * @param {string} set_star_token The security token.
+ */
+function TKR_toggleStarArtifactAtCursor(cbCellIndex, set_star_token) {
+ if (TKR_selected >= 0 && TKR_selected < TKR_cursorStops.length) {
+ var cell = TKR_cursorStops[TKR_selected].children[cbCellIndex];
+ var starIcon = cell.firstChild;
+ while (starIcon && starIcon.tagName != 'A') {
+ starIcon = starIcon.nextSibling;
+ }
+ if (starIcon) {
+ _TKR_toggleStar(
+ starIcon, issueRefs[TKR_selected]['project_name'],
+ issueRefs[TKR_selected]['id'], set_star_token);
+ }
+ }
+}
+
+/**
+ * Updates the style on new stop and clears the style on the former stop.
+ * @param {Object} newStop the cursor stop that the user is selecting now.
+ * @param {Object} formerStop the old cursor stop, if any.
+ */
+function TKR_updateCursor(newStop, formerStop) {
+ TKR_selected = undefined;
+ if (formerStop) {
+ formerStop.element.classList.remove('cursor_on');
+ formerStop.element.classList.add('cursor_off');
+ }
+ if (newStop && newStop.element) {
+ newStop.element.classList.remove('cursor_off');
+ newStop.element.classList.add('cursor_on');
+ TKR_selected = newStop.index;
+ }
+}
+
+
+/**
+ * Walk part of the page DOM to find elements that should be kibbles
+ * cursor stops. E.g., the rows of the issue list results table.
+ * @return {Array} an array of html elements.
+ */
+function TKR_findCursorRows() {
+ var rows = [];
+ var cursorarea = document.getElementById('cursorarea');
+ TKR_accumulateCursorRows(cursorarea, rows);
+ return rows;
+}
+
+
+/**
+ * Recusrively walk part of the page DOM to find elements that should
+ * be kibbles cursor stops. E.g., the rows of the issue list results
+ * table. The cursor stops are appended to the given rows array.
+ * @param {Element} parent html element to start on.
+ * @param {Array} rows array of html TR or DIV elements, each cursor stop will
+ * be added to this array.
+ */
+function TKR_accumulateCursorRows(parent, rows) {
+ for (var i = 0; i < parent.childNodes.length; i++) {
+ var elem = parent.childNodes[i];
+ var name = elem.tagName;
+ if (name && (name == 'TR' || name == 'DIV')) {
+ if (elem.className.indexOf('cursor') >= 0) {
+ elem.cursorIndex = rows.length;
+ rows.push(elem);
+ }
+ }
+ TKR_accumulateCursorRows(elem, rows);
+ }
+}
+
+
+/**
+ * Initialize kibbles cursors stops for the current page.
+ * @param {boolean} selectFirstStop True if the first stop should be
+ * selected before the user presses any keys.
+ */
+function TKR_setupKibblesCursorStops(selectFirstStop) {
+ kibbles.skipper.addStopListener(
+ kibbles.skipper.LISTENER_TYPE.PRE, TKR_updateCursor);
+
+ // Set the 'offset' option to return the middle of the client area
+ // an option can be a static value, or a callback
+ kibbles.skipper.setOption('padding_top', 50);
+
+ // Set the 'offset' option to return the middle of the client area
+ // an option can be a static value, or a callback
+ kibbles.skipper.setOption('padding_bottom', 50);
+
+ // register our stops with skipper
+ TKR_cursorStops = TKR_findCursorRows();
+ for (var i = 0; i < TKR_cursorStops.length; i++) {
+ var element = TKR_cursorStops[i];
+ kibbles.skipper.append(element);
+
+ if (element.className.indexOf('cursor_on') >= 0) {
+ kibbles.skipper.setCurrentStop(i);
+ }
+ }
+}
+
+
+/**
+ * Initialize kibbles keystrokes for an artifact entry page.
+ * @param {string} listUrl Rooted URL of the artifact list.
+ * @param {string} entryUrl Rooted URL of the artifact entry page.
+ */
+function TKR_setupKibblesOnEntryPage(listUrl, entryUrl) {
+ TKR_setupKibblesComponentKeys(listUrl, entryUrl, 'entry');
+}
+
+
+/**
+ * Initialize kibbles keystrokes for an artifact list page.
+ * @param {string} listUrl Rooted URL of the artifact list.
+ * @param {string} entryUrl Rooted URL of the artifact entry page.
+ * @param {string} projectName Name of the current project.
+ * @param {number} linkCellIndex table column that is expected to
+ * link to individual artifacts.
+ * @param {number} opt_checkboxCellIndex table column that is expected
+ * to contain a selection checkbox.
+ * @param {string} set_star_token The security token.
+ */
+function TKR_setupKibblesOnListPage(
+ listUrl, entryUrl, projectName, linkCellIndex,
+ opt_checkboxCellIndex, set_star_token) {
+ TKR_setupKibblesCursorStops(true);
+
+ kibbles.skipper.addFwdKey('j');
+ kibbles.skipper.addRevKey('k');
+
+ if (opt_checkboxCellIndex != undefined) {
+ var cbCellIndex = opt_checkboxCellIndex;
+ kibbles.keys.addKeyPressListener(
+ 'x', function() { TKR_selectArtifactAtCursor(cbCellIndex); });
+ kibbles.keys.addKeyPressListener(
+ 's',
+ function() {
+ TKR_toggleStarArtifactAtCursor(cbCellIndex, set_star_token);
+ });
+ }
+ kibbles.keys.addKeyPressListener(
+ 'o', function() { TKR_openArtifactAtCursor(linkCellIndex, false); });
+ kibbles.keys.addKeyPressListener(
+ 'O', function() { TKR_openArtifactAtCursor(linkCellIndex, true); });
+ kibbles.keys.addKeyPressListener(
+ 'enter', function() { TKR_openArtifactAtCursor(linkCellIndex); });
+
+ TKR_setupKibblesComponentKeys(listUrl, entryUrl, 'list');
+}
+
+
+/**
+ * Initialize kibbles keystrokes for an artifact detail page.
+ * @param {string} listUrl Rooted URL of the artifact list.
+ * @param {string} entryUrl Rooted URL of the artifact entry page.
+ * @param {string} prevUrl Rooted URL of previous artifact in list.
+ * @param {string} nextUrl Rooted URL of next artifact in list.
+ * @param {string} projectName name of the current project.
+ * @param {boolean} userCanComment True if the user may add a comment.
+ * @param {boolean} userCanStar True if the user may add a star.
+ * @param {string} set_star_token The security token.
+ */
+function TKR_setupKibblesOnDetailPage(
+ listUrl, entryUrl, prevUrl, nextUrl, projectName, localId,
+ userCanComment, userCanStar, set_star_token) {
+ TKR_setupKibblesCursorStops(false);
+ kibbles.skipper.addFwdKey('n');
+ kibbles.skipper.addRevKey('p');
+ if (prevUrl) {
+ kibbles.keys.addKeyPressListener(
+ 'k', function() { TKR_go(prevUrl); });
+ }
+ if (nextUrl) {
+ kibbles.keys.addKeyPressListener(
+ 'j', function() { TKR_go(nextUrl); });
+ }
+ if (userCanComment) {
+ kibbles.keys.addKeyPressListener(
+ 'r',
+ function() {
+ window.setTimeout(TKR_openIssueUpdateForm, 10);
+ });
+ }
+ if (userCanStar) {
+ kibbles.keys.addKeyPressListener(
+ 's',
+ function() {
+ var star = document.getElementById('star');
+ TKR_toggleStar(star, projectName, localId, set_star_token);
+ TKR_syncStarIcons(star, 'star2');
+ });
+ }
+ TKR_setupKibblesComponentKeys(listUrl, entryUrl, 'detail');
+}
diff --git a/appengine/monorail/static/js/tracker/tracker-nav.js b/appengine/monorail/static/js/tracker/tracker-nav.js
new file mode 100644
index 0000000..5b55f05
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-nav.js
@@ -0,0 +1,181 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains JS functions that implement various navigation
+ * features of Monorail.
+ */
+
+
+/**
+ * Navigate the browser to the given URL.
+ * @param {string} url The URL of the page to browse.
+ * @param {boolean} newWindow Open a new tab or window.
+ */
+function TKR_go(url, newWindow) {
+ if (newWindow)
+ window.open(url, '_blank');
+ else
+ document.location = url;
+}
+
+
+/**
+ * Tell the browser to scroll to the given anchor on the current page.
+ * @param {string} anchor Name of the <a name="xxx"> anchor on the page.
+ */
+function TKR_goToAnchor(anchor) {
+ document.location.hash = anchor;
+}
+
+
+/**
+ * Get the user-editable colspec form field. This text field is normally
+ * display:none, but it is shown when the user chooses "Edit columns...".
+ * We need a function to get this element because there are multiple form
+ * fields on the page with name="colspec", and an IE misfeature sets their
+ * id attributes as well, which makes document.getElementById() fail.
+ * @return {Element} user editable colspec form field.
+ */
+function TKR_getColspecElement() {
+ return document.getElementById('colspec_field').firstChild;
+}
+
+
+/**
+ * Get the hidden form field for colspec. This is a type="hidden" input field
+ * that is submitted as part of the artfact search query. We need a
+ * function to get this element because there are multiple form fields on the
+ * page with name="colspec", and an IE misfeature sets their id attributes
+ * as well, which makes document.getElementById() fail.
+ * @return {Element} colspec hidden form field.
+ */
+function TKR_getSearchColspecElement() {
+ return document.getElementById('search_colspec').firstChild;
+}
+
+
+/**
+ * Get the artifact search form field. This is a visible text field where
+ * the user enters a query for issues. This function
+ * is needed because there is also the project search field on the each page,
+ * and it has name="q". An IE misfeature confuses name="..." with id="...".
+ * @return {Element} artifact query form field, or undefined.
+ */
+function TKR_getArtifactSearchField() {
+ var qq = document.getElementById('qq');
+ return qq ? qq.firstChild : undefined;
+}
+
+
+/**
+ * Resize the artifiact search box to be bigger when the user has a long
+ * query.
+ */
+var MAX_ARTIFACT_SEARCH_FIELD_SIZE = 75;
+var AUTOSIZE_STEP = 3;
+
+function TKR_autosizeArtifactSerchField() {
+ var qq = TKR_getArtifactSearchField();
+ if (qq) {
+ var new_size = qq.value.length + AUTOSIZE_STEP;
+ if (new_size > MAX_ARTIFACT_SEARCH_FIELD_SIZE) {
+ new_size = MAX_ARTIFACT_SEARCH_FIELD_SIZE;
+ }
+ if (new_size > qq.size) {
+ qq.size = new_size;
+ }
+ }
+}
+
+window.setInterval(TKR_autosizeArtifactSerchField, 700);
+
+
+/**
+ * Build a query string for all the common contextual values that we use.
+ */
+function TKR_formatContextQueryArgs() {
+ var args = "";
+ var colspec = TKR_getColspecElement().value;
+ if (_ctxCan != 2) args += "&can=" + _ctxCan;
+ if (_ctxQuery != "") args += "&q=" + encodeURIComponent(_ctxQuery);
+ if (_ctxSortspec != "") args += "&sort=" + _ctxSortspec;
+ if (_ctxGroupBy != "") args += "&groupby=" + _ctxGroupBy;
+ if (colspec != _ctxDefaultColspec) args += "&colspec=" + colspec;
+ if (_ctxStart != 0) args += "&start=" + _ctxStart;
+ if (_ctxNum != _ctxResultsPerPage) args += "&num=" + _ctxNum;
+ return args;
+}
+
+// Fields that should use ":" when filtering.
+var _PRETOKENIZED_FIELDS = [
+ 'owner', 'reporter', 'cc', 'commentby', 'component'];
+
+/**
+ * The user wants to narrow his/her search results by adding a search term
+ * for the given prefix and value. Reload the issue list page with that
+ * additional search term.
+ * @param {string} prefix Field or label prefix, e.g., "Priority".
+ * @param {string} suffix Field or label value, e.g., "High".
+ */
+function TKR_filterTo(prefix, suffix) {
+ var newQuery = TKR_getArtifactSearchField().value;
+ if (newQuery != '') newQuery += ' ';
+
+ var op = '=';
+ for (var i = 0; i < _PRETOKENIZED_FIELDS.length; i++) {
+ if (prefix == _PRETOKENIZED_FIELDS[i]) {
+ op = ':';
+ break;
+ }
+ }
+
+ newQuery += prefix + op + suffix;
+ var url = 'list?can=' + $('can').value + '&q=' + newQuery;
+ if ($('sort') && $('sort').value) url += '&sort=' + $('sort').value;
+ url += '&colspec=' + TKR_getColspecElement().value;
+ TKR_go(url);
+}
+
+
+/**
+ * The user wants to sort his/her search results by adding a sort spec
+ * for the given column. Reload the issue list page with that
+ * additional sort spec.
+ * @param {string} colname Field or label prefix, e.g., "Priority".
+ * @param {boolean} descending True if the values should be reversed.
+ */
+function TKR_addSort(colname, descending) {
+ var existingSortSpec = '';
+ if ($('sort')) { existingSortSpec = $('sort').value; }
+ var oldSpecs = existingSortSpec.split(/ +/);
+ var sortDirective = colname;
+ if (descending) sortDirective = '-' + colname;
+ var specs = [sortDirective];
+ for (var i = 0; i < oldSpecs.length; i++) {
+ if (oldSpecs[i] != "" && oldSpecs[i] != colname &&
+ oldSpecs[i] != '-' + colname) {
+ specs.push(oldSpecs[i])
+ }
+ }
+
+ var url = ('list?can='+ $('can').value + '&q=' +
+ TKR_getArtifactSearchField().value);
+ url += '&sort=' + specs.join('+');
+ url += '&colspec=' + TKR_getColspecElement().value;
+ TKR_go(url)
+}
+
+/** Convenience function for sorting in ascending order. */
+function TKR_sortUp(colname) {
+ TKR_addSort(colname, false);
+}
+
+/** Convenience function for sorting in descending order. */
+function TKR_sortDown(colname) {
+ TKR_addSort(colname, true);
+}
diff --git a/appengine/monorail/static/js/tracker/tracker-onload.js b/appengine/monorail/static/js/tracker/tracker-onload.js
new file mode 100644
index 0000000..608e2b4
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-onload.js
@@ -0,0 +1,174 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains the Monorail onload() function that is called
+ * when each page loads.
+ */
+
+
+
+/**
+ * This code is run on every DIT page load. It registers a handler
+ * for autocomplete on four different types of text fields based on the
+ * name of that text field.
+ */
+function TKR_onload() {
+ _ac_install();
+
+ _ac_register(function (input, event) {
+ if (input.id.startsWith('search')) return TKR_searchStore;
+ if (input.id.startsWith('query_') || input.id.startsWith('predicate_'))
+ return TKR_projectQueryStore;
+ if (input.id.startsWith('cmd')) return TKR_quickEditStore;
+ if (input.id.startsWith('label')) return TKR_labelStore;
+ if (input.id.startsWith('component')) return TKR_componentListStore;
+ if (input.id.startsWith('status')) return TKR_statusStore;
+ if (input.id.startsWith('member')) return TKR_memberListStore;
+ if (input.id == 'admin_names_editor') return TKR_memberListStore;
+ if (input.id.startsWith('owner')) return TKR_ownerStore;
+ if (input.name == 'needs_perm' || input.name == 'grants_perm') {
+ return TKR_customPermissionsStore;
+ }
+ if (input.id == 'owner_editor') return TKR_ownerStore;
+ if (input.className.indexOf('userautocomplete') != -1) {
+ var customFieldIDStr = input.name;
+ var uac = TKR_userAutocompleteStores[customFieldIDStr];
+ if (uac) return uac;
+ return TKR_ownerStore;
+ }
+ if (input.className.indexOf('autocomplete') != -1) {
+ return TKR_autoCompleteStore;
+ }
+ if (input.id.startsWith('copy_to') || input.id.startsWith('move_to') ||
+ input.id.startsWith('new_savedquery_projects') ||
+ input.id.startsWith('savedquery_projects')) {
+ return TKR_projectStore;
+ }
+ });
+
+ _PC_Install();
+ TKR_allColumnNames = _allColumnNames;
+ TKR_labelFieldIDPrefix = _lfidprefix;
+ TKR_allOrigLabels = _allOrigLabels;
+}
+
+
+// External names for functions that are called directly from HTML.
+// JSCompiler does not rename functions that begin with an underscore.
+// They are not defined with "var" because we want them to be global.
+
+// TODO(jrobbins): the underscore names could be shortened by a
+// cross-file search-and-replace script in our build process.
+
+_selectAllIssues = TKR_selectAllIssues;
+_selectNoneIssues = TKR_selectNoneIssues;
+
+_toggleRows = TKR_toggleRows;
+_toggleColumn = TKR_toggleColumn;
+_toggleColumnUpdate = TKR_toggleColumnUpdate;
+_addGroupBy = TKR_addGroupBy;
+_addcol = TKR_addColumn;
+_checkRangeSelect = TKR_checkRangeSelect;
+_makeIssueLink = TKR_makeIssueLink;
+
+_onload = TKR_onload;
+
+_handleListActions = TKR_handleListActions;
+_handleDetailActions = TKR_handleDetailActions;
+
+_fetchOptions = TKR_fetchOptions;
+_fetchUserProjects = TKR_fetchUserProjects;
+_setACOptions = TKR_setUpAutoCompleteStore;
+_openIssueUpdateForm = TKR_openIssueUpdateForm;
+_addAttachmentFields = TKR_addAttachmentFields;
+
+_acstore = _AC_SimpleStore;
+_accomp = _AC_Completion;
+_acreg = _ac_register;
+
+_formatContextQueryArgs = TKR_formatContextQueryArgs;
+_ctxArgs = "";
+_ctxCan = undefined;
+_ctxQuery = undefined;
+_ctxSortspec = undefined;
+_ctxGroupBy = undefined;
+_ctxDefaultColspec = undefined;
+_ctxStart = undefined;
+_ctxNum = undefined;
+_ctxResultsPerPage = undefined;
+
+_filterTo = TKR_filterTo;
+_sortUp = TKR_sortUp;
+_sortDown = TKR_sortDown;
+
+_closeAllPopups = TKR_closeAllPopups;
+_closeSubmenus = TKR_closeSubmenus;
+_showRight = TKR_showRight;
+_showBelow = TKR_showBelow;
+_highlightRow = TKR_highlightRow;
+_floatMetadata = TKR_floatMetadata;
+_floatVertically = TKR_floatVertically;
+
+_setFieldIDs = TKR_setFieldIDs;
+_selectTemplate = TKR_selectTemplate;
+_saveTemplate = TKR_saveTemplate;
+_newTemplate = TKR_newTemplate;
+_deleteTemplate = TKR_deleteTemplate;
+_switchTemplate = TKR_switchTemplate;
+_templateNames = TKR_templateNames;
+
+_confirmNovelStatus = TKR_confirmNovelStatus;
+_confirmNovelLabel = TKR_confirmNovelLabel;
+_vallab = TKR_validateLabel;
+_dirty = TKR_dirty;
+_exposeExistingLabelFields = TKR_exposeExistingLabelFields;
+_confirmDiscardEntry = TKR_confirmDiscardEntry;
+_confirmDiscardUpdate = TKR_confirmDiscardUpdate;
+_lfidprefix = undefined;
+_allOrigLabels = undefined;
+_checkPlusOne = TKR_checkPlusOne;
+_checkUnrestrict = TKR_checkUnrestrict;
+
+_clearOnFirstEvent = TKR_clearOnFirstEvent;
+_forceProperTableWidth = TKR_forceProperTableWidth;
+
+_acof = _ac_onfocus;
+_acmo = _ac_mouseover;
+_acse = _ac_select;
+_acrob = _ac_real_onblur;
+
+// Variables that are given values in the HTML file.
+_allColumnNames = [];
+
+_go = TKR_go;
+_getColspec = TKR_getColspecElement;
+_getSearchColspec = TKR_getSearchColspecElement;
+
+function closeAutocompleteAndIssuePreview(e) {
+ _ac_fake_onblur(e);
+}
+
+if (BR_hasExcessBlurEvents()) {
+ document.addEventListener('click', closeAutocompleteAndIssuePreview, false);
+}
+// Make the document actually listen for click events, otherwise the
+// event handlers above would never get called.
+if (document.captureEvents) document.captureEvents(Event.CLICK);
+
+_setPeoplePrefs = TKR_setPeoplePrefs
+
+_setupKibblesOnEntryPage = TKR_setupKibblesOnEntryPage;
+_setupKibblesOnListPage = TKR_setupKibblesOnListPage;
+_setupKibblesOnDetailPage = TKR_setupKibblesOnDetailPage;
+
+_checkFieldNameOnServer = TKR_checkFieldNameOnServer;
+_checkLeafName = TKR_checkLeafName;
+
+_addMultiFieldValueWidget = TKR_addMultiFieldValueWidget;
+_removeMultiFieldValueWidget = TKR_removeMultiFieldValueWidget;
+_trimCommas = TKR_trimCommas;
diff --git a/appengine/monorail/static/js/tracker/tracker-util.js b/appengine/monorail/static/js/tracker/tracker-util.js
new file mode 100644
index 0000000..2e2859c
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/tracker-util.js
@@ -0,0 +1,43 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+/**
+ * This file contains JS utilities used by other JS files in Monorail.
+ */
+
+
+/**
+ * Add an indexOf method to all arrays, if this brower's JS implementation
+ * does not already have it.
+ * @param {Object} item The item to find
+ * @returns {number} The index of the given item, or -1 if not found.
+ */
+if (Array.prototype.indexOf == undefined) {
+ Array.prototype.indexOf = function(item) {
+ for (var i = 0; i < this.length; ++i) {
+ if (this[i] == item) return i;
+ }
+ return -1;
+ }
+}
+
+
+/**
+ * This function works around a FF HTML layout problem. The table
+ * width is somehow rendered at 100% when the table contains a
+ * display:none element, later, when that element is displayed, the
+ * table renders at the correct width. The work-around is to have the
+ * element initiallye displayed so that the table renders properly,
+ * but then immediately hide the element until it is needed.
+ *
+ * TODO(jrobbins): Find HTML markup that FF can render more
+ * consistently. After that, I can remove this hack.
+ */
+function TKR_forceProperTableWidth() {
+ var e = $('confirmarea');
+ if (e) e.style.display='none';
+}
diff --git a/appengine/monorail/static/js/tracker/trackerac_test.js b/appengine/monorail/static/js/tracker/trackerac_test.js
new file mode 100644
index 0000000..716c04c
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/trackerac_test.js
@@ -0,0 +1,132 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+var feedData = {
+ 'open': [{name: 'New', doc: 'Newly reported'},
+ {name: 'Started', doc: 'Work has begun'}],
+ 'closed': [{name: 'Fixed', doc: 'Problem was fixed'},
+ {name: 'Invalid', doc: 'Bad issue report'}],
+ 'labels': [{name: 'Type-Defect', doc: 'Something is broken'},
+ {name: 'Type-Enhancement', doc: 'It could be better'},
+ {name: 'Priority-High', doc: 'Urgent'},
+ {name: 'Priority-Low', doc: 'Not so urgent'},
+ {name: 'Hot', doc: ''},
+ {name: 'Cold', doc: ''}],
+ 'members': [{name: 'jrobbins', doc: ''},
+ {name: 'jrobbins@chromium.org', doc: ''}],
+ 'excl_prefixes': [],
+ 'strict': false
+};
+
+function setUp() {
+ TKR_autoCompleteFeedName = 'issueOptions';
+}
+
+/**
+ * The assertEquals method cannot do element-by-element comparisons.
+ * A search of how other teams write JS unit tests turned up this
+ * way to compare arrays.
+ */
+function assertElementsEqual(arrayA, arrayB) {
+ assertEquals(arrayA.join(' ;; '), arrayB.join(' ;; '));
+}
+
+function completionsEqual(strings, completions) {
+ if (strings.length != completions.length) {
+ return false;
+ }
+ for (var i = 0; i < strings.length; i++) {
+ if (strings[i] != completions[i].value) {
+ return false;
+ }
+ }
+ return true;
+}
+
+function assertHasCompletion(s, acStore) {
+ var ch = s.charAt(0).toLowerCase();
+ var firstCharMapArray = acStore.firstCharMap_[ch];
+ assertNotNull(!firstCharMapArray);
+ for (var i = 0; i < firstCharMapArray.length; i++) {
+ if (s == firstCharMapArray[i].value) return;
+ }
+ fail('completion ' + s + ' not found in acStore[' +
+ acStoreToString(acStore) + ']');
+}
+
+function assertHasAllCompletions(stringArray, acStore) {
+ for (var i = 0; i < stringArray.length; i++) {
+ assertHasCompletion(stringArray[i], acStore);
+ }
+}
+
+function acStoreToString(acStore) {
+ var allCompletions = [];
+ for (var ch in acStore.firstCharMap_) {
+ if (acStore.firstCharMap_.hasOwnProperty(ch)) {
+ var firstCharArray = acStore.firstCharMap_[ch];
+ for (var i = 0; i < firstCharArray.length; i++) {
+ allCompletions[firstCharArray[i].value] = true;
+ }
+ }
+ }
+ var parts = [];
+ for (var comp in allCompletions) {
+ if (allCompletions.hasOwnProperty(comp)) {
+ parts.push(comp);
+ }
+ }
+ return parts.join(', ');
+}
+
+function testSetUpStatusStore() {
+ TKR_setUpStatusStore(feedData.open, feedData.closed);
+ assertElementsEqual(
+ ['New', 'Started', 'Fixed', 'Invalid'],
+ TKR_statusWords);
+ assertHasAllCompletions(
+ ['New', 'Started', 'Fixed', 'Invalid'],
+ TKR_statusStore);
+}
+
+function testSetUpSearchStore() {
+ TKR_setUpSearchStore(
+ feedData.labels, feedData.members, feedData.open, feedData.closed);
+ assertHasAllCompletions(
+ ['status:New', 'status:Started', 'status:Fixed', 'status:Invalid',
+ '-status:New', '-status:Started', '-status:Fixed', '-status:Invalid',
+ 'Type=Defect', '-Type=Defect', 'Type=Enhancement', '-Type=Enhancement',
+ 'label:Hot', 'label:Cold', '-label:Hot', '-label:Cold',
+ 'owner:jrobbins', 'cc:jrobbins', '-owner:jrobbins', '-cc:jrobbins',
+ 'summary:', 'opened-after:today-1', 'commentby:me', 'reporter:me'],
+ TKR_searchStore);
+}
+
+function testSetUpQuickEditStore() {
+ TKR_setUpQuickEditStore(
+ feedData.labels, feedData.members, feedData.open, feedData.closed);
+ assertHasAllCompletions(
+ ['status=New', 'status=Started', 'status=Fixed', 'status=Invalid',
+ 'Type=Defect', 'Type=Enhancement', 'Hot', 'Cold', '-Hot', '-Cold',
+ 'owner=jrobbins', 'owner=me', 'cc=jrobbins', 'cc=me', 'cc=-jrobbins',
+ 'cc=-me', 'summary=""', 'owner=----'],
+ TKR_quickEditStore);
+}
+
+function testSetUpLabelStore() {
+ TKR_setUpLabelStore(feedData.labels);
+ assertHasAllCompletions(
+ ['Type-Defect', 'Type-Enhancement', 'Hot', 'Cold'],
+ TKR_labelStore);
+}
+
+function testSetUpMembersStore() {
+ TKR_setUpMemberStore(feedData.members);
+ assertHasAllCompletions(
+ ['jrobbins', 'jrobbins@chromium.org'],
+ TKR_memberListStore);
+}
diff --git a/appengine/monorail/static/js/tracker/trackerediting_test.js b/appengine/monorail/static/js/tracker/trackerediting_test.js
new file mode 100644
index 0000000..27d45bf
--- /dev/null
+++ b/appengine/monorail/static/js/tracker/trackerediting_test.js
@@ -0,0 +1,69 @@
+/* Copyright 2016 The Chromium Authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style
+ * license that can be found in the LICENSE file or at
+ * https://developers.google.com/open-source/licenses/bsd
+ */
+
+
+function testKeepJustSummaryPrefixes_NoPrefixes() {
+ assertEquals(
+ '',
+ TKR_keepJustSummaryPrefixes(''));
+
+ assertEquals(
+ '',
+ TKR_keepJustSummaryPrefixes('Enter one line summary'));
+
+ assertEquals(
+ '',
+ TKR_keepJustSummaryPrefixes('Translation problem [en]'));
+
+ assertEquals(
+ '',
+ TKR_keepJustSummaryPrefixes('Crash at HH:MM'));
+}
+
+function testKeepJustSummaryPrefixes_WithColons() {
+ assertEquals(
+ 'Security: ',
+ TKR_keepJustSummaryPrefixes('Security:'));
+
+ assertEquals(
+ 'Exploit: ',
+ TKR_keepJustSummaryPrefixes('Exploit: remote exploit'));
+
+ assertEquals(
+ 'XSS:Security: ',
+ TKR_keepJustSummaryPrefixes('XSS:Security: rest of summary'));
+
+ assertEquals(
+ 'XSS: Security: ',
+ TKR_keepJustSummaryPrefixes('XSS: Security: rest of summary'));
+
+ assertEquals(
+ 'XSS-Security: ',
+ TKR_keepJustSummaryPrefixes('XSS-Security: rest of summary'));
+
+ assertEquals(
+ 'XSS: Security: ',
+ TKR_keepJustSummaryPrefixes('XSS: Security: rest [of] su:mmary'));
+
+ assertEquals(
+ 'XSS-Security: ',
+ TKR_keepJustSummaryPrefixes('XSS-Security: rest [of] su:mmary'));
+}
+
+function testKeepJustSummaryPrefixes_WithBrackets() {
+ assertEquals(
+ '[Printing] ',
+ TKR_keepJustSummaryPrefixes('[Printing] problem with page'));
+
+ assertEquals(
+ '[Printing] ',
+ TKR_keepJustSummaryPrefixes('[Printing] problem with page'));
+
+ assertEquals(
+ '[l10n][en] ',
+ TKR_keepJustSummaryPrefixes('[l10n][en]Translation problem'));
+}
diff --git a/appengine/monorail/static/robots.txt b/appengine/monorail/static/robots.txt
new file mode 100644
index 0000000..3682c86
--- /dev/null
+++ b/appengine/monorail/static/robots.txt
@@ -0,0 +1,13 @@
+User-agent: *
+# Start by disallowing everything.
+Disallow: /
+# Some specific things are okay, though.
+Allow: /$
+Allow: /hosting
+Allow: /p/*/adminIntro
+# Query strings are hard. We only allow ?id=N, no other parameters.
+Allow: /p/*/issues/detail?id=*
+Disallow: /p/*/issues/detail?id=*&*
+Disallow: /p/*/issues/detail?*&id=*
+# 10 second crawl delay for bots that honor it.
+Crawl-delay: 10
diff --git a/appengine/monorail/static/third_party/js/kibbles-1.3.3.comp.js b/appengine/monorail/static/third_party/js/kibbles-1.3.3.comp.js
new file mode 100644
index 0000000..fd84c63
--- /dev/null
+++ b/appengine/monorail/static/third_party/js/kibbles-1.3.3.comp.js
@@ -0,0 +1 @@
+(function(){var E=false;var F={before:[],after:[]};var G={ESC:27,ENTER:13};function H(){if(E){return }var L=document;if(L.addEventListener){L.addEventListener("keypress",D,false);L.addEventListener("keydown",I,false)}else{if(L.attachEvent){L.documentElement.attachEvent("onkeypress",D);L.documentElement.attachEvent("onkeydown",I)}}E=true}function J(L,M){var N=L.toLowerCase();if(N=="before"||N=="after"){F[N].push(M);return }N=G[L.toUpperCase()];if(!N){N=L.charCodeAt(0)}if(!F[N]){F[N]=[]}F[N].push(M)}function D(Q){if(!Q){Q=window.event}var O=A(Q);if(B(O)){return }if(C(Q)){return }var N=K(Q);if(N==undefined){return }var P={code:N};for(var L=0;L<F.before.length;L++){F.before[L](P)}var M=F[N];if(M){for(var L=0;L<M.length;L++){M[L]({code:N})}}for(var L=0;L<F.after.length;L++){F.after[L](P)}}function I(M){if(!M){M=window.event}var L=K(M);if(L==G.ESC||L==G.ENTER){D(M)}}function K(L){return L.keyCode?L.keyCode:L.which}function C(L){return L.altKey||L.ctrlKey||L.metaKey}function A(M){var L=M.target;if(!L){L=M.srcElement}if(L.nodeType==3){L=L.parentNode}return L}function B(L){return L.tagName=="INPUT"||L.tagName=="TEXTAREA"}if(!window.kibbles){window.kibbles={}}window.kibbles.keys={listen:H,addKeyPressListener:J}})();(function(){var X=new Array();var L;var S={padding_top:0,padding_bottom:0,scroll_window:true};var T={PRE:"pre",POST:"post"};var F={pre:[],post:[]};function K(){X=new Array()}function A(b){return X[b]}function V(c,b){X[c]=b}function J(c,b){if(c<0||c>X.length-1){throw"Index out of bounds."}X.splice(c,0,b);if(c<=L){L++}}function E(b){X.push(b)}function N(b){if(b<0||b>X.length-1){throw"Index out of bounds."}X.splice(b,1);if(L>=b){L--}}function O(){return X.length}function P(b,c){S[b]=c}function M(b){kibbles.keys.addKeyPressListener(b,I)}function R(b){kibbles.keys.addKeyPressListener(b,C)}function D(c,b){if(c==T.PRE){F.pre.push(b)}else{if(c==T.POST){F.post.push(b)}}}function I(){Z(a())}function C(){Z(Q())}function Z(b){if(b>=0){var e=L;L=b;var c=new Y(b);var d=(e>=0)?new Y(e):undefined;U(c,d,F.pre);if(!c.y){c.y=H(c.element)}if(!c.y&&!(c.y>=0)){throw"Next stop does not y coords. Aborting."}U(c,d,F.post)}}function W(g,h){if(!G("scroll_window")){return }if(g&&g.element){var c=B();var e=c+document.documentElement.clientHeight;var b=G("padding_top");var d=e-b;if(g.y>d){window.scrollTo(0,g.y-b);return }var f=G("padding_bottom");var i=c+f;if(g.y<i){window.scrollTo(0,(g.y-document.documentElement.clientHeight)+f);return }}}function B(){if(window.document.body.scrollTop){return window.document.body.scrollTop}else{if(window.document.documentElement.scrollTop){return window.document.documentElement.scrollTop}else{if(window.pageYOffset){return window.pageYOffset}}}return 0}function G(b){var c=S[b];if(typeof c=="function"){return c()}return c}function U(c,f,d){if(c&&d){try{for(var b=0;b<d.length;b++){d[b](c,f)}}catch(e){}}}function a(){var b=0;if(L>=0){b=L+1}if(b>X.length-1){return }return b}function Q(){var b=X.length-1;if(L>=0){b=L-1}if(b<0){return }return b}function Y(b,c){this.index=b;this.element=X[b];this.y=H(this.element)}function H(c){if(c){var b=0;if(c.offsetParent){while(c.offsetParent){b+=c.offsetTop;c=c.offsetParent}}else{if(c.y){b+=c.y}}return b}return null}if(!window.kibbles.keys){throw"Kibbles.Skipper requires Kibbles.Keys which is not loaded. Can't continue."}window.kibbles.skipper={setOption:P,addFwdKey:M,addRevKey:R,LISTENER_TYPE:T,addStopListener:D,setCurrentStop:Z,get:A,set:V,append:E,insert:J,del:N,length:O,reset:K};D(kibbles.skipper.LISTENER_TYPE.POST,W);kibbles.keys.listen()})();
\ No newline at end of file
diff --git a/appengine/monorail/templates/features/activity-body.ezt b/appengine/monorail/templates/features/activity-body.ezt
new file mode 100644
index 0000000..640b1d3
--- /dev/null
+++ b/appengine/monorail/templates/features/activity-body.ezt
@@ -0,0 +1,9 @@
+[# This template is used to pre-render the title of an activity so that it can
+ later be accessed as activity.escaped_title.
+]
+
+[is activity_type "ProjectIssueUpdate"]
+ [include "updates-issueupdate-body.ezt"]
+[else]
+ Body?
+[end]
diff --git a/appengine/monorail/templates/features/activity-title.ezt b/appengine/monorail/templates/features/activity-title.ezt
new file mode 100644
index 0000000..c7e2f2f
--- /dev/null
+++ b/appengine/monorail/templates/features/activity-title.ezt
@@ -0,0 +1,9 @@
+[# This template is used to pre-render the title of an activity so that it can
+ later be accessed as activity.escaped_title.
+]
+
+[is activity_type "ProjectIssueUpdate"]
+ [include "updates-issueupdate-title.ezt"]
+[else]
+ title?
+[end]
diff --git a/appengine/monorail/templates/features/cues.ezt b/appengine/monorail/templates/features/cues.ezt
new file mode 100644
index 0000000..2101d37
--- /dev/null
+++ b/appengine/monorail/templates/features/cues.ezt
@@ -0,0 +1,125 @@
+[if-any cue]
+
+[# Do not show cue if there is an alert shown on the page.]
+[if-any alerts.show][else]
+
+
+[# Dialog box for privacy settings.]
+[is cue "privacy_click_through"]
+ <div id="cue" class="scrim">
+ <div id="privacy_dialog">
+ <h2>Email display settings</h2>
+
+ <p>There is a <a href="/hosting/settings" title="Settings"
+ class="dismiss_cue">setting</a> to control how your email
+ address appears on comments and issues that you post.
+
+ [if-any is_privileged_domain_user]
+ Since you are an integral part of this community, that setting
+ defaults to showing your full email address.</p>
+
+ <p>Also, you are being trusted to view email addresses of
+ non-members who post comments in your projects. Please use
+ those addresses only to request additional information about
+ the posted comments, and do not share other users' email
+ addresses beyond the site.</p>
+ [else]
+ Project members will always see your full email address. By
+ default, other users who visit the site will see an
+ abbreviated version of your email address.</p>
+
+ <p>If you do not wish your email address to be shared, there
+ are other ways to <a
+ href="http://www.chromium.org/getting-involved">get
+ involved</a> in the community. To report a problem when using
+ the Chrome browser, you may use the "Report an issue..." item
+ on the "Help" menu.</p>
+ [end]
+
+ <div class="actions">
+ <a href="#" title="Got it" class="dismiss_cue">GOT IT</a>
+ </div>
+ </div>
+ </div>
+
+[else]
+
+ <table align="center" border="0" cellspacing="0" cellpadding="0" id="cue">
+ <tr><td><span>
+ [# Cue cards to teach users how to join a project.]
+ [is cue "how_to_join_project"]
+ <b>How-to:</b>
+ Join this project by contacting the project owners.
+ [end]
+
+ [# Cue card to teach users how to search for numbers in the issue tracker.]
+ [is cue "search_for_numbers"]
+ [if-any jump_local_id]
+ <b>Tip:</b>
+ To find issues containing "[jump_local_id]", use quotes.
+ [end]
+ [end]
+
+ [# Cue card to teach users how to search for numbers in the issue tracker.]
+ [is cue "dit_keystrokes"]
+ <b>Tip:</b>
+ Type <b style="font-size:130%"><tt>?</tt></b> for keyboard shortcuts.
+ [end]
+
+ [# Cue card to teach users that italics mean derived values in the issue tracker.]
+ [is cue "italics_mean_derived"]
+ <b>Note:</b>
+ <i>Italics</i> mean that a value was derived by a filter rule.
+ <a href="http://code.google.com/p/monorail/wiki/FilterRules">Learn more</a>
+ [end]
+
+ [# Cue card to teach users that full-text indexing takes time.]
+ [is cue "stale_fulltext"]
+ <b>Note:</b>
+ Searching for text in issues may show results that are a few minutes out of date.
+ [end]
+
+ [# Cue cards to improve discoverability of people roles.]
+ [is cue "document_team_duties"]
+ [if-any read_only][else]
+ <b>Tip:</b>
+ Document <a href="people/list">each teammate's project duties</a>.
+ [end]
+ [end]
+
+ [# Cue cards to explain grid mode.]
+ [is cue "showing_ids_instead_of_tiles"]
+ <b>Note:</b>
+ Grid mode automatically switches to displaying IDs when there are many results.
+ [end]
+
+ </span>
+ [# Link to dismiss the cue card.]
+ [if-any logged_in_user]
+ [if-any read_only][else]
+ <a href="#" title="Don't show this message again" style="margin-left:3em;vertical-align:middle;" class="dismiss_cue"
+ ><img src="/static/images/close_icon.png" width="13" height="13"></a>
+ [end]
+ [end]
+ </td></tr>
+ </table>
+[end]
+
+
+
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var dismissLinks = document.getElementsByClassName("dismiss_cue");
+ for (var i = 0; i < dismissLinks.length; ++i) {
+ var dismissCue = dismissLinks[[]i];
+ dismissCue.addEventListener("click", function(event) {
+ _CS_dismissCue("[format "js"][cue][end]");
+ if (dismissCue.href == "#")
+ event.preventDefault();
+ });
+ }
+});
+ </script>
+
+[end]
+[end]
diff --git a/appengine/monorail/templates/features/inboundemail-banned.ezt b/appengine/monorail/templates/features/inboundemail-banned.ezt
new file mode 100644
index 0000000..f241fef
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-banned.ezt
@@ -0,0 +1,7 @@
+Subject: You are banned from using this issue tracker
+
+The email message you sent to [project_addr]
+was not processed because your account, [sender_addr],
+has been banned from using this issue tracker.
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/inboundemail-body-too-long.ezt b/appengine/monorail/templates/features/inboundemail-body-too-long.ezt
new file mode 100644
index 0000000..515e157
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-body-too-long.ezt
@@ -0,0 +1,6 @@
+Subject: Email body too long
+
+The email message you sent to [project_addr]
+was not processed because it was too large.
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/inboundemail-error-footer-part.ezt b/appengine/monorail/templates/features/inboundemail-error-footer-part.ezt
new file mode 100644
index 0000000..d615853
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-error-footer-part.ezt
@@ -0,0 +1,2 @@
+To learn more, please visit:
+https://chromium.googlesource.com/infra/infra/+/master/doc/users/index.md
diff --git a/appengine/monorail/templates/features/inboundemail-no-account.ezt b/appengine/monorail/templates/features/inboundemail-no-account.ezt
new file mode 100644
index 0000000..0b7392d
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-no-account.ezt
@@ -0,0 +1,9 @@
+Subject: Could not determine account of sender
+
+The email message you sent to [project_addr]
+was not processed because your address, [sender_addr],
+does not correspond to an account known to the server.
+You must send from an email address that has already
+been used to interact with the issue tracker web UI.
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/inboundemail-no-artifact.ezt b/appengine/monorail/templates/features/inboundemail-no-artifact.ezt
new file mode 100644
index 0000000..bbc93fc
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-no-artifact.ezt
@@ -0,0 +1,7 @@
+Subject: Could not find [artifact_phrase] in project [project_name]
+
+The email message you sent to [project_addr]
+was not processed because [artifact_phrase] does
+not exist in project [project_name].
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/inboundemail-no-perms.ezt b/appengine/monorail/templates/features/inboundemail-no-perms.ezt
new file mode 100644
index 0000000..5fe57dc
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-no-perms.ezt
@@ -0,0 +1,8 @@
+Subject: User does not have permission to add a comment
+
+The email message you sent to [project_addr]
+was not processed because user [sender_addr]
+does not have permission to add a comment to
+[artifact_phrase] in [project_name].
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/inboundemail-not-a-reply.ezt b/appengine/monorail/templates/features/inboundemail-not-a-reply.ezt
new file mode 100644
index 0000000..9861fd7
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-not-a-reply.ezt
@@ -0,0 +1,7 @@
+Subject: Your message is not a reply to a notification email
+
+The email message you sent to [project_addr]
+was not processed because it was not a reply to a notification
+email that we sent specifically to [sender_addr].
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/inboundemail-project-not-found.ezt b/appengine/monorail/templates/features/inboundemail-project-not-found.ezt
new file mode 100644
index 0000000..d82bac1
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-project-not-found.ezt
@@ -0,0 +1,6 @@
+Subject: Project not found
+
+The email message you sent to [project_addr]
+was not processed because there is no project at that address.
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/inboundemail-replies-disabled.ezt b/appengine/monorail/templates/features/inboundemail-replies-disabled.ezt
new file mode 100644
index 0000000..c95839b
--- /dev/null
+++ b/appengine/monorail/templates/features/inboundemail-replies-disabled.ezt
@@ -0,0 +1,7 @@
+Subject: Email replies are not enabled in project [project_name]
+
+The email message you sent to [project_addr]
+was not processed because project [project_name]
+has not enabled email replies.
+
+[include "inboundemail-error-footer-part.ezt"]
diff --git a/appengine/monorail/templates/features/saved-queries-page.ezt b/appengine/monorail/templates/features/saved-queries-page.ezt
new file mode 100644
index 0000000..dcc51d5
--- /dev/null
+++ b/appengine/monorail/templates/features/saved-queries-page.ezt
@@ -0,0 +1,42 @@
+[define title][if-any viewing_self]My[else][viewed_username][end] saved queries[end]
+[define category_css]css/ph_detail.css[end]
+
+[include "../framework/master-header.ezt" "showusertabs" "t4"]
+
+
+<h3>Saved queries</h3>
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+<div class="section">
+
+ <div class="closed">
+ <div>Saved queries allow you to quickly view issue lists that you use frequently.
+ <a class="ifClosed toggleHidden" href="#"
+ style="font-size:90%; margin:0 1em">Learn more</a>
+ </div>
+
+ <div id="filterhelp" class="ifOpened help">
+ Personal saved queries allow you to keep track of the issues that matter most to you.<br/>
+ When you are in a project, you can choose one of your saved queries from the
+ the bottom section of the search dropdown menu that is next to the issue search box.<br/>
+ You can also subscribe to any query to get email notifications when issues that
+ satisfy that query are modified.
+ </div>
+ <br>
+
+ <form action="queries.do" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+ [include "../framework/saved-queries-admin-part.ezt" "user"]
+
+ <input type="submit" id="savechanges" name="btn" value="Save changes"
+ class="submit">
+
+ </form>
+
+ </div>
+</div>
+
+[end][# if not read-only]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/features/updates-bulkedit-body.ezt b/appengine/monorail/templates/features/updates-bulkedit-body.ezt
new file mode 100644
index 0000000..5fc10e7
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-bulkedit-body.ezt
@@ -0,0 +1 @@
+[is num_issues "1"]Issue [else]Issues [end][for local_ids]<a class="ot-issue-link" href="/p/[project.project_name]/issues/detail?id=[local_ids]">[local_ids]</a>[if-index local_ids last][else], [end][end]
diff --git a/appengine/monorail/templates/features/updates-ending.ezt b/appengine/monorail/templates/features/updates-ending.ezt
new file mode 100644
index 0000000..0985311
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-ending.ezt
@@ -0,0 +1,7 @@
+[is ending_type "in_project"]
+ in project [include "updates-project-link.ezt" "2"]
+[else][is ending_type "by_user"]
+ [define user_profile_url][user.profile_url][end]
+ [define user_display_name][user.display_name][end]
+ by [include "updates-profile-link.ezt" "2"]
+[end][end]
diff --git a/appengine/monorail/templates/features/updates-entry-part.ezt b/appengine/monorail/templates/features/updates-entry-part.ezt
new file mode 100644
index 0000000..a1fde4e
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-entry-part.ezt
@@ -0,0 +1,57 @@
+[# Show one activity. arg0 is the activity.]
+
+[is arg0.highlight ""]
+ [define column_width]160[end]
+[else]
+ [define column_width]300[end]
+[end]
+
+<li [is even "Yes"]class="even"[end]>
+ <div class="g-section g-tpl-[column_width]">
+ <div class="g-unit g-first">
+ <div class="g-c">
+ [if-any arg0.highlight]
+ <div class="g-section g-tpl-160">
+ <div class="g-unit g-first">
+ <div class="g-c">
+ <span class="date [if-any arg0.escaped_body]below-more[else][end] activity" title="[arg0.date_tooltip]">[arg0.date_relative]</span>
+ </div>
+ </div>
+ <div class="g-unit">
+ <div class="g-c" style="padding-right:1em">
+ <span class="highlight-column">
+ [is arg0.highlight "project"]
+ <a href="/p/[arg0.project_name]/" title="[arg0.project_name]">[arg0.project_name]</a>
+ [else][is arg0.highlight "user"]
+ <a href="[arg0.user.profile_url]" title="[arg0.user.display_name]">[arg0.user.display_name]</a>
+ [end][end]
+ </span>
+ </div>
+ </div>
+ </div>
+ [else]
+ <span class="date [if-any arg0.escaped_body]below-more[end] activity" title="[arg0.date_tooltip]">[arg0.date_relative]</span>
+ [end]
+ </div>
+ </div>
+ <div class="g-unit">
+ <div class="g-c">
+ <span class="content">
+ [# SECURITY: OK to use "raw" here because escaped_title was preprocessed through the template engine.]
+ <span class="title">[format "raw"][arg0.escaped_title][end]</span>
+ [if-any arg0.escaped_body]
+ <span class="details-inline" style="margin-left:.5em">
+ [# SECURITY: OK to use "raw" here because escaped_body was preprocessed through the template engine.]
+ - [format "raw"][arg0.escaped_body][end]
+ </span>
+ <div class="details-wrapper">
+ [# SECURITY: OK to use "raw" here because escaped_body was preprocessed through the template engine.]
+ <div class="details">[format "raw"][arg0.escaped_body][end]</div>
+ </div>
+ [end]
+ </span>
+ </div>
+ </div>
+ </div>
+</li>
+[define even][is even "Yes"]No[else]Yes[end][end]
diff --git a/appengine/monorail/templates/features/updates-issue-link.ezt b/appengine/monorail/templates/features/updates-issue-link.ezt
new file mode 100644
index 0000000..cd17f6e
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-issue-link.ezt
@@ -0,0 +1,3 @@
+<a class="ot-issue-link"
+ href="/p/[issue.project_name]/issues/detail?id=[issue.local_id][if-any issue_change_id]#c_id[issue_change_id][end]"
+ >issue [issue.local_id]</a>
diff --git a/appengine/monorail/templates/features/updates-issueupdate-body.ezt b/appengine/monorail/templates/features/updates-issueupdate-body.ezt
new file mode 100644
index 0000000..5d0f683
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-issueupdate-body.ezt
@@ -0,0 +1,16 @@
+[# Format the body of one issue update in the activities list.]
+
+<span class="ot-issue-comment">
+ [for comment.text_runs][include "../tracker/render-rich-text.ezt" comment.text_runs][end]
+</span>
+
+[if-any comment.amendments]
+ <div class="ot-issue-fields">
+ [for comment.amendments]
+ <div class="ot-issue-field-wrapper">
+ <span class="ot-issue-field-name">[comment.amendments.field_name]: </span>
+ <span class="ot-issue-field-value">[comment.amendments.newvalue]</span>
+ </div>
+ [end]
+ </div>
+[end]
diff --git a/appengine/monorail/templates/features/updates-issueupdate-title.ezt b/appengine/monorail/templates/features/updates-issueupdate-title.ezt
new file mode 100644
index 0000000..1475df9
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-issueupdate-title.ezt
@@ -0,0 +1,28 @@
+[# Pre-render the title of an activity for an issue update.]
+
+[include "updates-issue-link.ezt"]
+([issue.short_summary])
+
+[define field_changed][end]
+[define multiple_fields_changed][end]
+[for comment.amendments]
+ [if-any field_changed]
+ [define multiple_fields_changed]True[end]
+ [else]
+ [define field_changed][comment.amendments.field_name][end]
+ [end]
+[end]
+
+[if-any issue_change_id]
+ [if-any multiple_fields_changed]
+ changed
+ [else][if-any field_changed]
+ [field_changed] changed
+ [else]
+ commented on
+ [end][end]
+[else]
+ reported
+[end]
+
+[include "updates-ending.ezt"]
diff --git a/appengine/monorail/templates/features/updates-newproject-body.ezt b/appengine/monorail/templates/features/updates-newproject-body.ezt
new file mode 100644
index 0000000..2d0feb0
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-newproject-body.ezt
@@ -0,0 +1 @@
+<span class="ot-project-summary">[project_summary]</span>
diff --git a/appengine/monorail/templates/features/updates-page.ezt b/appengine/monorail/templates/features/updates-page.ezt
new file mode 100644
index 0000000..e5306f3
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-page.ezt
@@ -0,0 +1,177 @@
+[define title]Updates[end]
+[if-any updates_data]
+
+[define even]Yes[end]
+
+<div id="colcontrol">
+<div class="list">
+ <table style="width: 100%;" cellspacing="0" cellpadding="0">
+ <tbody><tr>
+ <td style="text-align: left;">
+ Details:
+ <a id="detailsshow" href="#" class="showAll">Show all</a>
+ <a id="detailshide" href="#" class="hideAll">Hide all</a></td>
+ <td>
+ [include "../framework/artifact-list-pagination-part.ezt"]
+ </td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+
+ <table cellspacing="0" cellpadding="0" border="0" width="100%" id="resultstable" class="results" style="table-layout:fixed; width:100%">
+ <tbody>
+ <tr>
+ <td style="padding:0px" width="100%">
+
+ <div id='activity-streams-list' class='activity-stream-list'>
+ [if-any updates_data.today]
+ <h4>Today</h4>
+ <ul class='activity-stream'>
+ [for updates_data.today]
+ [include "updates-entry-part.ezt" updates_data.today]
+ [end]
+ </ul>
+ [end]
+
+ [if-any updates_data.yesterday]
+ <h4>Yesterday</h4>
+ <ul class='activity-stream'>
+ [for updates_data.yesterday]
+ [include "updates-entry-part.ezt" updates_data.yesterday]
+ [end]
+ </ul>
+ [end]
+
+ [if-any updates_data.pastweek]
+ <h4>Last 7 days</h4>
+ <ul class='activity-stream'>
+ [for updates_data.pastweek]
+ [include "updates-entry-part.ezt" updates_data.pastweek]
+ [end]
+ </ul>
+ [end]
+
+ [if-any updates_data.pastmonth]
+ <h4>Last 30 days</h4>
+ <ul class='activity-stream'>
+ [for updates_data.pastmonth]
+ [include "updates-entry-part.ezt" updates_data.pastmonth]
+ [end]
+ </ul>
+ [end]
+
+ [if-any updates_data.thisyear]
+ <h4>Earlier this year</h4>
+ <ul class='activity-stream'>
+ [for updates_data.thisyear]
+ [include "updates-entry-part.ezt" updates_data.thisyear]
+ [end]
+ </ul>
+ [end]
+
+ [if-any updates_data.older]
+ <h4>Older</h4>
+ <ul class='activity-stream'>
+ [for updates_data.older]
+ [include "updates-entry-part.ezt" updates_data.older]
+ [end]
+ </ul>
+ [end]
+ </div>
+
+ </td></tr></tbody></table>
+
+ <div class="list-foot">
+ [include "../framework/artifact-list-pagination-part.ezt"]
+ </div>
+</div>
+
+[else]
+
+ [if-any no_stars]
+ [is user_updates_tab_mode "st2"]
+ <div class="display-error">There are no starred projects.</div>
+ [else][is user_updates_tab_mode "st3"]
+ <div class="display-error">There are no starred developers.</div>
+ [end][end]
+ [else][if-any no_activities]
+ <div class="display-error">There are no updates yet.</div>
+ [end][end]
+
+[end]
+
+[if-any updates_data]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+
+ /**
+ * Shows the activity detail for the particular activity selected.
+ */
+ function handleActivityLinkClick(e) {
+ var targetEl;
+
+ if (!e) {
+ var e = window.event;
+ }
+ if (e.target) {
+ targetEl = e.target;
+
+ } else if (e.srcElement) {
+ targetEl = e.srcElement;
+ }
+ if (targetEl.nodeType == 3) {
+ targetEl = targetEl.parentNode;
+ }
+
+ while (targetEl.tagName.toLowerCase() != 'li') {
+ targetEl = targetEl.parentNode;
+ }
+ if (targetEl.className.indexOf('click') != -1) {
+ targetEl.className = targetEl.className.replace(/click/, '');
+ } else {
+ targetEl.className += ' click';
+ }
+
+ e.preventDefault();
+ }
+
+ /**
+ * Array of <li> elements for activity streams
+ */
+ var _CS_asElemList = document.getElementById('activity-streams-list').
+ getElementsByTagName('li');
+
+ /**
+ * Shows all activity details
+ */
+ function expandAll(event) {
+ for (var i=0; i < _CS_asElemList.length; i++) {
+ _CS_asElemList[[]i].className = 'click';
+ }
+ event.preventDefault();
+ }
+
+ /**
+ * Hides all activity details
+ */
+ function closeAll(event) {
+ for (var i=0; i < _CS_asElemList.length; i++) {
+ _CS_asElemList[[]i].className = '';
+ }
+ event.preventDefault();
+ }
+
+ if ($("detailsshow"))
+ $("detailsshow").addEventListener("click", expandAll);
+ if ($("detailshide"))
+ $("detailshide").addEventListener("click", closeAll);
+
+ var activityLinks = document.getElementsByClassName("activity");
+ for (var i = 0; i < activityLinks.length; ++i) {
+ var link = activityLinks[[]i];
+ link.addEventListener("click", handleActivityLinkClick);
+ }
+});
+</script>
+[end]
diff --git a/appengine/monorail/templates/features/updates-profile-link.ezt b/appengine/monorail/templates/features/updates-profile-link.ezt
new file mode 100644
index 0000000..722dbae
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-profile-link.ezt
@@ -0,0 +1 @@
+<a class="ot-profile-link-[arg0]" href="[user_profile_url]">[user_display_name]</a>
diff --git a/appengine/monorail/templates/features/updates-project-link.ezt b/appengine/monorail/templates/features/updates-project-link.ezt
new file mode 100644
index 0000000..b1a0ec2
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-project-link.ezt
@@ -0,0 +1 @@
+<a class="ot-project-link-[arg0]" href="/p/[project.project_name]/">[project.project_name]</a>
diff --git a/appengine/monorail/templates/features/updates-staractivity-body.ezt b/appengine/monorail/templates/features/updates-staractivity-body.ezt
new file mode 100644
index 0000000..a016c4d
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-staractivity-body.ezt
@@ -0,0 +1 @@
+[# Placeholder for star activity]
diff --git a/appengine/monorail/templates/features/updates-staractivity-title.ezt b/appengine/monorail/templates/features/updates-staractivity-title.ezt
new file mode 100644
index 0000000..93673ad
--- /dev/null
+++ b/appengine/monorail/templates/features/updates-staractivity-title.ezt
@@ -0,0 +1,5 @@
+[is scope "projects"]
+[is starred "yes"]Starred[else]Unstarred[end] project [include "updates-project-link.ezt" "1"]
+[else][is scope "users"]
+[is starred "yes"]Starred[else]Unstarred[end] <a class="ot-profile-link-1" href="[starred_user_profile_url]">[starred_user_display_name]</a>
+[end][end]
diff --git a/appengine/monorail/templates/framework/admin-email-sender-part.ezt b/appengine/monorail/templates/framework/admin-email-sender-part.ezt
new file mode 100644
index 0000000..783a87f
--- /dev/null
+++ b/appengine/monorail/templates/framework/admin-email-sender-part.ezt
@@ -0,0 +1,15 @@
+[if-any project_is_restricted]
+<p style="width:35em; border: 1px solid #933; padding: 3px">
+ <b style="color:#933">Important</b>: Access to this project is restricted, so
+ please do not specify a public mailing list address for all notifications.
+ Use only private mailing lists to avoid unwanted disclosures. If you make
+ your project public later, choose a new mailing list at that time.
+</p>
+[end]
+
+<p>
+ Notifications will be sent from:
+ <tt>[email_from_addr]</tt><br>
+ You may need to add this address as an allowed poster to your mailing list.<br>
+ If using Google Groups, add the address directly with no email delivery.
+</p>
diff --git a/appengine/monorail/templates/framework/alert.ezt b/appengine/monorail/templates/framework/alert.ezt
new file mode 100644
index 0000000..f2e4377
--- /dev/null
+++ b/appengine/monorail/templates/framework/alert.ezt
@@ -0,0 +1,37 @@
+[if-any alerts.show]
+ <table align="center" border="0" cellspacing="0" cellpadding="0" style="margin-bottom: 6px">
+ <tr><td class="notice">
+ [if-any alerts.updated]
+ <a href="[project_home_url]/issues/detail?id=[alerts.updated]">Issue [alerts.updated]</a>
+ has been updated.
+ [end]
+
+ [if-any alerts.moved]
+ Issue has been moved to
+ <a href="/p/[alerts.moved_to_project]/issues/detail?id=[alerts.moved_to_id]">
+ [alerts.moved_to_project]:[alerts.moved_to_id]
+ </a>
+ [end]
+
+ [if-any alerts.copied]
+ <a href="[project_home_url]/issues/detail?id=[alerts.copied_from_id]">Issue [alerts.copied_from_id]</a>
+ has been copied to
+ <a href="/p/[alerts.copied_to_project]/issues/detail?id=[alerts.copied_to_id]">
+ [alerts.copied_to_project]:[alerts.copied_to_id]
+ </a>
+ [end]
+
+ [if-any alerts.saved]
+ Changes have been saved
+ [end]
+
+ [if-any alerts.deleted]
+ [is alerts.deleted "1"]
+ Item deleted
+ [else]
+ [alerts.deleted] items deleted
+ [end]
+ [end]
+ </td></tr>
+ </table>
+[end]
diff --git a/appengine/monorail/templates/framework/artifact-collision-page.ezt b/appengine/monorail/templates/framework/artifact-collision-page.ezt
new file mode 100644
index 0000000..d3cc5b5
--- /dev/null
+++ b/appengine/monorail/templates/framework/artifact-collision-page.ezt
@@ -0,0 +1,30 @@
+[include "../framework/master-header.ezt" "showtabs"]
+
+[# Note: No need for UI element permission checking here. ]
+
+<h3>Update Collision</h3>
+
+<h4>What happened?</h4>
+
+<p>While you were viewing or updating [artifact_name], another user
+submitted an update to it. That user's update has already
+taken effect. Your update cannot be saved because your changes could
+overwrite the other user's changes.</p>
+
+<p>Note: if you have been viewing and updating [artifact_name] in multiple
+browser windows or tabs, it is possible that the "other user" is
+actually yourself.</p>
+
+
+<div style="margin:2em" class="help">
+ <b style="margin:0.5em">Your options:</b>
+
+ <ul>
+ <li>Start over: view the up-to-date
+ <a href="[artifact_detail_url]">[artifact_name]</a>
+ and consider making your changes again.</li>
+ </ul>
+
+</div>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/framework/artifact-list-admin-part.ezt b/appengine/monorail/templates/framework/artifact-list-admin-part.ezt
new file mode 100644
index 0000000..1d3facf3
--- /dev/null
+++ b/appengine/monorail/templates/framework/artifact-list-admin-part.ezt
@@ -0,0 +1,106 @@
+[# If any value is supplied for arg0, the user will also be able
+ to edit grid preferences.]
+<h4>[if-any arg0]List and grid preferences[else]List preferences[end]</h4>
+<div class="section">
+
+ <div class="closed">
+ <div>Default columns shown in list view:
+ <a class="ifClosed toggleHidden" href="#"
+ style="font-size:90%; margin-left:.5em">Learn more</a>
+ </div>
+
+ <div id="colhelp" class="ifOpened help">
+ <div>
+ You may enter a series of column names separated by spaces. The
+ columns will be displayed in order on the list view page.
+ </div>
+ <br>
+ <div>
+ Columns may be the names of built-in attributes, e.g., "Summary"
+ or "Stars". Columns may also be prefixes of the labels on items.
+ To experiment with label prefixes, label some items with
+ Key-Value labels, then click the "..." menu in the far upper right
+ heading of the list view.
+ </div>
+ </div>
+ <br>
+ </div>
+
+ <input type="text" size="75" name="default_col_spec" value="[config.default_col_spec]"
+ [if-any perms.EditProject][else]readonly="readonly"[end]
+ class="acob" style="margin-left:.7em">
+ <br>
+ <br>
+ <br>
+
+ <div class="closed">
+ <div>Default sorting order:
+ <a class="ifClosed toggleHidden" href="#"
+ style="font-size:90%; margin-left:.7em">Learn more</a>
+ </div>
+
+ <div class="ifOpened help">
+ <div>
+ You may enter a series of column names separated by spaces. Items
+ will be sorted by the first column specified. If two items have
+ the same value in the first column, the items' values in the second
+ column will be used to break the tie, and so on. Use a leading
+ minus-sign to reverse the sort order within a column.
+ </div>
+ <br>
+ <div>
+ To experiment with column sorting, click the list view header cells and
+ choose "Sort up" or "Sort down". The sorting specification used becomes
+ part of the page URL.
+ </div>
+ </div>
+ <br>
+ </div>
+
+ <input type="text" size="75" name="default_sort_spec" value="[config.default_sort_spec]"
+ [if-any perms.EditProject][else]readonly="readonly"[end]
+ class="acob" style="margin-left:.7em">
+
+
+ [if-any arg0]
+ <br>
+ <br>
+ <br>
+
+ <div class="closed">
+ <div>Default grid axes:
+ <a class="ifClosed toggleHidden" href="#"
+ style="font-size:90%; margin-left:.7em">Learn more</a>
+ </div>
+
+ <div class="ifOpened help">
+ <div>
+ You may enter one attribute name for the default grid rows and one for
+ the default grid columns. For example, "milestone" and "priority". Or,
+ you may leave each field blank.
+ </div>
+ <br>
+ <div>
+ To experiment with grid axes, click the "grid" link in the list view and
+ use the drop-down menus to select row and column attributes.
+ </div>
+ </div>
+ <br>
+ </div>
+
+ <span style="margin-left:.7em">
+ Rows: <input type="text" size="10" name="default_y_attr" value="[config.default_y_attr]"
+ [if-any perms.EditProject][else]readonly="readonly"[end]
+ class="acob">
+ </span>
+
+ <span style="margin-left:.7em">
+ Columns: <input type="text" size="10" name="default_x_attr" value="[config.default_x_attr]"
+ [if-any perms.EditProject][else]readonly="readonly"[end]
+ class="acob">
+ </span>
+
+ [end]
+
+</div>
+
diff --git a/appengine/monorail/templates/framework/artifact-list-cell-values.ezt b/appengine/monorail/templates/framework/artifact-list-cell-values.ezt
new file mode 100644
index 0000000..d84e439
--- /dev/null
+++ b/appengine/monorail/templates/framework/artifact-list-cell-values.ezt
@@ -0,0 +1,3 @@
+[if-any table_data.cells.values][for table_data.cells.values]
+[if-any table_data.cells.values.is_derived]<i>[end][table_data.cells.values.item][if-any table_data.cells.values.is_derived]</i>[end][if-index table_data.cells.values last][else], [end]
+[end][else]----[end]
diff --git a/appengine/monorail/templates/framework/artifact-list-group-row.ezt b/appengine/monorail/templates/framework/artifact-list-group-row.ezt
new file mode 100644
index 0000000..09b36ef
--- /dev/null
+++ b/appengine/monorail/templates/framework/artifact-list-group-row.ezt
@@ -0,0 +1,33 @@
+[# Display a group header row, if this is the start of a new group of rows.
+
+Args:
+ arg0: singular form of artifact type name.
+ arg1: plural form of artifact type name.
+]
+
+[if-any table_data.group][if-any table_data.group.cells]
+ <tbody class="opened"> [# The next tbody implicitly closes this one]
+ <tr class="group_row">
+ <td colspan="100" class="toggleHidden">
+ <img class="ifClosed" src="/static/images/plus.gif">
+ <img class="ifOpened" src="/static/images/minus.gif">
+ [table_data.group.rows_in_group]
+ [is table_data.group.rows_in_group "1"][arg0][else][arg1][end]:
+ [for table_data.group.cells]
+ [define any_group_value]No[end]
+ [for table_data.group.cells.values]
+ [if-any table_data.group.cells.values.item]
+ [define any_group_value]Yes[end]
+ [end]
+ [end]
+ [is any_group_value "Yes"]
+ [for table_data.group.cells.values]
+ [table_data.group.cells.group_name]=[table_data.group.cells.values.item]
+ [end]
+ [else]
+ -has:[table_data.group.cells.group_name]
+ [end]
+ [end]
+ </td>
+ </tr>
+[end][end]
diff --git a/appengine/monorail/templates/framework/artifact-list-non-column-labels.ezt b/appengine/monorail/templates/framework/artifact-list-non-column-labels.ezt
new file mode 100644
index 0000000..780caf8
--- /dev/null
+++ b/appengine/monorail/templates/framework/artifact-list-non-column-labels.ezt
@@ -0,0 +1,4 @@
+[for table_data.cells.non_column_labels]<a
+ class="label" href="list?q=label:[table_data.cells.non_column_labels.value]"
+ >[if-any table_data.cells.non_column_labels.is_derived]<i>[end][table_data.cells.non_column_labels.value][if-any table_data.cells.non_column_labels.is_derived]</i>[end]</a>
+[end]
diff --git a/appengine/monorail/templates/framework/artifact-list-pagination-part.ezt b/appengine/monorail/templates/framework/artifact-list-pagination-part.ezt
new file mode 100644
index 0000000..8896c14
--- /dev/null
+++ b/appengine/monorail/templates/framework/artifact-list-pagination-part.ezt
@@ -0,0 +1,18 @@
+[if-any pagination]
+ [if-any pagination.visible]
+ <div class="pagination">
+ [if-any pagination.prev_url]
+ <a href="[pagination.prev_url]"><b>‹</b> Prev</a>
+ [end]
+ [if-any pagination.start]
+ [pagination.start] - [pagination.last]
+ [end]
+ [if-any pagination.total_count]
+ of [pagination.total_count][if-any pagination.limit_reached]+[end]
+ [end]
+ [if-any pagination.next_url]
+ <a href="[pagination.next_url]">Next <b>›</b></a>
+ [end]
+ </div>
+ [end]
+[end]
diff --git a/appengine/monorail/templates/framework/banned-page.ezt b/appengine/monorail/templates/framework/banned-page.ezt
new file mode 100644
index 0000000..d9bf6bc
--- /dev/null
+++ b/appengine/monorail/templates/framework/banned-page.ezt
@@ -0,0 +1,25 @@
+[include "../framework/master-header.ezt" "hidetabs"]
+
+<h3>Access Not Allowed</h3>
+
+<h4>What happened?</h4>
+
+<p>You are not allowed to access this service.</p>
+
+<p>Please <a href="mailto:[feedback_email]">contact us</a> if you believe that you should be able to access this service. (This is a Google Group; what you write will be visible on the Internet.)</p>
+
+[# Note: we do not show the reason for being banned. ]
+
+
+<div style="margin:2em" class="help">
+ <b style="margin:0.5em">Your options:</b>
+
+ <ul>
+ <li>Participate in the open source community through other websites.</li>
+ <li><a href="[logout_url_goto_home]">Sign out</a> and access this site as
+ an anonymous user.</li>
+ <li><a href="mailto:[feedback_email]">Contact us</a> for further assistance.</li>
+ </ul>
+</div>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/framework/banner_message.ezt b/appengine/monorail/templates/framework/banner_message.ezt
new file mode 100644
index 0000000..a8a4097
--- /dev/null
+++ b/appengine/monorail/templates/framework/banner_message.ezt
@@ -0,0 +1,8 @@
+[if-any site_banner_message]
+ <div style="font-weight:bold; color:#a03; padding:5px; margin-top:10px; text-align:center; background:#ffeac0;">
+ [site_banner_message]
+ [if-any banner_time]
+ <a href="http://www.timeanddate.com/worldclock/fixedtime.html?month=[banner_time.month]&day=[banner_time.day]&year=[banner_time.year]&hour=[banner_time.hour]&min=[banner_time.minute]&sec=[banner_time.second]&p1=137">[banner_time.weekday], [banner_time.hour_min] PDT</a>
+ [end]
+ </div>
+[end]
diff --git a/appengine/monorail/templates/framework/captcha-field.ezt b/appengine/monorail/templates/framework/captcha-field.ezt
new file mode 100644
index 0000000..ed36e4b
--- /dev/null
+++ b/appengine/monorail/templates/framework/captcha-field.ezt
@@ -0,0 +1,5 @@
+[if-any errors.captcha]
+ <div class="fielderror">[errors.captcha]</div>
+[end]
+<script src="https://www.google.com/recaptcha/api.js" async defer nonce="[nonce]"></script>
+<div class="g-recaptcha" data-sitekey="[recaptcha_public_key]"></div>
diff --git a/appengine/monorail/templates/framework/comment-pagination-part.ezt b/appengine/monorail/templates/framework/comment-pagination-part.ezt
new file mode 100644
index 0000000..6867c57
--- /dev/null
+++ b/appengine/monorail/templates/framework/comment-pagination-part.ezt
@@ -0,0 +1,8 @@
+[if-any cmnt_pagination.prev_url]
+ <a href="[cmnt_pagination.prev_url]" style="margin-right:.7em"><b>‹</b> Newer</a>
+[end]
+Showing comments [cmnt_pagination.last] - [cmnt_pagination.start]
+[if-any cmnt_pagination.total_count]of [cmnt_pagination.total_count][end]
+[if-any cmnt_pagination.next_url]
+ <a href="[cmnt_pagination.next_url]" style="margin-left:.7em">Older <b>›</b></a>
+[end]
diff --git a/appengine/monorail/templates/framework/component-validation-row.ezt b/appengine/monorail/templates/framework/component-validation-row.ezt
new file mode 100644
index 0000000..5d37b84
--- /dev/null
+++ b/appengine/monorail/templates/framework/component-validation-row.ezt
@@ -0,0 +1,5 @@
+<tr>
+ <td colspan="3">
+ <div id="component_blocksubmitarea" class="blockingsubmit"><span id="component_blocksubmitmsg"></span></div>
+ </td>
+</tr>
diff --git a/appengine/monorail/templates/framework/debug.ezt b/appengine/monorail/templates/framework/debug.ezt
new file mode 100644
index 0000000..7903de5
--- /dev/null
+++ b/appengine/monorail/templates/framework/debug.ezt
@@ -0,0 +1,50 @@
+[is dbg "off"]
+ [if-any perms._ViewDebug]
+ <div class="debug">
+ - <a href="[debug_uri]">Reload w/ debug info</a>
+ </div>
+ [end]
+[else]
+ [# Note that this only handles the top two levels of (sub)phases.
+ # If you nest phases further than that (which we haven't wanted/needed to
+ # do so far), you'll have to modify this code in order to render it.]
+ <style type="text/css">
+ .debug, .debug a { color: #444; font-size: x-small}
+ .debug td, .debug th { background: #ddf}
+ .debug th { text-align: left; font-family: courier; font-size: small}
+ </style>
+
+ <div class="debug">Profile Data
+ <table class="ifOpened" cellpadding="2" cellspacing="2" border="0" style="padding-left: 1em">
+ [for profiler.top_phase.subphases]
+ <tr>
+ <th style="white-space:nowrap">[profiler.top_phase.subphases.name]:</th>
+ <td align="right">[profiler.top_phase.subphases.ms][is profiler.top_phase.subphases.ms "in_progress"][else] ms[end]</td>
+ <td><table cellspacing="1" cellpadding="0"><tr>
+ [for profiler.top_phase.subphases.subphases]
+ <td title="[profiler.top_phase.subphases.subphases.name]: [profiler.top_phase.subphases.subphases.ms]ms"
+ width="[is profiler.top_phase.subphases.subphases.ms "in_progress"]100%[else][profiler.top_phase.subphases.subphases.ms][end]"
+ style="padding:2px;color:#fff;background:#[profiler.top_phase.subphases.subphases.color]">[profiler.top_phase.subphases.subphases.ms]</td>
+ [end]
+
+ [if-any profiler.top_phase.subphases.uncategorized_ms]
+ <td title="uncategorized: [profiler.top_phase.subphases.uncategorized_ms]ms"
+ width="[profiler.top_phase.subphases.uncategorized_ms]"
+ style="padding:1px">[profiler.top_phase.subphases.uncategorized_ms]</td>
+ [end]
+ </tr></table>
+ </td>
+ </tr>
+ [end]
+ </table>
+ </div><br>
+ [for debug]
+ <div class="debug">[debug.title]
+ <table cellpadding="2" cellspacing="2" border="0" style="padding-left: 1em">
+ [for debug.collection]
+ <tr><th>[debug.collection.key]</th><td>[debug.collection.val]</td></tr>
+ [end]
+ </table>
+ </div><br>
+ [end]
+[end]
diff --git a/appengine/monorail/templates/framework/display-project-logo.ezt b/appengine/monorail/templates/framework/display-project-logo.ezt
new file mode 100644
index 0000000..fd787c6
--- /dev/null
+++ b/appengine/monorail/templates/framework/display-project-logo.ezt
@@ -0,0 +1,29 @@
+[# This template displays the project logo with the file name and a View link.
+
+ arg0: Whether to display a checkbox to delete the logo.
+]
+
+<table cellspacing="5" cellpadding="2" border="0">
+ <tr>
+ <td>
+ <b>[logo_view.filename]</b>
+ </td>
+ <td>
+ <a href="[logo_view.viewurl]" target="_blank" style="margin-left:.2em">View</a>
+ </td>
+ </tr>
+ <tr>
+ <td colspan=2>
+ <a href="[logo_view.viewurl]" target="_blank">
+ <img src="[logo_view.thumbnail_url]" class="preview">
+ </a>
+ </td>
+ </tr>
+ [if-any arg0]
+ <tr>
+ <td colspan=2>
+ <input type="checkbox" name="delete_logo" id="delete_logo"> Delete this logo
+ </td>
+ </tr>
+ [end]
+</table>
diff --git a/appengine/monorail/templates/framework/excessive-activity-page.ezt b/appengine/monorail/templates/framework/excessive-activity-page.ezt
new file mode 100644
index 0000000..dc1fe50
--- /dev/null
+++ b/appengine/monorail/templates/framework/excessive-activity-page.ezt
@@ -0,0 +1,31 @@
+[include "../framework/master-header.ezt" "hidetabs"]
+
+<h3>Action Limit Exceeded</h3>
+
+<h4>What happened?</h4>
+
+<div style="width:60em">
+
+<p>You have performed the requested action too many times in a 24-hour
+time period. Or, you have performed the requested action too many
+times since the creation of your account.</p>
+
+<p>We place limits on the number of actions that can be performed by
+each user in order to reduce the potential for abuse. We feel that we have set
+these limits high enough that legitimate use will very rarely
+reach them. Without these limits, a few abusive users could degrade
+the quality of this site for everyone.</p>
+
+
+<div style="margin:2em" class="help">
+ <b style="margin:0.5em">Your options:</b>
+
+ <ul>
+ <li>Wait 24 hours and then try this action again.</li>
+ <li>Ask another member of your project to perform the action for you.</li>
+ <li><a href="mailto:[feedback_email]">Contact us</a> for further assistance.</li>
+ </ul>
+</div>
+
+</div>
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/framework/file-content-js.ezt b/appengine/monorail/templates/framework/file-content-js.ezt
new file mode 100644
index 0000000..72e882e
--- /dev/null
+++ b/appengine/monorail/templates/framework/file-content-js.ezt
@@ -0,0 +1,89 @@
+[# TODO(jrobbins): move this into compiled javascript. ]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var numsGenState = {table_base_id: 'nums_table_'};
+ var srcGenState = {table_base_id: 'src_table_'};
+ var alignerRunning = false;
+ var startOver = false;
+
+ function setLineNumberHeights() {
+ if (alignerRunning) {
+ startOver = true;
+ return;
+ }
+ numsGenState.chunk_id = 0;
+ numsGenState.table = document.getElementById('nums_table_0');
+ numsGenState.row_num = 0;
+
+ if (!numsGenState.table) {
+ return; // Silently exit if no file is present.
+ }
+
+ srcGenState.chunk_id = 0;
+ srcGenState.table = document.getElementById('src_table_0');
+ srcGenState.row_num = 0;
+
+ alignerRunning = true;
+ continueToSetLineNumberHeights();
+ }
+
+ function rowGenerator(genState) {
+ if (genState.row_num < genState.table.rows.length) {
+ var currentRow = genState.table.rows[[]genState.row_num];
+ genState.row_num++;
+ return currentRow;
+ }
+
+ var newTable = document.getElementById(
+ genState.table_base_id + (genState.chunk_id + 1));
+ if (newTable) {
+ genState.chunk_id++;
+ genState.row_num = 0;
+ genState.table = newTable;
+ return genState.table.rows[[]0];
+ }
+
+ return null;
+ }
+
+ var MAX_ROWS_PER_PASS = 1000;
+
+ function continueToSetLineNumberHeights() {
+ var rowsInThisPass = 0;
+ var numRow = 1;
+ var srcRow = 1;
+
+ while (numRow && srcRow && rowsInThisPass < MAX_ROWS_PER_PASS) {
+ numRow = rowGenerator(numsGenState);
+ srcRow = rowGenerator(srcGenState);
+ rowsInThisPass++;
+
+ if (numRow && srcRow) {
+ if (numRow.offsetHeight != srcRow.offsetHeight) {
+ numRow.firstChild.style.height = srcRow.offsetHeight + 'px';
+ }
+ }
+ }
+
+ if (rowsInThisPass >= MAX_ROWS_PER_PASS) {
+ setTimeout(continueToSetLineNumberHeights, 10);
+ } else {
+ alignerRunning = false;
+ if (startOver) {
+ startOver = false;
+ setTimeout(setLineNumberHeights, 500);
+ }
+ }
+
+ }
+
+ function initLineNumberHeights() {
+ // Do 2 complete passes, because there can be races
+ // between this code and prettify.
+ startOver = true;
+ setTimeout(setLineNumberHeights, 250);
+ window.addEventListener('resize', setLineNumberHeights);
+ }
+ initLineNumberHeights();
+});
+</script>
diff --git a/appengine/monorail/templates/framework/file-content-part.ezt b/appengine/monorail/templates/framework/file-content-part.ezt
new file mode 100644
index 0000000..e3223d2
--- /dev/null
+++ b/appengine/monorail/templates/framework/file-content-part.ezt
@@ -0,0 +1,57 @@
+[# Safely display user-content text, such a program source code, with
+ line numbers and the ability to comment (if enabled).
+
+Other EZT variables used:
+ revision: revision ID string.
+ file_lines: List of lines in the file. Each line
+ has a line number, line content, and list of published and unpublished
+ comments.
+ should_prettify: whether the text should be syntax highlighted.
+ prettify_class: additional CSS class used to tell prettify.js how to
+ best syntax highlight this source file.
+]
+
+[# Display the line numbers and source lines in separate columns.
+ See corresponding comments L1, L2, L3 and S1, S2, S3 below.
+ This is messy because the pre tags have significant whitespace, so we
+ break lines inside the tags themslves to make our templates readable.
+ The table has class "opened" or "closed" and the published comment cells
+ and their corresponding gutter cells are conditionally displayed by the
+ CSS rules for the "ifOpened" class. The "hide published comments" link
+ toggles the table's open/closed state.]
+<table class="opened" id="review_comment_area"><tr>
+<td id="nums">
+[# L1. Start with a nocursor row at the top to space the line numbers down the
+ same amount as the source code lines w/ their initial cursor_hidden row.]
+<pre><table width="100%"><tr class="nocursor"><td></td></tr></table></pre>
+
+[# L2. Display each line number in a row that we can refer
+ to by ID, and make each line number a self-link w/ anchor.
+ If that source line has any published comments, add a nocursor row
+ for spacing to match the corresponding cursor_hidden row, and a gutter
+ cell. The height of the gutter cell is set by JS. Same for unpublished
+ comments.]
+<pre><table width="100%" id="nums_table_0">[for file_lines]<tr id="gr_svn[revision]_[file_lines.num]"
+><td id="[file_lines.num]"><a href="#[file_lines.num]">[file_lines.num]</a></td></tr
+>[end]</table></pre>
+
+[# L3. Finish the line numbers column with another nocursor row to match
+ the spacing of the source code column's final cursor_hidden row.]
+<pre><table width="100%"><tr class="nocursor"><td></td></tr></table></pre>
+</td>
+<td id="lines">
+
+[# S1. Start the source code column with a cursor row. ]
+<pre><table width="100%"><tr class="cursor_stop cursor_hidden"><td></td></tr></table></pre>
+
+[# S2. Display each source code line in a table row and cell
+ that we can identify by id.]
+<pre [if-any should_prettify]class="prettyprint [prettify_class]"[end]><table id="src_table_0">[for file_lines]<tr
+id=sl_svn[revision]_[file_lines.num]
+><td class="source">[file_lines.line]<br></td></tr
+>[end]</table></pre>
+
+[# S3. Finish the line numbers column with another cursor stop.]
+<pre><table width="100%"><tr class="cursor_stop cursor_hidden"><td></td></tr></table></pre>
+</td>
+</tr></table>
diff --git a/appengine/monorail/templates/framework/filter-rule-admin-part.ezt b/appengine/monorail/templates/framework/filter-rule-admin-part.ezt
new file mode 100644
index 0000000..a0b6ae0
--- /dev/null
+++ b/appengine/monorail/templates/framework/filter-rule-admin-part.ezt
@@ -0,0 +1,153 @@
+<style>
+ #rules th, #rules td { padding-bottom: 1em }
+</style>
+
+[# If any value is supplied for arg0, the user will be able to set actions
+ that set default owner, set default status, and add CC users.]
+<h4 id="filters">Filter rules</h4>
+<div class="section">
+
+ <div class="closed">
+ <div>Filter rules can help you fill in defaults and stay organized.
+ <a class="ifClosed toggleHidden" href="#"
+ style="font-size:90%; margin-left:.5em">Learn more</a>
+ </div>
+
+ <div id="filterhelp" class="ifOpened help">
+ Filter rules can help your team triage issues by automatically
+ filling in default values based on other values. They can be used
+ in the same way that you might use message filters in an email client.
+ Filter rules are evaluated after each edit, not just on new items. And,
+ filter rules only add values or set default values, they never override
+ values that were explicitly set by a user.<br>
+ <br>
+ Note that exclusive prefixes still apply. So, if a user has set a label
+ with one of the exclusive prefixes, a rule that adds another label with
+ the same prefix will have no effect.
+ </div>
+ <br>
+
+ <table border="0" id="rules">
+ <tr>
+ <th></th>
+ <th style="text-align:left">If the issue matches this query:</th>
+ <th colspan="2" style="text-align:left">Then, [if-any arg0]do the following[else]add these labels[end]:</th>
+ <th></th>
+ </tr>
+
+ [for rules]
+ <tr>
+ <td style="text-align:right" width="20">[rules.idx].</td>
+ <td><input type="text" name="predicate[rules.idx]" size="60" value="[rules.predicate]"
+ autocomplete="off" id="predicate_existing_[rules.idx]" class="acob"></td>
+ <td>
+ [if-any arg0]
+ <select name="action_type[rules.idx]">
+ <option value="" disabled="disabled" [is rules.action_type ""]selected="selected"[end]>Choose...</option>
+ <option value="default_status" [is rules.action_type "default_status"]selected="selected"[end]>Set default status:</option>
+ <option value="default_owner" [is rules.action_type "default_owner"]selected="selected"[end]>Set default owner:</option>
+ <option value="add_ccs" [is rules.action_type "add_ccs"]selected="selected"[end]>Add Cc:</option>
+ <option value="add_labels" [is rules.action_type "add_labels"]selected="selected"[end]>Add labels:</option>
+ <option value="also_notify" [is rules.action_type "also_notify"]selected="selected"[end]>Also notify email:</option>
+ </select>
+ [end]
+ </td>
+ <td>
+ <input type="text" name="action_value[rules.idx]" size="70" value="[rules.action_value]" class="acob">
+ </td>
+ <td></td>
+ </tr>
+ [end]
+
+ [for new_rule_indexes]
+ <tr id="newrow[new_rule_indexes]" [if-index new_rule_indexes first][else]style="display:none"[end]>
+ <td style="text-align:right" width="20">[new_rule_indexes].</td>
+ <td><input type="text" name="new_predicate[new_rule_indexes]" size="60" value=""
+ class="showNextRuleRow acob" data-index="[new_rule_indexes]"
+ autocomplete="off" id="predicate_new_[new_rule_indexes]"></td>
+ <td>
+ [if-any arg0]
+ <select name="new_action_type[new_rule_indexes]">
+ <option value="" disabled="disabled" selected="selected">Choose...</option>
+ <option value="default_status">Set default status:</option>
+ <option value="default_owner">Set default owner:</option>
+ <option value="add_ccs">Add Cc:</option>
+ <option value="add_labels">Add labels:</option>
+ <option value="also_notify">Also notify email:</option>
+ </select>
+ [end]
+ </td>
+ <td>
+ <input type="text" name="new_action_value[new_rule_indexes]" size="70" value="" class="acob">
+ [# TODO(jrobbins): figure out a way to display error messages on each rule. ]
+ </td>
+ <td width="40px">
+ [if-index new_rule_indexes last][else]
+ <span id="addrow[new_rule_indexes]" class="fakelink" class="fakelink" data-index="[new_rule_indexes]">Add a row</span
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ </table>
+ </div>
+
+ [if-any errors.rules]
+ [for errors.rules]
+ <div class="fielderror">[errors.rules]</div>
+ [end]
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ document.location.hash = 'filters';
+});
+ </script>
+ [end]
+
+</div>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function showNextRuleRow(i) {
+ if (i < [max_rules]) {
+ _showID('newrow' + (i + 1));
+ _hideID('addrow' + i);
+ }
+ }
+
+ var addARowLinks = document.getElementsByClassName("fakelink");
+ for (var i = 0; i < addARowLinks.length; ++i) {
+ var link = addARowLinks[[]i];
+ link.addEventListener("click", function(event) {
+ var index = Number(event.target.getAttribute("data-index"));
+ showNextRuleRow(index);
+ });
+ }
+
+ var typeToAddARow = document.getElementsByClassName("showNextRuleRow");
+ for (var i = 0; i < typeToAddARow.length; ++i) {
+ var el = typeToAddARow[[]i];
+ el.addEventListener("keydown", function(event) {
+ var index = Number(event.target.getAttribute("data-index"));
+ showNextRuleRow(index);
+ });
+ }
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+});
+</script>
diff --git a/appengine/monorail/templates/framework/footer-script.ezt b/appengine/monorail/templates/framework/footer-script.ezt
new file mode 100644
index 0000000..2e05d91
--- /dev/null
+++ b/appengine/monorail/templates/framework/footer-script.ezt
@@ -0,0 +1,23 @@
+<script type="text/javascript" defer src="/static/js/graveyard/common.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/graveyard/listen.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/graveyard/xmlhttp.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/graveyard/shapes.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/graveyard/geom.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/graveyard/popup_controller.js?version=[app_version]" nonce="[nonce]"></script>
+
+[# Note that this file will be requested twice on some pages, but chrome is smart enough
+ to not even request it the second time.]
+<script type="text/javascript" defer src="/static/js/framework/framework-ajax.js?version=[app_version]" nonce="[nonce]"></script>
+
+<script type="text/javascript" defer src="/static/js/tracker/externs.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/ac.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-ac.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-components.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-display.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-editing.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-fields.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-keystrokes.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-nav.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-util.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" defer src="/static/js/tracker/tracker-onload.js?version=[app_version]" nonce="[nonce]"></script>
+
diff --git a/appengine/monorail/templates/framework/group-setting-fields.ezt b/appengine/monorail/templates/framework/group-setting-fields.ezt
new file mode 100644
index 0000000..96c3b55
--- /dev/null
+++ b/appengine/monorail/templates/framework/group-setting-fields.ezt
@@ -0,0 +1,95 @@
+[# Diplay a widget to choose group visibility level, or read-only text showing
+ the visibility level. Read-only text is used when the user does not have
+ permission to edit, or if there is only one available choice.
+]
+
+[define vis_menu_was_shown]False[end]
+
+[if-any read_only][else]
+ <select name="visibility" id="visibility" [if-any import_group]disabled="disabled"[end]>
+ <option value="" disabled="disabled" [if-any initial_visibility][else]selected="selected"[end]>
+ Select a visibility level...
+ </option>
+ [for visibility_levels]
+ <option value="[visibility_levels.key]"
+ [if-any initial_visibility]
+ [is initial_visibility.key visibility_levels.key]selected="selected"[end]
+ [end]>
+ [visibility_levels.name]
+ </option>
+ [end]
+ </select>
+ [define vis_menu_was_shown]True[end]
+
+ <br><br>
+ Friend projects: <br>
+ <input size="60" type="text" id="friendprojects" name="friendprojects" value="[initial_friendprojects]">
+ <div class="fielderror">
+ <span id="friendprojectsfeedback"></span>
+ [if-any errors.friendprojects][errors.friendprojects][end]
+ </div>
+
+ <br><br>
+ <input type="checkbox" name="import_group" id="import_group"
+ [if-any import_group]checked="checked"[end]
+ [if-any groupadmin]disabled="disabled"[end] >
+ <label for="import_group">Import from external group</label>
+
+ <div class="fielderror">
+ <span id="groupimportfeedback"></span>
+ [if-any errors.groupimport][errors.groupimport][end]
+ </div>
+
+ <br>
+ External group type:
+ <select name="group_type" id="group_type"
+ [if-any import_group][else]disabled="disabled"[end]
+ [if-any groupadmin]disabled="disabled"[end] >
+ <option value="" disabled="disabled" [if-any initial_group_type][else]selected="selected"[end]>
+ Select a group type...
+ </option>
+ [for group_types]
+ <option value="[group_types.key]"
+ [if-any initial_group_type]
+ [is initial_group_type.key group_types.key]selected="selected"[end]
+ [end]>
+ [group_types.name]
+ </option>
+ [end]
+ </select>
+ <br><br>
+
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ cur_vis_value = $("visibility").value;
+
+ function _updateSettings() {
+ if ($("import_group").checked) {
+ $("group_type").disabled = false;
+ cur_vis_value = $("visibility").value;
+ $("visibility").value = 0;
+ $("visibility").disabled = true;
+ $("friendprojects").disabled = true;
+ } else {
+ $("group_type").disabled = true;
+ $("visibility").value = cur_vis_value;
+ $("visibility").disabled = false;
+ $("friendprojects").disabled = false;
+ }
+ }
+
+ $("import_group").addEventListener("click", _updateSettings);
+});
+ </script>
+[end]
+
+[is vis_menu_was_shown "False"]
+ [initial_visibility.name]
+ <input type="hidden" name="visibility" value="[initial_visibility.key]">
+[end]
+
+<div class="formerror">
+ [if-any errors.access]
+ <div class="emphasis">[errors.access]</div>
+ [end]
+</div>
\ No newline at end of file
diff --git a/appengine/monorail/templates/framework/js-placeholders.ezt b/appengine/monorail/templates/framework/js-placeholders.ezt
new file mode 100644
index 0000000..dd6b32a
--- /dev/null
+++ b/appengine/monorail/templates/framework/js-placeholders.ezt
@@ -0,0 +1,13 @@
+[# Empty function definitions because we load the JS at bottom of page.
+ Without this, some rollovers or on-click handlers might give errors for
+ the first 200ms or so after the page loads. With them, they simply are
+ no-ops. ]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+function _showBelow(){}
+function _toggleStar(){}
+function _goIssue(){}
+function _goFile(){}
+});
+</script>
+
diff --git a/appengine/monorail/templates/framework/label-validation-row.ezt b/appengine/monorail/templates/framework/label-validation-row.ezt
new file mode 100644
index 0000000..4c2d1a3
--- /dev/null
+++ b/appengine/monorail/templates/framework/label-validation-row.ezt
@@ -0,0 +1,6 @@
+<tr>
+ <td colspan="3">
+ <div id="confirmarea" class="novel"><span id="confirmmsg"></span></div>
+ <div id="blocksubmitarea" class="blockingsubmit"><span id="blocksubmitmsg"></span></div>
+ </td>
+</tr>
diff --git a/appengine/monorail/templates/framework/maintabs.ezt b/appengine/monorail/templates/framework/maintabs.ezt
new file mode 100644
index 0000000..527d47a
--- /dev/null
+++ b/appengine/monorail/templates/framework/maintabs.ezt
@@ -0,0 +1,81 @@
+[# Show top-level tabs.
+
+ Args:
+ arg0: Can be "showtabs", or "showusertabs" to select which
+ top-level tabs are shown.
+ arg1: String like "t1", "t2", "t3" to identify the currently active tab.
+]
+<table id="monobar" width="100%" cellpadding="0" cellspacing="0">
+ <tr>
+ [if-any project_thumbnail_url]
+ <th id="thumbnail_box">
+ <a href="[project_home_page]" target="_blank">
+ <img src="[project_thumbnail_url]">
+ </a>
+ </th>
+ [else]
+ <th class="padded">
+ <a href="/" id="wordmark">[site_name]</a>
+ </th>
+ [end]
+ [if-any projectname]
+ <th class="toptabs padded">
+ <a href="[project_home_url]/" title="[project_summary]"
+ id="projects-dropdown">Project: [projectname] <small>▼</small></a>
+ <a href="[project_home_url]/issues/list" class="[is main_tab_mode "t2"]active[end]">Issues</a>
+ <a href="[project_home_url]/people/list" class="[is main_tab_mode "t3"]active[end]">People</a>
+ <a href="[project_home_url]/adminIntro" class="[is main_tab_mode "t4"]active[end]">Development process</a>
+ <a href="[project_home_url]/updates/list" class="[is main_tab_mode "t5"]active[end]">History</a>
+ [if-any perms.EditProject]
+ <a href="[project_home_url]/admin"
+ class="[is main_tab_mode "t6"]active[else]inactive[end]">Administer</a>
+ [end]
+
+ </th>
+ [end]
+
+ [if-any viewed_user]
+ <th class="padded">
+ User: [viewed_user.display_name]
+ [if-any viewed_user_pb.is_site_admin_bool]<i>(Administrator)</i>[end]
+ </th>
+ [end]
+
+ <td width="100%" id="userbar">
+ [include "user-bar.ezt"]
+ </td>
+ </tr>
+</table>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("projects-dropdown"))
+ $("projects-dropdown").addEventListener("click", function(event) {
+ event.preventDefault();
+ });
+});
+</script>
+
+[is arg0 "showtabs"]
+ <div class="subt">
+ [include "projecttabs.ezt"]
+ </div>
+[else][is arg0 "showusertabs"]
+ <div class="subt">
+ [include "../sitewide/usertabs.ezt" arg1]
+ </div>
+[else][is arg0 "showusergrouptabs"]
+ <div class="subt">
+ [include "../sitewide/usergrouptabs.ezt" arg1]
+ </div>
+[end][end][end]
+
+[if-any site_read_only][else]
+ [if-any project_alert]
+ <div style="font-weight: bold; color: #c00; margin-top: 5px; display: block;">
+ [project_alert]
+ </div>
+ [end]
+[end]
+
+[include "../features/cues.ezt"]
diff --git a/appengine/monorail/templates/framework/master-footer.ezt b/appengine/monorail/templates/framework/master-footer.ezt
new file mode 100644
index 0000000..e073e09
--- /dev/null
+++ b/appengine/monorail/templates/framework/master-footer.ezt
@@ -0,0 +1,71 @@
+ <script type="text/javascript" defer src="/static/js/framework/externs.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/env.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/framework-ajax.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/framework-cues.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/framework-display.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/framework-menu.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/framework-myprojects.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/framework-accountmenu.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/framework-stars.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/framework/project-name-check.js?version=[app_version]" nonce="[nonce]"></script>
+ <script type="text/javascript" defer src="/static/js/graveyard/xmlhttp.js?version=[app_version]" nonce="[nonce]"></script>
+
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var toggles = document.getElementsByClassName("toggleHidden");
+ for (var i = 0; i < toggles.length; ++i) {
+ var toggle = toggles[[]i];
+ toggle.addEventListener("click", function (event) {
+ _toggleHidden(event.target);
+ event.preventDefault();
+ });
+ }
+
+ toggles = document.getElementsByClassName("toggleCollapse");
+ for (var i = 0; i < toggles.length; ++i) {
+ var toggle = toggles[[]i];
+ toggle.addEventListener("click", function (event) {
+ _toggleCollapse(event.target);
+ event.preventDefault();
+ });
+ }
+
+ [if-any form_token]
+ var tokenFields = document.querySelectorAll("input[[]name=token]");
+ for (var i = 0; i < tokenFields.length; ++i) {
+ var field = tokenFields[[]i];
+ field.form.addEventListener("submit", function(event) {
+ refreshTokens(
+ event, "[form_token]", "[form_token_path]", [token_expires_sec]);
+ });
+ }
+ [end]
+});
+ </script>
+
+</div> [# End <div id="maincol"> from master-header.ezt]
+
+<div id="footer">
+ <a href="https://bugs.chromium.org/p/monorail/adminIntro" title="Monorail [app_version]">About Monorail</a>
+ <a href="https://bugs.chromium.org/p/monorail/issues/entry" target="_blank">Feedback on Monorail</a>
+ <a href="http://www.google.com/policies/terms/">Terms</a>
+ <a href="http://www.google.com/policies/privacy/">Privacy</a>
+</div>
+
+ [include "debug.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ (function(i,s,o,g,r,a,m){i[[]'GoogleAnalyticsObject']=r;i[[]r]=i[[]r]||function(){
+ (i[[]r].q=i[[]r].q||[[]]).push(arguments)},i[[]r].l=1*new Date();a=s.createElement(o),
+ m=s.getElementsByTagName(o)[[]0];a.async=1;a.setAttribute('nonce','[nonce]');
+ a.src=g;m.parentNode.insertBefore(a,m)
+ })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+ ga('create', '[analytics_id]', {'siteSpeedSampleRate': 100});
+ ga('send', 'pageview');
+});
+</script>
+
+ </body>
+</html>
diff --git a/appengine/monorail/templates/framework/master-header.ezt b/appengine/monorail/templates/framework/master-header.ezt
new file mode 100644
index 0000000..c0ae57e
--- /dev/null
+++ b/appengine/monorail/templates/framework/master-header.ezt
@@ -0,0 +1,101 @@
+<!DOCTYPE html>
+[# This is the master header file that is included in all Monrail servlets that render a page.
+
+ Args:
+ arg0: Can be "showtabs", "showusertabs" or "showusergrouptabs" to select which top-plevel tabs are shown.
+ arg1: String like "t1", "t2", "t3" to identify the currently active tab.
+]
+<html lang="en">
+<head>
+ <link rel="icon" type="image/vnd.microsoft.icon" href="/static/images/monorail.ico">
+ [if-any link_rel_canonical]
+ <link rel="canonical" href="[link_rel_canonical]">
+ [end]
+ <script type="text/javascript" nonce="[nonce]">
+ [# Javascript object containing basic page data. ]
+ var CS_env = {
+ 'absoluteBaseUrl': '[format "js"][absolute_base_url][end]',
+ 'app_version': '[format "js"][app_version][end]',
+ 'token': '[format "js"][xhr_token][end]',
+ [if-any logged_in_user]
+ 'tokenExpiresSec': [format "js"][token_expires_sec][end],
+ [end]
+ 'loggedInUserEmail':
+ [if-any logged_in_user]
+ '[format "js"][logged_in_user.email][end]'
+ [else]
+ null
+ [end],
+ 'login_url': '[format "js"][login_url][end]',
+ 'logout_url': '[format "js"][logout_url][end]',
+ 'profileUrl':
+ [if-any logged_in_user]
+ '[format "js"][logged_in_user.profile_url][end]'
+ [else]
+ null
+ [end],
+ 'projectName': '[format "js"][projectname][end]',
+ };
+ </script>
+
+ [# Improve the snippet that appears in search]
+ [if-any show_search_metadata]
+ <meta name="Description" content="Monorail is simple, reliable, and flexible issue tracking tool.">
+ <meta name="robots" content="NOODP">
+ [end]
+
+ <title>
+ [if-any title][title] - [end]
+ [if-any projectname]
+ [projectname] -
+ [else]
+ [if-any viewing_user_page][viewed_user.display_name] - [end]
+ [end]
+ [if-any title_summary][title_summary] - [else]
+ [if-any project_summary][project_summary] - [end]
+ [end]
+ [site_name]
+ </title>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
+ <meta name="referrer" content="no-referrer">
+ [if-any robots_no_index]
+ <meta name="ROBOTS" content="NOINDEX,NOARCHIVE">
+ [else]
+ <meta name="ROBOTS" content="NOARCHIVE">
+ [end]
+ <meta name="viewport" content="width=device-width">
+ <link type="text/css" rel="stylesheet" href="/static/css/ph_core.css?version=[app_version]">
+ <link type="text/css" rel="stylesheet" href="/static/css/ph_mobile.css?version=[app_version]">
+ [if-any category_css]
+ <link type="text/css" rel="stylesheet" href="/static/[category_css]?version=[app_version]">
+ [end]
+ [if-any page_css]
+ <link type="text/css" rel="stylesheet" href="/static/[page_css]?version=[app_version]">
+ [end]
+ [# NO MORE SCRIPTS IN HEAD, it makes page loading too slow.]
+</head>
+
+<body class="[main_tab_mode] [if-any perms.EditIssue]perms_EditIssue[end]">
+
+[# Tiny script used sitewide. ]
+<script type="text/javascript" nonce="[nonce]">
+ function _go(url) { document.location = url; }
+ function $(id) { return document.getElementById(id); }
+
+ var loadQueue = [];
+ function runOnLoad(fn) { loadQueue.push(fn); }
+
+ window.onload = function() {
+ for (var fn of loadQueue)
+ fn();
+ delete loadQueue;
+ };
+</script>
+
+[include "maintabs.ezt" arg0 arg1]
+
+[include "banner_message.ezt"]
+
+<div id="maincol">
+[include "alert.ezt"]
diff --git a/appengine/monorail/templates/framework/project-access-part.ezt b/appengine/monorail/templates/framework/project-access-part.ezt
new file mode 100644
index 0000000..2400dfe
--- /dev/null
+++ b/appengine/monorail/templates/framework/project-access-part.ezt
@@ -0,0 +1,36 @@
+[# Diplay a widget to choose project access level, or read-only text showing
+ the access level. Read-only text is used when the user does not have
+ permission to edit, or if there is only one available choice.
+]
+
+[define access_menu_was_shown]False[end]
+
+[if-any read_only][else]
+ [if-any offer_access_level]
+ <select name="access" id="access">
+ <option value="" disabled="disabled" [if-any initial_access][else]selected="selected"[end]>
+ Select an access level...
+ </option>
+ [for available_access_levels]
+ <option value="[available_access_levels.key]"
+ [if-any initial_access]
+ [is initial_access.key available_access_levels.key]selected="selected"[end]
+ [end]>
+ [available_access_levels.name]
+ </option>
+ [end]
+ </select>
+ [define access_menu_was_shown]True[end]
+ [end]
+[end]
+
+[is access_menu_was_shown "False"]
+ [initial_access.name]
+ <input type="hidden" name="access" value="[initial_access.key]">
+[end]
+
+<div class="formerror">
+ [if-any errors.access]
+ <div class="emphasis">[errors.access]</div>
+ [end]
+</div>
diff --git a/appengine/monorail/templates/framework/project-descriptive-fields.ezt b/appengine/monorail/templates/framework/project-descriptive-fields.ezt
new file mode 100644
index 0000000..ef6347e
--- /dev/null
+++ b/appengine/monorail/templates/framework/project-descriptive-fields.ezt
@@ -0,0 +1,35 @@
+Summary:<br>
+<input type="text" id="summary" name="summary" size="75" value="[initial_summary]"><br>
+<div class="fielderror">
+ <span id="summaryfeedback">[if-any errors.summary][errors.summary][end]</span>
+</div>
+
+Description:<br>
+<textarea id="description" name="description" rows="20" cols="90" wrap="soft"
+ >[initial_description]</textarea><br>
+<div class="fielderror">
+ <span id="descriptionfeedback">[if-any errors.description][errors.description][end]</span>
+</div>
+
+Project home page (optional):<br/>
+<input type="text" id="project_home" name="project_home" size="75" value="[initial_project_home]"><br>
+<div class="fielderror">
+ <span id="project_homefeedback">[if-any errors.project_home][errors.project_home][end]</span>
+</div>
+
+Project documentation page (optional):<br/>
+<input type="text" id="docs_url" name="docs_url" size="75" value="[initial_docs_url]"><br>
+<div class="fielderror">
+ <span id="docs_urlfeedback">[if-any errors.docs_url][errors.docs_url][end]</span>
+</div>
+
+[if-any logo_view.viewurl]
+ Project logo:<br>
+ [include "display-project-logo.ezt" True]
+[else]
+ Upload project logo (optional, will be resized to 110x30):<br/>
+ <input type="file" name="logo" id="logo">
+ <div class="fielderror">
+ <span id="logofeedback">[if-any errors.logo][errors.logo][end]</span>
+ </div>
+[end]
diff --git a/appengine/monorail/templates/framework/projecttabs.ezt b/appengine/monorail/templates/framework/projecttabs.ezt
new file mode 100644
index 0000000..11b72ff
--- /dev/null
+++ b/appengine/monorail/templates/framework/projecttabs.ezt
@@ -0,0 +1,30 @@
+[is main_tab_mode "t2"]
+ [include "../tracker/issue-search-form.ezt"]
+[end]
+
+
+[is main_tab_mode "t4"]
+ <div class="[admin_tab_mode]">
+ <div class="at isf">
+ <span class="inst1"><a href="/p/[projectname]/adminIntro">Introduction</a></span>
+ <span class="inst3"><a href="/p/[projectname]/adminStatuses">Statuses</a></span>
+ <span class="inst4"><a href="/p/[projectname]/adminLabels">Labels and fields</a></span>
+ [if-any perms.EditProject][# Rule might be too sensitive for non-members to view.]
+ <span class="inst5"><a href="/p/[projectname]/adminRules">Rules</a></span>
+ [end]
+ <span class="inst6"><a href="/p/[projectname]/adminTemplates">Templates</a></span>
+ <span class="inst7"><a href="/p/[projectname]/adminComponents">Components</a></span>
+ <span class="inst8"><a href="/p/[projectname]/adminViews">Views</a></span>
+ </div>
+ </div>
+[end]
+
+
+[is main_tab_mode "t6"]
+ <div class="[admin_tab_mode]">
+ <div class="at isf">
+ <span class="inst1"><a href="/p/[projectname]/admin">General</a></span>
+ <span class="inst9"><a href="/p/[projectname]/adminAdvanced">Advanced</a></span>
+ </div>
+ </div>
+[end]
diff --git a/appengine/monorail/templates/framework/read-only-rejection.ezt b/appengine/monorail/templates/framework/read-only-rejection.ezt
new file mode 100644
index 0000000..3147d9a
--- /dev/null
+++ b/appengine/monorail/templates/framework/read-only-rejection.ezt
@@ -0,0 +1,10 @@
+<span style="color:#a30">
+ [if-any site_read_only]
+ This operation cannot be done when the site is read-only.
+ Please come back later.
+ [else]
+ [if-any project_read_only]
+ READ-ONLY
+ [end]
+ [end]
+</span>
diff --git a/appengine/monorail/templates/framework/saved-queries-admin-part.ezt b/appengine/monorail/templates/framework/saved-queries-admin-part.ezt
new file mode 100644
index 0000000..d0e038c
--- /dev/null
+++ b/appengine/monorail/templates/framework/saved-queries-admin-part.ezt
@@ -0,0 +1,133 @@
+[# arg0 is either "user" for user saved queries or "project" for canned queries]
+<style>
+ #queries th, #queries td { padding-bottom: 1em }
+</style>
+
+<table border="0" id="queries">
+ <tr>
+ <th></th>
+ <th style="text-align:left">Saved query name:</th>
+ [is arg0 "user"]
+ <th style="text-align:left">Project(s):</th>
+ [end]
+ <th colspan="2" style="text-align:left">Query:</th>
+ [is arg0 "user"]
+ <th style="text-align:left">Subsciption options:</th>
+ [end]
+ <th></th>
+ </tr>
+
+ [for canned_queries]
+ <tr>
+ <td style="text-align:right" width="20">[canned_queries.idx].
+ <input type="hidden" name="savedquery_id_[canned_queries.idx]" value="[canned_queries.query_id]">
+ </td>
+ <td><input type="text" name="savedquery_name_[canned_queries.idx]" size="35" value="[canned_queries.name]" class="acob"></td>
+ [is arg0 "user"]
+ <td><input type="text" name="savedquery_projects_[canned_queries.idx]" size="35" value="[canned_queries.projects]"
+ class="acob" autocomplete="off" id="savedquery_projects_[canned_queries.idx]"></td>
+ [end]
+
+ <td>
+ <select name="savedquery_base_[canned_queries.idx]">
+ [define can][canned_queries.base_query_id][end]
+ [include "../tracker/issue-can-widget.ezt" "admin"]
+ </select>
+ </td>
+ <td>
+ <input type="text" name="savedquery_query_[canned_queries.idx]" size="50" value="[canned_queries.query]" autocomplete="off" id="query_existing_[canned_queries.idx]" class="acob">
+ </td>
+ [is arg0 "user"]
+ <td>
+ <select id="savedquery_sub_mode_[canned_queries.idx]" name="savedquery_sub_mode_[canned_queries.idx]">
+ <option [is canned_queries.subscription_mode "noemail"]selected="select"[end] value="noemail"
+ >No emails</option>
+ <option [is canned_queries.subscription_mode "immediate"]selected="select"[end] value="immediate">Notify Immediately</option>
+ [# TODO(jrobbins): <option disabled="disabled">Notify Daily</option>]
+ [# TODO(jrobbins): <option disabled="disabled">Notify Weekly on Monday</option>]
+ </select>
+ </td>
+ [end]
+ <td></td>
+ </tr>
+ [end]
+
+ [define can]2[end]
+ [for new_query_indexes]
+ <tr id="newquery[new_query_indexes]" [if-index new_query_indexes first][else]style="display:none"[end]>
+ <td style="text-align:right" width="20">[new_query_indexes].</td>
+ <td><input type="text" name="new_savedquery_name_[new_query_indexes]"
+ class="showNextQueryRow acob" data-index="[new_query_indexes]"
+ size="35" value="" placeholder="Required"></td>
+ [is arg0 "user"]
+ <td><input type="text" name="new_savedquery_projects_[new_query_indexes]" size="35" value="" class="acob"
+ autocomplete="off" id="new_savedquery_projects_[new_query_indexes]" placeholder="Optional"></td>
+ [end]
+ <td>
+ <select name="new_savedquery_base_[new_query_indexes]">
+ [include "../tracker/issue-can-widget.ezt" "admin"]
+ </select>
+ </td>
+ <td>
+ <input type="text" name="new_savedquery_query_[new_query_indexes]" size="50" value="" autocomplete="off" id="query_new_[new_query_indexes]" class="acob" placeholder="Optional. Example- "label:Security owner:me"">
+ </td>
+ [is arg0 "user"]
+ <td>
+ <select id="new_savedquery_sub_mode_[new_query_indexes]" name="new_savedquery_sub_mode_[new_query_indexes]">
+ <option selected="selected" value="noemail">No emails</option>
+ <option value="immediate">Notify Immediately</option>
+ [# TODO(jrobbins): <option disabled="disabled">Notify Daily</option>]
+ [# TODO(jrobbins): <option disabled="disabled">Notify Weekly</option>]
+ </select>
+ </td>
+ [end]
+ <td width="40px">
+ [if-index new_query_indexes last][else]
+ <span id="addquery[new_query_indexes]" class="fakelink" data-index="[new_query_indexes]">Add a row</span
+ [end]
+ </td>
+ </tr>
+ [end]
+
+</table>
+
+[include "../framework/footer-script.ezt"]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function showNextQueryRow(i) {
+ if (i < [max_queries]) {
+ _showID('newquery' + (i + 1));
+ _hideID('addquery' + i);
+ }
+ }
+ _fetchUserProjects(true);
+ _onload();
+
+ var addARowLinks = document.getElementsByClassName("fakelink");
+ for (var i = 0; i < addARowLinks.length; ++i) {
+ var link = addARowLinks[[]i];
+ link.addEventListener("click", function(event) {
+ var index = Number(event.target.getAttribute("data-index"));
+ showNextQueryRow(index);
+ });
+ }
+
+ var typeToAddARow = document.getElementsByClassName("showNextQueryRow");
+ for (var i = 0; i < typeToAddARow.length; ++i) {
+ var el = typeToAddARow[[]i];
+ el.addEventListener("keydown", function(event) {
+ var index = Number(event.target.getAttribute("data-index"));
+ showNextQueryRow(index);
+ });
+ }
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+});
+</script>
diff --git a/appengine/monorail/templates/framework/user-bar.ezt b/appengine/monorail/templates/framework/user-bar.ezt
new file mode 100644
index 0000000..29a6c20
--- /dev/null
+++ b/appengine/monorail/templates/framework/user-bar.ezt
@@ -0,0 +1,14 @@
+<span style="padding: 0 1em">
+ [if-any logged_in_user]
+ <span id="account-menu" style="color:#0000cc">
+ <b><u>[logged_in_user.email]</u></b>
+ <small>▼</small>
+ </span>
+ | <a href="[logged_in_user.profile_url]"
+ title="Profile, Updates, and Settings"
+ ><u>Profile</u></a>
+ | <a href="[logout_url]"><u>Sign out</u></a>
+ [else]
+ <a href="[login_url]"><u>Sign in</u></a>
+ [end]
+</span>
diff --git a/appengine/monorail/templates/framework/user-link.ezt b/appengine/monorail/templates/framework/user-link.ezt
new file mode 100644
index 0000000..18d7e6c
--- /dev/null
+++ b/appengine/monorail/templates/framework/user-link.ezt
@@ -0,0 +1 @@
+[if-any arg0.profile_url]<a style="white-space: nowrap" href="[arg0.profile_url]" title="[arg0.display_name]">[arg0.display_name]</a>[else][arg0.display_name][end]
\ No newline at end of file
diff --git a/appengine/monorail/templates/project/people-add-members-form.ezt b/appengine/monorail/templates/project/people-add-members-form.ezt
new file mode 100644
index 0000000..d6ec30e
--- /dev/null
+++ b/appengine/monorail/templates/project/people-add-members-form.ezt
@@ -0,0 +1,94 @@
+
+[if-any offer_membership_editing]
+<br>
+<div class="h4" style="margin-bottom:4px" id="addmembers">Add Members</div>
+
+<div id="makechanges" class="closed">
+
+ <div class="ifClosed">
+ <textarea id="tempt" rows="4" style="color:#666; width:500px; margin-left:4px"
+ >Enter new member email addresses</textarea>
+ </div>
+
+
+<input type="hidden" name="token" value="[form_token]">
+<table class="ifOpened vt" cellspacing="2" cellpadding="2" style="margin-top:0">
+ <tr>
+ <td colspan="2">
+ <textarea name="addmembers" style="width:500px" rows="4"
+ id="addMembersTextArea">[initial_add_members]</textarea>
+ [if-any errors.addmembers]
+ <div class="fielderror">[errors.addmembers]</div>
+ [end]<br>
+ </td>
+ <td rowspan="3">
+ <div class="tip" style="margin-top:0; margin-left:4px">
+ Enter the email addresses of users that you would like to
+ add to this [is arg0 "project"]project[else]group[end].<br><br>
+ Each email address must correspond to an existing Google Account.
+ </div>
+ </td>
+ </tr>
+
+ <tr>
+ <th width="30" align="left">Role:</th>
+
+ <td width="470" align="left">
+ [is arg0 "project"]
+ <input type="radio" name="role" value="owner" id="owner">
+ <label for="owner">Owner: may make any change to this
+ project.</label><br>
+
+ <input type="radio" name="role" value="committer" id="committer"
+ checked="checked">
+ <label for="committer">Committer: may work in the project, but may
+ not reconfigure it.</label><br>
+
+ <input type="radio" name="role" value="contributor" id="contributor">
+ <label for="contributor">Contributor: starts with the same permissions
+ as non-members.</label><br>
+ [# TODO(jrobbins): custom roles]
+ [else]
+ <input type="radio" name="role" value="owner" id="owner">
+ <label for="owner">Owner: may make any change to this
+ group.</label><br>
+
+ <input type="radio" name="role" value="member" id="member"
+ checked="checked">
+ <label for="member">Member: member of this user group.</label><br>
+ [end]
+ </td>
+
+ </tr>
+ <tr>
+ <td colspan="2">
+ <input type="submit" name="addbtn" id="addbtn"
+ value="Save changes" style="margin-top:1em">
+ </td>
+ </tr>
+</table>
+
+</div>
+
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ window._openAddMembersForm = function _openAddMembersForm() {
+ document.location.hash='addmembers';
+ document.getElementById('makechanges').className = "opened";
+ window.setTimeout(
+ function () { document.getElementById('addMembersTextArea').focus(); },
+ 100);
+ }
+
+ [if-any initially_expand_form]
+ _openAddMembersForm();
+ [end]
+
+ if ($("tempt"))
+ $("tempt").addEventListener("mousedown", _openAddMembersForm);
+
+});
+</script>
+
+[end]
diff --git a/appengine/monorail/templates/project/people-detail-page.ezt b/appengine/monorail/templates/project/people-detail-page.ezt
new file mode 100644
index 0000000..ceaff17
--- /dev/null
+++ b/appengine/monorail/templates/project/people-detail-page.ezt
@@ -0,0 +1,135 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<a href="list">‹ Back to people list</a>
+
+<form action="detail.do" method="POST" id="peopledetail">
+<input type="hidden" name="token" value="[form_token]">
+<input type="hidden" name="u" value="[member.user.user_id]">
+<table cellspacing="8" class="rowmajor vt">
+<tr>
+ <th width="1%">User:</th>
+ <td>[include "../framework/user-link.ezt" member.user]</td>
+</tr>
+
+ <tr class="[if-any expand_perms]opened[else]closed[end]">
+ <th>Role:</th>
+ <td>
+ [# Show a widget if the current user is allowed to edit roles.]
+ [if-any perms.EditProject]
+ [define offer_role_select]Yes[end]
+ [else]
+ [define offer_role_select]No[end]
+ [end]
+ [# But, don't offer it if the user could remove himself as the last owner.]
+ [is total_num_owners "1"][if-any warn_abandonment]
+ [define offer_role_select]No[end]
+ [end][end]
+
+ [is offer_role_select "Yes"]
+ <select name="role">
+ <option [is member.role "Owner"]selected="selected"[end]
+ value="owner">Owner</option>
+ <option [is member.role "Committer"]selected="selected"[end]
+ value="committer">Committer</option>
+ <option [is member.role "Contributor"]selected="selected"[end]
+ value="contributor">Contributor</option>
+ </select>
+ [else]
+ [member.role]
+ [end]
+ <a class="ifClosed toggleHidden" href="#" id="show_permissions"
+ style="font-size:90%; margin-left:1em">Show permissions</a>
+ <a class="ifOpened toggleHidden" href="#" id="hide_permissions"
+ style="font-size:90%; margin-left:1em">Hide permissions</a>
+ [include "people-detail-perms-part.ezt"]
+ </td>
+ <td>
+ <div class="ifOpened tip" style="width:17em">
+ <b>Permissions</b> enable members to perform specific actions in
+ a project. Appropriate permissions are already defined for each
+ role: Owner, Committer, and Contributor. Additional permissions can
+ be granted to individual members, if needed.
+
+ <p>Most project owners will never need to grant any individual
+ member permissions. It is usually more important to describe
+ each member's duties in the notes.</p>
+
+ <div style="margin-top:.5em">
+ <a href="http://code.google.com/p/monorail/wiki/Permissions" target="new">Learn more</a>
+ <a href="http://code.google.com/p/monorail/wiki/Permissions" target="new"><img src="/static/images/tearoff_icon.gif" width="16" height="16"></a>
+ </div>
+ </div>
+ </td>
+</tr>
+
+
+<tr>
+ <th>Notes:</th>
+ <td>
+ [if-any offer_edit_member_notes]
+ <div style="width:40em">
+ <textarea style="width:100%" rows="4" class="ifExpand" name="notes"
+ >[member.notes]</textarea>
+ </div>
+ [else]
+ [if-any member.notes][member.notes][else]----[end]
+ [end]
+
+ </td>
+</tr>
+
+[if-any offer_edit_perms offer_edit_member_notes]
+ <tr>
+ <th></th>
+ <td>
+ <input type="submit" name="submit" value="Save changes">
+ [if-any offer_remove_role]
+ <input type="submit" name="remove" value="Remove member"
+ style="margin-left:3em" id="remove_member">
+ [end]
+ </td>
+ </tr>
+[end]
+
+</table>
+</form>
+
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function _confirmRemove() {
+ [if-any warn_abandonment]
+ [is total_num_owners "1"]
+ alert('You cannot remove the last project owner.');
+ return false;
+ [else]
+ return confirm('Remove yourself?\nYou will be locked out of making further changes.');
+ [end]
+ [else]
+ return confirm('Remove member [format "js"][member.user.email][end]?');
+ [end]
+ }
+
+ if ($("remove_member"))
+ $("remove_member").addEventListener("click", function(event) {
+ if (!_confirmRemove())
+ event.preventDefault();
+ });
+
+ [if-any read_only][else]
+ if ($("show_permissions"))
+ $("show_permissions").addEventListener("click", function() {
+ _setPeoplePrefs("[projectname]", 1, "[xhr_token]");
+ });
+ if ($("hide_permissions"))
+ $("hide_permissions").addEventListener("click", function() {
+ _setPeoplePrefs("[projectname]", 0, "[xhr_token]");
+ });
+ [end]
+
+});
+</script>
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/project/people-detail-perms-part.ezt b/appengine/monorail/templates/project/people-detail-perms-part.ezt
new file mode 100644
index 0000000..3a30417
--- /dev/null
+++ b/appengine/monorail/templates/project/people-detail-perms-part.ezt
@@ -0,0 +1,82 @@
+<table id="perm_defs" class="ifOpened">
+ [if-any offer_edit_perms displayed_extra_perms]
+ <tr><th colspan="2">Standard permissions</th></tr>
+ [end]
+
+ [include "people-detail-row-part.ezt" role_perms.View member_perms.View "View" "View issues"]
+ [include "people-detail-row-part.ezt" role_perms.Commit member_perms.Commit "Commit" "Full project member"]
+
+ [include "people-detail-row-part.ezt" role_perms.CreateIssue member_perms.CreateIssue "CreateIssue" "Enter a new issue"]
+ [include "people-detail-row-part.ezt" role_perms.AddIssueComment member_perms.AddIssueComment "AddIssueComment" "Add a comment to an issue"]
+ [include "people-detail-row-part.ezt" role_perms.EditIssue member_perms.EditIssue "EditIssue" "Edit any attribute of an issue"]
+ [include "people-detail-row-part.ezt" role_perms.EditIssueOwner member_perms.EditIssueOwner "EditIssueOwner" "- Edit the owner of an issue"]
+ [include "people-detail-row-part.ezt" role_perms.EditIssueSummary member_perms.EditIssueSummary "EditIssueSummary" "- Edit the summary of an issue"]
+ [include "people-detail-row-part.ezt" role_perms.EditIssueStatus member_perms.EditIssueStatus "EditIssueStatus" "- Edit the status of an issue"]
+ [include "people-detail-row-part.ezt" role_perms.EditIssueCc member_perms.EditIssueCc "EditIssueCc" "- Edit the CC list of an issue"]
+ [include "people-detail-row-part.ezt" role_perms.DeleteIssue member_perms.DeleteIssue "DeleteIssue" "Delete/undelete an issue"]
+
+ [include "people-detail-row-part.ezt" role_perms.DeleteAny member_perms.DeleteAny "DeleteAny" "Delete comments by anyone"]
+ [include "people-detail-row-part.ezt" role_perms.EditAnyMemberNotes member_perms.EditAnyMemberNotes "EditAnyMemberNotes" "Edit anyone's member notes"]
+ [include "people-detail-row-part.ezt" role_perms.ModerateSpam member_perms.ModerateSpam "ModerateSpam" "Mark or un-mark issues and comments as spam"]
+
+
+
+ [if-any offer_edit_perms displayed_extra_perms]
+ <tr><th colspan="2">Custom permissions</th></tr>
+ [end]
+
+ [if-any offer_edit_perms]
+ <tr>
+ <td id="displayed_extra_perms" colspan="2">
+ <div style="width:12em">
+ [for displayed_extra_perms]
+ <input style="width:100%" name="extra_perms"
+ value="[displayed_extra_perms]">
+ [end]
+ <input style="width:100%" name="extra_perms"
+ id="first_extra_perms"
+ value="" autocomplete="off">
+ </div>
+ </td>
+ </tr>
+ [else]
+ [for displayed_extra_perms]
+ <tr>
+ <td>
+ <input type="checkbox" checked="checked" disabled="disabled">
+ [displayed_extra_perms]
+ </td>
+ <td></td>
+ </tr>
+ [end]
+ [end]
+
+</table>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function _addInput(event) {
+ if (event.target.value == "") {
+ return;
+ }
+ var area = event.target.parentNode;
+ var newInput = document.createElement("input");
+ newInput.style.width = "100%";
+ newInput.name = event.target.name;
+ newInput.onfocus = function(e) {
+ _acrob(null);
+ _acof(e);
+ };
+ newInput.setAttribute("autocomplete", "off");
+ newInput.addEventListener("keyup", _addInput);
+ area.appendChild(newInput);
+ area.appendChild(document.createElement("br"));
+
+ // Make it only fire once.
+ event.target.removeEventListener("keyup", _addInput);
+ }
+
+ if ($("first_extra_perms"))
+ $("first_extra_perms").addEventListener("keyup", _addInput);
+});
+</script>
diff --git a/appengine/monorail/templates/project/people-detail-row-part.ezt b/appengine/monorail/templates/project/people-detail-row-part.ezt
new file mode 100644
index 0000000..785005f
--- /dev/null
+++ b/appengine/monorail/templates/project/people-detail-row-part.ezt
@@ -0,0 +1,29 @@
+[# Display one row in the permissions table.
+
+Args:
+ arg0: True if the permission is native to the role. So, it cannot be removed.
+ arg1: True if the user has this permission. So, it will be shown when not in editing mode.
+ arg2: Permission name.
+ arg3: Permission description.
+
+References globals:
+ offer_edit_perms: True if the user can edit permissions on this page.
+]
+
+<tr>
+ <td>
+ <input type="checkbox" [if-any arg1]checked="checked"[end] id="[arg2]"
+ [if-any offer_edit_perms]
+ [if-any arg0]
+ disabled="disabled"
+ [else]
+ name="extra_perms" value="[arg2]"
+ [end]
+ [else]
+ disabled="disabled"
+ [end]
+ >
+ <label for="[arg2]">[arg2]</label>
+ </td>
+ <td>[arg3]</td>
+</tr>
diff --git a/appengine/monorail/templates/project/people-list-page.ezt b/appengine/monorail/templates/project/people-list-page.ezt
new file mode 100644
index 0000000..4860f27
--- /dev/null
+++ b/appengine/monorail/templates/project/people-list-page.ezt
@@ -0,0 +1,143 @@
+[define title]People[end]
+[define category_css]css/ph_list.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+[include "../framework/js-placeholders.ezt"]
+
+<form method="POST" action="list.do" id="membership_form">
+<div id="colcontrol">
+ <div class="list">
+ [if-any pagination.visible]
+ <div class="pagination">
+ [if-any pagination.prev_url]<a href="[pagination.prev_url]"><b>‹</b> Prev</a>[end]
+ Members [pagination.start] - [pagination.last] of [pagination.total_count]
+ [if-any pagination.next_url]<a href="[pagination.next_url]">Next <b>›</b></a>[end]
+ </div>
+ [end]
+ <b style="margin-right:1em">Project People</b>
+
+ [if-any offer_membership_editing]
+ <input type="button" value="Add members" style="font-size:80%"
+ id="add_members_button">
+ <input type="submit" value="Remove members" style="font-size:80%; margin-left:1em"
+ id="removebtn" name="removebtn" disabled="disabled">
+ [# TOOD(jrobbins): extra confirmation when removing yourself as owner.]
+ [end]
+ </div>
+
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped vt" id="resultstable" width="100%">
+ <tbody>
+ [if-any pagination.visible_results]
+
+ <tr id="headingrow">
+ [if-any offer_membership_editing]
+ <th style="border-right:0; padding-right:2px"> </th>
+ [end]
+
+ <th style="white-space:nowrap">Name</th>
+ <th style="white-space:nowrap">Role</th>
+ <th style="white-space:nowrap">Notes</th>
+ </tr>
+
+ [for pagination.visible_results]
+ [include "people-list-row-part.ezt"]
+ [end]
+
+ [else]
+ <tr>
+ <td colspan="40" class="id">
+ <div style="padding: 3em; text-align: center">
+ This project does not have any members.
+ </div>
+ </td>
+ </tr>
+ [end]
+
+
+ </tbody>
+ </table>
+ <div class="list-foot">
+ <div class="pagination">
+ [if-any pagination.prev_url]<a href="[pagination.prev_url]"><b>‹</b> Prev</a>[end]
+ [pagination.start] - [pagination.last] of [pagination.total_count]
+ [if-any pagination.next_url]<a href="[pagination.next_url]">Next <b>›</b></a>[end]
+ </div>
+ </div>
+</div>
+
+[if-any untrusted_user_groups]
+ <div style="width:45em">
+ [include "untrusted-user-groups-part.ezt"]
+ </div>
+[end]
+
+[include "people-add-members-form.ezt" "project"]
+</form>
+
+[if-any offer_membership_editing]
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ $("add_members_button").addEventListener("click", _openAddMembersForm);
+
+ function _countChecked(opt_className) {
+ var numChecked = 0;
+ var inputs = document.getElementsByTagName('input');
+ for (var i = 0; i < inputs.length; i++) {
+ var el = inputs[[]i];
+ if (el.type == 'checkbox' && el.name == 'remove' && el.checked &&
+ (!opt_className || opt_className == el.className)) {
+ numChecked++;
+ }
+ }
+ return numChecked;
+ }
+
+ function _enableRemoveButton() {
+ var removeButton = document.getElementById('removebtn');
+ if (_countChecked() > 0) {
+ removeButton.disabled = false;
+ } else {
+ removeButton.disabled = true;
+ }
+ }
+
+ setInterval(_enableRemoveButton, 700);
+
+ function _preventAbandonment(event) {
+ var meCheckbox = document.getElementById("me_checkbox");
+ if (meCheckbox && meCheckbox.checked) {
+ numOwnersChecked = _countChecked("owner");
+ if (numOwnersChecked == [total_num_owners]) {
+ alert("You cannot remove all project owners.");
+ event.preventDefault();
+ } else {
+ if (!confirm("Remove yourself as project owner?\n" +
+ "You will be locked out of making further changes.")) {
+ event.preventDefault();
+ }
+ }
+ }
+ return true;
+ }
+ [if-any check_abandonment]
+ $("membership_form").addEventListener("submit", _preventAbandonment);
+ [end]
+
+ function _handleResultsClick(event) {
+ var target = event.target;
+ if (target.tagName == "A")
+ return;
+ if (target.classList.contains("rowwidgets") || target.parentNode.classList.contains("rowwidgets"))
+ return;
+ if (target.tagName != "TR") target = target.parentNode;
+ _go(target.attributes[[]"data-url"].value,
+ (event.metaKey || event.ctrlKey || event.button == 1));
+ };
+ $("resultstable").addEventListener("click", _handleResultsClick);
+
+});
+ </script>
+[end]
+
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/project/people-list-row-part.ezt b/appengine/monorail/templates/project/people-list-row-part.ezt
new file mode 100644
index 0000000..ee45f7d
--- /dev/null
+++ b/appengine/monorail/templates/project/people-list-row-part.ezt
@@ -0,0 +1,37 @@
+[define detail_url][pagination.visible_results.detail_url][end]
+<tr data-url="[detail_url]">
+
+ [if-any offer_membership_editing]
+ <td style="padding-right:2px" class="rowwidgets">
+ <input type="checkbox" name="remove"
+ [is pagination.visible_results.role "Owner"]class="owner"[end]
+ value="[pagination.visible_results.user.email]"
+ [if-any pagination.visible_results.viewing_self]
+ id="me_checkbox"
+ [end]
+ >
+ </td>
+ [end]
+
+ <td style="white-space:nowrap">
+ <a href="[detail_url]"
+ >[pagination.visible_results.user.display_name]</a>
+ [if-any pagination.visible_results.viewing_self]
+ <b>- me</b>
+ [end]
+ </td>
+
+ <td>
+ <a href="[detail_url]" style="white-space:nowrap">
+ [pagination.visible_results.role]<br>
+ [for pagination.visible_results.extra_perms]
+ <div style="font-size:90%">+ [pagination.visible_results.extra_perms]</div>
+ [end]
+ </a>
+ </td>
+
+ <td width="90%">
+ <a href="[detail_url]">[pagination.visible_results.notes]</a>
+ </td>
+
+</tr>
diff --git a/appengine/monorail/templates/project/project-admin-advanced-page.ezt b/appengine/monorail/templates/project/project-admin-advanced-page.ezt
new file mode 100644
index 0000000..dbd6289
--- /dev/null
+++ b/appengine/monorail/templates/project/project-admin-advanced-page.ezt
@@ -0,0 +1,11 @@
+[include "../framework/master-header.ezt" "showtabs"]
+
+ <form action="adminAdvanced.do" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+
+ [include "project-admin-publishing-part.ezt"]
+ [include "project-admin-quota-part.ezt"]
+
+ </form>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/project/project-admin-page.ezt b/appengine/monorail/templates/project/project-admin-page.ezt
new file mode 100644
index 0000000..bc9689c
--- /dev/null
+++ b/appengine/monorail/templates/project/project-admin-page.ezt
@@ -0,0 +1,129 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+ <form action="admin.do" method="POST" autocomplete="off" enctype="multipart/form-data">
+ <input type="hidden" name="token" value="[form_token]">
+
+<h4>Project metadata</h4>
+
+<div class="section">
+ [include "../framework/project-descriptive-fields.ezt"]
+</div>
+
+
+<h4>Project access</h4>
+
+<div class="section">
+ [if-any offer_access_level initial_access]
+ <br>This project may be viewed by:
+ [include "../framework/project-access-part.ezt" "dontchecksubmit"]<br>
+ [end]
+
+<div class="section">
+ <div class="closed">
+ <p>Restriction labels allow project members to restrict access to individual
+ issues.
+ <a class="ifClosed toggleHidden" href="#" style="font-size:90%; margin-left:.5em">Learn more</a></p>
+ <div class="ifOpened help">
+ Normally, if a project member may edit the labels, then he or she may also
+ edit restriction labels. That allows project committers to adjust access
+ controls for the items that they are working on. However, some project
+ owners may prefer that once a restriction label is in place, only a project
+ owner may remove it.
+ </div>
+ </div>
+ <input type="checkbox" name="only_owners_remove_restrictions"
+ id="only_owners_remove_restrictions"
+ [if-any only_owners_remove_restrictions]checked="checked"[end] >
+ <label for="only_owners_remove_restrictions">Only project owners
+ may remove <tt>Restrict-*</tt> labels</label>
+</div>
+
+<div class="section">
+ <div class="closed">
+ <p>Collaboration style
+ <a class="ifClosed toggleHidden" href="#" style="font-size:90%; margin-left:.5em">Learn more</a></p>
+ <div class="ifOpened help">
+ Project workspaces are usually intended to promote collaboration among
+ all project members. However, sometimes a compartmentalized collaboration
+ style is more appropriate. For example, one company might want to work
+ with several partners, but not let each partner know about the others.
+ Note: In such a project, all artifacts should have restriction labels.
+ </div>
+ </div>
+ <input type="checkbox" name="only_owners_see_contributors" id="only_owners_see_contributors"
+ [if-any only_owners_see_contributors]checked="checked"[end] >
+ <label for="only_owners_see_contributors">Only project owners may see the list of contributors.</label>
+</div>
+
+</div>
+
+
+<h4>Activity notifications</h4>
+
+<div class="section">
+ <p>Email notifications of issue tracker activity will automatically be sent to
+ the following email address.</p>
+
+ <table cellpadding="2">
+ <tr><th>All issue changes:</th>
+ <td><input type="email" name="issue_notify" size="35" value="[issue_notify]"><br>
+ [if-any errors.issue_notify]
+ <div class="fielderror">[errors.issue_notify]</div>
+ [end]
+ </td>
+ </tr>
+ </table>
+ [# TODO: validate as address is entered ]
+
+ [include "../framework/admin-email-sender-part.ezt"]
+</div>
+
+
+<h4>Email reply processing</h4>
+
+<div class="section">
+ <div class="closed">
+ <p>Users may add comments and make updates by replying to
+ certain notification emails.
+ <a class="ifClosed toggleHidden" style="font-size:90%; margin-left:.5em">Learn more</a></p>
+ <div class="ifOpened help">
+ Users may add comments to an issue
+ by replying to a notification email:
+
+ <ul>
+ <li>Look for a note in the footer of the email indicating that
+ a reply will be processed by the server.</li>
+ <li>Comments must be in replies to notification emails sent directly
+ to the member, not through a mailing list.</li>
+ <li>The reply must be <tt>From:</tt> the same email address to which
+ the notification was sent.</li>
+ <li>Project members who have permission to edit issues may make
+ changes via email replies.</li>
+ </ul>
+ </div>
+ </div>
+ <input type="checkbox" name="process_inbound_email" id="process_inbound_email"
+ [if-any process_inbound_email]checked="checked"[end] >
+ <label for="process_inbound_email">Process email replies</label>
+</div>
+
+<br>
+
+ <input type="submit" id="savechanges" name="btn" value="Save changes" class="submit">
+ </form>
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _onload();
+});
+</script>
+
+[end]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/project/project-admin-publishing-part.ezt b/appengine/monorail/templates/project/project-admin-publishing-part.ezt
new file mode 100644
index 0000000..bef52df
--- /dev/null
+++ b/appengine/monorail/templates/project/project-admin-publishing-part.ezt
@@ -0,0 +1,110 @@
+[# This is the "Project publishing options" on the "Advanced" subtab. ]
+
+<h4>Project state</h4>
+
+<div class="section">
+<table class="vt" cellspacing="20" style="width:60em">
+ [if-any offer_archive]
+ <tr>
+ <td>
+ <input type="submit" name="archivebtn" style="width:6em"
+ value="Archive">
+ </td>
+ <td>
+ Archive this project. It will only be visible read-only to
+ project members. Once it is archived, you may unarchive it, or go ahead
+ and fully delete it.
+ <br><br>
+ </td>
+ </tr>
+ [end]
+
+ [if-any offer_delete]
+ <tr>
+ <td>
+ <input type="submit" name="deletebtn" style="width:6em"
+ value="Delete" id="delbtn">
+ </td>
+ <td>
+ Completely delete this project now.
+ <br><br>
+ </td>
+ </tr>
+ [end]
+
+ [if-any offer_publish]
+ <tr>
+ <td>
+ <input type="submit" name="publishbtn" style="width:6em"
+ value="Unarchive">
+ </td>
+ <td>
+ Make this project active again.
+ All project contents will become visible and editable to users as normal.
+ <br><br>
+ </td>
+ </tr>
+ [end]
+
+ [if-any offer_move]
+ <tr>
+ <td>
+ <input type="submit" name="movedbtn" style="width:6em"
+ value="Move">
+ </td>
+ <td>
+ If you have moved your project to a different location, enter it here and
+ users will be directed to that location. If the destination is another
+ project on this site, enter just the new project name. If the destination
+ is another site, enter the new project home page URL.
+ <br><br>
+ <b>Location:</b>
+ <input type="text" name="moved_to" size="50" value="[moved_to]">
+ </td>
+ </tr>
+ [end]
+
+ [if-any offer_doom]
+ <tr>
+ <td>
+ <input type="submit" name="doombtn" style="width:6em"
+ value="Doom">
+ </td>
+ <td>
+ Immediately archive this project and schedule it for deletion in
+ 90 days. Only a site admin can un-archive the project, not a
+ project owner. In the meantime, the project will be read-only for
+ project members only, and the reason for deletion will be displayed at the top
+ of each page.
+ <br><br>
+ <b>Reason:</b>
+ <input type="text" name="reason" size="50" value="[default_doom_reason]">
+ </td>
+ </tr>
+ [end]
+
+ [if-any offer_archive offer_delete offer_publish offer_doom offer_move][else]
+ <tr>
+ <td>
+ </td>
+ <td>
+ You are not authorized to change the project state.
+ </td>
+ </tr>
+ [end]
+
+</table>
+
+</div>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("delbtn")) {
+ $("delbtn").addEventListener("click", function(event) {
+ var msg = "Really delete the whole project?\nThis operation cannot be undone.";
+ if (!confirm(msg))
+ event.preventDefault();
+ });
+ }
+});
+</script>
diff --git a/appengine/monorail/templates/project/project-admin-quota-part.ezt b/appengine/monorail/templates/project/project-admin-quota-part.ezt
new file mode 100644
index 0000000..1649eab
--- /dev/null
+++ b/appengine/monorail/templates/project/project-admin-quota-part.ezt
@@ -0,0 +1,30 @@
+<h4>Storage quota</h4>
+
+<div class="section">
+
+ <table cellspacing="6" style="padding:6px">
+ <tr>
+ <td>Issue attachments:</td>
+ <td>[include "quota-bar.ezt" attachment_quota]</td>
+ </tr>
+ <tr>
+ <td style="padding:15px 0">
+ [if-any offer_quota_editing]
+ <input type="submit" name="savechanges" value="Update Quota">
+ [end]
+ </td>
+ <td style="padding:15px 0">
+ [if-any offer_quota_editing]
+ <input type="number" name="[attachment_quota.field_name]" value="[attachment_quota.quota_mb]"
+ size="5" min="1" style="font-size:90%; padding:0">
+ [if-any errors.attachment_quota]
+ <div class="fielderror">[errors.attachment_quota]</div>
+ [end]
+ [else]
+ [attachment_quota.quota_mb]
+ [end]
+ MB
+ </td>
+ </tr>
+ </table>
+</div>
diff --git a/appengine/monorail/templates/project/project-export-page.ezt b/appengine/monorail/templates/project/project-export-page.ezt
new file mode 100644
index 0000000..1909437
--- /dev/null
+++ b/appengine/monorail/templates/project/project-export-page.ezt
@@ -0,0 +1,24 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<h3>Project export</h3>
+
+<form action="projectExport/json" method="GET">
+ [# We use xhr_token here because we are doing a GET on a JSON servlet.]
+ <input type="hidden" name="token" value="[xhr_token]">
+ <table cellpadding="3" class="rowmajor vt">
+ <tr>
+ <th>Format</th>
+ <td style="width:90%">JSON</td>
+ </tr>
+ <tr>
+ <th></th>
+ <td><input type="submit" name="btn" value="Submit"></td>
+ </tr>
+ </table>
+</form>
+
+
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/project/project-summary-page.ezt b/appengine/monorail/templates/project/project-summary-page.ezt
new file mode 100644
index 0000000..66e9aaa
--- /dev/null
+++ b/appengine/monorail/templates/project/project-summary-page.ezt
@@ -0,0 +1,89 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+[# TODO: add UI element permissions when I add editing elements to this page. ]
+[define show_star][if-any project_stars_enabled][if-any logged_in_user][if-any read_only][else]yes[end][end][end][end]
+
+<div style="font-size:140%; margin-top:1em">Project: <b>[projectname]</b></div>
+<div class="section">
+ <div><i>[project_summary]</i></div>
+
+ [if-any show_star]
+ <div>
+ <a class="star" id="star"
+ style="color:[if-any is_project_starred]cornflowerblue[else]gray[end];"
+ title="[if-any is_project_starred]Un-s[else]S[end]tar this project">
+ [if-any is_project_starred]★[else]☆[end]
+ </a>
+ Starred by [num_stars] user[plural]
+ </div>
+ [end]
+</div>
+
+
+<h4>Project description</h4>
+<div class="section">
+ [format "raw"][formatted_project_description][end]
+</div>
+
+<h4>Project access</h4>
+<div class="section">
+ [access_level.name]
+</div>
+
+
+[if-any home_page]
+ <h4>Project home page</h4>
+ <div class="section">
+ <a href="[home_page]">[home_page]</a>
+ </div>
+[end]
+
+
+<!-- TODO(jrobbins): expose this later when it is more fully baked.
+
+<h4>Issue tracking process</h4>
+<div class="section">
+ Brief paragraph about how you intend this issue tracker to be used.
+
+</div>
+
+
+<h4>Ground rules</h4>
+ <ul>
+ <li>Non-members may enter new issues, but they will be moderated...</li>
+ <li>Please keep to the facts of the issue, don't try to advocate.</li>
+ <li>We are not currently looking for feature requests from non-members.</li>
+ </ul>
+
+
+
+<h4>Guidelines</h4>
+ <ul>
+ <li>Make sure the defect is verified with the latest build</li>
+ <li>Another bullet item describing how to collaborate in this project</li>
+ <li>A few more</li>
+ <li>And going into a little detail</li>
+ <li>But not too much... also need good defaults and examples</li>
+ </ul>
+
+
+<h4>For more information</h4>
+ <ul>
+ <li>Link to external docs</li>
+ <li>And discussion forums</li>
+ </ul>
+
+-->
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("star")) {
+ [# The user viewing this page wants to star the project *on* this page]
+ $("star").addEventListener("click", function () {
+ _TKR_toggleStar($("star"), "[projectname]");
+ });
+ }
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/project/project-updates-page.ezt b/appengine/monorail/templates/project/project-updates-page.ezt
new file mode 100644
index 0000000..300cef4
--- /dev/null
+++ b/appengine/monorail/templates/project/project-updates-page.ezt
@@ -0,0 +1,7 @@
+[define page_css]css/d_updates_page.css[end]
+
+[include "../framework/master-header.ezt" "showtabs"]
+
+[include "../features/updates-page.ezt"]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/project/quota-bar.ezt b/appengine/monorail/templates/project/quota-bar.ezt
new file mode 100644
index 0000000..e89a6e2
--- /dev/null
+++ b/appengine/monorail/templates/project/quota-bar.ezt
@@ -0,0 +1,22 @@
+[# Display a little HTML bar and labels. This is not really a bar chart.
+ For comparison, see the bars in the top half of
+ https://www.google.com/accounts/ManageStorage
+
+arg0: an EZTItem with quota info for one component.
+]
+
+<table border="0" cellpadding="0" cellspacing="0">
+ <tr>
+ <td style="width:200px">
+ <table border="0" cellpadding="0" cellspacing="0" style="width:100%; border:1px solid #345BA6">
+ <tr>
+ <td style="background:#345BA6; width:[arg0.used_percent]%"> </td>
+ <td style="background:#EBF0FA; width:[arg0.avail_percent]%"> </td>
+ </tr>
+ </table>
+ </td>
+ <td style="padding-left:.7em">
+ [arg0.used] ([arg0.used_percent]%) in use
+ </td>
+ </tr>
+</table>
diff --git a/appengine/monorail/templates/project/untrusted-user-groups-part.ezt b/appengine/monorail/templates/project/untrusted-user-groups-part.ezt
new file mode 100644
index 0000000..474cfe7
--- /dev/null
+++ b/appengine/monorail/templates/project/untrusted-user-groups-part.ezt
@@ -0,0 +1,14 @@
+[if-any perms.EditProject]
+ <div class="help" style="background: #ddf8cc;">
+ <b>Important:</b> Users could be given indirect
+ roles in this project without your knowledge.
+ The following user groups either have group managers
+ who are not project owners in this project, or they allow anyone to
+ join the group:
+ <ul style="list-style-type: none">
+ [for untrusted_user_groups]
+ <li>[untrusted_user_groups.email]</li> [# TODO(jrobbins): hyperlink]
+ [end]
+ </ul>
+ </div>
+[end]
diff --git a/appengine/monorail/templates/sitewide/403-page.ezt b/appengine/monorail/templates/sitewide/403-page.ezt
new file mode 100644
index 0000000..ca667bb
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/403-page.ezt
@@ -0,0 +1,10 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "hidetabs"]
+
+<center style="margin-top: 4em;">
+ You do not have permission to view the requested page.
+ <br/><br/>
+ [if-any reason]Reason: [reason][end]
+</center>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/group-admin-page.ezt b/appengine/monorail/templates/sitewide/group-admin-page.ezt
new file mode 100644
index 0000000..057b9be
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/group-admin-page.ezt
@@ -0,0 +1,20 @@
+[define title]User Group: [groupname][end]
+[define category_css]css/ph_list.css[end]
+[include "../framework/master-header.ezt" "showusergrouptabs"]
+
+
+<form action="groupadmin.do" method="POST" autocomplete="off">
+ <input type="hidden" name="token" value="[form_token]">
+
+ <h4>Group membership visibility</h4>
+
+ The group members may be viewed by:
+ [include "../framework/group-setting-fields.ezt"]
+ <br>
+
+ <input type="submit" id="savechanges" name="btn" value="Save changes" class="submit">
+</form>
+
+[include "../framework/footer-script.ezt"]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/group-create-page.ezt b/appengine/monorail/templates/sitewide/group-create-page.ezt
new file mode 100644
index 0000000..9d54b02
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/group-create-page.ezt
@@ -0,0 +1,43 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "hidetabs"]
+
+<h2>Create a new user group</h2>
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="createGroup.do" method="POST" id="create_group_form"
+ style="margin:1em">
+ <input type="hidden" name="token" value="[form_token]">
+
+ Group email address:<br>
+ <input size="30" type="text" id="groupname" name="groupname" value="[initial_name]">
+ <span class="graytext">Example: group-name@example.com</span>
+ <div class="fielderror">
+ <span id="groupnamefeedback"></span>
+ [if-any errors.groupname][errors.groupname][end]
+ </div>
+ <br>
+
+ Members viewable by:
+ [include "../framework/group-setting-fields.ezt"]
+ <br>
+
+ <input type="submit" id="submit_btn" name="btn" value="Create group">
+</form>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ $("create_group_form").addEventListener("submit", function() {
+ $("submit_btn").value = "Creating group...";
+ $("submit_btn").disabled = "disabled";
+ });
+});
+</script>
+
+[include "../framework/footer-script.ezt"]
+
+
+[end][# not read-only]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/group-detail-page.ezt b/appengine/monorail/templates/sitewide/group-detail-page.ezt
new file mode 100644
index 0000000..e497d66
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/group-detail-page.ezt
@@ -0,0 +1,103 @@
+[define title]User Group: [groupname][end]
+[define category_css]css/ph_list.css[end]
+[include "../framework/master-header.ezt" "showusergrouptabs"]
+[include "../framework/js-placeholders.ezt"]
+
+<form method="POST" action="edit.do">
+<div id="colcontrol">
+ <div class="list">
+ [if-any pagination.visible]
+ <div class="pagination">
+ [if-any pagination.prev_url]<a href="[pagination.prev_url]"><b>‹</b> Prev</a>[end]
+ Members [pagination.start] - [pagination.last] of [pagination.total_count]
+ [if-any pagination.next_url]<a href="[pagination.next_url]">Next <b>›</b></a>[end]
+ </div>
+ [end]
+ <b>User Group: [groupname]</b>
+ [if-any offer_membership_editing]
+ <input type="button" value="Add members" style="font-size:80%; margin-left:1em"
+ id="add_members_button">
+ <input type="submit" value="Remove members" style="font-size:80%; margin-left:1em"
+ id="removebtn" name="removebtn" disabled="disabled">
+ [# TODO(jrobbins): extra confirmation when removing yourself as group owner.]
+ [end]
+ </div>
+
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped vt" id="resultstable" width="100%">
+ <tbody>
+ <tr id="headingrow">
+ [if-any offer_membership_editing]
+ <th style="border-right:0; padding-right:2px"> </th>
+ [end]
+ <th style="white-space:nowrap">Member</th>
+ <th style="white-space:nowrap">Role</th>
+ </tr>
+
+ [if-any pagination.visible_results]
+ [for pagination.visible_results]
+ <tr>
+ [if-any offer_membership_editing]
+ <td style="padding-right:2px">
+ <input type="checkbox" name="remove"
+ value="[pagination.visible_results.email]">
+ </td>
+ [end]
+ <td class="id" style="text-align:left">
+ [include "../framework/user-link.ezt" pagination.visible_results]
+ </td>
+ <td style="text-align:left" width="90%">
+ <a href="[pagination.visible_results.profile_url]">[pagination.visible_results.role]</a>
+ </td>
+ </tr>
+ [end]
+ [else]
+ <tr><td colspan="40">
+ This user group has no members.
+ </td></tr>
+ [end]
+
+
+ </tbody>
+ </table>
+</div>
+
+[include "../project/people-add-members-form.ezt" "group"]
+
+</form>
+
+
+[if-any offer_membership_editing]
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function _countChecked(opt_className) {
+ var numChecked = 0;
+ var inputs = document.getElementsByTagName('input');
+ for (var i = 0; i < inputs.length; i++) {
+ var el = inputs[[]i];
+ if (el.type == 'checkbox' && el.name == 'remove' && el.checked &&
+ (!opt_className || opt_className == el.className)) {
+ numChecked++;
+ }
+ }
+ return numChecked;
+ }
+
+ function _enableRemoveButton() {
+ var removeButton = document.getElementById('removebtn');
+ if (_countChecked() > 0) {
+ removeButton.disabled = false;
+ } else {
+ removeButton.disabled = true;
+ }
+ }
+
+ setInterval(_enableRemoveButton, 700);
+
+ $("add_members_button").addEventListener("click", _openAddMembersForm);
+});
+ </script>
+[end]
+
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/group-list-page.ezt b/appengine/monorail/templates/sitewide/group-list-page.ezt
new file mode 100644
index 0000000..51266aa
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/group-list-page.ezt
@@ -0,0 +1,96 @@
+[define title]User Groups[end]
+[define category_css]css/ph_list.css[end]
+[include "../framework/master-header.ezt" "hidetabs"]
+[include "../framework/js-placeholders.ezt"]
+
+<form method="POST" action='/hosting/deleteGroup.do'>
+<input type="hidden" name="token" value="[form_token]">
+<div id="colcontrol">
+ <div class="list">
+ <b>User Groups</b>
+ [if-any offer_group_deletion]
+ <input type="submit" value="Delete Groups" style="margin-left:1em"
+ id="removebtn" name="removebtn" disabled="disabled">
+ [end]
+ [if-any offer_group_creation]
+ <a href="/hosting/createGroup" class="buttonify" style="margin-left:1em">Create Group</a>
+ [end]
+ </div>
+
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped" id="resultstable" width="100%">
+ <tbody>
+ [if-any groups]
+
+ <tr id="headingrow">
+ [if-any offer_group_deletion]
+ <th style="border-right:0; padding-right:2px" width="2%"> </th>
+ [end]
+ <th style="white-space:nowrap">Name</th>
+ <th style="white-space:nowrap">Size</th>
+ <th style="white-space:nowrap">Member list visibility</th>
+ </tr>
+
+ [for groups]
+ <tr>
+ [if-any offer_group_deletion]
+ <td style="padding-right:2px" width="2%">
+ <input type="checkbox" name="remove"
+ value="[groups.group_id]">
+ </td>
+ [end]
+ <td class="id" style="text-align:left"><a href="[groups.detail_url]">[groups.name]</a></td>
+ <td><a href="[groups.detail_url]">[groups.num_members]</a></td>
+ <td><a href="[groups.detail_url]">[groups.who_can_view_members]</a></td>
+ </tr>
+ [end]
+
+ [else]
+ <tr>
+ <td colspan="40" class="id">
+ <div style="padding: 3em; text-align: center">
+ No user groups have been defined.
+ </div>
+ </td>
+ </tr>
+ [end]
+
+
+ </tbody>
+ </table>
+</div>
+
+</form>
+
+[if-any offer_group_deletion]
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function _countChecked(opt_className) {
+ var numChecked = 0;
+ var inputs = document.getElementsByTagName('input');
+ for (var i = 0; i < inputs.length; i++) {
+ var el = inputs[[]i];
+ if (el.type == 'checkbox' && el.name == 'remove' && el.checked &&
+ (!opt_className || opt_className == el.className)) {
+ numChecked++;
+ }
+ }
+ return numChecked;
+ }
+
+ function _enableRemoveButton() {
+ var removeButton = document.getElementById('removebtn');
+ if (_countChecked() > 0) {
+ removeButton.disabled = false;
+ } else {
+ removeButton.disabled = true;
+ }
+ }
+
+ setInterval(_enableRemoveButton, 700);
+
+});
+ </script>
+[end]
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/hosting-home-page.ezt b/appengine/monorail/templates/sitewide/hosting-home-page.ezt
new file mode 100644
index 0000000..d28c93c
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/hosting-home-page.ezt
@@ -0,0 +1,65 @@
+[define show_search_metadata]True[end]
+[define robots_no_index]true[end]
+[define category_css]css/ph_list.css[end]
+
+[include "../framework/master-header.ezt" "hidesearch"]
+
+[define prod_hosting_base_url]/hosting/[end]
+
+<div style="padding-top:30px;">
+<div style="margin-top:3em; text-align:center;">
+[# TODO: Provide an text alternative to the logo.]
+<p style="font-size:large;">Monorail, Monorail, Monorail!</p>
+
+<div style="text-align:center;margin:1em">
+ [if-any read_only][else]
+ [if-any can_create_project]
+ <img src="/static/images/new-24.gif" style="vertical-align:top; width:16px;">
+ <a href="/hosting/createProject">Create a new project</a>
+ [end]
+ [end]
+
+ [if-any learn_more_link]
+ <img src="/static/images/question-16.gif" style="vertical-align:top;padding-left:30px">
+ <a href="[learn_more_link]">Learn more about [site_name]</a>
+ [end]
+</div>
+
+ <p style="text-align:center;padding:0; margin:2em">
+ <div id="controls">
+ [include "../sitewide/project-list-controls.ezt" arg1]
+ </div>
+
+ <div style="margin:2em">
+ [if-any projects]
+ <table class="resultstable striped" width="100%" border="0" cellspacing="0" cellpadding="10px" style="margin:auto;">
+ [for projects]
+ <tr>
+ [include "project-list-row.ezt"]
+ </tr>
+ [end]
+ </table>
+ [else]
+ <p>There were no visible projects found.</p>
+ [end]
+ </div>
+
+ </p>
+</div>
+
+</div>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var stars = document.getElementsByClassName("star");
+ for (var i = 0; i < stars.length; ++i) {
+ var star = stars[[]i];
+ star.addEventListener("click", function (event) {
+ var projectName = event.target.getAttribute("data-project-name");
+ _TKR_toggleStar(event.target, projectName);
+ });
+ }
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/moved-page.ezt b/appengine/monorail/templates/sitewide/moved-page.ezt
new file mode 100644
index 0000000..c05d556
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/moved-page.ezt
@@ -0,0 +1,24 @@
+[include "../framework/master-header.ezt" "hidetabs"]
+
+<h3>Project has moved</h3>
+
+<h4>What happened?</h4>
+
+<p>Project "[project_name]" has moved to another location on the Internet.</p>
+
+<div style="margin:2em" class="help">
+ <b style="margin:0.5em">Your options:</b>
+
+ <ul>
+ [if-any moved_to_url]
+ <li>View the project at:
+ <a href="[moved_to_url]">[moved_to_url]</a></li>
+ [end]
+ <li><a href="http://www.google.com/search?q=[project_name]">Search the web</a>
+ for pages about "[project_name]".
+ </li>
+
+ </ul>
+</div>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/project-404-page.ezt b/appengine/monorail/templates/sitewide/project-404-page.ezt
new file mode 100644
index 0000000..d0f7a75
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/project-404-page.ezt
@@ -0,0 +1,6 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<center style="margin-top: 4em;">The page you asked for does not exist.</center>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/project-create-page.ezt b/appengine/monorail/templates/sitewide/project-create-page.ezt
new file mode 100644
index 0000000..6b81e24
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/project-create-page.ezt
@@ -0,0 +1,134 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "hidetabs"]
+
+<h2>Create a new project</h2>
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="createProject.do" method="POST" id="create_project_form"
+ style="margin:1em" enctype="multipart/form-data">
+ <input type="hidden" name="token" value="[form_token]">
+
+ [if-any cue_remaining_projects]
+ <table align="center" border="0" cellspacing="0" cellpadding="0" style="margin-bottom: 6px">
+ <tr><td class="notice">
+ <b>Note:</b> You can create at most [cue_remaining_projects] more projects.
+ </td></tr>
+ </table>
+ [end]
+
+
+ Project name:<br>
+ <input size="30" type="text" id="projectname" name="projectname" autocomplete="off"
+ value="[initial_name]">
+ <span class="graytext">Example: my-project-name</span>
+ <div class="fielderror">
+ <span id="projectnamefeedback">
+ [if-any errors.projectname][errors.projectname][end]
+ </span>
+ </div>
+
+ [include "../framework/project-descriptive-fields.ezt"]
+ <br>
+
+ Viewable by:
+ [include "../framework/project-access-part.ezt" "checksubmit"]
+ <br>
+
+ [if-any show_captcha]
+ <div>Word verification</div>
+ <div>[include "../framework/captcha-field.ezt"]</div>
+ [end]
+ <br>
+ <input type="submit" id="submit_btn" name="btn" value="Create project">
+</form>
+
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _onload();
+
+ [# TODO(jrobbins): move this to compiled Javascript. ]
+ var submit = document.getElementById('submit_btn');
+ submit.disabled = 'disabled';
+ var projectname = document.getElementById('projectname');
+ var access = document.getElementById('access');
+ var summary = document.getElementById('summary');
+ var description = document.getElementById('description');
+ var cg = document.getElementById('cg');
+ var oldName = '';
+ projectname.focus();
+ var solelyDigits = /^[[]-0-9]+$/
+ var hasUppercase = /[[]A-Z]/
+ var projectRE = /^[[]a-z0-9][[]-a-z0-9]*$/
+
+ function checkprojectname() {
+ name = projectname.value;
+ if (name != oldName) {
+ oldName = name;
+ feedback = document.getElementById('projectnamefeedback');
+ submit.disabled='disabled';
+ if (name == '') {
+ feedback.innerText = '';
+ } else if (hasUppercase.test(name)) {
+ feedback.innerText = 'Must be all lowercase';
+ } else if (solelyDigits.test(name)) {
+ feedback.innerText = 'Must include a lowercase letter';
+ } else if (!projectRE.test(name)) {
+ feedback.innerText = 'Invalid project name';
+ } else if (name.length > [max_project_name_length]) {
+ feedback.innerText = 'Project name is too long';
+ } else if(name[[]name.length - 1] == '-') {
+ feedback.innerText = "Project name cannot end with a '-'";
+ } else {
+ feedback.innerText = '';
+ checkname()
+ checksubmit()
+ }
+ }
+ }
+
+ function checkname() {
+ _CP_checkProjectName(projectname.value);
+ }
+
+ function checkempty(elemId) {
+ var elem = document.getElementById(elemId);
+ feedback = document.getElementById(elemId + 'feedback');
+ if (elem.value.length == 0) {
+ feedback.innerText = 'Please enter a ' + elemId;
+ } else {
+ feedback.innerText = ' ';
+ }
+ checksubmit();
+ }
+
+ function checksubmit() {
+ feedback = document.getElementById('projectnamefeedback');
+ submit.disabled='disabled';
+ if (projectname.value.length > 0 &&
+ summary.value.length > 0 &&
+ description.value.length > 0 &&
+ (cg == undefined || cg.value.length > 1) &&
+ feedback.innerText == '') {
+ submit.disabled='';
+ }
+ }
+ setInterval(checkprojectname, 700); [# catch changes that were not keystrokes.]
+ $("projectname").addEventListener("keyup", checkprojectname);
+ $("summary").addEventListener("keyup", function() { checkempty("summary"); });
+ $("description").addEventListener("keyup", function() { checkempty("description"); });
+ $("create_project_form").addEventListener("submit", function () {
+ $("submit_btn").value = "Creating project...";
+ $("submit_btn").disabled = "disabled";
+ });
+
+});
+</script>
+
+[end][# not read-only]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/project-list-controls.ezt b/appengine/monorail/templates/sitewide/project-list-controls.ezt
new file mode 100644
index 0000000..d1a8821
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/project-list-controls.ezt
@@ -0,0 +1,14 @@
+<table class="isf" width="100%" border="0" cellspacing="0" cellpadding="0">
+ <tr>
+ [if-any projects]
+ <td style="padding:3px 1em; font-weight:normal; white-space:nowrap;">
+ <p style="font-size:1.2em">List of Projects</p>
+ [if-any pagination.prev_url]<a href="[pagination.prev_url]"><b>‹</b> Prev</a>[end]
+ [pagination.start] - [pagination.last] of [pagination.total_count]
+ [if-any pagination.next_url]<a href="[pagination.next_url]">Next <b>›</b></a>[end]
+ </td>
+ [else]
+ <td style="padding:3px 1em"> </td>
+ [end]
+ </tr>
+</table>
diff --git a/appengine/monorail/templates/sitewide/project-list-row.ezt b/appengine/monorail/templates/sitewide/project-list-row.ezt
new file mode 100644
index 0000000..23fa8b5
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/project-list-row.ezt
@@ -0,0 +1,52 @@
+[# This displays one list row of the project search results.
+
+No parameters are used, but it expects the "projects" loop variable to
+hold the current project.]
+
+[# Project name link to this project]
+<td style="width:10%">
+ <a href="[projects.relative_home_url]/" style="font-size:medium">
+ [projects.project_name]
+ </a>
+</td>
+
+[# Display membership and star only if user is logged in]
+[if-any logged_in_user]
+ [# User's membership status of this project]
+ <td style="width:10%">
+ [if-any projects.membership_desc][projects.membership_desc][end]
+ </td>
+
+ [# Display star for logged in user to star this project]
+ <td style="width:5%">
+ [if-any logged_in_user]
+ <a class="star"
+ style="color:[if-any projects.starred]cornflowerblue[else]gray[end]"
+ title="[if-any projects.starred]Un-s[else]S[end]tar this project" data-project-name="[projects.project_name]">
+ [if-any projects.starred]★[else]☆[end]
+ </a>
+ [end]
+ </td>
+[end]
+
+[# Display how many have starred this project]
+<td style="width:10%">
+ [is projects.num_stars "0"]
+ [else]
+ Stars: <span id="star_count-[projects.project_name]">[projects.num_stars]</span>
+ [end]
+</td>
+
+[# When project was last updated]
+<td style="width:20%">
+ [if-any projects.last_updated_exists]
+ Updated: [projects.recent_activity]
+ [end]
+</td>
+
+[# The short summary of this project]
+<td style="width:45%">
+ [is projects.limited_summary ""][else]
+ [projects.limited_summary]<br>
+ [end]
+</td>
diff --git a/appengine/monorail/templates/sitewide/unified-settings.ezt b/appengine/monorail/templates/sitewide/unified-settings.ezt
new file mode 100644
index 0000000..5e55681
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/unified-settings.ezt
@@ -0,0 +1,186 @@
+[# common form fields for changing user settings ]
+<input type="hidden" name="token" value="[form_token]">
+
+
+<h4>Privacy</h4>
+<div style="margin:0 0 2em 2em">
+ <input type="checkbox" name="obscure_email" id="obscure_email" value="1"
+ [if-any read_only]disabled="disabled"[end]
+ [if-any settings_user_pb.obscure_email_bool]checked="checked"[end] >
+ <label for="obscure_email">
+ When [if-any self]I participate[else]this user participates[end]
+ in projects, show non-members [if-any self]my[else]this user's[end] email address as
+ "[settings_user.obscured_username]...@[settings_user.domain]", instead of
+ showing the full address.
+ </label>
+
+ <br><br>
+</div>
+
+<h4>Issue tracker</h4>
+<div style="margin:0 0 2em 2em">
+ [# TODO(jrobbins): re-implement issue preview on hover in polymer.]
+
+ <p>
+ Whenever an issue is changed by another user, send
+ [if-any self]me[else]this user[end] an email:
+ </p>
+ <input type="checkbox" name="notify" id="notify" value="1"
+ [if-any read_only]disabled="disabled"[end]
+ [if-any settings_user_pb.notify_issue_change_bool]checked="checked"[end] >
+ <label for="notify">If [if-any self]I am[else]this user is[end] in the issue's
+ <b>owner</b> or <b>CC</b> fields.</label><br>
+ <input type="checkbox" name="notify_starred" id="notify_starred" value="1"
+ [if-any read_only]disabled="disabled"[end]
+ [if-any settings_user_pb.notify_starred_issue_change_bool]checked="checked"[end] >
+ <label for="notify_starred">
+ If [if-any self]I[else]this user[end] <b>starred</b> the issue.
+ </label>
+ <br><br>
+</div>
+
+[if-any perms._EditOtherUsers]
+ <h4>Banned for abuse</h4>
+ <div style="margin:0 0 2em 2em">
+ <input type="checkbox" name="banned" id="banned" value="1"
+ [if-any settings_user_is_banned]checked="checked"[end] >
+ <label for="banned">This user is banned because:</label>
+ <input type="text" size="50" name="banned_reason" id="banned_reason" value="[settings_user_pb.banned]">
+ </div>
+
+ <h4>Action limits</h4>
+ <div style="margin:0 0 2em 2em; width:50em">
+ <input type="checkbox" name="ignore_action_limits" id="ignore_action_limits" value="1"
+ [if-any settings_user_ignore_action_limits]checked="checked"[end] >
+ <label for="ignore_action_limits">
+ [if-any self]I am[else]This user is[end]
+ trusted to not spam, so ignore action limits</label>
+ <br><br>
+
+ <div class="help" style="width:75em">
+ <table cellpadding="6" cellspacing="0" style="padding:4px" width="100%">
+ <tr>
+ <th align="left">Project creation:</th>
+ <td>
+ [is settings_user_pb.project_creation_limit.recent_count "0"]
+ None
+ [else]
+ <label for="reset_project_creation">Reset</label>
+ <input type="checkbox" name="reset_project_creation" id="reset_project_creation" value="1">
+ [settings_user_pb.project_creation_limit.recent_count] since [project_creation_reset]
+ [end]
+ </td>
+ <td>
+ Period soft limit: <input name="project_creation_soft_limit" value="[project_creation_soft_limit]" size="4">
+ hard limit: <input name="project_creation_hard_limit" value="[project_creation_hard_limit]" size="4">
+ </td>
+ <td>
+ [settings_user_pb.project_creation_limit.lifetime_count]
+ out of <input name="project_creation_lifetime_limit" value="[project_creation_lifetime_limit]" size="4">
+ in lifetime.
+ </td>
+ </tr>
+
+ <tr>
+ <th align="left">Issue comment:</th>
+ <td>
+ [is settings_user_pb.issue_comment_limit.recent_count "0"]
+ None
+ [else]
+ <label for="reset_issue_comment_creation">Reset</label>
+ <input type="checkbox" name="reset_issue_comment" id="reset_issue_comment" value="1">
+ [settings_user_pb.issue_comment_limit.recent_count] since [issue_comment_reset]
+ [end]
+ </td>
+ <td>
+ Period soft limit: <input name="issue_comment_soft_limit" value="[issue_comment_soft_limit]" size="4">
+ hard limit: <input name="issue_comment_hard_limit" value="[issue_comment_hard_limit]" size="4">
+ </td>
+ <td>
+ [settings_user_pb.issue_comment_limit.lifetime_count]
+ out of <input name="issue_comment_lifetime_limit" value="[issue_comment_lifetime_limit]" size="4">
+ in lifetime.
+ </td>
+ </tr>
+
+ <tr>
+ <th align="left">Issue attachment:</th>
+ <td>
+ [is settings_user_pb.issue_attachment_limit.recent_count "0"]
+ None
+ [else]
+ <label for="reset_issue_attachment">Reset</label>
+ <input type="checkbox" name="reset_issue_attachment" id="reset_issue_attachment" value="1">
+ [settings_user_pb.issue_attachment_limit.recent_count] since [issue_attachment_reset].
+ [end]
+ </td>
+ <td>
+ Period soft limit: <input name="issue_attachment_soft_limit" value="[issue_attachment_soft_limit]" size="4">
+ hard limit: <input name="issue_attachment_hard_limit" value="[issue_attachment_hard_limit]" size="4">
+ </td>
+ <td>
+ [settings_user_pb.issue_attachment_limit.lifetime_count]
+ out of <input name="issue_attachment_lifetime_limit" value="[issue_attachment_lifetime_limit]" size="4">
+ in lifetime.
+ </td>
+ </tr>
+
+ <tr>
+ <th align="left">Issue bulk edit:</th>
+ <td>
+ [is settings_user_pb.issue_bulk_edit_limit.recent_count "0"]
+ None
+ [else]
+ <label for="reset_issue_bulk_edit">Reset</label>
+ <input type="checkbox" name="reset_issue_bulk_edit" id="reset_issue_bulk" value="1">
+ [settings_user_pb.issue_bulk_edit_limit.recent_count] since [issue_bulk_edit_reset]
+ [end]
+ </td>
+ <td>
+ Period soft limit: <input name="issue_bulk_edit_soft_limit" value="[issue_bulk_edit_soft_limit]" size="4">
+ hard limit: <input name="issue_bulk_edit_hard_limit" value="[issue_bulk_edit_hard_limit]" size="4">
+ </td>
+ <td>
+ [settings_user_pb.issue_bulk_edit_limit.lifetime_count]
+ out of <input name="issue_bulk_edit_lifetime_limit" value="[issue_bulk_edit_lifetime_limit]" size="4">
+ in lifetime.
+ </td>
+ </tr>
+
+ <tr>
+ <th align="left">API request:</th>
+ <td>
+ [is settings_user_pb.api_request_limit.recent_count "0"]
+ None
+ [else]
+ <label for="reset_api_request">Reset</label>
+ <input type="checkbox" name="reset_api_request" id="reset_api_request" value="1">
+ [settings_user_pb.api_request_limit.recent_count] since [api_request_reset].
+ [end]
+ </td>
+ <td>
+ Period soft limit: <input name="api_request_soft_limit" value="[api_request_soft_limit]" size="4">
+ hard limit: <input name="api_request_hard_limit" value="[api_request_hard_limit]" size="4">
+ </td>
+ <td>
+ [settings_user_pb.api_request_limit.lifetime_count]
+ out of <input name="api_request_lifetime_limit" value="[api_request_lifetime_limit]" size="6">
+ in lifetime.
+ </td>
+ </tr>
+
+ </table>
+ </div>
+ </div>
+
+[end]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ $("banned_reason").addEventListener("keyup", function() {
+ $("banned").checked = $("banned_reason").value != "";
+ });
+});
+</script>
+
+[include "../framework/footer-script.ezt"]
diff --git a/appengine/monorail/templates/sitewide/user-profile-page.ezt b/appengine/monorail/templates/sitewide/user-profile-page.ezt
new file mode 100644
index 0000000..378db08
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/user-profile-page.ezt
@@ -0,0 +1,284 @@
+[define category_css]css/ph_list.css[end]
+[include "../framework/master-header.ezt" "showusertabs" "t1"]
+[include "../framework/js-placeholders.ezt"]
+<div id="colcontrol">
+
+<h2>
+ [if-any viewing_self][else]
+ [if-any user_stars_enabled]
+ [if-any logged_in_user]
+ [if-any read_only][else]
+ [if-any user_stars_enabled]
+ [if-any logged_in_user]
+ [if-any read_only][else]
+ <a id="user_star"
+ style="color:[if-any is_user_starred]cornflowerblue[else]gray[end]"
+ title="[if-any is_user_starred]Un-s[else]S[end]tar this project">
+ [if-any is_user_starred]★[else]☆[end]
+ </a>
+ [end]
+ [end]
+ [end]
+ [end]
+ [end]
+ [end]
+ [end]
+
+ [viewed_user_display_name]
+</h2>
+
+[if-any user_stars_enabled]
+<div>
+<b>Starred developers:</b>
+[if-any starred_users]
+[for starred_users]
+ [include "../framework/user-link.ezt" starred_users][if-index starred_users last][else], [end]
+[end]
+[else]<i>None</i>[end]
+</div>
+[end]
+<br>
+
+<div class="list">
+ <table style="width: 100%;" cellspacing="0" cellpadding="0">
+ <tbody><tr>
+ <th style="text-align: left;">Projects
+ </th>
+ </tr></tbody>
+ </table>
+</div>
+
+<table cellspacing="0" cellpadding="2" border="0" class="results striped" id="projecttable" width="100%">
+ <tbody>
+ <tr id="headingrow">
+ [if-any logged_in_user]
+ <th style="white-space:nowrap; width:3%;"></th>
+ [end]
+ <th style="white-space:nowrap; width:15%;">Role</th>
+ <th style="white-space:nowrap; width:25%;">Project</th>
+ <th style="white-space:nowrap; width:57%;">Summary</th>
+ </tr>
+ [if-any owner_of_projects committer_of_projects contributor_to_projects]
+ [if-any owner_of_projects]
+ [for owner_of_projects]
+ <tr data-url="[owner_of_projects.relative_home_url]" data-project-name="[owner_of_projects.project_name]">
+ [if-any logged_in_user]
+ <td class="rowwidgets">
+ <a class="star"
+ style="color:[if-any owner_of_projects.starred]cornflowerblue[else]gray[end]"
+ title="[if-any owner_of_projects.starred]Un-s[else]S[end]tar this project"
+ data-project-name="[owner_of_projects.project_name]">
+ [if-any owner_of_projects.starred]★[else]☆[end]
+ </a>
+ </td>
+ [end]
+ <td>Owner</td>
+ <td class="id" name="owner">
+ <a href="[owner_of_projects.relative_home_url]/">[owner_of_projects.project_name]</a>
+ [is owner_of_projects.state_name "HIDDEN"]<span style="color:red"> - hidden</span>[end]
+ </td>
+ <td>[owner_of_projects.summary]</td>
+ </tr>
+ [end]
+ [end]
+ [if-any committer_of_projects]
+ [for committer_of_projects]
+ <tr data-url="[committer_of_projects.relative_home_url]" data-project-name="[committer_of_projects.project_name]">
+ [if-any logged_in_user]
+ <td class="rowwidgets">
+ <a class="star"
+ style="color:[if-any committer_of_projects.starred]cornflowerblue[else]gray[end]"
+ title="[if-any committer_of_projects.starred]Un-s[else]S[end]tar this project"
+ data-project-name="[committer_of_projects.project_name]">
+ [if-any committer_of_projects.starred]★[else]☆[end]
+ </a>
+ </td>
+ [end]
+ <td>Committer</td>
+ <td class="id" name="committer">
+ <a href="[committer_of_projects.relative_home_url]/">[committer_of_projects.project_name]
+ </a>
+ </td>
+ <td>
+ [committer_of_projects.summary]
+ </td>
+ </tr>
+ [end]
+ [end]
+
+ [if-any contributor_to_projects]
+ [for contributor_to_projects]
+ <tr data-url="[contributor_to_projects.relative_home_url]" data-project-name="[contributor_to_projects.project_name]">
+ [if-any logged_in_user]
+ <td class="rowwidgets">
+ <a class="star"
+ style="color:[if-any contributor_to_projects.starred]cornflowerblue[else]gray[end]"
+ title="[if-any contributor_to_projects.starred]Un-s[else]S[end]tar this project"
+ data-project-name="[contributor_to_projects.project_name]">
+ [if-any contributor_to_projects.starred]★[else]☆[end]
+ </a>
+ </td>
+ [end]
+ <td>Contributor</td>
+ <td class="id" name="contributor">
+ <a href="[contributor_to_projects.relative_home_url]/">[contributor_to_projects.project_name]
+ </a>
+ [is contributor_to_projects.state_name "HIDDEN"]<span style="color:red"> - hidden</span>[end]</td>
+ <td>
+ [contributor_to_projects.summary]
+ </td>
+ </tr>
+ [end]
+ [end]
+
+ [else]
+ <tr>
+ <td colspan="4"><i>No projects.</i></td>
+ <tr>
+ [end]
+ </tbody>
+</table>
+
+
+[if-any starred_projects]
+<br>
+<div class="list">
+ <table style="width: 100%;" cellspacing="0" cellpadding="0">
+ <tbody><tr>
+ <th style="text-align: left;">
+ Starred by [if-any viewing_self]me[else]
+ [viewed_user_display_name]
+ [end]
+ </th>
+ </tr></tbody>
+ </table>
+</div>
+<table cellspacing="0" cellpadding="2" border="0" class="results striped" id="starredtable" width="100%">
+ <tbody>
+ <tr id="headingrow">
+ [if-any logged_in_user]
+ <th style="white-space:nowrap; width:3%;"></th>
+ [end]
+ <th style="white-space:nowrap; width:25%;">Name</th>
+ <th style="white-space:nowrap; width:57%;">Summary</th>
+ </tr>
+
+ [for starred_projects]
+ <tr data-url="[starred_projects.relative_home_url]" data-project-name="[starred_projects.project_name]">
+ [if-any logged_in_user]
+ <td class="rowwidgets">
+ <a class="star"
+ style="color:[if-any starred_projects.starred]cornflowerblue[else]gray[end]"
+ title="[if-any starred_projects.starred]Un-s[else]S[end]tar this project"
+ data-project-name="[starred_projects.project_name]">
+ [if-any starred_projects.starred]★[else]☆[end]
+ </a>
+ </td>
+ [end]
+ <td class="id" name="starred_project">
+ <a href="[starred_projects.relative_home_url]/">[starred_projects.project_name]</a>
+ [is starred_projects.state_name "HIDDEN"]<span style="color:red"> - hidden</span>[end]
+ </td>
+ <td>
+ [starred_projects.summary]
+ </td>
+ </tr>
+ [end]
+
+</table>
+[end]
+
+[if-any owner_of_archived_projects]
+<br>
+<div class="list">
+ <table style="width: 100%;" cellspacing="0" cellpadding="0">
+ <tbody><tr>
+ <th style="text-align: left;">Archived projects
+ </th>
+ </tr></tbody>
+ </table>
+</div>
+<table cellspacing="0" cellpadding="2" border="0" class="results striped" id="archivedtable" width="100%">
+ <tbody>
+ <tr id="headingrow">
+ <th style="white-space:nowrap; width:25%;">Name</th>
+ <th style="white-space:nowrap; width:60%;">Summary</th>
+ </tr>
+ [for owner_of_archived_projects]
+ <tr data-url="[owner_of_archived_projects.relative_home_url]/adminAdvanced">
+ <td class="id" name="deleted_project">[owner_of_archived_projects.project_name] -
+ <a href="[owner_of_archived_projects.relative_home_url]/adminAdvanced">Unarchive or delete</a>
+ </td>
+ <td>
+ [owner_of_archived_projects.summary]
+ </td>
+ </tr>
+ [end]
+</table>
+[end]
+
+</div>
+</div>
+
+[if-any perms._EditOtherUsers]
+<h3 style="clear:both">Edit user</h3>
+ <form action="edit.do" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+ <h4>Site administration</h4>
+ <div style="margin:0 0 2em 2em">
+ <input type="checkbox" name="site_admin" id="site_admin" value="1" [if-any viewed_user_pb.is_site_admin_bool]checked="checked"[end] >
+ <label for="site_admin">This user is a site administrator (a super user)</label>
+ </div>
+
+ [include "unified-settings.ezt"]
+
+ <div style="margin:0 0 2em 2em">
+ <input id="submit_btn" type="submit" name="btn"
+ value="Save changes">
+ </div>
+
+ </form>
+[end]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("user_star")) {
+ [# The user viewing this page wants to star the user *on* this page]
+ $("user_star").addEventListener("click", function () {
+ _TKR_toggleStar($("user_star"), null, null, "[viewed_user_id]");
+ });
+ }
+
+ var stars = document.getElementsByClassName("star");
+ for (var i = 0; i < stars.length; ++i) {
+ var star = stars[[]i];
+ star.addEventListener("click", function (event) {
+ var projectName = event.target.getAttribute("data-project-name");
+ _TKR_toggleStar(event.target, projectName);
+ });
+ }
+
+ function _handleProjectClick(event) {
+ var target = event.target;
+ if (target.tagName == "A")
+ return;
+
+ if (target.classList.contains("rowwidgets") || target.parentNode.classList.contains("rowwidgets"))
+ return;
+ if (target.tagName != "TR") target = target.parentNode;
+ _go(target.attributes[[]"data-url"].value,
+ (event.metaKey || event.ctrlKey || event.button == 1));
+ };
+ $("projecttable").addEventListener("click", _handleProjectClick);
+ if ($("starredtable")) {
+ $("starredtable").addEventListener("click", _handleProjectClick);
+ }
+ if ($("archivedtable")) {
+ $("archivedtable").addEventListener("click", _handleProjectClick);
+ }
+
+});
+</script>
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/user-settings-page.ezt b/appengine/monorail/templates/sitewide/user-settings-page.ezt
new file mode 100644
index 0000000..33c8510
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/user-settings-page.ezt
@@ -0,0 +1,17 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showusertabs" "t1"]
+
+<div style="max-width:50em">
+
+<h3>User Preferences</h3>
+
+<form action="settings.do" method="POST">
+ [include "unified-settings.ezt"]
+ [if-any read_only][else]
+ <input id="submit_btn" type="submit" name="btn" value="Save preferences">
+ [end]
+</form>
+
+</div>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/user-updates-page.ezt b/appengine/monorail/templates/sitewide/user-updates-page.ezt
new file mode 100644
index 0000000..31b24fb
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/user-updates-page.ezt
@@ -0,0 +1,7 @@
+[define page_css]css/d_updates_page.css[end]
+
+[include "../framework/master-header.ezt" "showusertabs" "t3"]
+
+[include "../features/updates-page.ezt"]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/sitewide/usergrouptabs.ezt b/appengine/monorail/templates/sitewide/usergrouptabs.ezt
new file mode 100644
index 0000000..1f29b43
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/usergrouptabs.ezt
@@ -0,0 +1,15 @@
+[# Display a row of tabs for servlets with URLs starting with /u/username.
+
+ Args:
+ arg0: String like "t1", "t2", "t3" to identify the currently active tab.
+]
+
+<div class="[admin_tab_mode]">
+ <div class="at isf">
+ <span class="inst1"><a href="/g/[groupid]/">People</a></span>
+ [if-any offer_membership_editing]
+ <span class="inst2"><a href="/g/[groupid]/groupadmin">Administer</a></span>
+ [end]
+ </div>
+</div>
+
diff --git a/appengine/monorail/templates/sitewide/usertabs.ezt b/appengine/monorail/templates/sitewide/usertabs.ezt
new file mode 100644
index 0000000..527a330
--- /dev/null
+++ b/appengine/monorail/templates/sitewide/usertabs.ezt
@@ -0,0 +1,43 @@
+[# Display a row of tabs for servlets with URLs starting with /u/username.
+
+ Args:
+ arg0: String like "t1", "t2", "t3" to identify the currently active tab.
+]
+
+<div class="at isf [user_tab_mode]">
+ <span class="inst2">
+ <a href="[viewed_user.profile_url]">[if-any viewing_self]My Profile[else]User Profile[end]</a>
+ </span>
+
+ <span class="inst5">
+ <a href="[viewed_user.profile_url][if-any viewing_self]updates/projects[else]updates[end]">Updates</a>
+ </span>
+
+ [if-any viewing_self]
+ <span class="inst3">
+ <a href="/hosting/settings">Settings</a>
+ </span>
+ [end]
+
+ [if-any offer_saved_queries_subtab]
+ <span class="inst4">
+ <a href="[viewed_user.profile_url]queries">Saved Queries</a>
+ </span>
+ [end]
+
+</div>
+
+
+[is arg0 "t3"]
+ <div class="at [user_updates_tab_mode]" style="margin-left: 2em">
+ <span class="inst1">
+ <a href="[viewed_user.profile_url]updates">From [viewed_user.display_name]</a>
+ </span>
+ <span class="inst2">
+ <a href="[viewed_user.profile_url]updates/projects">Starred Projects</a>
+ </span>
+ <span class="inst3">
+ <a href="[viewed_user.profile_url]updates/developers">Starred Developers</a>
+ </span>
+ </div>
+[end]
diff --git a/appengine/monorail/templates/tracker/admin-components-page.ezt b/appengine/monorail/templates/tracker/admin-components-page.ezt
new file mode 100644
index 0000000..86219c4
--- /dev/null
+++ b/appengine/monorail/templates/tracker/admin-components-page.ezt
@@ -0,0 +1,196 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="adminComponents.do" id="adminComponents" method="POST">
+ <input type="hidden" name="token" value="form_token]">
+
+ <h4>Issue components</h4>
+ [if-any perms.EditProject]
+ <span style="margin:0 .7em">Show:
+ <select id="rowfilter">
+ <option label="All components" value="all">
+ <option label="Active components" value="active" selected=true>
+ <option label="Top-level components" value="toplevel">
+ <option label="Components I administer" value="myadmin">
+ <option label="Components I am CC'd on" value="mycc">
+ <option label="Deprecated components" value="deprecated">
+ </select>
+ </span>
+ <span style="margin:0 .7em">Select:
+ <a id="selectall" href="#">All</a>
+ <a id="selectnone" href="#">None</a>
+ </span>
+ [end]
+
+ <div class="list-foot"></div>
+ [if-any perms.EditProject]
+ <form action="adminComponents.do" method="POST">
+ <a href="/p/[projectname]/components/create" class="buttonify">Create component</a>
+ <input type="hidden" name="delete_components">
+ <input type="hidden" name="token" value="[form_token]">
+ <input type="submit" name="deletebtn" value="Delete Component(s)" style="margin-left:2em" disabled>
+ </form>
+ <div id="deletebtnsfeedback" class="fielderror" style="margin-left:1em">
+ [if-any failed_perm]
+ You do not have permission to delete the components:
+ [failed_perm]<br/>
+ [end]
+ [if-any failed_subcomp]
+ Can not delete the following components because they have subcomponents:
+ [failed_subcomp]<br/>
+ [end]
+ [if-any failed_templ]
+ Can not delete the following components because they are listed in templates:
+ [failed_templ]<br/>
+ [end]
+ </div>
+ [end]
+
+ <div class="section">
+ <table cellspacing="0" cellpadding="2" border="0" class="comptable results striped vt active" id="resultstable" width="100%">
+ <tbody>
+ <tr>
+ [if-any perms.EditProject]<th></th>[end]
+ <th>Name</th>
+ <th>Administrators</th>
+ <th>Auto Cc</th>
+ <th>Description</th>
+ </tr>
+ [if-any component_defs][else]
+ <tr>
+ <td colspan="5">
+ <div style="padding: 3em; text-align: center">
+ This project has not defined any components.
+ </div>
+ </td>
+ </tr>
+ [end]
+ [for component_defs]
+ [define detail_url]/p/[projectname]/components/detail?component=[format "url"][component_defs.path][end][end]
+ <tr data-url="[detail_url]" class="comprow [component_defs.classes]">
+ [if-any perms.EditProject]
+ <td class="cb">
+ <input type="checkbox" data-path="[component_defs.path]" class="checkRangeSelect">
+ </td>
+ [end]
+ <td class="id">
+ <a style="white-space:nowrap" href="[detail_url]">[component_defs.path]</a>
+ </td>
+ <td>
+ [for component_defs.admins]
+ [include "../framework/user-link.ezt" component_defs.admins][if-index component_defs.admins last][else],[end]
+ [end]
+ </td>
+ <td>
+ [for component_defs.cc]
+ [include "../framework/user-link.ezt" component_defs.cc][if-index component_defs.cc last][else],[end]
+ [end]
+ </td>
+ <td>
+ [component_defs.docstring_short]
+ </td>
+ </tr>
+ [end]
+ </tbody>
+ </table>
+ </div>[# section]
+
+ <div class="list-foot"></div>
+ [if-any perms.EditProject]
+ <form action="adminComponents.do" method="POST">
+ <a href="/p/[projectname]/components/create" class="buttonify">Create component</a>
+ <input type="hidden" name="delete_components">
+ <input type="hidden" name="token" value="[form_token]">
+ <input type="submit" name="deletebtn" value="Delete Component(s)" style="margin-left:2em" disabled>
+ </form>
+ [end]
+
+</form>
+
+[end]
+
+[include "../framework/footer-script.ezt"]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _onload();
+
+ if ($("selectall")) {
+ $("selectall").addEventListener("click", function() {
+ _selectAllIssues();
+ setDisabled(false);
+ });
+ }
+ if ($("selectnone")) {
+ $("selectnone").addEventListener("click", function() {
+ _selectNoneIssues();
+ setDisabled(true);
+ });
+ }
+
+ var checkboxNodes = document.getElementsByClassName("checkRangeSelect");
+ var checkboxes = Array();
+ for (var i = 0; i < checkboxNodes.length; ++i) {
+ var checkbox = checkboxNodes.item(i);
+ checkboxes.push(checkbox);
+ checkbox.addEventListener("click", function (event) {
+ _checkRangeSelect(event, event.target);
+ _highlightRow(event.target);
+ updateEnabled();
+ });
+ }
+
+ function updateEnabled() {
+ var anySelected = checkboxes.some(function(checkbox) {
+ return checkbox.checked;
+ });
+ setDisabled(!anySelected);
+ }
+
+ var deleteButtons = document.getElementsByName("deletebtn");
+ function setDisabled(disabled) {
+ for (var i = 0; i < deleteButtons.length; ++i) {
+ deleteButtons.item(i).disabled = disabled;
+ }
+ }
+
+ for (var i = 0; i < deleteButtons.length; ++i) {
+ deleteButtons.item(i).addEventListener("click", function(event) {
+ var componentsToDelete = [];
+ for (var i = 0; i< checkboxes.length; ++i) {
+ var checkbox = checkboxes[[]i];
+ if (checkbox.checked)
+ componentsToDelete.push(checkbox.getAttribute("data-path"));
+ }
+ var fields = document.getElementsByName("delete_components");
+ for (var i = 0; i< fields.length; ++i) {
+ fields.item(i).value = componentsToDelete.join();
+ }
+ if (!confirm("Are you sure you want to delete the selected components ?\nThis operation cannot be undone."))
+ event.preventDefault();
+ });
+ }
+
+ function _handleResultsClick(event) {
+ var target = event.target;
+ if (target.tagName == "A" || target.type == "checkbox" || target.className == "cb")
+ return;
+ while (target && target.tagName != "TR") target = target.parentNode;
+ _go(target.attributes[[]"data-url"].value,
+ (event.metaKey || event.ctrlKey || event.button == 1));
+ };
+ $("resultstable").addEventListener("click", _handleResultsClick);
+
+
+ function _handleRowFilterChange(event) {
+ $("resultstable").classList.remove('all', 'active', 'toplevel', 'myadmin', 'mycc', 'deprecated');
+ $("resultstable").classList.add(event.target.value);
+ };
+ $("rowfilter").addEventListener("change", _handleRowFilterChange);
+});
+</script>
+
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/admin-labels-page.ezt b/appengine/monorail/templates/tracker/admin-labels-page.ezt
new file mode 100644
index 0000000..5bcf681
--- /dev/null
+++ b/appengine/monorail/templates/tracker/admin-labels-page.ezt
@@ -0,0 +1,132 @@
+[define category_css]css/ph_list.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+[include "../framework/js-placeholders.ezt"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="adminLabels.do" id="adminLabels" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+
+ <h4>Predefined issue labels</h4>
+ <div class="section">
+ [if-any perms.EditProject]
+ <table class="vt">
+ <tr><td>
+ <textarea name="predefinedlabels" rows="12" cols="75" style="tab-size:18">[for config.issue_labels]
+[config.issue_labels.commented][config.issue_labels.name_padded][if-any config.issue_labels.docstring]	= [config.issue_labels.docstring][end][end]
+</textarea><br><br>
+
+ Each issue may have <b>at most one</b> label with each of these prefixes:<br>
+ <input type="text" size="75" name="excl_prefixes"
+ value="[for config.excl_prefixes][config.excl_prefixes][if-index config.excl_prefixes last][else], [end][end]">
+ </td>
+ <td style="padding-left:.7em">
+ <div class="tip">
+ <b>Instructions:</b><br> List one label per line in desired sort-order.<br><br>
+ Optionally, use an equals-sign to document the meaning of each label.
+ </div>
+ </td>
+ </tr>
+ </table>
+ [else]
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped" width="100%">
+ <tr>
+ <th style="min-width:14em">Label</th>
+ <th width="100%">Meaning</th>
+ </tr>
+ [for config.issue_labels]
+ <tr>
+ <td style="white-space:nowrap; padding-right:2em; color:#363">[config.issue_labels.name]</td>
+ <td>[config.issue_labels.docstring]</td>
+ </tr>
+ [end]
+ </table>
+ [end]
+ </div>
+
+ [if-any perms.EditProject]
+ <input type="submit" id="savechanges" name="btn" value="Save changes" class="submit">
+ [end]
+
+ <br>
+ <br>
+
+ <h4>Custom fields</h4>
+ <div class="section">
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped vt" id="resultstable" width="100%">
+ <tbody>
+ <tr>
+ <th>Name</th>
+ <th>Type</th>
+ <th>Required</th>
+ <th>Multivalued</th>
+ <th>Applicable to</th>
+ <th>Description</th>
+ </tr>
+ [if-any field_defs][else]
+ <tr>
+ <td colspan="40">
+ <div style="padding: 3em; text-align: center">
+ This project has not defined any custom fields.
+ </div>
+ </td>
+ </tr>
+ [end]
+ [for field_defs]
+ [define detail_url]/p/[projectname]/fields/detail?field=[field_defs.field_name][end]
+ [is field_defs.type_name "INT_TYPE"][define pretty_type_name]Integer[end][end]
+ [is field_defs.type_name "ENUM_TYPE"][define pretty_type_name]Enum[end][end]
+ [is field_defs.type_name "USER_TYPE"][define pretty_type_name]User[end][end]
+ [is field_defs.type_name "STR_TYPE"][define pretty_type_name]String[end][end]
+ <tr data-url="[detail_url]">
+ <td class="id" style="white-space:nowrap">
+ <a href="[detail_url]">[field_defs.field_name]</a></td>
+ <td style="white-space:nowrap">
+ [pretty_type_name]
+ </td>
+ <td style="white-space:nowrap">
+ [if-any field_defs.is_required_bool]Required[else]Optional[end]
+ </td>
+ <td style="white-space:nowrap">
+ [if-any field_defs.is_multivalued_bool]Multiple[else]Single[end]
+ </td>
+ <td style="white-space:nowrap">
+ [if-any field_defs.applicable_type][field_defs.applicable_type][else]Any issue[end]
+ </td>
+ <td>
+ [field_defs.docstring_short]
+ </td>
+ </tr>
+ [end]
+ </tbody>
+ </table>
+ <div class="list-foot"></div>
+ [if-any perms.EditProject]
+ <p><a href="/p/[projectname]/fields/create" class="buttonify">Add field</a></p>
+ [end]
+ </div>
+
+</form>
+
+[end]
+
+
+[include "../framework/footer-script.ezt"]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _onload();
+
+ function _handleResultsClick(event) {
+ var target = event.target;
+ if (target.tagName == "A")
+ return;
+ while (target && target.tagName != "TR") target = target.parentNode;
+ _go(target.attributes[[]"data-url"].value,
+ (event.metaKey || event.ctrlKey || event.button == 1));
+ };
+ $("resultstable").addEventListener("click", _handleResultsClick);
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/admin-rules-page.ezt b/appengine/monorail/templates/tracker/admin-rules-page.ezt
new file mode 100644
index 0000000..7ff5819
--- /dev/null
+++ b/appengine/monorail/templates/tracker/admin-rules-page.ezt
@@ -0,0 +1,28 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="adminRules.do" id="adminRules" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+
+ [include "../framework/filter-rule-admin-part.ezt" "with_tracking_actions"]
+
+ <input type="submit" id="savechanges" name="btn" value="Save changes" class="submit">
+</form>
+
+[end]
+
+[include "../framework/footer-script.ezt"]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions(
+ '[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+ _onload();
+
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/admin-statuses-page.ezt b/appengine/monorail/templates/tracker/admin-statuses-page.ezt
new file mode 100644
index 0000000..b073ff8
--- /dev/null
+++ b/appengine/monorail/templates/tracker/admin-statuses-page.ezt
@@ -0,0 +1,81 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="adminStatuses.do" id="adminStatuses" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+
+ [if-any perms.EditProject]
+ <table class="vt">
+ <tr><td>
+ <h4>Open Issue Status Values</h4>
+ <div class="section">
+ <textarea name="predefinedopen" rows="6" cols="75" style="tab-size:18">[for config.open_statuses]
+[config.open_statuses.commented][config.open_statuses.name_padded][if-any config.open_statuses.docstring]	= [config.open_statuses.docstring][end][end]
+</textarea>
+ </div>
+ <h4>Closed Issue Status Values</h4>
+ <div class="section">
+ <textarea name="predefinedclosed" rows="6" cols="75" style="tab-size:18">[for config.closed_statuses]
+[config.closed_statuses.commented][config.closed_statuses.name_padded][if-any config.closed_statuses.docstring]	= [config.closed_statuses.docstring][end][end]
+</textarea><br><br>
+
+ If an issue's status is being set to one of these values, offer to merge issues:<br>
+ <input type="text" size="75" name="statuses_offer_merge"
+ value="[for config.statuses_offer_merge][config.statuses_offer_merge][if-index config.statuses_offer_merge last][else], [end][end]">
+ </div>
+ </td>
+ <td style="padding-left:.7em">
+ <div class="tip">
+ <b>Instructions:</b><br> List one status value per line in desired sort-order.<br><br>
+ Optionally, use an equals-sign to document the meaning of each status value.
+ </div>
+ </td>
+ </tr>
+ </table>
+ [else]
+ <h4>Open Issue Status Values</h4>
+ <div class="section">
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped" width="100%">
+ <tr>
+ <th style="min-width:14em">Status</th>
+ <th width="100%">Meaning</th>
+ </tr>
+ [for config.open_statuses]
+ <tr>
+ <td style="white-space:nowrap; padding-right:2em;">[config.open_statuses.name]</td>
+ <td>[config.open_statuses.docstring]</td>
+ </tr>
+ [end]
+ </table>
+ </div>
+
+ <h4>Closed Issue Status Values</h4>
+ <div class="section">
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped" width="100%">
+ <tr>
+ <th style="min-width:14em">Status</th>
+ <th width="100%">Meaning</th>
+ </tr>
+ [for config.closed_statuses]
+ <tr>
+ <td style="white-space:nowrap; padding-right:2em;">[config.closed_statuses.name]</td>
+ <td>[config.closed_statuses.docstring]</td>
+ </tr>
+ [end]
+ </table>
+ </div>
+ [end]
+
+
+ [if-any perms.EditProject]
+ <input type="submit" id="savechanges" name="btn" value="Save changes" class="submit">
+ [end]
+</form>
+
+[end]
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/admin-templates-editor-part.ezt b/appengine/monorail/templates/tracker/admin-templates-editor-part.ezt
new file mode 100644
index 0000000..7e451b8
--- /dev/null
+++ b/appengine/monorail/templates/tracker/admin-templates-editor-part.ezt
@@ -0,0 +1,88 @@
+[# Display all the form fields needed to edit an issue template.
+ Values are copied to and from these editor fields into per-template hidden fields by JS code.]
+
+<table cellspacing="0" cellpadding="3" class="rowmajor vt">
+ <tr>
+ <th>Members only:</th>
+ <td>
+ <input type="checkbox" id="members_only_checkbox" class="saveTemplate">
+ <label for="members_only_checkbox">Only offer this template to project members</label>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Summary:</th>
+ <td>
+ <input type="text" id="summary_editor" size="60" class="saveTemplate acob" value=""><br>
+ <input type="checkbox" id="summary_must_be_edited_checkbox" class="saveTemplate">
+ <label for="summary_must_be_edited_checkbox">Users must edit issue summary before submitting</label>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Description:</th>
+ <td>
+ <textarea id="content_editor" rows="12" cols="75" class="undef" class="saveTemplate"></textarea>
+ [# Note: wrap="hard" has no effect on content_editor because we copy to a hidden field before submission.]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Status:</th>
+ <td>
+ <input type="text" id="status_editor" size="12" class="saveTemplate acob" value=""
+ autocomplete="off">
+ </td>
+ </tr>
+
+ <tr>
+ <th>Owner:</th>
+ <td>
+ <input type="text" id="owner_editor" size="25" class="saveTemplate acob" value=""
+ autocomplete="off">
+ <span id="owner_defaults_to_member_area">
+ <input type="checkbox" id="owner_defaults_to_member_checkbox" class="saveTemplate" style="margin-left:2em">
+ <label for="owner_defaults_to_member_checkbox">Default to member who is entering the issue</label>
+ </span>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Components:</th>
+ <td>
+ <input type="text" id="components_editor" size="75" class="saveTemplate acob" value=""
+ autocomplete="off">
+ <br/>
+ <span id="component_required_area">
+ <input type="checkbox" id="component_required_checkbox" class="saveTemplate">
+ <label for="component_required_checkbox">Require at least one component</label>
+ </span>
+ </td>
+ </tr>
+
+ [for fields]
+ [# TODO(jrobbins): determine applicability dynamically and update fields in JS]
+ <tr>
+ <th>[fields.field_name]:</th>
+ <td colspan="2">
+ [include "field-value-widgets.ezt" False]
+ </td>
+ <tr>
+ [end]
+
+ <tr>
+ <th>Labels:</th>
+ <td>
+ [include "label-fields.ezt" "all"]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Template admins:</th>
+ <td>
+ <input type="text" id="admin_names_editor" size="75" class="saveTemplate acob" value=""
+ autocomplete="off">
+ </td>
+ </tr>
+
+</table>
diff --git a/appengine/monorail/templates/tracker/admin-templates-page.ezt b/appengine/monorail/templates/tracker/admin-templates-page.ezt
new file mode 100644
index 0000000..22e36ce
--- /dev/null
+++ b/appengine/monorail/templates/tracker/admin-templates-page.ezt
@@ -0,0 +1,238 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="adminTemplates.do" id="adminTemplates" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+
+ <input type="hidden" name="current_template_index" id="current_template_index" value="">
+ <h4>Issue templates</h4>
+ <div class="section">
+ <table>
+ <tr><td>Choose Template:<br>
+ <select size="35" id="template_menu" style="width:14em">
+ [for config.templates]
+ [if-any config.templates.can_view perms.EditProject]
+ <option value="[config.templates.index]">[config.templates.name]</option>
+ [end]
+ [end]
+ </select>
+ [if-any perms.EditProject]
+ <div style="padding-top:4px; text-align:center">
+ <input type="button" name="newbtn" id="newbtn" value="New...">
+ <input type="button" name="delbtn" id="delbtn" value="Delete">
+ </div>
+ [end]
+ </td>
+ <td style="color:#666; font-weight:bold;">
+ → <br><br>
+ → <br><br>
+ → <br><br>
+ </td>
+ <td id="edit_panel" class="closed">
+ <div style="border:2px solid #c3d9ff; padding: 4px 6px;">
+ <div class="ifClosed" style="width:52em; height:46em;">Select a template from the list.</div>
+ <div class="ifOpened">
+ [include "admin-templates-editor-part.ezt"]
+ </div>
+ </div>
+ </td>
+ </tr>
+ </table><br>
+
+ [if-any perms.EditProject]
+ Default template for project members:
+ <select name="default_template_for_developers" id="default_template_for_developers">
+ [for config.templates]
+ <option value="[config.templates.name]" [is config.templates.template_id config.default_template_for_developers]selected[end]>[config.templates.name]</option>
+ [end]
+ </select><br><br>
+
+ Default template for non-members:
+ <select name="default_template_for_users" id="default_template_for_users">
+ [for config.templates]
+ <option value="[config.templates.name]" [is config.templates.template_id config.default_template_for_users]selected[end]>[config.templates.name]</option>
+ [end]
+ </select>
+ [end]
+
+
+ [for config.templates]
+ [if-any config.templates.can_view perms.EditProject]
+ <input type="hidden" name="name_[config.templates.index]" id="name_[config.templates.index]"
+ value="[config.templates.name]">
+ <input type="hidden" name="template_id_[config.templates.index]" id="template_id_[config.templates.index]"
+ value="[config.templates.template_id]">
+ <input type="hidden" name="members_only_[config.templates.index]" id="members_only_[config.templates.index]"
+ value="[config.templates.members_only]">
+ <input type="hidden" name="summary_[config.templates.index]" id="summary_[config.templates.index]"
+ value="[config.templates.summary]">
+ <input type="hidden" name="summary_must_be_edited_[config.templates.index]" id="summary_must_be_edited_[config.templates.index]"
+ value="[config.templates.summary_must_be_edited]">
+ <input type="hidden" name="content_[config.templates.index]" id="content_[config.templates.index]"
+ value="[config.templates.content]">
+ <input type="hidden" name="status_[config.templates.index]" id="status_[config.templates.index]"
+ value="[config.templates.status]">
+ <input type="hidden" name="owner_[config.templates.index]" id="owner_[config.templates.index]"
+ value="[config.templates.ownername]">
+ <input type="hidden" name="owner_defaults_to_member_[config.templates.index]" id="owner_defaults_to_member_[config.templates.index]"
+ value="[config.templates.owner_defaults_to_member]">
+ <input type="hidden" name="component_required_[config.templates.index]" id="component_required_[config.templates.index]"
+ value="[config.templates.component_required]">
+ <input type="hidden" name="components_[config.templates.index]" id="components_[config.templates.index]"
+ value="[config.templates.components]">
+
+ [for config.templates.complete_field_values]
+ [# TODO(jrobbins): support specifying multiple values of multivalued fields]
+ <input type="hidden"
+ name="field_value_[config.templates.index]_[config.templates.complete_field_values.field_id]"
+ id="field_value_[config.templates.index]_[config.templates.complete_field_values.field_id]"
+ value="[config.templates.complete_field_values.val]">
+ [end]
+
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_0"
+ value="[config.templates.label0]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_1"
+ value="[config.templates.label1]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_2"
+ value="[config.templates.label2]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_3"
+ value="[config.templates.label3]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_4"
+ value="[config.templates.label4]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_5"
+ value="[config.templates.label5]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_6"
+ value="[config.templates.label6]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_7"
+ value="[config.templates.label7]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_8"
+ value="[config.templates.label8]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_9"
+ value="[config.templates.label9]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_10"
+ value="[config.templates.label10]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_11"
+ value="[config.templates.label11]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_12"
+ value="[config.templates.label12]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_13"
+ value="[config.templates.label13]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_14"
+ value="[config.templates.label14]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_15"
+ value="[config.templates.label15]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_16"
+ value="[config.templates.label16]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_17"
+ value="[config.templates.label17]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_18"
+ value="[config.templates.label18]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_19"
+ value="[config.templates.label19]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_20"
+ value="[config.templates.label20]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_21"
+ value="[config.templates.label21]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_22"
+ value="[config.templates.label22]">
+ <input type="hidden" name="label_[config.templates.index]" id="label_[config.templates.index]_23"
+ value="[config.templates.label23]">
+ <input type="hidden" name="admin_names_[config.templates.index]" id="admin_names_[config.templates.index]"
+ value="[config.templates.admin_names]">
+ <input type="hidden" name="can_edit_[config.templates.index]" id="can_edit_[config.templates.index]"
+ value="[config.templates.can_edit]">
+ [end]
+ [end]
+
+ <br><br>
+
+ [define can_edit_any]No[end]
+ [for config.templates]
+ [if-any config.templates.can_view][if-any config.templates.can_edit]
+ [define can_edit_any]Yes[end]
+ [end][end]
+ [end]
+ [if-any perms.EditProject]
+ [define can_edit_any]Yes[end]
+ [end]
+
+ [is can_edit_any "Yes"]
+ <input type="submit" id="savechanges" name="btn" value="Save changes" class="submit">
+ [end]
+
+ </div>[# section]
+
+</form>
+[end]
+
+[include "../framework/footer-script.ezt"]
+[include "field-value-widgets-js.ezt"]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function getParam(name, defaultVal){
+ if(name=(new RegExp('[?&]'+encodeURIComponent(name)+'=([^&]*)')).exec(location.search)) {
+ return decodeURIComponent(name[1]);
+ }
+ return defaultVal;
+ }
+
+ _setFieldIDs([[]
+ [for fields][fields.field_id][if-index fields last][else], [end][end]
+ ]);
+
+ [if-any perms.EditProject]
+ _templateNames.push(
+ [for config.templates]"[format "js"][config.templates.name][end]"[if-index config.templates last][else],[end][end]
+ )
+ [end]
+ if ($("template_menu") && $("template_menu").options.length > 0) {
+ var tindex = getParam('tindex', 0);
+ if (tindex >= $("template_menu").length) {
+ // If the specified template no longer exists default to the first one.
+ $("template_menu").options[[]0].selected = true;
+ } else {
+ $("template_menu").options[[]tindex].selected = true;
+ }
+ _selectTemplate(document.getElementById('template_menu'));
+ }
+
+ [# Catch changes that were not keystrokes, e.g., paste menu item.]
+ setInterval(_saveTemplate, 700);
+
+ _fetchOptions(
+ '[projectname]', 'issueOptions', CS_env.token, [project.cached_content_timestamp]);
+ _onload();
+
+ _dirty = _saveTemplate;
+
+ if ($("template_menu"))
+ $("template_menu").addEventListener("change", function() {
+ _selectTemplate($("template_menu"));
+ });
+ if ($("newbtn"))
+ $("newbtn").addEventListener("click", _newTemplate);
+ if ($("delbtn"))
+ $("delbtn").addEventListener("click", _deleteTemplate);
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+
+ var saveTemplateElements = document.getElementsByClassName("saveTemplate");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("keyup", _saveTemplate);
+ el.addEventListener("change", _saveTemplate);
+ }
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/admin-views-page.ezt b/appengine/monorail/templates/tracker/admin-views-page.ezt
new file mode 100644
index 0000000..b0c9e89
--- /dev/null
+++ b/appengine/monorail/templates/tracker/admin-views-page.ezt
@@ -0,0 +1,81 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="adminViews.do" id="adminViews" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+
+ [include "../framework/artifact-list-admin-part.ezt" "with_grid"]
+
+<h4 id="queries">Saved queries</h4>
+<div class="section">
+
+ <div class="closed">
+ <div>Saved queries help project visitors easily view relevant issue lists.
+ <a class="ifClosed toggleHidden" href="#"
+ style="font-size:90%; margin-left:.5em">Learn more</a>
+ </div>
+
+ <div id="filterhelp" class="ifOpened help">
+ Project owners can set up saved queries to make it easier for team members to
+ quickly run common queries. More importantly, project owners can use saved
+ queries to focus the team's attention on the issue lists that are most important
+ for the project's success. The project's saved queries are shown in the middle
+ section of the search dropdown menu that is next to the issue search box.
+ </div>
+ <br>
+
+ [if-any perms.EditProject]
+ [include "../framework/saved-queries-admin-part.ezt" "project"]
+ [else]
+ <table cellspacing="0" cellpadding="2" border="0" class="results striped">
+ <tr>
+ <th align="left">Saved query name</th>
+ <th align="left">Search in</th>
+ <th align="left">Query</th>
+ </tr>
+ [for canned_queries]
+ <tr>
+ <td>[canned_queries.name]</td>
+ <td>
+ [define can][canned_queries.base_query_id][end]
+ [is can "1"]All issues[end]
+ [is can "2"]Open issues[end]
+ [is can "3"]Open and owned by me[end]
+ [is can "4"]Open and reported by me[end]
+ [is can "5"]Open and starred by me[end]
+ [is can "6"]New issues[end]
+ [is can "7"]Issues to verify[end]
+ [is can "8"]Open with comment by me[end]
+ </td>
+ <td>
+ [canned_queries.query]
+ </td>
+ </tr>
+ [end]
+ </table>
+ [end]
+ </div>
+</div>
+
+ [if-any perms.EditProject]
+ <input type="submit" id="savechanges" name="btn" value="Save changes" class="submit">
+ [end]
+</form>
+
+[end]
+
+[include "../framework/footer-script.ezt"]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions(
+ '[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+ _onload();
+
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/component-create-page.ezt b/appengine/monorail/templates/tracker/component-create-page.ezt
new file mode 100644
index 0000000..273c168
--- /dev/null
+++ b/appengine/monorail/templates/tracker/component-create-page.ezt
@@ -0,0 +1,125 @@
+[define title]Add a Component[end]
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<a href="/p/[projectname]/adminComponents">‹ Back to component list</a><br><br>
+
+
+<h4>Add a component</h4>
+
+<form action="create.do" method="POST">
+<input type="hidden" name="token" value="[form_token]">
+
+<table cellspacing="8" class="rowmajor vt">
+
+ <tr>
+ <th width="1%">Parent:</th>
+ <td>
+ <select name="parent_path" id="parent_path">
+ <option value="">Top level</option>
+ [for component_defs]
+ <option value="[component_defs.path]" [if-any component_defs.selected]selected=true[end]>[component_defs.path]</option>
+ [end]
+ </select>
+ </td>
+ <td rowspan="10">
+ <div class="tip">
+ <p>Components should describe the structure of the software being
+ built so that issues can be related to the correct parts.</p>
+
+ <p>Please use labels instead for releases,
+ milestones, task forces, types of issues, etc.</p>
+
+ <p>Deprecated components won't be shown in autocomplete.</p>
+ </div>
+ </td>
+ </tr>
+
+ <tr>
+ <th width="1%">Name:</th>
+ <td>
+ <input id="leaf_name" name="leaf_name" size="30" value="[initial_leaf_name]"
+ class="acob">
+ <span id="leafnamefeedback" class="fielderror" style="margin-left:1em">
+ [if-any errors.leaf_name][errors.leaf_name][end]
+ </span>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Description:</th>
+ <td>
+ <textarea name="docstring" rows="4" cols="75">[initial_docstring]</textarea>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Admins:</th>
+ <td>
+ <textarea id="member_admins" name="admins" rows="3" cols="75">[for initial_admins][initial_admins], [end]</textarea>
+ <span id="memberadminsfeedback" class="fielderror" style="margin-left:1em">
+ [if-any errors.member_admins][errors.member_admins][end]
+ </span>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Auto Cc:</th>
+ <td>
+ <textarea id="member_cc" name="cc" rows="3" cols="75">[for initial_cc][initial_cc], [end]</textarea>
+ <span id="memberccfeedback" class="fielderror" style="margin-left:1em">
+ [if-any errors.member_cc][errors.member_cc][end]
+ </span>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Deprecated:</th>
+ <td>
+ <input type="checkbox" id="deprecated" name="deprecated">
+ </td>
+ </tr>
+
+ <tr>
+ <td></td>
+ <td>
+ <input id="submit_btn" type="submit" name="submit" value="Create component">
+ </td>
+ </tr>
+
+</table>
+</form>
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions('[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+
+ _onload();
+ document.getElementById('submit_btn').disabled = 'disabled';
+ document.getElementById('leaf_name').focus();
+
+ function checkSubmit() {
+ _checkLeafName(
+ '[projectname]',
+ document.getElementById('parent_path').value,
+ '', CS_env.token);
+ }
+ setInterval(checkSubmit, 700);
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+});
+</script>
+
+
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/component-detail-page.ezt b/appengine/monorail/templates/tracker/component-detail-page.ezt
new file mode 100644
index 0000000..87b52fe
--- /dev/null
+++ b/appengine/monorail/templates/tracker/component-detail-page.ezt
@@ -0,0 +1,158 @@
+[define title]Component [component_def.path][end]
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<a href="/p/[projectname]/adminComponents">‹ Back to component list</a><br><br>
+
+
+<h4>Component</h4>
+[if-any creator]
+ Created by <a href="[creator.profile_url]">[creator.display_name]</a> [created]<br/>
+[end]
+[if-any modifier]
+ Last modified by <a href="[modifier.profile_url]">[modifier.display_name]</a> [modified]<br/>
+[end]
+
+<br/>
+<form action="detail.do" method="POST">
+<input type="hidden" name="token" value="[form_token]">
+<input type="hidden" name="component" value="[component_def.path]">
+<table cellspacing="8" class="rowmajor vt">
+ <tr>
+ <th width="1%">Name:</th>
+ <td>
+ [if-any allow_edit]
+ [if-any component_def.parent_path][component_def.parent_path]>[end]
+ <input id="leaf_name" name="leaf_name" value="[initial_leaf_name]" size="30" class="acob">
+ <span id="leafnamefeedback" class="fielderror" style="margin-left:1em">
+ [if-any errors.leaf_name][errors.leaf_name][end]
+ </span>
+ [else]
+ [component_def.path]
+ [end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Description:</th>
+ <td>
+ [if-any allow_edit]
+ <textarea name="docstring" rows="4" cols="75">[initial_docstring]</textarea>
+ [else]
+ [component_def.docstring]
+ [end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Admins:</th>
+ <td>
+ [if-any allow_edit]
+ <textarea id="member_admins" name="admins" rows="3" cols="75">[for initial_admins][initial_admins], [end]</textarea>
+ <span id="memberadminsfeedback" class="fielderror" style="margin-left:1em">
+ [if-any errors.member_admins][errors.member_admins][end]
+ </span>
+ [else]
+ [for component_def.admins]
+ <div>[include "../framework/user-link.ezt" component_def.admins]</div>
+ [end]
+ [end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Auto Cc:</th>
+ <td>
+ [if-any allow_edit]
+ <textarea id="member_cc" name="cc" rows="3" cols="75">[for initial_cc][initial_cc], [end]</textarea>
+ <span id="memberccfeedback" class="fielderror" style="margin-left:1em">
+ [if-any errors.member_cc][errors.member_cc][end]
+ </span>
+ [else]
+ [for component_def.cc]
+ <div>[include "../framework/user-link.ezt" component_def.cc]</div>
+ [end]
+ [end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Deprecated:</th>
+ <td>
+ <input type="checkbox" id="deprecated" name="deprecated" [if-any initial_deprecated]checked="checked"[end]
+ [if-any allow_edit][else]disabled[end]>
+ </td>
+ </tr>
+
+ <tr>
+ <td></td>
+ <td>
+ [if-any allow_edit]
+ <div>
+ <span style="float:left;">
+ <input type="submit" name="submit" id="submit_btn" value="Submit changes">
+ <input type="submit" name="deletecomponent" value="Delete component"
+ [if-any allow_delete][else]disabled[end]
+ style="margin-left:2em" id="deletecomponent">
+ </span>
+ <span style="float:right;">
+ <a href="/p/[projectname]/components/create?component=[component_def.path]">Create new subcomponent</a>
+ </span>
+ <div style="clear:both;"></div>
+ </div>
+ [if-any allow_delete][else]
+ <br/><br/>
+ <b>Note:</b>
+ [if-any subcomponents]
+ <br/>
+ Can not delete this component because it has the following subcomponents:<br/>
+ [for subcomponents]<div style="margin-left:1em">[subcomponents.path]</div>[end]
+ [end]
+ [if-any templates]
+ <br/>
+ Can not delete this component because it is listed in the following templates:<br/>
+ [for templates]<div style="margin-left:1em">[templates.name]</div>[end]
+ [end]
+ [end]
+ [end]
+ </td>
+ </tr>
+
+</table>
+</form>
+
+[include "../framework/footer-script.ezt"]
+
+[if-any allow_edit]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions('[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+
+ _onload();
+ function checkSubmit() {
+ _checkLeafName('[projectname]', '[component_def.parent_path]', '[component_def.leaf_name]', CS_env.token);
+ }
+ setInterval(checkSubmit, 700);
+
+ if ($("deletecomponent")) {
+ $("deletecomponent").addEventListener("click", function(event) {
+ if (!confirm("Are you sure you want to delete [component_def.path]?\nThis operation cannot be undone."))
+ event.preventDefault();
+ });
+ }
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+});
+</script>
+[end]
+
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/field-create-page.ezt b/appengine/monorail/templates/tracker/field-create-page.ezt
new file mode 100644
index 0000000..1234e02
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-create-page.ezt
@@ -0,0 +1,237 @@
+[define title]Add a Field[end]
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<a href="/p/[projectname]/adminLabels">‹ Back to field list</a><br><br>
+
+
+<h4>Add a custom field</h4>
+
+<form action="create.do" method="POST">
+<input type="hidden" name="token" value="[form_token]">
+
+<table cellspacing="8" class="rowmajor vt">
+ <tr>
+ <th width="1%">Name:</th>
+ <td>
+ <input id="fieldname" name="name" size="30" value="[initial_field_name]" class="acob">
+ <span id="fieldnamefeedback" class="fielderror" style="margin-left:1em">
+ [if-any errors.field_name][errors.field_name][end]
+ </span>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Description:</th>
+ <td>
+ <textarea name="docstring" rows="4" cols="75">[initial_field_docstring]</textarea>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Type:</th>
+ <td>
+ <select id="field_type" name="field_type">
+ <option value="enum_type" [is initial_type "enum_type"]selected="selected"[end]>Enum</option>
+ <option value="int_type" [is initial_type "int_type"]selected="selected"[end]>Integer</option>
+ <option value="str_type" [is initial_type "str_type"]selected="selected"[end]>String</option>
+ <option value="user_type" [is initial_type "user_type"]selected="selected"[end]>User</option>
+ </select>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Applicable:</th>
+ <td>When issue type is:
+ <select id="applicable_type" name="applicable_type">
+ <option value="" [is initial_applicable_type ""]selected="selected"[end]>Anything</option>
+ <option disabled="disabled">----</option>
+ [for well_known_issue_types]
+ <option value="[well_known_issue_types]" [is initial_applicable_type well_known_issue_types]selected="selected"[end]>[well_known_issue_types]</option>
+ [end]
+ </select>
+ [# TODO(jrobbins): AND with free-form applicability predicate.]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Required:</th>
+ <td>
+ <input type="checkbox" id="is_required" name="is_required" class="acob"
+ [if-any initial_is_required]checked="checked"[end]>
+ <label for="is_required">Required when applicable</label>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Multivalued:</th>
+ <td>
+ <input type="checkbox" name="is_multivalued" class="acob"
+ [if-any initial_is_multivalued]checked="checked"[end]>
+ </td>
+ </tr>
+
+ <tr id="choices_row" style="display:none">
+ <th>Choices:</th>
+ <td>
+ <textarea id="choices" name="choices" rows="10" cols="75" style="tab-size:12"
+ >[initial_choices]</textarea>
+ </td>
+ </tr>
+
+ <tr id="int_row" style="display:none">
+ <th>Validation:</th>
+ <td>
+ Min value: <input type="number" name="min_value" style="text-align:right; width: 4em">
+ Max value: <input type="number" name="max_value" style="text-align:right; width: 4em"><br>
+ </td>
+ </tr>
+
+ <tr id="str_row" style="display:none">
+ <th>Validation:</th>
+ <td>
+ Regex: <input type="text" name="regex" size="30"><br>
+ </td>
+ </tr>
+
+ <tr id="user_row" style="display:none">
+ <th>Validation:</th>
+ <td>
+ <input type="checkbox" name="needs_member" id="needs_member" class="acob"
+ [if-any initial_needs_member]checked[end]>
+ <label for="needs_member">User must be a project member</label><br>
+ <span id="needs_perm_span" style="margin-left:1em">
+ Required permission:
+ <input type="text" name="needs_perm" id="needs_perm" size="20"
+ value="[initial_needs_perm]" class="acob">
+ </span><br>
+ </td>
+ </tr>
+ <tr id="user_row2" style="display:none">
+ <th>Permissions:</th>
+ <td>
+ The users named in this field is granted this permission on this issue:<br>
+ [# TODO(jrobbins): one-click way to specify View vs. EditIssue vs. any custom perm.]
+ <input type="text" name="grants_perm" id="grants_perm" class="acob"
+ size="20" value="[initial_grants_perm]" autocomplete="off">
+ </td>
+ </tr>
+ <tr id="user_row3" style="display:none">
+ <th>Notification:</th>
+ <td>
+ The users named in this field will be notified via email whenever:<br>
+ <select name="notify_on">
+ <option value="never" [is initial_notify_on "0"]selected="selected"[end]
+ >No notifications</option>
+ <option value="any_comment" [is initial_notify_on "1"]selected="selected"[end]
+ >Any change or comment is added</option>
+ </select>
+ </td>
+ </tr>
+
+ <th>Admins:</th>
+ <td>
+ <input id="member_admins" name="admin_names" size="75" value="[initial_admins]"
+ autocomplete="off" class="acob">
+ <span class="fielderror" style="margin-left:1em">
+ [if-any errors.field_admins][errors.field_admins][end]
+ </span>
+ </td>
+ </tr>
+
+ <tr>
+ <td></td>
+ <td>
+ <input id="submit_btn" type="submit" name="submit" value="Create field">
+ </td>
+ </tr>
+
+</table>
+</form>
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions('[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+
+ _onload();
+ var submit = document.getElementById('submit_btn');
+ submit.disabled = 'disabled';
+ var fieldname = document.getElementById('fieldname');
+ var oldName = '';
+ fieldname.focus();
+
+ var fieldNameRE = /^[[]a-zA-Z]([[]-_]?[[]a-zA-Z0-9])*$/;
+
+ function checkFieldName() {
+ name = fieldname.value;
+ if (name != oldName) {
+ oldName = name;
+ feedback = document.getElementById('fieldnamefeedback');
+ submit.disabled = 'disabled';
+ if (name == '') {
+ feedback.innerText = 'Please choose a field name';
+ } else if (!fieldNameRE.test(name)) {
+ feedback.innerText = 'Invalid field name';
+ } else if (name.length > 30) {
+ feedback.innerText = 'Field name is too long';
+ } else {
+ _checkFieldNameOnServer('[projectname]', name, CS_env.token);
+ }
+ }
+ }
+
+ setInterval(checkFieldName, 700);
+
+ function updateForm(new_type) {
+ var choices_row = document.getElementById('choices_row');
+ choices_row.style.display = (new_type == 'enum_type') ? '' : 'none';
+
+ var int_row = document.getElementById('int_row');
+ int_row.style.display = (new_type == 'int_type') ? '' : 'none';
+
+ var str_row = document.getElementById('str_row');
+ str_row.style.display = (new_type == 'str_type') ? '' : 'none';
+
+ var user_row_display = (new_type == 'user_type') ? '' : 'none';
+ document.getElementById('user_row').style.display = user_row_display;
+ document.getElementById('user_row2').style.display = user_row_display;
+ document.getElementById('user_row3').style.display = user_row_display;
+ }
+
+ var type_select = document.getElementById('field_type');
+ updateForm(type_select.value);
+ type_select.addEventListener("change", function() {
+ updateForm(type_select.value);
+ });
+
+ var needs_perm_span = document.getElementById('needs_perm_span');
+ var needs_perm = document.getElementById('needs_perm');
+ function enableNeedsPerm(enable) {
+ needs_perm_span.style.color = enable ? 'inherit' : '#999';
+ needs_perm.disabled = enable ? '' : 'disabled';
+ if (!enable) needs_perm.value = '';
+ }
+ enableNeedsPerm(false);
+
+ var needs_member = document.getElementById("needs_member");
+ if (needs_member)
+ needs_member.addEventListener("change", function() {
+ enableNeedsPerm(needs_member.checked);
+ });
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+});
+</script>
+
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/field-detail-page.ezt b/appengine/monorail/templates/tracker/field-detail-page.ezt
new file mode 100644
index 0000000..c922b20
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-detail-page.ezt
@@ -0,0 +1,260 @@
+[define title]Field [field_def.field_name][end]
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<a href="/p/[projectname]/adminLabels">‹ Back to field list</a><br><br>
+
+
+<h4>Custom field</h4>
+
+<form action="detail.do" method="POST">
+<input type="hidden" name="token" value="[form_token]">
+<input type="hidden" name="field" value="[field_def.field_name]">
+
+<table cellspacing="8" class="rowmajor vt">
+ <tr>
+ <th width="1%">Name:</th>
+ <td>
+ [if-any uneditable_name]
+ <input type="hidden" name="name" value="[field_def.field_name]">
+ [field_def.field_name]
+ [else][if-any allow_edit]
+ <input name="name" value="[field_def.field_name]" size="30" class="acob">
+ [else]
+ [field_def.field_name]
+ [end][end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Description:</th>
+ <td>
+ [if-any allow_edit]
+ <textarea name="docstring" rows="4" cols="75">[field_def.docstring]</textarea>
+ [else]
+ [field_def.docstring]
+ [end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Type:</th>
+ [# TODO(jrobbins): make field types editable someday.]
+ <td>[field_def.type_name]</td>
+ </tr>
+
+ [is field_def.field_name "Type"][else]
+ <tr>
+ <th>Applicable:</th>
+ <td>When issue type is:
+ [if-any allow_edit]
+ <select id="applicable_type" name="applicable_type">
+ <option value="" [is initial_applicable_type ""]selected="selected"[end]>Anything</option>
+ <option disabled="disabled">----</option>
+ [for well_known_issue_types]
+ <option value="[well_known_issue_types]" [is initial_applicable_type well_known_issue_types]selected="selected"[end]>[well_known_issue_types]</option>
+ [end]
+ </select>
+ [else]
+ [initial_applicable_type]
+ [end]
+ [# TODO(jrobbins): editable applicable_predicate.]
+ </td>
+ </tr>
+ [end]
+
+ <tr>
+ <th>Required:</th>
+ <td>
+ [if-any allow_edit]
+ <input type="checkbox" id="is_required" name="is_required" class="acob"
+ [if-any field_def.is_required_bool]checked="checked"[end]>
+ <label for="is_required">Required when applicable</label>
+ [else]
+ [if-any field_def.is_required_bool]Yes[else]No[end]
+ [end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Multivalued:</th>
+ <td>
+ [if-any allow_edit]
+ <input type="checkbox" name="is_multivalued" class="acob"
+ [if-any field_def.is_multivalued_bool]checked="checked"[end]>
+ [else]
+ [if-any field_def.is_multivalued_bool]Yes[else]No[end]
+ [end]
+ </td>
+ </tr>
+
+ [# TODO(jrobbins): dynamically display validation info as field type is edited.]
+ [is field_def.type_name "ENUM_TYPE"]
+ <tr>
+ <th>Choices:</th>
+ <td>
+ [if-any allow_edit]
+ <textarea name="choices" rows="10" cols="75" style="tab-size:18">
+[for field_def.choices][field_def.choices.name_padded][if-any field_def.choices.docstring]	= [end][field_def.choices.docstring]
+[end]
+ </textarea>
+ [else]
+ <table cellspacing="4" cellpadding="0" style="padding: 2px; border:2px solid #eee">
+ [for field_def.choices]
+ <tr>
+ <td>[field_def.choices.name]</td>
+ <td>[if-any field_def.choices.docstring]= [end][field_def.choices.docstring]</td>
+ </tr>
+ [end]
+ </table>
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ [is field_def.type_name "INT_TYPE"]
+ <tr id="int_row">
+ <th>Validation:</th>
+ <td>
+ Min value:
+ <input type="number" name="min_value" style="text-align:right; width: 4em"
+ value="[field_def.min_value]" class="acob"
+ [if-any allow_edit][else]disabled="disabled"[end]>
+ Max value:
+ <input type="number" name="max_value" style="text-align:right; width: 4em"
+ value="[field_def.max_value]" class="acob"
+ [if-any allow_edit][else]disabled="disabled"[end]><br>
+ </td>
+ </tr>
+ [end]
+
+ [is field_def.type_name "STR_TYPE"]
+ <tr id="str_row">
+ <th>Validation:</th>
+ <td>
+ Regex: <input type="text" name="regex" size="30" value="[field_def.regex]" class="acob"><br>
+ </td>
+ </tr>
+ [end]
+
+ [is field_def.type_name "USER_TYPE"]
+ <tr id="user_row">
+ <th>Validation:</th>
+ <td>
+ <input type="checkbox" name="needs_member" id="needs_member" class="acob"
+ [if-any allow_edit][else]disabled="disabled"[end]
+ [if-any field_def.needs_member_bool]checked="checked"[end]>
+ <label for="needs_member">User must be a project member</label><br>
+ <span id="needs_perm_span" style="margin-left:1em">Required permission:
+ <input type="text" name="needs_perm" id="needs_perm" size="20"
+ value="[field_def.needs_perm]" autocomplete="off" class="acob"
+ [if-any allow_edit][else]disabled="disabled"[end]></span><br>
+ </td>
+ </tr>
+ <tr id="user_row2">
+ <th>Permissions:</th>
+ <td>
+ The users named in this field is granted this permission on this issue:<br>
+ [# TODO(jrobbins): one-click way to specify View vs. EditIssue vs. any custom perm.]
+ <input type="text" name="grants_perm" id="grants_perm" class="acob"
+ size="20" value="[field_def.grants_perm]" autocomplete="off"
+ [if-any allow_edit][else]disabled[end]>
+ </td>
+ </tr>
+ <tr id="user_row3">
+ <th>Notification:</th>
+ <td>
+ The users named in this field will be notified via email whenever:<br>
+ <select name="notify_on" [if-any allow_edit][else]disabled[end]
+ class="acrob">
+ <option value="never" [is field_def.notify_on "0"]selected="selected"[end]
+ >No notifications</option>
+ <option value="any_comment" [is field_def.notify_on "1"]selected="selected"[end]
+ >Any change or comment is added</option>
+ </select>
+ </td>
+ </tr>
+ [end]
+
+ <th>Admins:</th>
+ <td>
+ [if-any allow_edit]
+ <input id="member_admins" name="admin_names" size="75" value="[initial_admins]"
+ autocomplete="off" class="acob">
+ <span class="fielderror" style="margin-left:1em">
+ [if-any errors.field_admins][errors.field_admins][end]
+ </span>
+ [else]
+ [for field_def.admins]
+ <div>[include "../framework/user-link.ezt" field_def.admins]</div>
+ [end]
+ [end]
+ </td>
+ </tr>
+
+
+ <tr>
+ <td></td>
+ <td>
+ [if-any allow_edit]
+ <input type="submit" name="submit" value="Submit changes">
+ <input type="submit" name="deletefield" value="Delete Field"
+ style="margin-left:2em" id="deletefield">
+ [end]
+ </td>
+ </tr>
+
+</table>
+</form>
+
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions('[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+
+ _onload();
+
+ var needs_perm_span = document.getElementById('needs_perm_span');
+ var needs_perm = document.getElementById('needs_perm');
+ var needs_member = document.getElementById('needs_member');
+ function enableNeedsPerm(enable) {
+ needs_perm_span.style.color = enable ? 'inherit' : '#999';
+ needs_perm.disabled = enable ? '' : 'disabled';
+ if (!enable) needs_perm.value = '';
+ }
+ [if-any allow_edit]
+ if (needs_perm)
+ enableNeedsPerm(needs_member.checked);
+ [end]
+
+ if ($("deletefield")) {
+ $("deletefield").addEventListener("click", function(event) {
+ var msg = ("Are you sure you want to delete [field_def.field_name]?\n" +
+ "This operation cannot be undone. " +
+ "[is field_def.type_name "ENUM_TYPE"]\nEnum values will be retained on issues as labels.[end]");
+ if (!confirm(msg))
+ event.preventDefault();
+ });
+ }
+
+ var acobElements = document.getElementsByClassName("acob");
+ for (var i = 0; i < acobElements.length; ++i) {
+ var el = acobElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ }
+
+ if ($("needs_member")) {
+ $("needs_member").addEventListener("change", function(event) {
+ enableNeedsPerm($("needs_member").checked);
+ });
+ }
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/field-value-multi-enum.ezt b/appengine/monorail/templates/tracker/field-value-multi-enum.ezt
new file mode 100644
index 0000000..d70cca2
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-multi-enum.ezt
@@ -0,0 +1,69 @@
+[for fields.field_def.choices]
+ [define checked]No[end]
+ [define derived]No[end]
+ [for fields.values]
+ [is fields.values.val fields.field_def.choices.name]
+ [define checked]Yes[end]
+ [end]
+ [end]
+ [for fields.derived_values]
+ [is fields.derived_values.val fields.field_def.choices.name]
+ [define checked]Yes[end]
+ [define derived]Yes[end]
+ [end]
+ [end]
+
+ <span style="white-space:nowrap; margin-right:1em"
+ title="[if-any derived]derived: [end][fields.field_def.choices.docstring]"
+ >
+ <input type="checkbox" name="custom_[fields.field_id]"
+ value="[fields.field_def.choices.name]"
+ id="custom_[fields.field_id]_[fields.field_def.choices.idx]"
+ [is checked "Yes"]checked="checked"[end] [is derived "Yes"]disabled="disabled"[end]>
+ <label for="custom_[fields.field_id]_[fields.field_def.choices.idx]"
+ [is derived "Yes"]style="font-style:italic"[end]>
+ [fields.field_def.choices.name]</label>
+ </span>
+
+[end]
+
+
+[# Also include any oddball values as plain text with an _X_ icon.]
+[for fields.values]
+ [define already_shown]No[end]
+ [for fields.field_def.choices]
+ [is fields.field_def.choices.name fields.values.val]
+ [define already_shown]Yes[end]
+ [end]
+ [end]
+ [is already_shown "No"]
+ <span style="white-space:nowrap; margin-right:1em"
+ title="This is not a defined choice for this field"
+ id="span_oddball_[fields.values.idx]">
+ <a id="oddball_[fields.values.idx]" class="remove_oddball">
+ <img src="/static/images/close_icon.png" width="13" height="13">
+ </a>[fields.values.val]
+ [# Below hidden input contains the value of the field for tracker_helpers._ParseIssueRequestFields ]
+ <input type="text" class="labelinput" id="input_oddball_[fields.values.idx]" size="20" name="label"
+ value="[fields.field_name]-[fields.values.val]" hidden>
+ </span>
+ [end]
+[end]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var oddballAnchors = document.getElementsByClassName("remove_oddball")
+ for (var i = 0; i < oddballAnchors.length; ++i) {
+ var oddballAnchor = oddballAnchors[[]i];
+
+ oddballAnchor.addEventListener("click", function(event) {
+ var oddballSpan = $("span_" + this.id);
+ oddballSpan.hidden = true;
+ var oddballInput = $("input_" + this.id);
+ oddballInput.value = "";
+ event.preventDefault();
+ });
+ }
+});
+</script>
+
diff --git a/appengine/monorail/templates/tracker/field-value-multi-int.ezt b/appengine/monorail/templates/tracker/field-value-multi-int.ezt
new file mode 100644
index 0000000..e9aeb8f
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-multi-int.ezt
@@ -0,0 +1,32 @@
+[if-any fields.values]
+ [for fields.values]
+ <input type="number" name="custom_[fields.field_id]" value="[fields.values.val]"
+ [if-any fields.field_def.min_value]min="[fields.field_def.min_value]"[end]
+ [if-any fields.field_def.max_value]max="[fields.field_def.max_value]"[end]
+ [if-index fields.values first]
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ [end]
+ style="text-align:right; width:12em" class="multivalued customfield">
+ [if-index fields.values first][else]
+ <u class="removeMultiFieldValueWidget">X</u>
+ [end]
+ [if-index fields.values last]
+ <u class="addMultiFieldValueWidget" data-field-id="[fields.field_id]" data-field-type="int"
+ data-validate-1="[fields.field_def.min_value]" data-validate-2="[fields.field_def.max_value]"
+ >Add a value</u>
+ [end]
+ [end]
+[else]
+ <input type="number" name="custom_[fields.field_id]" value=""
+ [if-any fields.field_def.min_value]min="[fields.field_def.min_value]"[end]
+ [if-any fields.field_def.max_value]max="[fields.field_def.max_value]"[end]
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ style="text-align:right; width:12em" class="multivalued customfield">
+ <u class="addMultiFieldValueWidget" data-field-type="[fields.field_id]" data-field-type="int"
+ data-validate-1="[fields.field_def.min_value]" data-validate-2="[fields.field_def.max_value]">Add a value</u>
+[end]
+
+[for fields.derived_values]
+ <input type="number" disabled="disabled" value="[fields.derived_values.val]"
+ style="font-style:italic; text-align:right; width:12em" class="multivalued">
+[end]
diff --git a/appengine/monorail/templates/tracker/field-value-multi-str.ezt b/appengine/monorail/templates/tracker/field-value-multi-str.ezt
new file mode 100644
index 0000000..babffac
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-multi-str.ezt
@@ -0,0 +1,27 @@
+[if-any fields.values]
+ [for fields.values]
+ <input name="custom_[fields.field_id]" value="[fields.values.val]"
+ [# TODO(jrobbins): string validation]
+ [if-index fields.values first]
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ [end]
+ size="90" class="multivalued customfield">
+ [if-index fields.values first][else]
+ <u class="removeMultiFieldValueWidget">X</u>
+ [end]
+ [if-index fields.values last]
+ <u class="addMultiFieldValueWidget" data-field-id="[fields.field_id]" data-field-type="str">Add a value</u>
+ [end]
+ [end]
+[else]
+ <input name="custom_[fields.field_id]" value=""
+ [# TODO(jrobbins): string validation]
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ size="90" class="multivalued customfield">
+ <u class="addMultiFieldValueWidget" data-field-id="[fields.field_id]" data-field-type="str">Add a value</u>
+[end]
+
+[for fields.derived_values]
+ <input disabled="disabled" value="[fields.derived_values.val]"
+ style="font-style:italic" size="90" class="multivalued">
+[end]
diff --git a/appengine/monorail/templates/tracker/field-value-multi-user.ezt b/appengine/monorail/templates/tracker/field-value-multi-user.ezt
new file mode 100644
index 0000000..b006af6
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-multi-user.ezt
@@ -0,0 +1,26 @@
+[if-any fields.values]
+ [for fields.values]
+ <input type="text" name="custom_[fields.field_id]" value="[fields.values.val]"
+ [# TODO(jrobbins): include fields.min_value and fields.max_value attrs]
+ [if-index fields.values first]
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ [end]
+ style="width:12em" class="multivalued userautocomplete customfield" autocomplete="off">
+ [if-index fields.values first][else]
+ <u class="removeMultiFieldValueWidget">X</u>
+ [end]
+ [if-index fields.values last]
+ <u class="addMultiFieldValueWidget" data-field-id="[fields.field_id]" data-field-type="user">Add a value</u>
+ [end]
+ [end]
+[else]
+ <input type="text" name="custom_[fields.field_id]" value=""
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ style="width:12em" class="multivalued userautocomplete customfield" autocomplete="off">
+ <u class="addMultiFieldValueWidget" data-field-id="[fields.field_id]" data-field-type="user">Add a value</u>
+[end]
+
+[for fields.derived_values]
+ <input type="text" disabled="disabled" value="[fields.derived_values.val]"
+ style="width:12em" class="multivalued">
+[end]
diff --git a/appengine/monorail/templates/tracker/field-value-single-enum.ezt b/appengine/monorail/templates/tracker/field-value-single-enum.ezt
new file mode 100644
index 0000000..13bd97b
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-single-enum.ezt
@@ -0,0 +1,81 @@
+[if-any fields.values fields.derived_values]
+
+ [# TODO(jrobbins): a better UX for undesired values would be to replace the current
+ --/value slect widget with a plain-text display of the value followed by an _X_
+ link to delete it. There would be a hidden field with the value. Validation would
+ fail in JS and on the server if each such _X_ had not already been clicked.]
+
+ [# There could be more than one if this field used to be multi-valued.]
+ [for fields.values]
+ <select name="custom_[fields.field_id]" id="custom_[fields.field_id]"
+ class="custom_field_value_menu">
+ [define show_no_value_choice]No[end]
+ [# Non-required fields can have any value removed.]
+ [if-any fields.field_def.is_required_bool][else]
+ [define show_no_value_choice]Yes[end]
+ [end]
+ [# Formerly multi-valued fields need -- to narrow down to being singled valued.]
+ [if-index fields.values first][else]
+ [define show_no_value_choice]Yes[end]
+ [end]
+ [is show_no_value_choice "Yes"]
+ <option value="--"
+ [is fields.values.val ""]selected="selected"[end]
+ title="No value">--</option>
+ [end]
+
+ [define value_is_shown]No[end]
+ [for fields.field_def.choices]
+ [define show_choice]No[end]
+ [# Always show the current value]
+ [is fields.values.val fields.field_def.choices.name]
+ [define value_is_shown]Yes[end]
+ [define show_choice]Yes[end]
+ [end]
+ [# Formerly multi-valued fields extra values can ONLY be removed.]
+ [if-index fields.values first]
+ [define show_choice]Yes[end]
+ [end]
+ [is show_choice "Yes"]
+ <option value="[fields.field_def.choices.name]"
+ [is fields.values.val fields.field_def.choices.name]selected="selected"[end]>
+ [fields.field_def.choices.name]
+ [if-any fields.field_def.choices.docstring]= [fields.field_def.choices.docstring][end]
+ </option>
+ [end]
+ [end]
+
+ [is value_is_shown "No"]
+ [# This is an oddball label, force the user to explicitly remove it.]
+ <option value="[fields.values.val]" selected="selected"
+ title="This value is not a defined choice for this field">
+ [fields.values.val]
+ </option>
+ [end]
+ </select><br>
+ [end]
+
+ [for fields.derived_values]
+ <div title="Derived: [fields.derived_values.docstring]" class="rolloverzone">
+ <i>[fields.derived_values.val]</i>
+ </div>
+ [end]
+
+[else][# No current values, just give all choices.]
+
+ <select name="custom_[fields.field_id]" id="custom_[fields.field_id]"
+ class="custom_field_value_menu">
+ [if-any fields.field_def.is_required_bool]
+ <option value="" disabled="disabled" selected="selected">Select value…</option>
+ [else]
+ <option value="--" selected="selected" title="No value">--</option>
+ [end]
+ [for fields.field_def.choices]
+ <option value="[fields.field_def.choices.name]">
+ [fields.field_def.choices.name]
+ [if-any fields.field_def.choices.docstring]= [fields.field_def.choices.docstring][end]
+ </option>
+ [end]
+ </select><br>
+
+[end]
diff --git a/appengine/monorail/templates/tracker/field-value-single-int.ezt b/appengine/monorail/templates/tracker/field-value-single-int.ezt
new file mode 100644
index 0000000..0e24866
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-single-int.ezt
@@ -0,0 +1,35 @@
+[# Even though this field definition says it is single-valued, the issue might have
+ multiple values if the field definition was previously multi-valued. In such a situation
+ values other than the first value are shown read-only and must be explicitly removed
+ before the comment can be submitted. ]
+
+[# If the field has no explicit values, then show an empty form element.]
+[if-any fields.values][else]
+ <input type="number" name="custom_[fields.field_id]" id="custom_[fields.field_id]" value=""
+ [if-any fields.field_def.is_required_bool] required="required"[end]
+ [if-any fields.field_def.min_value]min="[fields.field_def.min_value]"[end]
+ [if-any fields.field_def.max_value]max="[fields.field_def.max_value]"[end]
+ style="text-align:right; width:12em" class="multivalued customfield">
+[end]
+
+
+[for fields.values]
+ [if-index fields.values first]
+ <input type="number" name="custom_[fields.field_id]" value="[fields.values.val]"
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ [if-any fields.field_def.min_value]min="[fields.field_def.min_value]"[end]
+ [if-any fields.field_def.max_value]max="[fields.field_def.max_value]"[end]
+ style="text-align:right; width:12em" class="multivalued customfield">
+ [else]
+ <span>
+ <input type="number" disabled="disabled" value="[fields.values.val]"
+ style="text-align:right; width: 12em" class="multivalued customfield">
+ <u class="removeMultiFieldValueWidget">X</u>
+ </span>
+ [end]
+[end]
+
+[for fields.derived_values]
+ <input type="number" disabled="disabled" value="[fields.derived_values.val]"
+ style="font-style:italic; text-align:right; width:12em" class="multivalued">
+[end]
diff --git a/appengine/monorail/templates/tracker/field-value-single-str.ezt b/appengine/monorail/templates/tracker/field-value-single-str.ezt
new file mode 100644
index 0000000..910cfb1
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-single-str.ezt
@@ -0,0 +1,34 @@
+[# Even though this field definition says it is single-valued, the issue might have
+ multiple values if the field definition was previously multi-valued. In such a situation
+ values other than the first value are shown read-only and must be explicitly removed
+ before the comment can be submitted. ]
+
+[# If the field has no explicit values, then show an empty form element.]
+[if-any fields.values][else]
+ <input name="custom_[fields.field_id]" id="custom_[fields.field_id]" value=""
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ [# TODO(jrobbins): validation]
+ class="multivalued customfield" size="90">
+[end]
+
+
+[for fields.values]
+ [if-index fields.values first]
+ <input name="custom_[fields.field_id]" value="[fields.values.val]"
+ class="multivalued customfield"
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ [# TODO(jrobbins): validation]
+ size="90"><br>
+ [else]
+ <span>
+ <input disabled="disabled" value="[fields.values.val]"
+ class="multivalued" size="90">
+ <a href="#" class="removeMultiFieldValueWidget">X</a>
+ </span>
+ [end]
+[end]
+
+[for fields.derived_values]
+ <input disabled="disabled" value="[fields.derived_values.val]"
+ style="font-style:italic" class="multivalued" size="90"><br>
+[end]
diff --git a/appengine/monorail/templates/tracker/field-value-single-user.ezt b/appengine/monorail/templates/tracker/field-value-single-user.ezt
new file mode 100644
index 0000000..9630d88
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-single-user.ezt
@@ -0,0 +1,31 @@
+[# Even though this field definition says it is single-valued, the issue might have
+ multiple values if the field definition was previously multi-valued. In such a situation
+ values other than the first value are shown read-only and must be explicitly removed
+ before the comment can be submitted. ]
+
+[# If the field has no explicit values, then show an empty form element.]
+[if-any fields.values][else]
+ <input type="text" name="custom_[fields.field_id]" id="custom_[fields.field_id]" value=""
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ style="width:12em" class="multivalued userautocomplete customfield" autocomplete="off">
+[end]
+
+
+[for fields.values]
+ [if-index fields.values first]
+ <input type="text" name="custom_[fields.field_id]" value="[fields.values.val]"
+ [if-any fields.field_def.is_required_bool]required="required"[end]
+ style="width:12em" class="multivalued userautocomplete customfield" autocomplete="off">
+ [else]
+ <span>
+ <input type="text" disabled="disabled" value="[fields.values.val]"
+ style="width:12em" class="multivalued userautocomplete customfield" autocomplete="off">
+ <a href="#" class="removeMultiFieldValueWidget">X</a>
+ </span>
+ [end]
+[end]
+
+[for fields.derived_values]
+ <input type="text" disabled="disabled" value="[fields.derived_values.val]"
+ style="font-style:italic; width:12em" class="multivalued">
+[end]
diff --git a/appengine/monorail/templates/tracker/field-value-widgets-js.ezt b/appengine/monorail/templates/tracker/field-value-widgets-js.ezt
new file mode 100644
index 0000000..fd012a5
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-widgets-js.ezt
@@ -0,0 +1,35 @@
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var removeMFVElements = document.getElementsByClassName("removeMultiFieldValueWidget");
+ for (var i = 0; i < removeMFVElements.length; ++i) {
+ var el = removeMFVElements[[]i];
+ el.addEventListener("click", function(event) {
+ _removeMultiFieldValueWidget(event.target);
+ });
+ }
+
+ var addMFVElements = document.getElementsByClassName("addMultiFieldValueWidget");
+ for (var i = 0; i < addMFVElements.length; ++i) {
+ var el = addMFVElements[[]i];
+ el.addEventListener("click", function(event) {
+ var target = event.target;
+ var fieldID = target.getAttribute("data-field-id");
+ var fieldType = target.getAttribute("data-type");
+ var fieldValidate1 = target.getAttribute("data-validate-1");
+ var fieldValidate2 = target.getAttribute("data-validate-2");
+ _addMultiFieldValueWidget(
+ event.target, fieldID, fieldType, fieldValidate1, fieldValidate2);
+ });
+ }
+
+ var customFieldElements = document.getElementsByClassName("customfield");
+ for (var i = 0; i < customFieldElements.length; ++i) {
+ var el = customFieldElements[[]i];
+ el.addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ el.addEventListener("keyup", _dirty);
+ }
+});
+</script>
diff --git a/appengine/monorail/templates/tracker/field-value-widgets.ezt b/appengine/monorail/templates/tracker/field-value-widgets.ezt
new file mode 100644
index 0000000..f48bd64
--- /dev/null
+++ b/appengine/monorail/templates/tracker/field-value-widgets.ezt
@@ -0,0 +1,37 @@
+[# Display widgets for editing one custom field.
+ The variable "fields" must already refer to a FieldValueView object.
+ arg0: True if the field is multi-valued.
+]
+[is fields.field_def.type_name "ENUM_TYPE"]
+ [if-any arg0]
+ [include "field-value-multi-enum.ezt"]
+ [else]
+ [include "field-value-single-enum.ezt"]
+ [end]
+[end]
+
+[is fields.field_def.type_name "INT_TYPE"]
+ [if-any arg0]
+ [include "field-value-multi-int.ezt"]
+ [else]
+ [include "field-value-single-int.ezt"]
+ [end]
+[end]
+
+[is fields.field_def.type_name "STR_TYPE"]
+ [if-any arg0]
+ [include "field-value-multi-str.ezt"]
+ [else]
+ [include "field-value-single-str.ezt"]
+ [end]
+[end]
+
+[is fields.field_def.type_name "USER_TYPE"]
+ [if-any arg0]
+ [include "field-value-multi-user.ezt"]
+ [else]
+ [include "field-value-single-user.ezt"]
+ [end]
+[end]
+
+[# TODO(jrobbins): more field types. ]
diff --git a/appengine/monorail/templates/tracker/issue-advsearch-page.ezt b/appengine/monorail/templates/tracker/issue-advsearch-page.ezt
new file mode 100644
index 0000000..d07d6dd
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-advsearch-page.ezt
@@ -0,0 +1,77 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<form action="advsearch.do" method="POST" style="margin:6px;margin-top:12px;" autocomplete="false">
+
+[# Note: No need for UI element permission checking here. ]
+
+<table cellspacing="0" cellpadding="4" border="0" class="advquery">
+ <tr class="focus"><td width="25%"><b> Find issues</b></td>
+ <td>Search within</td>
+ <td>
+ <select name="can" style="width:100%">
+ [include "issue-can-widget.ezt" "advsearch"]
+ </select>
+ </td>
+ <td width="25%" align="center" rowspan="3">
+ <input type="submit" name="btn" value="Search" style="font-weight:bold; font-size:120%">
+ </td>
+ </tr>
+ <tr class="focus"><td width="25%"></td>
+ <td>with <b>all</b> of the words</td><td><input type="text" size="25" name="words" value=""></td>
+ </tr>
+ <tr class="focus"><td></td>
+ <td><b>without</b> the words</td><td><input type="text" size="25" name="without" value=""></td>
+ </tr>
+ <tr><td> </td><td></td><td></td><td></td></tr>
+ [# TODO(jrobbins): add autocomplete to labels field, allow commas, disable browser autocomplete. ]
+ <tr><td><b>Restrict search to</b></td><td>Labels</td><td><input type="text" name="labels" size="25" value="" placeholder="All the labels"></td><td class="eg">e.g., FrontEnd Priority:High</td></tr>
+ <tr><td rowspan="5"><br>
+ <table cellspacing="0" cellpadding="0" border="0"><tr><td>
+ <div class="tip">
+ <b>Tip:</b> Search results can be<br>refined by clicking on
+ the<br>result table headings.<br> <a href="searchtips">More
+ Search Tips</a>
+ </div>
+ </td></tr></table>
+ </td>
+ [# TODO(jrobbins): add autocomplete to status field, allow commas, disable browser autocomplete. ]
+ <td>Statuses</td><td><input type="text" name="statuses" size="25" value="" placeholder="Any status"></td><td class="eg">e.g., Started</td></tr>
+ <tr><td>Components</td><td><input type="text" size="25" name="components" value="" placeholder="Any component"></td><td class="eg"></td></tr>
+ <tr><td>Reporters</td><td><input type="text" size="25" name="reporters" value="" placeholder="Any reporter"></td><td class="eg"></td></tr>
+ [# TODO(jrobbins): add autocomplete to owners and cc fields, allow commas, disable browser autocomplete. ]
+ <tr><td>Owners</td><td><input type="text" size="25" name="owners" value="" placeholder="Any owner"></td><td class="eg">e.g., user@example.com</td></tr>
+ <tr><td>Cc</td><td><input type="text" size="25" name="cc" value="" placeholder="Any cc"></td><td class="eg"></td></tr>
+ <tr><td></td><td>Comment by</td><td><input type="text" size="25" name="commentby" value="" placeholder="Any commenter"></td><td class="eg"></td></tr>
+ [# TODO(jrobbins): implement search by star counts
+ <tr><td></td><td>Starred by</td>
+ <td>
+ <select name="starcount" style="width:100%">
+ <option value="-1" selected="selected">Any number of users</option>
+ <option value="0">Exactly zero users</option>
+ <option value="1">1 or more users</option>
+ <option value="2">2 or more users</option>
+ <option value="3">3 or more users</option>
+ <option value="4">4 or more users</option>
+ <option value="5">5 or more users</option>
+ </select></td>
+ <td class="eg"></td>
+ </tr>
+ ]
+ [# TODO(jrobbins) search by dates? ]
+ <tr><td></td><td> </td><td></td><td class="eg"></td></tr>
+</table>
+</form>
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions(
+ '[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+ _onload();
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-attachment-part.ezt b/appengine/monorail/templates/tracker/issue-attachment-part.ezt
new file mode 100644
index 0000000..2f394ce
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-attachment-part.ezt
@@ -0,0 +1,70 @@
+[# This template part renders a small table that describes one issue attachment
+ on a issue description or issue comment.
+
+ arg0: Comment sequence number.
+ arg1: AttachmentView.
+ arg2: can_delete_bool? This will be True if the logged in user is allowed
+ to delete the comment.
+ Also used: projectname, issue.local_id, attachment_form_token.
+]
+
+
+[define attachment_visible][end]
+[if-any arg2][define attachment_visible]Yes[end][end]
+[if-any arg1.deleted_bool][else][define attachment_visible]Yes[end][end]
+
+[if-any attachment_visible]
+ <table cellspacing="3" cellpadding="2" border="0">
+ <tr><td width="20">
+ <a href="[arg1.downloadurl]">
+ <img width="16" height="16" src="/static[arg1.iconurl]" border="0">
+ </a>
+ </td>
+
+ <td style="min-width:16em" valign="top">
+ [if-any arg1.deleted_bool]Deleted:[end]
+ <b [if-any arg1.deleted_bool]style="color:#666"[end]>[arg1.filename]</b>
+ <br>
+
+ [arg1.filesizestr]
+ [if-any arg1.deleted_bool][else]
+ [if-any arg1.url]
+ <a href="[arg1.url]" target="_blank" style="margin-left:.7em">View</a>
+ [end]
+ <a href="[arg1.downloadurl]" style="margin-left:.7em" download>Download</a>
+ [end]
+ </td>
+
+ [if-any arg2]
+ <td valign="top">
+ <form action="delAttachment.do" method="post" style="padding-left:1em">
+ <input type="hidden" name="token" value="[attachment_form_token]">
+ [# TODO(jrobbins): add hidden fields to maintain navigational state, e.g., colspec, q, sort, start, num]
+ <input type="hidden" name="id" value="[issue.local_id]">
+ <input type="hidden" name="sequence_num" value="[arg0]">
+ <input type="hidden" name="aid" value="[arg1.attachment_id]">
+ [if-any arg1.deleted_bool]
+ <input type="submit" name="undelete" value="Undelete" style="font-size:92%; width:7em">
+ [else]
+ <input type="submit" name="delete" value="Delete" style="font-size:92%; width:7em">
+ [end]
+ </form>
+ </td>
+ [end]
+ </tr>
+
+ [if-any arg1.thumbnail_url]
+ [if-any arg1.deleted_bool][else]
+ <tr>
+ <td colspan="3">
+ <a href="[arg1.url]" target="_blank">
+ <img src="[arg1.thumbnail_url]" class="preview">
+ </a>
+ </td>
+ </tr>
+ [end]
+ [end]
+
+
+ </table>
+[end]
diff --git a/appengine/monorail/templates/tracker/issue-attachment-text.ezt b/appengine/monorail/templates/tracker/issue-attachment-text.ezt
new file mode 100644
index 0000000..d313669
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-attachment-text.ezt
@@ -0,0 +1,54 @@
+[define category_css]css/ph_detail.css[end]
+[define page_css]css/d_sb.css[end]
+[define title][filename] ([filesize])[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<link type="text/css" rel="stylesheet"
+ href="/static/css/prettify.css?version=[app_version]">
+
+<h3 style="margin-bottom: 0">Issue <a href="detail?id=[local_id][#TODO(jrobbins): comment number]">[local_id]</a> attachment: [filename] <small>([filesize])</small>
+</h3>
+
+
+
+<div class="fc">
+ [if-any too_large]
+ <p><em>This file is too large to display.</em></p>
+
+ [else][if-any is_binary]
+
+ <p><em>
+ This file is not plain text (only UTF-8 and Latin-1 text encodings are currently supported).
+ </em></p>
+ [else]
+
+ [define revision]None[end]
+ [include "../framework/file-content-part.ezt"]
+ [include "../framework/file-content-js.ezt"]
+
+ [end][end]
+
+</div>
+
+
+[if-any should_prettify]
+<script src="/static/js/prettify.js?version=[app_version]" nonce="[nonce]"></script>
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ prettyPrint();
+});
+</script>
+[end]
+
+[if-any perms.EditIssue]
+ [include "../framework/footer-script.ezt"]
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions(
+ '[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+});
+ </script>
+[end]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-blocking-change-notification-email.ezt b/appengine/monorail/templates/tracker/issue-blocking-change-notification-email.ezt
new file mode 100644
index 0000000..9e45c69
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-blocking-change-notification-email.ezt
@@ -0,0 +1,7 @@
+Issue [issue.local_id]: [format "raw"][summary][end]
+[detail_url]
+
+[if-any is_blocking]This issue is now blocking issue [downstream_issue_ref].
+See [downstream_issue_url]
+[else]This issue is no longer blocking issue [downstream_issue_ref].
+See [downstream_issue_url][end]
diff --git a/appengine/monorail/templates/tracker/issue-bulk-change-notification-email.ezt b/appengine/monorail/templates/tracker/issue-bulk-change-notification-email.ezt
new file mode 100644
index 0000000..2051220
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-bulk-change-notification-email.ezt
@@ -0,0 +1,18 @@
+[if-any amendments]Updates:
+[amendments]
+[end]
+Comment[if-any commenter] by [commenter.display_name][end]:
+[if-any comment_text][format "raw"][comment_text][end][else](No comment was entered for this change.)[end]
+
+Affected issues:
+[for issues] issue [issues.local_id]: [format "raw"][issues.summary][end]
+ [format "raw"]http://[hostport][issues.detail_relative_url][end]
+
+[end]
+[is body_type "email"]
+--
+You received this message because you are listed in the owner
+or CC fields of these issues, or because you starred them.
+You may adjust your issue notification preferences at:
+http://[hostport]/hosting/settings
+[end]
diff --git a/appengine/monorail/templates/tracker/issue-bulk-edit-page.ezt b/appengine/monorail/templates/tracker/issue-bulk-edit-page.ezt
new file mode 100644
index 0000000..97cc327
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-bulk-edit-page.ezt
@@ -0,0 +1,470 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[# Note: base permission for this page is EditIssue]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<div style="margin-top: 0; padding: 3px;" class="closed">
+ <form action="bulkedit.do" method="POST" style="margin: 0; padding: 0" enctype="multipart/form-data"
+ id="bulk_form">
+
+ <input type="hidden" name="can" value=[can] >
+ <input type="hidden" name="start" value=[start] >
+ <input type="hidden" name="num" value=[num] >
+ <input type="hidden" name="q" value="[query]">
+ <input type="hidden" id="sort" name="sort" value="[sortspec]">
+ <input type="hidden" name="groupby" value="[groupby]">
+ <input type="hidden" name="colspec" value="[colspec]">
+ <input type="hidden" name="x" value="[grid_x_attr]">
+ <input type="hidden" name="y" value="[grid_y_attr]">
+ <input type="hidden" name="mode" value="[if-any grid_mode]grid[end]">
+ <input type="hidden" name="cells" value="[grid_cell_mode]">
+
+ <input type="hidden" name="ids"
+ value="[for issues][issues.local_id][if-index issues last][else], [end][end]">
+ <input type="hidden" name="token" value="[form_token]">
+ <table cellpadding="0" cellspacing="0" border="0">
+ <tr><td>
+
+ <table cellspacing="0" cellpadding="3" border="0" class="rowmajor vt">
+ <tr><th>Issues:</th>
+ <td colspan="2">
+ [for issues]
+ <a href="detail?id=[issues.local_id]" title="[issues.summary]"
+ [if-any issues.closed]class=closed_ref[end]
+ >[if-any issues.closed] [end][issues.local_id][if-any issues.closed] [end]</a>[if-index issues last][else], [end]
+ [end]
+ </td>
+ </tr>
+
+ <tr>
+ <th>Comment:</th>
+ <td colspan="2">
+ <textarea cols="75" rows="6" name="comment" id="comment" class="issue_text">[initial_comment]</textarea>
+ [if-any errors.comment]
+ <div class="fielderror">[errors.comment]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tr><th width="10%">Status:</th><td colspan="2">
+ [include "issue-bulk-operator-part.ezt" "statusenter" ""]
+ <input id="statusenter" autocomplete="off" style="width: 12em"
+ name="status" value="[initial_status]">
+ <span id="merge_area" style="margin-left:2em;">
+ Merge into issue:
+ <input type="text" id="merge_into" name="merge_into" style="width: 5em"
+ value="[is initial_merge_into "0"][else][initial_merge_into][end]">
+ </span>
+ [if-any errors.merge_into_id]
+ <div class="fielderror">[errors.merge_into_id]</div>
+ [end]
+ </td>
+ </tr>
+ <tr><th width="10%">Owner:</th><td colspan="2">
+ [include "issue-bulk-operator-part.ezt" "ownerenter" ""]
+ <input id="ownerenter" type="text" autocomplete="off" style="width: 12em"
+ name="owner" value="[initial_owner]">
+ [if-any errors.owner]
+ <div class="fielderror">[errors.owner]</div>
+ [end]
+ </td>
+ </tr>
+ <tr><th>Cc:</th><td colspan="2">
+ [include "issue-bulk-operator-part.ezt" "memberenter" "multi"]
+ <input type="text" multiple id="memberenter" autocomplete="off" style="width: 30em"
+ name="cc" value="[initial_cc]">
+ [if-any errors.cc]
+ <div class="fielderror">[errors.cc]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tr><th>Components:</th><td colspan="2">
+ [include "issue-bulk-operator-part.ezt" "componententer" "multi"]
+ <input type="text" id="componententer" style="width:30em"
+ name="components" value="[initial_components]">
+ [if-any errors.components]
+ <div class="fielderror">[errors.components]</div>
+ [end]
+ </td></tr>
+
+ <tbody class="collapse">
+ [# Show some field editing elements immediately, others can be revealed.]
+ [define any_fields_to_reveal]No[end]
+ [for fields]
+ [if-any fields.applicable]
+ [# TODO(jrobbins): determine applicability dynamically and update fields in JS]
+ <tr [if-any fields.display][else]class="ifExpand"[define any_fields_to_reveal]Yes[end][end]>
+ <th>[fields.field_name]:</th>
+ <td colspan="2">
+ [define widget_id]custom_[fields.field_id][end]
+ [define multi][if-any fields.field_def.is_multivalued_bool]multi[end][end]
+ [include "issue-bulk-operator-part.ezt" widget_id multi]
+ [include "field-value-widgets.ezt" False]
+ <div class="fielderror" style="display:none" id="error_custom_[fields.field_id]"></div>
+ </td>
+ <tr>
+ [end]
+ [end]
+ [is any_fields_to_reveal "Yes"]
+ <tr class="ifCollapse">
+ <td colspan="2"><a href="#" class="toggleCollapse">Show all fields</a><t/td>
+ </tr>
+ [end]
+ </tbody>
+
+ <tr><th>Labels:</th>
+ <td colspan="2" class="labelediting">
+ <div id="enterrow1">
+ <input type="text" class="labelinput" id="label0" size="20" autocomplete="off"
+ name="label" value="[label0]">
+ <input type="text" class="labelinput" id="label1" size="20" autocomplete="off"
+ name="label" value="[label1]">
+ <input type="text" class="labelinput" id="label2" size="20" autocomplete="off"
+ data-show-id="enterrow2" data-hide-id="addrow1"
+ name="label" value="[label2]"> <span id="addrow1" class="fakelink" data-instead="enterrow2">Add a row</span>
+ </div>
+ <div id="enterrow2" style="display:none">
+ <input type="text" class="labelinput" id="label3" size="20" autocomplete="off"
+ name="label" value="[label3]">
+ <input type="text" class="labelinput" id="label4" size="20" autocomplete="off"
+ name="label" value="[label4]">
+ <input type="text" class="labelinput" id="label5" size="20" autocomplete="off"
+ data-show-id="enterrow3" data-hide-id="addrow2"
+ name="label" value="[label5]"> <span id="addrow2" class="fakelink" data-instead="enterrow3">Add a row</span>
+ </div>
+ <div id="enterrow3" style="display:none">
+ <input type="text" class="labelinput" id="label6" size="20" autocomplete="off"
+ name="label" value="[label6]">
+ <input type="text" class="labelinput" id="label7" size="20" autocomplete="off"
+ name="label" value="[label7]">
+ <input type="text" class="labelinput" id="label8" size="20" autocomplete="off"
+ data-show-id="enterrow4" data-hide-id="addrow3"
+ name="label" value="[label8]"> <span id="addrow3" class="fakelink" data-instead="enterrow4">Add a row</span>
+ </div>
+ <div id="enterrow4" style="display:none">
+ <input type="text" class="labelinput" id="label9" size="20" autocomplete="off"
+ name="label" value="[label9]">
+ <input type="text" class="labelinput" id="label10" size="20" autocomplete="off"
+ name="label" value="[label10]">
+ <input type="text" class="labelinput" id="label11" size="20" autocomplete="off"
+ data-show-id="enterrow5" data-hide-id="addrow4"
+ name="label" value="[label11]"> <span id="addrow4" class="fakelink" data-instead="enterrow5">Add a row</span>
+ </div>
+ <div id="enterrow5" style="display:none">
+ <input type="text" class="labelinput" id="label12" size="20" autocomplete="off"
+ name="label" value="[label12]">
+ <input type="text" class="labelinput" id="label13" size="20" autocomplete="off"
+ name="label" value="[label13]">
+ <input type="text" class="labelinput" id="label14" size="20" autocomplete="off"
+ data-show-id="enterrow6" data-hide-id="addrow5"
+ name="label" value="[label14]"> <span id="addrow5" class="fakelink" data-instead="enterrow6">Add a row</span>
+ </div>
+ <div id="enterrow6" style="display:none">
+ <input type="text" class="labelinput" id="label15" size="20" autocomplete="off"
+ name="label" value="[label15]">
+ <input type="text" class="labelinput" id="label16" size="20" autocomplete="off"
+ name="label" value="[label16]">
+ <input type="text" class="labelinput" id="label17" size="20" autocomplete="off"
+ data-show-id="enterrow7" data-hide-id="addrow6"
+ name="label" value="[label17]"> <span id="addrow6" class="fakelink" data-instead="enterrow7">Add a row</span>
+ </div>
+ <div id="enterrow7" style="display:none">
+ <input type="text" class="labelinput" id="label18" size="20" autocomplete="off"
+ name="label" value="[label18]">
+ <input type="text" class="labelinput" id="label19" size="20" autocomplete="off"
+ name="label" value="[label19]">
+ <input type="text" class="labelinput" id="label20" size="20" autocomplete="off"
+ data-show-id="enterrow8" data-hide-id="addrow7"
+ name="label" value="[label20]"> <span id="addrow7" class="fakelink" data-instead="enterrow8">Add a row</span>
+ </div>
+ <div id="enterrow8" style="display:none">
+ <input type="text" class="labelinput" id="label21" size="20" autocomplete="off"
+ name="label" value="[label21]">
+ <input type="text" class="labelinput" id="label22" size="20" autocomplete="off"
+ name="label" value="[label22]">
+ <input type="text" class="labelinput" id="label23" size="20" autocomplete="off"
+ name="label" value="[label23]">
+ </div>
+ </td>
+ </tr>
+
+ [if-any page_perms.DeleteIssue]
+ <tr><th width="10%">Move to project:</th><td colspan="2">
+ <input id="move_toenter" type="text" autocomplete="off" style="width: 12em"
+ name="move_to">
+ [if-any errors.move_to]
+ <div class="fielderror">[errors.move_to]</div>
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ [if-any show_captcha]
+ <tr><th style="white-space:nowrap">Human Verification:</th>
+ <td colspan="2">
+ [include "../framework/captcha-field.ezt"]
+ </td>
+ </tr>
+ [end]
+
+ <tr>
+ <td colspan="3"><span id="confirmarea" class="novel" style="padding-top:5px; margin:0">
+ <span id="confirmmsg"></span>
+ [# TODO(jrobbins): <a href="TODO" target="_new">Learn more</a>]
+ </span>
+ </td>
+ </tr>
+ </table>
+
+
+
+[# TODO(jrobbins): <a class="ifClosed toggleHidden" href="#">More options</a>]
+[# <a class="ifOpened" href="#" class="toggleHidden" style="background:#ccc; padding: 4px;">Hide options</a>]
+[# <div class="ifOpened" style="background:#ccc; padding: 8px"><a href="#autmatically-generated">Bookmarkable link to these values</a></div>]
+[# <br><br>]
+
+
+
+
+ <div style="padding:6px">
+ <input type="submit" id="submit_btn" name="btn" value="Update [num_issues] Issue[is num_issues "1"][else]s[end]">
+ <input type="button" id="discard" name="nobtn" value="Discard">
+
+ <input type="checkbox" checked="checked" name="send_email" id="send_email" style="margin-left:1em">
+ <label for="send_email" title="Send issue change notifications to interested users">Send email</label>
+
+ </div>
+
+
+
+[if-any show_progress]
+ <div>Note: Updating [num_issues] issues will take approximately [num_seconds] seconds.</div>
+ <div id="progress">
+ </div>
+[end]
+
+ </td>
+ <td>
+ <div class="tip">
+ <b>Usage:</b> This form allows you to update several issues at one
+ time.<br><br>
+ The same comment will be applied to all issues.<br><br>
+
+ If specified, the status or owner you enter will be applied to all
+ issues.<br><br>
+
+ You may append or remove values in multi-valued fields by choosing the += or -= operators.
+ To remove labels, preceed the label with a leading dash. (You may also use a leading dash
+ to remove individual items when using the += operator.)
+ </div>
+ </td>
+ </tr>
+ </table>
+
+
+ </form>
+</div>
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ document.getElementById('comment').select();
+ _lfidprefix = 'label';
+ _fetchOptions("[projectname]", "issueOptions",
+ CS_env.token, [project.cached_content_timestamp]);
+ _fetchUserProjects(false);
+ setTimeout(_forceProperTableWidth, 100);
+
+ _exposeExistingLabelFields();
+ _onload();
+
+ checksubmit();
+ setInterval(checksubmit, 700); [# catch changes that were not keystrokes, e.g., paste menu item.]
+
+
+
+function checksubmit() {
+ var submit = document.getElementById('submit_btn');
+ var cg = document.getElementById('cg');
+ if (cg != undefined) { submit.disabled='disabled'; }
+
+ submit.disabled='disabled';
+ var restrict_to_known = [if-any restrict_to_known]true[else]false[end];
+ var confirmmsg = document.getElementById('confirmmsg');
+ var statusenter = $('statusenter');
+ var merge_area = $('merge_area');
+ var statuses_offer_merge = [[] [for statuses_offer_merge]"[statuses_offer_merge]"[if-index statuses_offer_merge last][else],[end][end] ];
+ if (restrict_to_known && confirmmsg && confirmmsg.innerText.length > 0) {
+ return;
+ }
+ if (cg == undefined || cg.value.length > 1) {
+ submit.disabled='';
+ }
+
+ if (statusenter) {
+ var offer_merge = 'none';
+ for (var i = 0; i < statuses_offer_merge.length; i++) {
+ if (statusenter.value == statuses_offer_merge[[]i]) offer_merge = '';
+ }
+ merge_area.style.display = offer_merge;
+ }
+}
+
+
+function disableFormElement(el) {
+ el.readOnly = 'yes';
+ el.style.background = '#eee';
+ [# TODO(jrobbins): disable auto-complete ]
+}
+
+
+function bulkOnSubmit() {
+ var inputs = document.getElementsByTagName('input');
+ for (var i = 0; i < inputs.length; i++) {
+ disableFormElement(inputs[[]i]);
+ }
+ disableFormElement(document.getElementById('comment'));
+ [if-any show_progress]
+ var progress = document.getElementById('progress');
+ progress.innerText = 'Processing...';
+ [end]
+}
+
+
+function _checkAutoClear(inputEl, selectID) {
+ var val = inputEl.value;
+ var sel = document.getElementById(selectID);
+ if (val.match(/^--+$/)) {
+ sel.value = 'clear';
+ inputEl.value = '';
+ } else if (val) {
+ sel.value = 'set';
+ }
+}
+
+
+function _ignoreWidgetIfOpIsClear(selectEl, inputID) {
+ if (selectEl.value == 'clear') {
+ document.getElementById(inputID).value = '';
+ }
+}
+
+$("bulk_form").addEventListener("submit", bulkOnSubmit);
+
+if ($("comment")) {
+ $("comment").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+}
+if ($("statusenter")) {
+ $("statusenter").addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $("statusenter").addEventListener("keyup", function(event) {
+ _dirty();
+ _checkAutoClear(event.target, "op_statusenter");
+ return _confirmNovelStatus(event.target);
+ });
+}
+if ($("ownerenter")) {
+ $("ownerenter").addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $("ownerenter").addEventListener("keyup", function(event) {
+ _dirty();
+ _checkAutoClear(event.target, "op_ownerenter");
+ return true;
+ });
+}
+if ($("memberenter")) {
+ $("memberenter").addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $("memberenter").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+}
+if ($("componententer")) {
+ $("componententer").addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $("componententer").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+}
+
+if ($("move_toenter")) {
+ $("move_toenter").addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $("move_toenter").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+}
+
+if ($("submit_btn")) {
+ $("submit_btn").addEventListener("focus", function(event) {
+ _acrob(null);
+ });
+ $("submit_btn").addEventListener("mousedown", function(event) {
+ _acrob(null);
+ });
+ $("submit_btn").addEventListener("click", function(event) {
+ _trimCommas();
+ });
+}
+if ($("discard")) {
+ $("discard").addEventListener("click", function(event) {
+ _confirmDiscardEntry(this);
+ event.preventDefault();
+ });
+}
+
+var labelInputs = document.getElementsByClassName("labelinput");
+for (var i = 0; i < labelInputs.length; ++i) {
+ var labelInput = labelInputs[[]i];
+ labelInput.addEventListener("keyup", function (event) {
+ _dirty();
+ if (event.target.getAttribute("data-show-id") &&
+ event.target.getAttribute("data-hide-id") &&
+ event.target.value) {
+ _showID(event.target.getAttribute("data-show-id"));
+ _hideID(event.target.getAttribute("data-hide-id"));
+ }
+ return _vallab(event.target);
+ });
+ labelInput.addEventListener("blur", function (event) {
+ return _vallab(event.target);
+ });
+ labelInput.addEventListener("focus", function (event) {
+ return _acof(event);
+ });
+}
+
+var addRowLinks = document.getElementsByClassName("fakelink");
+for (var i = 0; i < addRowLinks.length; ++i) {
+ var rowLink = addRowLinks[[]i];
+ rowLink.addEventListener("click", function (event) {
+ _acrob(null);
+ var insteadID = event.target.getAttribute("data-instead");
+ if (insteadID)
+ _showInstead(insteadID, this);
+ });
+}
+
+});
+</script>
+
+[end]
+
+[include "field-value-widgets-js.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-bulk-operator-part.ezt b/appengine/monorail/templates/tracker/issue-bulk-operator-part.ezt
new file mode 100644
index 0000000..3af2c0b
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-bulk-operator-part.ezt
@@ -0,0 +1,28 @@
+[# Display a <select> widget with options to set/append/remove/clear the field.
+ Args:
+ arg0: element ID of widget to disable if Clear is selected. The form name and ID
+ of the <select> will be "op_" + arg0.
+ arg1: "multi" for multi-valued fields so that "Append" and "Remove" are offered.
+ ]
+<select name="op_[arg0]" id="op_[arg0]" style="width:9em" tabindex="-1">
+ [is arg1 "multi"]
+ <option value="append" selected="selected">Append +=</option>
+ <option value="remove">Remove -=</option>
+ [# TODO(jrobbins): <option value="setexact">Set exactly :=</option>]
+ [else]
+ <option value="set" selected="selected">Set =</option>
+ <option value="clear">Clear</option>
+ [end]
+</select>
+
+[is arg1 "multi"][else]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("op_[arg0]")) {
+ $("op_[arg0]").addEventListener("change", function(event) {
+ _ignoreWidgetIfOpIsClear(event.target, '[arg0]');
+ });
+ }
+});
+</script>
+[end]
diff --git a/appengine/monorail/templates/tracker/issue-can-widget.ezt b/appengine/monorail/templates/tracker/issue-can-widget.ezt
new file mode 100644
index 0000000..abd8c66
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-can-widget.ezt
@@ -0,0 +1,70 @@
+[# This is used in the issue search form and issue advanced search page. We want to show the same options in both contexts.]
+
+<option disabled="disabled">Search within:</option>
+<option value="1" [is can "1"]selected=selected[end]
+ title="All issues in the project"> All issues</option>
+<option value="2" [is can "2"]selected=selected[end]
+ title="All issues except ones with a closed status"> Open issues</option>
+
+[if-any logged_in_user]
+ [define username][logged_in_user.email][end]
+ [is arg0 "admin"][define username]logged-in-user[end][end]
+ <option value="3" [is can "3"]selected=selected[end]
+ title="[[]Open issues] owner=[username]"> Open and owned by me</option>
+ <option value="4" [is can "4"]selected=selected[end]
+ title="[[]Open issues] reporter=[username]"> Open and reported by me</option>
+ <option value="5" [is can "5"]selected=selected[end]
+ title="[[]Open issues] starredby:[username]"> Open and starred by me</option>
+ <option value="8" [is can "8"]selected=selected[end]
+ title="[[]Open issues] commentby:[username]"> Open with comment by me</option>
+[end]
+
+[# TODO(jrobbins): deprecate these and tell projects to define canned queries instead.]
+<option value="6" [is can "6"]selected=selected[end]
+ title="[[]Open issues] status=New"> New issues</option>
+<option value="7" [is can "7"]selected=selected[end]
+ title="[[]All issues] status=fixed,done"> Issues to verify</option>
+
+[is arg0 "admin"][else]
+ [define first]Yes[end]
+ [for canned_queries]
+ [is first "Yes"]
+ [define first]No[end]
+ <option disabled="disabled">----</option>
+ [end]
+ [# TODO(jrobbins): canned query visibility conditions, e.g., members only. ]
+ <option value="[canned_queries.query_id]"
+ [is can canned_queries.query_id]selected=selected[end]
+ title="[canned_queries.docstring]"
+ > [canned_queries.name]</option>
+ [end]
+ [if-any perms.EditProject][if-any is_cross_project][else]
+ [is first "Yes"]
+ [define first]No[end]
+ <option disabled="disabled">----</option>
+ [end]
+ <option value="manageprojectqueries"
+ > Manage project queries...</option>
+ [end][end]
+
+ [if-any logged_in_user]
+ [define first]Yes[end]
+ [for saved_queries]
+ [is first "Yes"]
+ [define first]No[end]
+ <option disabled="disabled">----</option>
+ [end]
+ <option value="[saved_queries.query_id]"
+ [is can saved_queries.query_id]selected=selected[end]
+ title="[saved_queries.docstring]"
+ > [saved_queries.name]</option>
+ [end]
+ [is first "Yes"]
+ [define first]No[end]
+ <option disabled="disabled">----</option>
+ [end]
+ <option value="managemyqueries"
+ > Manage my saved queries...</option>
+ [end][# end if logged in]
+
+[end][# end not "admin"]
diff --git a/appengine/monorail/templates/tracker/issue-change-notification-email.ezt b/appengine/monorail/templates/tracker/issue-change-notification-email.ezt
new file mode 100644
index 0000000..2fa01eb
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-change-notification-email.ezt
@@ -0,0 +1,17 @@
+[is comment.sequence "0"]Status: [is issue.status.name ""]----[else][issue.status.name][end]
+Owner: [is issue.owner.username ""]----[else][issue.owner.display_name][end][if-any issue.cc]
+CC: [for issue.cc][issue.cc.display_name][if-index issue.cc last] [else], [end] [end][end][if-any issue.labels]
+Labels:[for issue.labels] [issue.labels.name][end][end][if-any issue.components]
+Components:[for issue.components] [issue.components.path][end][end][if-any issue.blocked_on]
+BlockedOn:[for issue.blocked_on] [if-any issue.blocked_on.visible][issue.blocked_on.display_name][end][end][end]
+[else][if-any comment.amendments]Updates:
+[for comment.amendments] [comment.amendments.field_name]: [format "raw"][comment.amendments.newvalue][end]
+[end][end][end]
+[is comment.sequence "0"]New issue [issue.local_id][else]Comment #[comment.sequence] on issue [issue.local_id][end] by [comment.creator.display_name]: [format "raw"][summary][end]
+[detail_url]
+
+[if-any comment.content][for comment.text_runs][include "render-plain-text.ezt" comment.text_runs][end][else](No comment was entered for this change.)[end]
+[if-any comment.attachments]
+Attachments:
+[for comment.attachments] [comment.attachments.filename] [comment.attachments.filesizestr]
+[end][end]
diff --git a/appengine/monorail/templates/tracker/issue-comment-amendment-part.ezt b/appengine/monorail/templates/tracker/issue-comment-amendment-part.ezt
new file mode 100644
index 0000000..9414e12
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-comment-amendment-part.ezt
@@ -0,0 +1 @@
+<b>[comments.amendments.field_name]:</b>[for comments.amendments.values] [if-any comments.amendments.values.url]<a href="[comments.amendments.values.url]">[end][comments.amendments.values.value][if-any comments.amendments.values.url]</a>[end][end]<br>
diff --git a/appengine/monorail/templates/tracker/issue-comment-normal-part.ezt b/appengine/monorail/templates/tracker/issue-comment-normal-part.ezt
new file mode 100644
index 0000000..9e780ee
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-comment-normal-part.ezt
@@ -0,0 +1,81 @@
+<div class="cursor_off vt issuecomment" id="hc[comments.sequence]">
+
+ <div class="issuecommentheader">
+ <span class="author">
+ [if-any comments.creator_role]<span class="role_label">Project Member</span>[end]
+ <a name="c[comments.sequence]"
+ href="[project_home_url]/issues/detail?id=[issue.local_id]#c[comments.sequence]">Comment [comments.sequence]</a>
+ by
+ [include "../framework/user-link.ezt" comments.creator]</span>,
+ <span class="date" title="[comments.date_tooltip] UTC"
+ >[comments.date_string][comments.date_relative]</span>
+ [if-any page_perms.ViewInboundMessages comments.can_delete_bool][if-any comments.inbound_message]
+ via <a target="_blank" href="original?id=[issue.local_id]&seq=[comments.sequence]">email</a>
+ [end][end]
+ </div>[#issuecommentheader]
+
+ <div class="issuecommentbody">
+ [if-any read_only][else]
+ <div style="float:right; margin-right:.3em; text-align:right" class="ichcommands">
+ <span id='comment-action-loading-[comments.sequence]'
+ class='loading' style='display:none;'>Processing</span>
+
+ [if-any comments.can_delete_bool][if-any arg0]
+ [if-any comments.is_deleted_bool]
+ <div style="font-size:90%">
+ This comment is not visible to normal users.
+ </div>
+ [if-any comments.is_spam_bool]
+ <a style="font-size: 90%" href="#"
+ class="flagComment" data-spam="[comments.is_spam]" data-id="[comments.id]"
+ data-sequence='[comments.sequence]'
+ >Unmark this comment as spam</a>
+ [else]
+ <a style="font-size: 90%" href="#"
+ class="delComment" data-sequence="[comments.sequence]" data-mode="0"
+ >Undelete comment</a>
+ [end]
+ [else]
+ <a style="font-size: 90%" href="#"
+ class="delComment" data-sequence="[comments.sequence]" data-mode="1"
+ >Delete comment</a>
+ [end]
+ [end][end]
+
+ [if-any page_perms.FlagSpam]
+ <a class="spamflag flagComment"
+ data-spam="[comments.flagged_spam]" data-id="[comments.id]" data-sequence="[comments.sequence]"
+ style="color:[if-any comments.flagged_spam_bool]red[else]gray[end]; text-decoration:none;"
+ title="[if-any comments.flagged_spam_bool]Un-f[else]F[end]lag as spam">
+ [if-any comments.flagged_spam_bool]⚑[else]⚐[end]
+ </a>
+ [end]
+
+ </div>
+ [end]
+
+ [if-any comments.amendments]
+ <table class="updates">
+ <tr><td class="box-inner">
+ [for comments.amendments]
+ [include "issue-comment-amendment-part.ezt" comments.amendments]
+ [end]
+ </td></tr>
+ </table>
+ [end]
+
+[if-any comments.content]<pre class="issue_text">
+[for comments.text_runs][include "render-rich-text.ezt" comments.text_runs][end]
+</pre>[end]
+
+ [if-any comments.attachments]
+ <div class="attachments">
+ [for comments.attachments]
+ [define offer_delete_attach][if-any arg0][comments.can_delete_bool][else][end][end]
+ [include "issue-attachment-part.ezt" comments.sequence comments.attachments offer_delete_attach]
+ [end]
+ </div>
+ [end]
+
+ </div>[#issuecommentbody]
+</div>[#issuecomment]
diff --git a/appengine/monorail/templates/tracker/issue-comment-tombstone-part.ezt b/appengine/monorail/templates/tracker/issue-comment-tombstone-part.ezt
new file mode 100644
index 0000000..39063fa
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-comment-tombstone-part.ezt
@@ -0,0 +1,10 @@
+<div class="cursor_off vt issuecomment" id="hc[comments.sequence]">
+ <div class="issuecommentheader" style="background:#e8e8e8">
+ <span class="author">
+ <a name="c[comments.sequence]"
+ href="[project_home_url]/issues/detail?id=[issue.local_id]#c[comments.sequence]">Comment [comments.sequence]</a>
+ <span style="margin-left: 1em">Deleted</span>
+ </span>
+ </div>
+</div>
+
diff --git a/appengine/monorail/templates/tracker/issue-comments-part.ezt b/appengine/monorail/templates/tracker/issue-comments-part.ezt
new file mode 100644
index 0000000..b0f1315
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-comments-part.ezt
@@ -0,0 +1,60 @@
+[# Show the list of issue comments. This is used on the issue detail page and also
+ the issue peek page.
+
+ arg0: If any value, then show the "Delete comment" links, if appropriate.
+]
+
+<div class="cursor_off vt issuedescription" id="hc0">
+
+<pre class="issue_text">
+[for description.text_runs][include "render-rich-text.ezt" description.text_runs][end]
+</pre>
+
+[if-any description.attachments]
+ <div class="attachments">
+ [for description.attachments]
+ [include "issue-attachment-part.ezt" "0" description.attachments description.can_delete_bool]
+ [end]
+ </div>
+[end]
+
+</div>[#issuedescription]
+
+
+[if-any cmnt_pagination]
+ [if-any cmnt_pagination.visible]
+ <div class="vt issuecomment" width="100%" style="background:#e5ecf9; padding:2px .7em; margin:0; border:0">
+ [include "../framework/comment-pagination-part.ezt"]
+ </div>
+ [end]
+[end]
+
+[for comments]
+ [if-any arg0]
+ [define show_comment][if-any comments.visible_bool]Yes[else]No[end][end]
+ [else]
+ [define show_comment][if-any comments.is_deleted_bool]No[else]Yes[end][end]
+ [end]
+
+ [is show_comment "Yes"]
+ <div class="[if-any comments.is_deleted_bool] delcom ifExpand[end]">
+ [include "issue-comment-normal-part.ezt" arg0]
+ </div>
+ [if-any comments.is_deleted_bool]
+ <div class="ifCollapse">
+ [include "issue-comment-tombstone-part.ezt" arg0]
+ </div>
+ [end]
+ [else]
+ [include "issue-comment-tombstone-part.ezt" arg0]
+ [end][# show_comment]
+[end][#for comments]
+
+
+[if-any cmnt_pagination]
+ [if-any cmnt_pagination.visible]
+ <div class="vt issuecomment" width="100%" style="background:#e5ecf9; padding:2px .7em; margin:0">
+ [include "../framework/comment-pagination-part.ezt"]
+ </div>
+ [end]
+[end]
diff --git a/appengine/monorail/templates/tracker/issue-detail-page.ezt b/appengine/monorail/templates/tracker/issue-detail-page.ezt
new file mode 100644
index 0000000..71add46
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-detail-page.ezt
@@ -0,0 +1,259 @@
+[define title]Issue [issue.local_id][end]
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[define any_visible_deleted]false[end]
+[for comments]
+ [if-any comments.can_delete_bool]
+ [if-any comments.is_deleted_bool]
+ [define any_visible_deleted]true[end]
+ [end]
+ [end]
+[end]
+
+[if-any page_perms.EditIssue page_perms.EditIssueSummary page_perms.EditIssueStatus page_perms.EditIssueOwner page_perms.EditIssueCc]
+ [define offer_make_changes]yes[end]
+[else]
+ [define offer_make_changes][end]
+[end]
+
+<div id="color_control" class="[if-any issue.is_open][else]closed_colors[end]">
+
+<table width="100%" cellpadding="0" cellspacing="0" border="0" class="issuepage" id="meta-container">
+
+<tbody class="collapse"> [# collapse/expand controls undelete-able comments]
+ <tr>
+ <td nowrap="nowrap" class="vt h3 issuemetaheader" style="min-width:8em">
+ [define user_can_star]false[end]
+ [if-any page_perms.SetStar]
+ [if-any read_only][else]
+ [define user_can_star]true[end]
+ <a class="star" id="star"
+ style="color:[if-any starred]cornflowerblue[else]gray[end]"
+ title="[if-any starred]Un-s[else]S[end]tar this issue">
+ [if-any starred]★[else]☆[end]
+ </a>
+ [end]
+ [end]
+ <a href="detail?id=[issue.local_id]">Issue [issue.local_id]</a>
+
+ [if-any page_perms.FlagSpam]
+ <a class="spamflag" id="flag_spam"
+ style="float:right; color:[if-any issue.flagged_spam_bool]red[else]gray[end]"
+ title="[if-any issue.flagged_spam_bool]Un-f[else]F[end]lag as spam">
+ [if-any issue.flagged_spam_bool]⚑[else]⚐[end]
+ </a>
+ <form action="flagspam.do" method="post" id="spam_form" style="display:none">
+ <input type="hidden" name="token" value="[flag_spam_token]">
+ <input type="hidden" name="id" value="[issue.local_id]">
+ <input type="hidden" name="spam" value="[if-any issue.flagged_spam_bool]false[else]true[end]">
+ </form>
+ [end]
+ </td>
+ <td width="90%" class="vt issueheader" style="border-right: 2px solid white">
+ [if-any issue.flagged_spam_bool]<div style="border:1px solid red; background:#fdd; padding:1em">You have reported this issue as spam. To undo this, click the flag again.</div>[end]
+ <span class="h3" style="padding-left:4px">[issue.summary]</span>
+ </td>
+ <td class="issueheader">
+ [if-any flipper.show]
+ <div class="pagination">
+ [if-any flipper.prev_url]<a href="[flipper.prev_url]" title="Prev">‹ Prev</a>[else][end]
+ [flipper.current] of [flipper.total_count]
+ [if-any flipper.next_url]<a href="[flipper.next_url]" title="Next">Next ›</a>[else][end]
+ </div>[end]
+ </td>
+ </tr>
+ <tr>
+ <td nowrap="nowrap" class="issuemetaheader">
+ Starred by [issue.star_count]
+ [is issue.star_count "1"] user [else] users [end]
+ </td>
+ <td class="issueheader" style="padding-left:6px; border-right: 2px solid white">
+ [if-any description.creator_role]<span class="role_label">Project Member</span>[end]
+ Reported by
+ [include "../framework/user-link.ezt" description.creator],
+ <span class="date" title="[description.date_tooltip] UTC">[description.date_string][description.date_relative]</span>
+ [# Note: initial issue description cannot have come via email]
+ </td>
+ <td align="center" nowrap="nowrap" class="issueheader">
+ [if-any back_to_list_url]
+ <a href="[back_to_list_url]">Back to list</a>
+ [end]
+ </td>
+ </tr>
+
+
+ <tr>
+ <td id="issuemeta">
+ [if-any noisy]
+ <br>
+ Comments by non-members will not trigger notification emails to users who starred this issue.
+ [end]
+ <div id="meta-float">
+ [include "issue-meta-part.ezt" "show make changes below"]
+
+ [is any_visible_deleted "true"]
+ <br><br>
+ <div style="text-align:right; margin-right:.3em; font-size:90%">
+ <a class="ifCollapse toggleCollapse" href="#"
+ style="white-space:nowrap">Show deleted comments</a>
+ <a class="ifExpand toggleCollapse" href="#"
+ style="white-space:nowrap">Hide deleted comments</a>
+ </div>
+ [end]
+
+ </div>
+ </td>
+ <td class="vt issuedescription" width="100%" id="cursorarea" colspan="2">
+ [include "issue-comments-part.ezt" "show-delete-comment-link"]
+ </td>
+ </tr>
+
+[if-any read_only][else]
+ [if-any logged_in_user][else]
+ <tr>
+ <td colspan="2"></td>
+ <td class="vt issuecomment">
+ <span class="indicator">►</span> <a href="[login_url]"
+ >Sign in</a> to add a comment
+ </td>
+ </tr>
+ [end]
+[end]
+</tbody>
+</table>
+<br>
+
+[include "../framework/footer-script.ezt"]
+
+[define user_can_comment]false[end]
+[if-any read_only][else]
+ [if-any page_perms.AddIssueComment]
+ [include "issue-update-form.ezt"]
+ [define user_can_comment]true[end]
+ [end]
+[end]
+
+
+</div> [# end color_control]
+
+[# Form used to submit comment soft delete and undelete changes.]
+<form name="delcom" action="delComment.do?q=[query]&can=[can]&groupby=[format "url"][groupby][end]&sort=[format "url"][sortspec][end]&colspec=[format "url"][colspec][end]" method="POST">
+ <input type="hidden" name="sequence_num" value="">
+ <input type="hidden" name="mode" value="">
+ <input type="hidden" name="id" value="[issue.local_id]">
+ <input type="hidden" name="token" value="[delComment_form_token]">
+</form>
+
+[# Form used to flag/unflag comments as spam.]
+<form name="flagcom" action="flagspam.do" method="post">
+ <input type="hidden" name="comment_id" value="">
+ <input type="hidden" name="sequence_num" value="">
+ <input type="hidden" name="spam" value="">
+ <input type="hidden" name="id" value="[issue.local_id]">
+ <input type="hidden" name="token" value="[flag_spam_token]">
+</form>
+
+
+<div id="helparea"></div>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions('[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+ _fetchUserProjects(false);
+
+ _onload();
+
+ function delComment(sequence_num, delete_mode) {
+ var f = document.forms[[]"delcom"];
+ f.sequence_num.value = sequence_num;
+ f.mode.value = delete_mode;
+ [# TODO(jrobbins): submit in a subframe,XHR or ask user before losing comment in-progress ]
+ var loading = $('comment-action-loading-' + sequence_num);
+ loading.style.display = 'inline';
+ f.submit();
+ return false;
+ }
+
+ _floatMetadata();
+
+ if ($("star"))
+ $("star").addEventListener("click", function (event) {
+ _TKR_toggleStar($("star"), "[projectname]", [issue.local_id], null, "[set_star_token]");
+ _TKR_syncStarIcons($("star"), "star2");
+ });
+
+ var delCommentElements = document.getElementsByClassName("delComment");
+ for (var i = 0; i < delCommentElements.length; ++i) {
+ var el = delCommentElements[[]i];
+ el.addEventListener("click", function(event) {
+ var sequence = event.target.getAttribute("data-sequence");
+ var mode = event.target.getAttribute("data-mode");
+ delComment(sequence, mode);
+ event.preventDefault();
+ });
+ }
+
+ function flagComment(comment_id, sequence_num, isSpam) {
+ var f = document.forms[[]"flagcom"];
+ f.comment_id.value = comment_id;
+ f.sequence_num.value = sequence_num;
+ f.spam.value = isSpam.toLowerCase() == "false";
+
+ var loading = $('comment-action-loading-' + sequence_num);
+ loading.style.display = 'inline';
+
+ f.submit();
+ return false;
+ }
+
+ var flagCommentElements = document.getElementsByClassName("flagComment");
+ for (var i = 0; i < flagCommentElements.length; ++i) {
+ var el = flagCommentElements[[]i];
+ el.addEventListener("click", function(event) {
+ var id = event.target.getAttribute("data-id");
+ var sequence = event.target.getAttribute("data-sequence");
+ var isSpam = event.target.getAttribute("data-spam");
+ flagComment(id, sequence, isSpam);
+ event.preventDefault();
+ });
+ }
+
+ window.userMadeChanges = false;
+ var inputs = document.querySelectorAll('input[type~="text"], textarea');
+ for (var i = 0; i < inputs.length; i++) {
+ var el = inputs[[]i];
+ el.addEventListener("input", function(event) {
+ if (event.target.id != "searchq") {
+ userMadeChanges = true;
+ }
+ });
+ }
+
+ window.onbeforeunload = function() {
+ if (userMadeChanges || TKR_isDirty) {
+ return "You have unsaved changes. Leave this page and discard them?";
+ }
+ };
+
+
+
+});
+</script>
+
+<script type="text/javascript" defer src="/static/third_party/js/kibbles-1.3.3.comp.js" nonce="[nonce]"></script>
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _setupKibblesOnDetailPage(
+ [if-any back_to_list_url]'[format "js"][back_to_list_url][end]'[else]'[project_home_url]/issues/list'[end],
+ '[project_home_url]/issues/entry',
+ '[if-any flipper.show][if-any flipper.prev_url][format "js"][flipper.prev_url][end][end][end]',
+ '[if-any flipper.show][if-any flipper.next_url][format "js"][flipper.next_url][end][end][end]',
+ '[projectname]', [issue.local_id],
+ [user_can_comment], [user_can_star], '[set_star_token]');
+});
+</script>
+
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-entry-page.ezt b/appengine/monorail/templates/tracker/issue-entry-page.ezt
new file mode 100644
index 0000000..a172eae
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-entry-page.ezt
@@ -0,0 +1,401 @@
+[define title]New Issue[end]
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[# Note: base permission for this page is CreateIssue]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<div style="margin-top: 0; padding: 3px;" class="closed">
+ <form action="entry.do" method="POST" style="margin: 0; padding: 0" enctype="multipart/form-data" id="create_issue_form">
+ <input type="hidden" name="token" value="[form_token]">
+ <input type="hidden" name="template_name" value="[template_name]">
+ <input type="hidden" name="star" id="star_input" value="1">
+ <table cellpadding="0" cellspacing="0" border="0">
+ <tr><td>
+
+ <table cellspacing="0" cellpadding="3" border="0" class="rowmajor vt">
+ [if-any offer_templates]
+ <tr><th><label for="template_name">Template:</label></th>
+ <td><select name="template_name" id="template_name" data-project-name="[projectname]">
+ [for config.templates]
+ [if-any config.templates.can_view]
+ <option value="[config.templates.name]" [is config.templates.name template_name]selected=selected[end]>[config.templates.name]</option>
+ [end]
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ <tr><th><label for="summary">Summary:</label></th>
+ <td colspan="2" class="inplace">
+ <input type="text" id="summary" name="summary" value="[initial_summary]" required data-clear-summary-on-click="[clear_summary_on_click]">
+ [if-any errors.summary]
+ <div class="fielderror">[errors.summary]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tr><th rowspan="3"><label for="comment">Description:</label></th>
+ <td colspan="2">
+ <textarea style="width:100%" cols="80" rows="15" name="comment" id="comment" class="issue_text" required>[initial_description]
+</textarea> [# We want 1 final newline but 0 trailing spaces in the textarea]
+ [if-any errors.comment]
+ <div class="fielderror">[errors.comment]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tr><td colspan="2">
+ <div id="attachmentareadeventry"></div>
+ </td></tr>
+
+ <tr>
+ <td style="width: 12em">
+ [if-any allow_attachments]
+ <span id="attachprompt"><img width="16" height="16" src="/static/images/paperclip.png" border="0" alt="A paperclip">
+ <a href="#" id="attachafile">Attach a file</a></span>
+ <div id="attachmaxsize" style="margin-left:1.2em; display:none">Max. attachments: [max_attach_size]</div>
+ [if-any errors.attachments]
+ <div class="fielderror">[errors.attachments]</div>
+ [end]
+ [else]
+ <div style="color:#666">Issue attachment storage quota exceeded.</div>
+ [end]
+ </td>
+ <td id="star_cell">
+ [# Note: if the user is permitted to enter an issue, they are permitted to star it.]
+ <a class="star" id="star" style="color:cornflowerblue;">★</a>
+ Notify me of issue changes, if enabled in <a id="settings" target="new" href="/hosting/settings">settings</a>
+ </td>
+ </tr>
+
+ <tr [if-any page_perms.EditIssue page_perms.EditIssueStatus][else]style="display:none;"[end]><th width="10%"><label for="statusenter">Status:</label></th>
+ <td class="inplace" style="width: 12em">
+ <input type="text" id="statusenter" autocomplete="off" name="status" value="[initial_status]">
+ </td>
+ </tr>
+ <tr [if-any page_perms.EditIssue page_perms.EditIssueOwner][else]style="display:none;"[end]><th width="10%"><label for="ownerenter">Owner:</label></th>
+ <td class="inplace" style="width: 12em">
+ <input type="text" id="ownerenter" autocomplete="off" name="owner" value="[initial_owner]">
+ [if-any errors.owner]
+ <div class="fielderror">[errors.owner]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tr [if-any page_perms.EditIssue page_perms.EditIssueCc][else]style="display:none;"[end]><th><label for="memberenter">Cc:</label></th>
+ <td colspan="2" class="inplace">
+ <input type="text" multiple id="memberenter" autocomplete="off" name="cc" value="[initial_cc]">
+ [if-any errors.cc]
+ <div class="fielderror">[errors.cc]</div>
+ [end]
+ </td>
+ </tr>
+
+ [# TODO(jrobbins): page_perms.EditIssueComponent]
+ <tr [if-any page_perms.EditIssue][else]style="display:none;"[end]><th><label for="components">Components:</label></th>
+ <td colspan="2" class="inplace">
+ <input type="text" id="components" autocomplete="off" name="components" value="[initial_components]">
+ [if-any errors.components]
+ <div class="fielderror">[errors.components]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tbody [if-any page_perms.EditIssue][else]style="display:none;"[end] class="collapse">
+ [define any_fields_to_reveal]No[end]
+ [for fields]
+ [if-any fields.applicable]
+ [# TODO(jrobbins): determine applicability dynamically and update fields in JS]
+ <tr [if-any fields.display][else]class="ifExpand"[define any_fields_to_reveal]Yes[end][end]>
+ <th>[fields.field_name]:</th>
+ <td colspan="2">
+ [include "field-value-widgets.ezt" fields.field_def.is_multivalued_bool]
+ <div class="fielderror" style="display:none" id="error_custom_[fields.field_id]"></div>
+ </td>
+ <tr>
+ [end]
+ [end]
+ [is any_fields_to_reveal "Yes"]
+ <tr class="ifCollapse">
+ <td colspan="2"><a href="#" class="toggleCollapse">Show all fields</a><t/td>
+ </tr>
+ [end]
+ </tbody>
+
+ <tr [if-any page_perms.EditIssue][else]style="display:none;"[end]><th><label for="label0">Labels:</label></th>
+ <td colspan="2" class="labelediting">
+ [include "label-fields.ezt" "just-two"]
+ </td>
+ </tr>
+
+ <tr [if-any page_perms.EditIssue][else]style="display:none;"[end]><th style="white-space:nowrap"><label for="blocked_on">Blocked on:</label></th>
+ <td class="inplace" colspan="2">
+ <input type="text" name="blocked_on" id="blocked_on" value="[initial_blocked_on]">
+ [if-any errors.blocked_on]
+ <div class="fielderror">[errors.blocked_on]</div>
+ [end]
+ </td>
+ </tr>
+ <tr [if-any page_perms.EditIssue][else]style="display:none;"[end]><th><label for="blocking">Blocking:</label></th>
+ <td class="inplace" colspan="2">
+ <input type="text" name="blocking" id="blocking" value="[initial_blocking]" />
+ [if-any errors.blocking]
+ <div class="fielderror">[errors.blocking]</div>
+ [end]
+ </td>
+ </tr>
+
+ [if-any show_captcha]
+ <tr><th style="white-space:nowrap">Human Verification:</th>
+ <td colspan="2">
+ [include "../framework/captcha-field.ezt"]
+ </td>
+ </tr>
+ [end]
+
+ [include "../framework/label-validation-row.ezt"]
+ [include "../framework/component-validation-row.ezt"]
+ </table>
+
+ <div style="padding:6px">
+ <input type="submit" id="submit_btn" name="btn" value="Submit issue">
+ <input type="button" id="discard" name="nobtn" value="Discard">
+ </div>
+
+ </td>
+ </tr>
+ </table>
+ </form>
+</div>
+
+<div id="helparea"></div>
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+
+ if ($("template_name")) {
+ $("template_name").addEventListener("change", function(event) {
+ _switchTemplate(event.target.getAttribute("data-project-name"),
+ event.target.value)
+ });
+ }
+
+ if ($("summary")) {
+ var clearSummaryOnClick = $("summary").getAttribute("data-clear-summary-on-click");
+ if (clearSummaryOnClick) {
+ $("summary").addEventListener("keydown", function(event) {
+ _clearOnFirstEvent();
+ });
+ }
+ $("summary").addEventListener("click", function(event) {
+ if (clearSummaryOnClick) {
+ _clearOnFirstEvent();
+ }
+ checksubmit();
+ });
+ $("summary").addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ $("summary").addEventListener("keyup", function(event) {
+ _dirty();
+ checksubmit();
+ return true;
+ });
+ }
+
+ if ($("comment")) {
+ $("comment").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+ }
+ if ($("settings")) {
+ $("settings").addEventListener("focus", function(event) {
+ _acrob(null);
+ });
+ }
+ if ($("statusenter")) {
+ $("statusenter").addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $("statusenter").addEventListener("keyup", function(event) {
+ _dirty();
+ return _confirmNovelStatus(event.target);
+ });
+ }
+
+ if ($("submit_btn")) {
+ $("submit_btn").addEventListener("focus", function(event) {
+ _acrob(null);
+ });
+ $("submit_btn").addEventListener("click", function(event) {
+ _acrob(null);
+ _trimCommas();
+ userMadeChanges = false;
+ TKR_isDirty = false;
+ });
+ }
+ if ($("discard")) {
+ $("discard").addEventListener("focus", function(event) {
+ _acrob(null);
+ });
+ $("discard").addEventListener("click", function(event) {
+ _acrob(null);
+ _confirmDiscardEntry(event.target);
+ event.preventDefault();
+ });
+ }
+
+ window.allowSubmit = true;
+ $("create_issue_form").addEventListener("submit", function() {
+ if (allowSubmit) {
+ allowSubmit = false;
+ $("submit_btn").value = "Creating issue...";
+ $("submit_btn").disabled = "disabled";
+ }
+ else {
+ event.preventDefault();
+ }
+ });
+
+ var _blockIdsToListeners = [[]"blocked_on", "blocking"];
+ for (var i = 0; i < _blockIdsToListeners.length; i++) {
+ var id = _blockIdsToListeners[[]i];
+ if ($(id)) {
+ $(id).addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ $(id).addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+ }
+ }
+
+ var _idsToAddDefaultListeners = [[]"ownerenter", "memberenter", "components"];
+ for (var i = 0; i < _idsToAddDefaultListeners.length; i++) {
+ var id = _idsToAddDefaultListeners[[]i];
+ if ($(id)) {
+ $(id).addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $(id).addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+ }
+ }
+
+ if ($("attachafile")) {
+ $("attachafile").addEventListener("click", function(event) {
+ _addAttachmentFields("attachmentareadeventry");
+ event.preventDefault();
+ });
+ }
+
+ window.userMadeChanges = false;
+ var inputs = document.querySelectorAll('input[type~="text"], textarea');
+ for (var i = 0; i < inputs.length; i++) {
+ var el = inputs[[]i];
+ el.addEventListener("input", function(event) {
+ if (event.target.id != "searchq") {
+ userMadeChanges = true;
+ }
+ });
+ }
+
+ window.onbeforeunload = function() {
+ if (userMadeChanges || TKR_isDirty) {
+ return "You have unsaved changes. Leave this page and discard them?";
+ }
+ };
+
+ _lfidprefix = 'labelenter';
+ _onload();
+ [if-any any_errors]
+ function _clearOnFirstEvent(){}
+ [else]
+ document.getElementById('summary').select();
+ [end]
+
+ _fetchOptions("[projectname]", "issueOptions",
+ CS_env.token, [project.cached_content_timestamp]);
+ [if-any page_perms.EditIssue page_perms.EditIssueStatus page_perms.EditIssueOwner page_perms.EditIssueCc]
+ setTimeout(_forceProperTableWidth, 100);
+ [end]
+
+ [if-any page_perms.EditIssue]
+ _exposeExistingLabelFields();
+ [end]
+
+ var field_error;
+ [if-any errors.custom_fields]
+ [for errors.custom_fields]
+ field_error = document.getElementById('error_custom_' + [errors.custom_fields.field_id]);
+ field_error.innerText = "[errors.custom_fields.message]";
+ field_error.style.display = "";
+ [end]
+ [end]
+
+
+
+function checksubmit() {
+ var restrict_to_known = [if-any restrict_to_known]true[else]false[end];
+ var confirmmsg = document.getElementById('confirmmsg');
+ var cg = document.getElementById('cg');
+ var label_blocksubmitmsg = document.getElementById('blocksubmitmsg');
+ var component_blocksubmitmsg = document.getElementById('component_blocksubmitmsg');
+
+ // Check for templates that require components.
+ var component_required = [if-any component_required]true[else]false[end];
+ var components = document.getElementById('components');
+ if (components && component_required && components.value == "") {
+ component_blocksubmitmsg.innerText = "You must specify a component for this template.";
+ } else {
+ component_blocksubmitmsg.innerText = "";
+ }
+
+ var submit = document.getElementById('submit_btn');
+ var summary = document.getElementById('summary');
+ if ((restrict_to_known && confirmmsg && confirmmsg.innerText) ||
+ (label_blocksubmitmsg && label_blocksubmitmsg.innerText) ||
+ (component_blocksubmitmsg && component_blocksubmitmsg.innerText) ||
+ (cg && cg.value == "") ||
+ (!allowSubmit) ||
+ (!summary.value [if-any must_edit_summary]|| summary.value == '[format "js"][template_summary][end]'[end])) {
+ submit.disabled='disabled';
+ } else {
+ submit.disabled='';
+ }
+}
+checksubmit();
+setInterval(checksubmit, 700); [# catch changes that were not keystrokes, e.g., paste menu item.]
+
+$("star").addEventListener("click", function (event) {
+ _TKR_toggleStarLocal($("star"), "star_input");
+});
+
+});
+</script>
+
+[# TODO(jrobbins): Re-enable keystrokes on issue entry after resolving issue 3039]
+<!--
+<script type="text/javascript" defer src="/static/third_party/js/kibbles-1.3.3.comp.js" nonce="[nonce]"></script>
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _setupKibblesOnEntryPage('[project_home_url]/issues/list');
+});
+</script>
+-->
+
+[end]
+
+[include "field-value-widgets-js.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-export-page.ezt b/appengine/monorail/templates/tracker/issue-export-page.ezt
new file mode 100644
index 0000000..f710fda
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-export-page.ezt
@@ -0,0 +1,32 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<h3>Issue export</h3>
+
+<form action="export/json" method="GET">
+ [# We use xhr_token here because we are doing a GET on a JSON servlet.]
+ <input type="hidden" name="token" value="[xhr_token]">
+ <table cellpadding="3" class="rowmajor vt">
+ <tr>
+ <th>Format</th>
+ <td style="width:90%">JSON</td>
+ </tr>
+ <tr>
+ <th>Start</th>
+ <td><input type="number" size="7" name="start" value="[initial_start]"></td>
+ </tr>
+ <tr>
+ <th>Num</th>
+ <td><input type="number" size="4" name="num" value="[initial_num]"></td>
+ </tr>
+ <tr>
+ <th></th>
+ <td><input type="submit" name="btn" value="Submit"></td>
+ </tr>
+ </table>
+</form>
+
+
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-grid-body.ezt b/appengine/monorail/templates/tracker/issue-grid-body.ezt
new file mode 100644
index 0000000..bef7fe8
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-grid-body.ezt
@@ -0,0 +1,68 @@
+[if-any results]
+
+ [is grid_x_attr "--"][else]
+ <tr>
+ [is grid_y_attr "--"][else]<th> </th>[end]
+ [for grid_x_headings]
+ <th>[grid_x_headings]</th>
+ [end]
+ </tr>
+ [end]
+
+ [for grid_data]
+ <tr class="grid">
+ [is grid_y_attr "--"][else]<th>[grid_data.grid_y_heading]</th>[end]
+
+ [for grid_data.cells_in_row]
+ <td class="vt hoverTarget [is grid_cell_mode "tiles"][else]idcount[end]">
+ [for grid_data.cells_in_row.tiles]
+ [is grid_cell_mode "tiles"]
+ [include "issue-grid-tile.ezt" grid_data.cells_in_row.tiles.starred grid_data.cells_in_row.tiles.local_id grid_data.cells_in_row.tiles.status grid_data.cells_in_row.tiles.summary]
+ [end]
+ [is grid_cell_mode "ids"]
+ <a title="[grid_data.cells_in_row.tiles.summary]"
+ href="detail?id=[grid_data.cells_in_row.tiles.local_id]">[grid_data.cells_in_row.tiles.local_id]</a>
+ [end]
+ [end]
+ [is grid_cell_mode "counts"]
+ [is grid_data.cells_in_row.count "0"]
+ [else]
+ [is grid_data.cells_in_row.count "1"]
+ <a href="detail?id=[for grid_data.cells_in_row.tiles][grid_data.cells_in_row.tiles.local_id][end]"
+ >[grid_data.cells_in_row.count] item</a>
+ [else]
+ <a href="list?can=[can]&q=[grid_data.cells_in_row.drill_down][query]">[grid_data.cells_in_row.count] items</a>
+ [end]
+ [end]
+
+ [end]
+ </td>
+ [end]
+ </tr>
+ [end]
+
+[else]
+
+ <tr>
+ <td colspan="40" class="id">
+ <div style="padding: 3em; text-align: center">
+ [if-any project_has_any_issues]
+ Your search did not generate any results. <br>
+ [is can "1"]
+ You may want to remove some terms from your query.<br>
+ [else]
+ You may want to try your search over <a href="list?can=1&q=[query]&x=[grid_x_attr]&y=[grid_y_attr]&mode=grid">all issues</a>.<br>
+ [end]
+ [else]
+ This project currently has no issues.<br>
+ [if-any page_perms.CreateIssue]
+ [if-any read_only][else]
+ You may want to enter a <a class="id" href="entry">new issue</a>.
+ [end]
+ [end]
+ [end]
+ </div>
+ </td>
+ </tr>
+
+[end]
diff --git a/appengine/monorail/templates/tracker/issue-grid-controls-bottom.ezt b/appengine/monorail/templates/tracker/issue-grid-controls-bottom.ezt
new file mode 100644
index 0000000..cbbf957
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-grid-controls-bottom.ezt
@@ -0,0 +1 @@
+[# Nothing at bottom yet ]
diff --git a/appengine/monorail/templates/tracker/issue-grid-controls-top.ezt b/appengine/monorail/templates/tracker/issue-grid-controls-top.ezt
new file mode 100644
index 0000000..0887a43
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-grid-controls-top.ezt
@@ -0,0 +1,55 @@
+<div class="list">
+<div style="float:right; margin-left:2em; font-size:95%">
+ <a class="buttonify capsule_left" href="list?can=[can]&q=[query]&colspec=[format "url"][colspec][end]&groupby=[format "url"][groupby][end]&sort=[format "url"][sortspec][end]&x=[grid_x_attr]&y=[grid_y_attr]&cells=[grid_cell_mode]">List</a><span
+ class="buttonify capsule_right mode_button_active">Grid</span>
+</div>
+
+[if-any grid_data]
+<div class="pagination">
+ [is pagination.total_count "1"]
+ [pagination.total_count] issue shown
+ [else]
+ [if-any grid_limited][grid_shown] issues of [end]
+ [pagination.total_count] issues shown
+ [end]
+</div>
+[end]
+
+ <form id="colspecform" action="list" method="GET" style="display:inline">
+ <input type="hidden" name="can" value="[can]">
+ <input type="hidden" name="q" value="[query]">
+ <input type="hidden" name="colspec" id="colspec" value="[colspec]">
+ <input type="hidden" name="sort" value="[sortspec]">
+ <input type="hidden" name="groupby" value="[groupby]">
+ <input type="hidden" name="mode" value="grid">
+<span>Rows:</span>
+<select name="y" class="drop-down-bub">
+ <option value="--" [if-any grid_y_attr][else]selected=selected[end]>None</option>
+ [for grid_axis_choices]
+ <option value="[grid_axis_choices]"
+ [is grid_axis_choices grid_y_attr]selected=selected[end]
+ >[grid_axis_choices]</option>
+ [end]
+</select>
+
+<span style="margin-left:.7em">Cols:</span>
+<select name="x" class="drop-down-bub">
+ <option value="--" [if-any grid_x_attr][else]selected=selected[end]>None</option>
+ [for grid_axis_choices]
+ <option value="[grid_axis_choices]"
+ [is grid_axis_choices grid_x_attr]selected=selected[end]
+ >[grid_axis_choices]</option>
+ [end]
+</select>
+
+<span style="margin-left:.7em">Cells:</span>
+<select name="cells" class="drop-down-bub">
+ <option value="tiles" [is grid_cell_mode "tiles"]selected=selected[end]>Tiles</option>
+ <option value="ids" [is grid_cell_mode "ids"]selected=selected[end]>IDs</option>
+ <option value="counts" [is grid_cell_mode "counts"]selected=selected[end]>Counts</option>
+</select>
+
+<input type="submit" name="nobtn" style="font-size:90%; margin-left:.5em" value="Update">
+
+</form>
+</div>
diff --git a/appengine/monorail/templates/tracker/issue-grid-tile.ezt b/appengine/monorail/templates/tracker/issue-grid-tile.ezt
new file mode 100644
index 0000000..5ba3905
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-grid-tile.ezt
@@ -0,0 +1,27 @@
+<div class="gridtile">
+ <table cellspacing="0" cellpadding="0">
+ <tr>
+ <td class="id">
+ [if-any read_only][else]
+ [if-any page_perms.SetStar]
+ <a class="star"
+ style="color:[if-any arg0]cornflowerblue[else]gray[end]; text-decoration:none;"
+ title="[if-any arg0]Un-s[else]S[end]tar this issue"
+ data-project-name="[projectname]" data-local-id="[arg1]">
+ [if-any arg0]★[else]☆[end]
+ </a>
+ [end]
+ [end]
+ <a href="detail?id=[arg1]">[arg1]</a>
+ </td>
+ <td class="status">[arg2]</td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div>
+ <a href="detail?id=[arg1]">[arg3]</a>
+ </div>
+ </td>
+ </tr>
+ </table>
+</div>
diff --git a/appengine/monorail/templates/tracker/issue-hovercard.ezt b/appengine/monorail/templates/tracker/issue-hovercard.ezt
new file mode 100644
index 0000000..b70341b
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-hovercard.ezt
@@ -0,0 +1,6 @@
+[# Show a small dialog box allows the user to quickly view one issue.]
+
+<div id="infobubble">
+ <div id="peekarea" style="width:72em; padding:0"
+ ><div class="loading">Loading...</div></div>
+</div>
diff --git a/appengine/monorail/templates/tracker/issue-import-page.ezt b/appengine/monorail/templates/tracker/issue-import-page.ezt
new file mode 100644
index 0000000..9e0f713d
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-import-page.ezt
@@ -0,0 +1,45 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<h3>Issue export</h3>
+
+[if-any import_errors]
+ [# This is actually used to show both errors and progress messages
+ after a successful import.]
+ <div class="error" style="margin-bottom:1em">
+ Import event log:
+ <ul>
+ [for import_errors]
+ <li>[import_errors]</li>
+ [end]
+ </ul>
+ </div>
+[end]
+
+
+<form action="import.do" enctype="multipart/form-data" method="POST">
+ <input type="hidden" name="token" value="[form_token]">
+ <table cellpadding="3" class="rowmajor vt">
+ <tr>
+ <th>Format</th>
+ <td style="width:90%">JSON</td>
+ </tr>
+ <tr>
+ <th>File</th>
+ <td><input type="file" name="jsonfile"></td>
+ </tr>
+ <tr>
+ <th>Pre-check only</th>
+ <td><input type="checkbox" name="pre_check_only"></td>
+ </tr>
+ <tr>
+ <th></th>
+ <td><input type="submit" name="btn" value="Submit"></td>
+ </tr>
+ </table>
+</form>
+
+
+
+[include "../framework/footer-script.ezt"]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-list-body.ezt b/appengine/monorail/templates/tracker/issue-list-body.ezt
new file mode 100644
index 0000000..b4beeb2
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-body.ezt
@@ -0,0 +1,88 @@
+[for panels][# There will always be exactly one panel.]
+ [include "issue-list-headings.ezt"]
+[end]
+
+[if-any table_data]
+ [for table_data]
+
+ [include "../framework/artifact-list-group-row.ezt" "issue" "issues"]
+
+ <tr class="ifOpened hoverTarget [is cursor table_data.issue_ref]cursor_on[else]cursor_off[end]" data-idx="[table_data.idx]">
+ <td class="rowwidgets nowrap">
+ [if-any read_only][else]
+ [if-any page_perms.EditIssue][if-any is_cross_project][else][# TODO(jrobbins): cross-project bulk edit]
+ <input type="checkbox" id="cb_[table_data.local_id]" class="checkRangeSelect">
+ [end][end]
+ [if-any page_perms.SetStar]
+ <a class="star" id="star-[table_data.project_name]-[table_data.local_id]"
+ style="color:[if-any table_data.starred]cornflowerblue[else]gray[end];"
+ title="[if-any table_data.starred]Un-s[else]S[end]tar this issue"
+ data-project-name="[table_data.project_name]" data-local-id="[table_data.local_id]">
+ [if-any table_data.starred]★[else]☆[end]
+ </a>
+ [else]
+
+ [end]
+ [end]
+ </td>
+
+ [for table_data.cells]
+ [is table_data.cells.type "ID"]
+ <td class="id col_[table_data.cells.col_index]"
+ [# TODO(jrobbins): re-implement preview_on_hover in polymer.]
+ ><a href="[table_data.issue_url]">
+ [if-any is_cross_project][table_data.project_name]:[end][table_data.local_id]</a></td>
+ [end]
+
+ [is table_data.cells.type "summary"]
+ <td class="col_[table_data.cells.col_index]"
+ align="right" valign="top" style="padding-right:0; padding-top: 0.15em">
+ [if-any table_data.owner_is_me]<small><b>›</b></small>[else] [end]
+ </td>
+ <td class="col_[table_data.cells.col_index]" width="100%"
+ >[include "../framework/artifact-list-cell-values.ezt"]
+ [include "../framework/artifact-list-non-column-labels.ezt"]
+ </td>
+ [end]
+
+ [is table_data.cells.type "attr"]
+ <td class="col_[table_data.cells.col_index][if-any table_data.cells.NOWRAP] nowrap[end]"
+ [if-any table_data.cells.align]align="[table_data.cells.align]"[end]
+ >[include "../framework/artifact-list-cell-values.ezt"]</td>
+ [end]
+
+ [is table_data.cells.type "unfilterable"]
+ <td class="col_[table_data.cells.col_index] [if-any table_data.cells.NOWRAP]nowrap[end]"
+ [if-any table_data.cells.align]align="[table_data.cells.align]"[end]
+ >[include "../framework/artifact-list-cell-values.ezt"]</td>
+ [end]
+ [end]
+ <td> </td>
+ </tr>
+ [end]
+
+[else]
+
+ <tr>
+ <td colspan="40" class="id">
+ <div style="padding: 3em; text-align: center">
+ [if-any project_has_any_issues]
+ Your search did not generate any results. <br>
+ [is can "1"]
+ You may want to remove some terms from your query.<br>
+ [else]
+ You may want to try your search over <a href="list?can=1&q=[format "url"][query][end]">all issues</a>.<br>
+ [end]
+ [else]
+ This project currently has no issues.<br>
+ [if-any page_perms.CreateIssue]
+ [if-any read_only][else]
+ You may want to enter a <a class="id" href="entry">new issue</a>.
+ [end]
+ [end]
+ [end]
+ </div>
+ </td>
+ </tr>
+
+[end]
diff --git a/appengine/monorail/templates/tracker/issue-list-controls-bottom.ezt b/appengine/monorail/templates/tracker/issue-list-controls-bottom.ezt
new file mode 100644
index 0000000..bafb40a
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-controls-bottom.ezt
@@ -0,0 +1,7 @@
+<div class="list-foot" style="font-size: small">
+[if-any logged_in_user]
+ <a href="[csv_link]" style="float:right; margin-left: 1em">CSV</a>
+[end]
+
+[include "../framework/artifact-list-pagination-part.ezt"]
+</div>
diff --git a/appengine/monorail/templates/tracker/issue-list-controls-top.ezt b/appengine/monorail/templates/tracker/issue-list-controls-top.ezt
new file mode 100644
index 0000000..592f758
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-controls-top.ezt
@@ -0,0 +1,64 @@
+<div class="list">
+ <div style="float:right; margin-left:2em; font-size:95%">
+ <span class="buttonify mode_button_active capsule_left">List</span><a
+ class="buttonify capsule_right" href="list?can=[can]&q=[query]&colspec=[format "url"][colspec][end]&groupby=[format "url"][groupby][end]&sort=[format "url"][sortspec][end]&x=[grid_x_attr]&y=[grid_y_attr]&cells=[grid_cell_mode]&mode=grid">Grid</a>
+ </div>
+
+ [include "../framework/artifact-list-pagination-part.ezt"]
+
+ [if-any page_perms.EditIssue]
+ [if-any is_cross_project][else]
+ <span style="margin:0 .7em">Select:
+ <a id="selectall" href="#">All</a>
+ <a id="selectnone" href="#">None</a>
+ </span>
+ [end]
+ <select id="moreactions" class="drop-down-bub">
+ <option value="moreactions" disabled="disabled" selected="selected">Actions...</option>
+ <option value="colspec">Change columns...</option>
+ [if-any is_cross_project][else][# TODO(jrobbins): cross-project bulk edit]
+ <option value="bulk">Bulk edit...</option>
+ [end]
+ [if-any is_cross_project][else][# TODO(jrobbins): cross-project spam flagging]
+ <option value="flagspam">Flag as spam...</option>
+ <option value="unflagspam">Un-flag as spam...</option>
+ [end]
+ </select>
+ <span id='bulk-action-loading' class='loading' style='visibility:hidden'>Processing</span>
+ [end]
+ <form id="colspecform" action="list" method="GET" autocomplete="off"
+ style="display:inline; margin-left:1em">
+ <input type="hidden" name="can" value="[can]">
+ <input type="hidden" name="q" value="[query]">
+ <input type="hidden" name="sort" value="[sortspec]">
+ <input type="hidden" id="groupbyspec" name="groupby" value="[groupby]">
+ <span id="columnspec" style="display:none; font-size:90%"><span>Columns:
+ </span><span id="colspec_field"><input type="text" size="60" name="colspec"
+ value="[colspec]"></span>
+ <input type="submit" name="nobtn" value="Update">
+ [# TODO(jrobbins): <a href="TODO">Learn more</a> ]
+ </span>
+ </form>
+</div>
+
+<form id='bulkspam' method="post" action="flagspam.do">
+ <input type="hidden" name="token" value="[flag_spam_token]">
+ <input type="hidden" id="bulk_spam_ids" name="ids">
+ <input type="hidden" id="bulk_spam_value" name="spam" value="true">
+</form>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("selectall")) {
+ $("selectall").addEventListener("click", function() { _selectAllIssues(); });
+ }
+ if ($("selectnone")) {
+ $("selectnone").addEventListener("click", function() { _selectNoneIssues(); });
+ }
+ if ($("moreactions")) {
+ $("moreactions").addEventListener("change", function(event) {
+ _handleListActions(event.target);
+ });
+ }
+});
+</script>
diff --git a/appengine/monorail/templates/tracker/issue-list-csv.ezt b/appengine/monorail/templates/tracker/issue-list-csv.ezt
new file mode 100644
index 0000000..3c943f6
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-csv.ezt
@@ -0,0 +1,16 @@
+[# Prefix response body with over 1024 bytes of static content to avoid content sniffing.]
+"-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-"
+This file contains the same information as the issue list web page, but in CSV format.
+You can adjust the columns of the CSV file by adjusting the columns shown on the web page
+before clicking the CSV link.
+"-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-"
+
+
+[for panels][# There will always be exactly one panel.][for panels.ordered_columns]"[panels.ordered_columns.name]"[if-index panels.ordered_columns last][else],[end][end][end]
+[for table_data][for table_data.cells][is table_data.cells.type "ID"]"[table_data.local_id]",[else]"[format "raw"][if-any table_data.cells.values][for table_data.cells.values][table_data.cells.values.item][if-index table_data.cells.values last][else], [end][end][end][end]"[if-index table_data.cells last][else],[end][end][end]
+[end]
+
+[if-any next_csv_link]
+This file is truncated to [item_count] out of [pagination.total_count] total results.
+See [next_csv_link] for the next set of results.
+[end]
diff --git a/appengine/monorail/templates/tracker/issue-list-headings.ezt b/appengine/monorail/templates/tracker/issue-list-headings.ezt
new file mode 100644
index 0000000..c45894b
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-headings.ezt
@@ -0,0 +1,26 @@
+[# arg0 is the ordered_columns argument that gives the name and index of each column.]
+
+<thead id="resultstablehead">
+<tr id="headingrow"><th style="border-left: 0"> </th>
+ [for panels.ordered_columns]
+ [is panels.ordered_columns.name "Summary"]
+ <th class="col_[panels.ordered_columns.col_index]" style="border-right:0"> </th>
+ <th class="col_[panels.ordered_columns.col_index]" nowrap="nowrap" id="summaryheading"
+ data-col-index="[panels.ordered_columns.col_index]" width="100%"
+ ><a href="#" style="text-decoration: none">Summary + Labels <span class="indicator">▼</span></a></th>
+ [else]
+ [is panels.ordered_columns.name "ID"]
+ <th class="col_[panels.ordered_columns.col_index]" nowrap="nowrap"
+ data-col-index="[panels.ordered_columns.col_index]"
+ ><a href="#" style="text-decoration: none">[panels.ordered_columns.name] <span class="indicator">▼</span></a></th>
+ [else]
+ <th class="col_[panels.ordered_columns.col_index]"
+ data-col-index="[panels.ordered_columns.col_index]"
+ ><a href="#" style="text-decoration: none">[panels.ordered_columns.name] <span class="indicator">▼</span></a></th>
+ [end]
+ [end]
+ [end]
+ <th data-col-index="dot" style="width:3ex"><a href="#columnprefs"
+ class="dotdotdot">...</a></th>
+</tr>
+</thead>
diff --git a/appengine/monorail/templates/tracker/issue-list-js.ezt b/appengine/monorail/templates/tracker/issue-list-js.ezt
new file mode 100644
index 0000000..d7e32f6
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-js.ezt
@@ -0,0 +1,61 @@
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ [# Pass the list of column names from HTML to JS ]
+ window._allColumnNames = [
+ [for column_values]'[column_values.column_name]'[if-index column_values last][else], [end][end]
+ ];
+
+ [# Update the issue link hrefs on-load and whenever the column-spec changes.]
+ _ctxCan = [can];
+ _ctxQuery = "[format "js"][query][end]";
+ _ctxSortspec = "[format "js"][sortspec][end]";
+ _ctxGroupBy = "[format "js"][groupby][end]";
+ _ctxDefaultColspec = "[format "js"][default_colspec][end]";
+ _ctxStart = [start];
+ _ctxNum = [num];
+ _ctxResultsPerPage = [default_results_per_page];
+ _ctxArgs = _formatContextQueryArgs();
+
+ function _goIssue(issueIndex, newWindow) {
+ var url = _makeIssueLink(issueRefs[[]issueIndex]);
+ _go(url, newWindow);
+ }
+
+ window.issueRefs = [[]
+ [for table_data]
+ {project_name: "[format "js"][table_data.project_name][end]",
+ id: [table_data.local_id]}[if-index table_data last][else],[end][end]
+ ];
+
+ function _handleResultsClick(event) {
+ var target = event.target;
+ if (target.classList.contains("label"))
+ return;
+ if (target.classList.contains("rowwidgets") || target.parentNode.classList.contains("rowwidgets"))
+ return;
+ while (target && target.tagName != "TR") target = target.parentNode;
+ event.preventDefault();
+ _goIssue(target.attributes[[]"data-idx"].value,
+ (event.metaKey || event.ctrlKey || event.button == 1));
+ };
+ $("resultstable").addEventListener("click", _handleResultsClick);
+
+ function _handleHeaderClick(event) {
+ var target = event.target;
+ while (target && target.tagName != "TH") target = target.parentNode;
+ var colIndex = target.getAttribute("data-col-index");
+ _showBelow("pop_" + colIndex, target);
+ }
+ $("resultstablehead").addEventListener("click", _handleHeaderClick);
+});
+</script>
+
+<script type="text/javascript" defer src="/static/third_party/js/kibbles-1.3.3.comp.js" nonce="[nonce]"></script>
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _setupKibblesOnListPage(
+ '[project_home_url]/issues/list',
+ '[project_home_url]/issues/entry',
+ '[projectname]', 1, 0, '[set_star_token]');
+});
+</script>
diff --git a/appengine/monorail/templates/tracker/issue-list-menus.ezt b/appengine/monorail/templates/tracker/issue-list-menus.ezt
new file mode 100644
index 0000000..2af3f05
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-menus.ezt
@@ -0,0 +1,168 @@
+[# Table header popup menus ]
+
+[for column_values]
+ [is column_values.column_name "id"]
+ <div id="pop_[column_values.col_index]" class="popup">
+ <table cellspacing="0" cellpadding="0" border="0">
+ <tr id="pop_up_[column_values.col_index]"><td>Sort Up</td></tr>
+ <tr id="pop_down_[column_values.col_index]"><td>Sort Down</td></tr>
+ <tr id="pop_hide_[column_values.col_index]"><td>Hide Column</td></tr>
+ </table>
+ </div>
+ [else]
+ [is column_values.column_name "summary"]
+ <div id="pop_[column_values.col_index]" class="popup">
+ <table cellspacing="0" cellpadding="0" border="0">
+ <tr id="pop_up_[column_values.col_index]"><td>Sort Up</td></tr>
+ <tr id="pop_down_[column_values.col_index]"><td>Sort Down</td></tr>
+ [if-any column_values.filter_values]
+ <tr id="pop_show_only_[column_values.col_index]"><td>Show only
+ <span class="indicator">►</span></td></tr>
+ [end]
+ <tr id="pop_hide_[column_values.col_index]"><td>Hide Column</td></tr>
+ </table>
+ </div>
+ [else]
+ <div id="pop_[column_values.col_index]" class="popup">
+ <table cellspacing="0" cellpadding="0" border="0">
+ <tr id="pop_up_[column_values.col_index]"><td>Sort Up</td></tr>
+ <tr id="pop_down_[column_values.col_index]"><td>Sort Down</td></tr>
+ [if-any column_values.filter_values]
+ <tr id="pop_show_only_[column_values.col_index]"><td>Show only
+ <span class="indicator">►</span></td></tr>
+ [end]
+ <tr id="pop_hide_[column_values.col_index]"><td>Hide Column</td></tr>
+ <tr id="pop_groupby_[column_values.col_index]"><td>Group Rows</td></tr>
+ </table>
+ </div>
+ [end]
+ [end]
+[end]
+
+[# Table header popup submenus for autofiltering of values ]
+
+[for column_values]
+ <div id="filter_[column_values.col_index]" class="popup subpopup">
+ <table cellspacing="0" cellpadding="0" border="0">
+ [for column_values.filter_values]
+ <tr data-filter-column="[is column_values.column_name "Summary"]label[else][column_values.column_name][end]"
+ data-filter-value="[column_values.filter_values]">
+ <td>[column_values.filter_values]</td></tr>
+ [end]
+ </table>
+ </div>
+[end]
+
+[# Popup menu showing the list of available columns allowing show/hide ]
+
+<div id="pop_dot" class="popup">
+ <table cellspacing="0" cellpadding="0" border="0">
+ <tr><th>Show columns:</th></tr>
+ [for panels.ordered_columns]
+ <tr data-toggle-column-index="[panels.ordered_columns.col_index]"><td> <span
+ class="col_[panels.ordered_columns.col_index]">♦</span> [panels.ordered_columns.name]</td></tr>
+ [end]
+ [for unshown_columns]
+ <tr data-add-column-name="[unshown_columns]"
+ ><td> [unshown_columns]</td></tr>
+ [end]
+ <tr id="pop_dot_edit"
+ ><td> Edit column spec...</td></tr>
+ </table>
+</div>
+
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function registerPopHandlers(colIndex, colName) {
+ var sortUpEl = $("pop_up_" + colIndex);
+ if (sortUpEl) {
+ sortUpEl.addEventListener("click", function () {
+ _closeAllPopups(sortUpEl);
+ _sortUp(colName);
+ });
+ sortUpEl.addEventListener("mouseover", function () {
+ _closeSubmenus();
+ });
+ }
+
+ var sortDownEl = $("pop_down_" + colIndex);
+ if (sortDownEl) {
+ sortDownEl.addEventListener("click", function () {
+ _closeAllPopups(sortDownEl);
+ _sortDown(colName);
+ });
+ sortDownEl.addEventListener("mouseover", function () {
+ _closeSubmenus();
+ });
+ }
+
+ var hideEl = $("pop_hide_" + colIndex);
+ if (hideEl) {
+ hideEl.addEventListener("click", function () {
+ _closeAllPopups(hideEl);
+ _toggleColumnUpdate(colIndex);
+ });
+ hideEl.addEventListener("mouseover", function () {
+ _closeSubmenus();
+ });
+ }
+
+ var showOnlyEl = $("pop_show_only_" + colIndex);
+ if (showOnlyEl) {
+ showOnlyEl.addEventListener("mouseover", function () {
+ _showRight("filter_" + colIndex, showOnlyEl);
+ });
+ }
+
+ var groupByEl = $("pop_groupby_" + colIndex);
+ if (groupByEl) {
+ groupByEl.addEventListener("click", function () {
+ _closeAllPopups(groupByEl);
+ _addGroupBy(colIndex);
+ });
+ groupByEl.addEventListener("mouseover", function () {
+ _closeSubmenus();
+ });
+ }
+ }
+
+ [for column_values]
+ registerPopHandlers([column_values.col_index], "[column_values.column_name]");
+ [end]
+
+ function handleFilterValueClick(event) {
+ var target = event.target;
+ if (target.tagName != "TR") target = target.parentNode;
+ _closeAllPopups(target);
+ var filterColumn = target.getAttribute("data-filter-column");
+ var filterValue = target.getAttribute("data-filter-value");
+ _filterTo(filterColumn, filterValue);
+ }
+
+ [for column_values]
+ $("filter_" + [column_values.col_index]).addEventListener(
+ "click", handleFilterValueClick);
+ [end]
+
+ function handleDotDotDotClick(event) {
+ var target = event.target;
+ if (target.tagName != "TR") target = target.parentNode;
+ _closeAllPopups(target);
+ var colIndex = target.getAttribute("data-toggle-column-index");
+ if (colIndex != null)
+ _toggleColumnUpdate(colIndex);
+ var colName = target.getAttribute("data-add-column-name");
+ if (colName != null)
+ _addcol(colName);
+ }
+
+ $("pop_dot").addEventListener("click", handleDotDotDotClick);
+
+ $("pop_dot_edit").addEventListener("click", function() {
+ var target = $("pop_dot_edit");
+ _closeAllPopups(target);
+ $("columnspec").style.display = "";
+ });
+});
+</script>
diff --git a/appengine/monorail/templates/tracker/issue-list-page.ezt b/appengine/monorail/templates/tracker/issue-list-page.ezt
new file mode 100644
index 0000000..5ec41f3
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-list-page.ezt
@@ -0,0 +1,88 @@
+[define title]Issues[end]
+[define category_css]css/ph_list.css[end]
+[define page_css]css/ph_detail.css[end][# needed for infopeek]
+
+[if-any projectname]
+ [include "../framework/master-header.ezt" "showtabs"]
+[else]
+ [include "../framework/master-header.ezt" "hidetabs"]
+[end]
+[include "../framework/js-placeholders.ezt" "showtabs"]
+
+<div id="colcontrol">
+ [if-any is_missing_shards]
+ <div class="error" style="text-align:center">[missing_shard_count] search backends did not respond or had errors. These results are probably incomplete.</div>
+ [end]
+
+ [if-any grid_mode]
+ [include "issue-grid-controls-top.ezt"]
+ [else]
+ [include "issue-list-controls-top.ezt"]
+ [end]
+
+ [include "issue-hovercard.ezt"]
+
+ <div id="cursorarea">
+ <table cellspacing="0" cellpadding="2" border="0" class="results [if-any grid_mode][else]striped[end]" id="resultstable" width="100%">
+ [if-any grid_mode]
+ [include "issue-grid-body.ezt"]
+ [else]
+ [include "issue-list-body.ezt"]
+ [end]
+ </table>
+ </div>
+
+ [if-any grid_mode]
+ [include "issue-grid-controls-bottom.ezt"]
+ [else]
+ [include "issue-list-controls-bottom.ezt"]
+ [for panels][# There will always be exactly one panel]
+ [include "issue-list-menus.ezt"]
+ [end]
+ [end]
+</div>
+
+<div id="helparea"></div>
+
+[include "../framework/footer-script.ezt"]
+[if-any grid_mode][else]
+ [include "issue-list-js.ezt"]
+[end]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _onload();
+});
+</script>
+
+[if-any projectname]
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions(
+ '[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+
+ var stars = document.getElementsByClassName("star");
+ for (var i = 0; i < stars.length; ++i) {
+ var star = stars[[]i];
+ star.addEventListener("click", function (event) {
+ var projectName = event.target.getAttribute("data-project-name");
+ var localID = event.target.getAttribute("data-local-id");
+ _TKR_toggleStar(event.target, projectName, localID, null, "[set_star_token]");
+ [# TODO: _TKR_syncStarIcons() when issue peek is re-implemented.]
+ });
+ }
+
+ var issueCheckboxes = document.getElementsByClassName("checkRangeSelect");
+ for (var i = 0; i < issueCheckboxes.length; ++i) {
+ var el = issueCheckboxes[[]i];
+ el.addEventListener("click", function (event) {
+ _checkRangeSelect(event, event.target);
+ _highlightRow(event.target);
+ });
+ }
+});
+</script>
+[end]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-meta-part.ezt b/appengine/monorail/templates/tracker/issue-meta-part.ezt
new file mode 100644
index 0000000..7ca7582
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-meta-part.ezt
@@ -0,0 +1,268 @@
+[# Show the issue detail page meta data column.
+
+ arg0: Any non-empty value means that the "make changes below" text
+ should be shown. It is shown on the issue details page, but not
+ on the issue peek page.
+]
+
+<table cellspacing="0" cellpadding="0">
+ <tr><th align="left" style="padding-right:.3em">Status:</th>
+ <td width="100%">
+ [is issue.status.name ""]
+ [is issue.derived_status.name ""]
+ ----
+ [else]
+ <i title="Derived[if-any issue.derived_status.tooltip issue.derived_status.docstring]:[end] [issue.derived_status.tooltip] [issue.derived_status.docstring]"
+ >[issue.derived_status.name]</i>
+ [end]
+ [else]
+ <span title="[issue.status.tooltip] [issue.status.docstring]">[issue.status.name]</span>
+ [end]
+ </td>
+ </tr>
+ [if-any issue.merged_into.visible]
+ <tr><th align="left">Merged:</th>
+ <td class="rel_issues">
+ <a href="[issue.merged_into.url]" title="[issue.merged_into.display_name]: [issue.merged_into.summary]"
+ [if-any issue.merged_into.is_open][else]class="closed_ref"[end]
+ >[issue.merged_into.display_name]</a>
+ </td>
+ </tr>
+ [end]
+
+ [if-any arg0]
+ <tr><th align="left">Owner:</th><td>
+ [is issue.owner.username ""]
+ [is issue.derived_owner.username ""]
+ ----
+ [else]
+ <i title="Derived">[include "../framework/user-link.ezt" issue.derived_owner]</i>
+ [end]
+ [else]
+ [include "../framework/user-link.ezt" issue.owner]
+ [end]
+ </td>
+ </tr>
+ [else]
+ <tr><td align="left" colspan="2" class="widemeta"><b>Owner:</b>
+ [is issue.owner.username ""]
+ [is issue.derived_owner.username ""]
+ ----
+ [else]
+ <div style="margin-left:1em">
+ <i title="Derived">[include "../framework/user-link.ezt" issue.derived_owner]</i>
+ </div>
+ [end]
+ [else]
+ <div style="margin-left:1em">
+ [include "../framework/user-link.ezt" issue.owner]
+ </div>
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ [if-any issue.closed]
+ <tr><th align="left" valign="top" style="padding-right:.3em">Closed:</th>
+ <td align="left" valign="top" width="100%">
+ [issue.closed]
+ </td>
+ </tr>
+ [end]
+ [if-any issue.cc issue.derived_cc]
+ [if-any arg0]
+ <tr><th class="vt" align="left">Cc:</th><td>
+ [define join_char][end]
+ [if-any issue.cc][if-any issue.derived_cc][define join_char],[end][end][end]
+ [for issue.cc] [include "../framework/user-link.ezt" issue.cc][if-index issue.cc last][join_char][else],[end] [end]
+ [for issue.derived_cc] <i title="Derived">[include "../framework/user-link.ezt" issue.derived_cc]</i>[if-index issue.derived_cc last][else],[end] [end]
+ </td></tr>
+ [else]
+ <tr><td class="vt" align="left" colspan="2" class="widemeta"><b>Cc:</b>
+ <div style="margin-left:1em">
+ [define join_char][end]
+ [if-any issue.cc][if-any issue.derived_cc][define join_char],[end][end][end]
+ [for issue.cc] [include "../framework/user-link.ezt" issue.cc][if-index issue.cc last][join_char][else],[end] [end]
+ [for issue.derived_cc] <i title="Derived">[include "../framework/user-link.ezt" issue.derived_cc]</i>[if-index issue.derived_cc last][else],[end] [end]
+ </div>
+ </td></tr>
+ [end]
+ [end]
+
+ [if-any issue.components]
+ <tr>
+ <th align="left" valign="top" style="padding-right:.3em">Components:</th>
+ <td align="left" valign="top" width="100%">
+ [for issue.components]
+ <div><a href="list?q=component:[issue.components.path]" class="fieldvalue"
+ [if-any issue.components.derived]style="font-style:italic"[end]
+ title="[if-any issue.components.derived]Derived: [end][issue.components.docstring_short]"
+ >[issue.components.path]</a></div>
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ [for issue.fields]
+ [if-any issue.fields.display]
+ <tr><th align="left" valign="top" title="[issue.fields.field_docstring]"
+ style="padding-right:.3em"
+ >[issue.fields.field_name]:</th>
+ <td align="left" valign="top" width="100%">
+ [if-any issue.fields.values issue.fields.derived_values]
+ [for issue.fields.values]
+ <a href="list?q=[issue.fields.field_name]="[format "url"][issue.fields.values.val][end]"" class="fieldvalue"
+ title="[issue.fields.values.val] [issue.fields.values.docstring]">[issue.fields.values.val]</a
+ >[if-index issue.fields.values last][if-any issue.fields.derived_values],[end][else],[end]
+ [end]
+ [for issue.fields.derived_values]
+ <a href="list?q=[issue.fields.field_name]="[format "url"][issue.fields.derived_values.val][end]""
+ title="derived: [issue.fields.derived_values.val] [issue.fields.derived_values.docstring]"><i>[issue.fields.derived_values.val]</i></a>[if-index issue.fields.derived_values last][else],[end]
+ [end]
+ [else]
+ ----
+ [end]
+ </td>
+ </tr>
+ [end]
+ [end]
+
+ <tr><td colspan="2" class="widemeta">
+ [for issue.labels]
+ <div style="padding-top:2px">
+ [is issue.labels.prefix ""]
+ <a href="list?q=label:[issue.labels.name]"
+ title="[if-any issue.labels.tooltip][issue.labels.tooltip] [end][issue.labels.docstring]"
+ class="label">[issue.labels.name]</a>
+ [else]
+ <a href="list?q=label:[issue.labels.name]"
+ title="[if-any issue.labels.tooltip][issue.labels.tooltip] [end][issue.labels.docstring]"
+ class="label"><b>[issue.labels.prefix]-</b>[issue.labels.value]</a>
+ [end]
+ </div>
+ [end]
+ </td></tr>
+
+ [for issue.derived_labels]
+ [is issue.derived_labels.prefix ""]
+ <tr>
+ <td colspan="2" class="widemeta">
+ <a href="list?q=label:[issue.derived_labels.name]"
+ title="Derived[if-any issue.derived_labels.tooltip issue.derived_labels.docstring]: [end][issue.derived_labels.tooltip] [issue.derived_labels.docstring]"
+ class="derived label">[issue.derived_labels.name]</a>
+ </td>
+ </tr>
+ [else]
+ <tr>
+ <td colspan="2" class="widemeta">
+ <a href="list?q=label:[issue.derived_labels.name]"
+ title="Derived[if-any issue.derived_labels.tooltip issue.derived_labels.docstring]: [end][issue.derived_labels.tooltip] [issue.derived_labels.docstring]"
+ class="derived label"><b>[issue.derived_labels.prefix]-</b>[issue.derived_labels.value]</a>
+ </td>
+ </tr>
+ [end]
+ [end]
+
+</table>
+
+<div class="rel_issues widemeta">
+ [if-any issue.blocked_on]
+ <br><b>Blocked on:</b><br>
+ [for issue.blocked_on][if-any issue.blocked_on.visible]
+ [if-any issue.blocked_on.is_open]
+ <a href="[issue.blocked_on.url]" title="[issue.blocked_on.display_name]: [issue.blocked_on.summary]"
+ >[issue.blocked_on.display_name]</a><br>
+ [end]
+ [end][end]
+ [for issue.blocked_on][if-any issue.blocked_on.visible]
+ [if-any issue.blocked_on.is_open][else]
+ <a href="[issue.blocked_on.url]" title="[issue.blocked_on.display_name]: [issue.blocked_on.summary]" class="closed_ref"
+ >[issue.blocked_on.display_name]</a><br>
+ [end]
+ [end][end]
+ [end]
+
+ [if-any issue.blocking]
+ <br><b>Blocking:</b><br>
+ [for issue.blocking][if-any issue.blocking.visible]
+ [if-any issue.blocking.is_open]
+ <a href="[issue.blocking.url]" title="[issue.blocking.display_name]: [issue.blocking.summary]"
+ >[issue.blocking.display_name]</a><br>
+ [end]
+ [end][end]
+ [for issue.blocking][if-any issue.blocking.visible]
+ [if-any issue.blocking.is_open][else]
+ <a href="[issue.blocking.url]" title="[issue.blocking.display_name]: [issue.blocking.summary]" class="closed_ref"
+ >[issue.blocking.display_name]</a><br>
+ [end]
+ [end][end]
+ [end]
+
+ [if-any issue.restrictions.has_restrictions]
+ <br>
+ <div class="issue_restrictions">
+ <div class="restrictions_header">
+ <div class="lock_grey goog-inline-block"></div>
+ Restricted
+ </div>
+ <ul>
+ [if-any issue.restrictions.view]
+ <li>Only users with [issue.restrictions.view] permission can see this issue.</li>
+ [end]
+ [if-any issue.restrictions.add_comment]
+ <li>Only users with [issue.restrictions.add_comment] permission may comment.</li>
+ [end]
+ [if-any issue.restrictions.edit]
+ <li>Only users with [issue.restrictions.edit] permission may make changes.</li>
+ [end]
+ [if-any issue.restrictions.other]
+ [for issue.restrictions.other]
+ <li>Other restriction:
+ <span class="other_restriction">[issue.restrictions.other]</span>
+ </li>
+ [end]
+ [end]
+ </ul>
+ </div>
+ [end]
+</div>
+
+
+[if-any previous_locations]
+ <div class="rel_issues">
+ <br><b>Previous locations:</b><br>
+ [for previous_locations]
+ <a>[previous_locations.project_name]:[previous_locations.local_id]</a>
+ [end]
+ </div>
+[end]
+
+
+[if-any arg0]
+ <br><br>
+ [if-any read_only][else]
+ [if-any logged_in_user]
+ [if-any page_perms.AddIssueComment]
+ [if-any offer_make_changes]
+ <a id="addacomment" href="#"><span style="white-space:nowrap">Add a comment and</span>
+ <span style="white-space:nowrap">make changes below</span></a>
+ [else]
+ <a id="addacomment" href="#" style="white-space:nowrap">Add a comment below</a>
+ [end]
+ [end]
+ [else]
+ <div style="white-space:nowrap"><a href="[login_url]">Sign in</a> to add a comment</div>
+ [end]
+ [end][# not read_only]
+[end]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("addacomment")) {
+ $("addacomment").addEventListener("click", function(event) {
+ _openIssueUpdateForm();
+ event.preventDefault();
+ });
+ }
+});
+</script>
diff --git a/appengine/monorail/templates/tracker/issue-missing-page.ezt b/appengine/monorail/templates/tracker/issue-missing-page.ezt
new file mode 100644
index 0000000..d570e1d
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-missing-page.ezt
@@ -0,0 +1,39 @@
+[define title]Issue Not Found[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+<h3>Issue Not Found</h3>
+
+<h4>What happened?</h4>
+
+[if-any moved_to_project_name]
+<p>This issue was moved to [moved_to_project_name].
+<a href="/p/[moved_to_project_name]/issues/detail?id=[moved_to_id]">Go to the issue</a>.</p>
+[else]
+
+ [if-any issue_missing]
+ <p>This issue does not exist because its issue ID
+ was skipped during issue creation.</p>
+ [else]
+ [if-any issue_not_specified]
+ <p>No issue id has been specified.</p>
+ [else]
+ [if-any issue_not_created]
+ <p>The specified issue ID does not exist.</p>
+ [end]
+ [end]
+ [end]
+[end]
+
+[if-any issue_deleted]
+<p>Issue [local_id] has been deleted.</p>
+
+ [if-any page_perms.DeleteIssue]
+ <form action="delete.do" method="post">
+ <input type="hidden" name="token" value="[delete_form_token]">
+ <input type="hidden" name="id" value="[local_id]">
+ <input type="submit" name="undelete" value="Undelete">
+ </form>
+ [end]
+[end]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-original-page.ezt b/appengine/monorail/templates/tracker/issue-original-page.ezt
new file mode 100644
index 0000000..b071f2d
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-original-page.ezt
@@ -0,0 +1,17 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+ <head>
+ <title>Issue [projectname]:[local_id] comment #[seq]</title>
+ <meta name="ROBOTS" content="NOINDEX">
+ <meta name="referrer" content="no-referrer">
+ <link type="text/css" rel="stylesheet" href="/static/css/ph_core.css?version=[app_version]">
+ </head>
+ <body>
+ <h3>Original email for issue [projectname]:[local_id] comment #[seq]</h3>
+ [if-any is_binary]
+ <i>The message could not be displayed.</i>
+ [else]
+ <pre>[message_body]</pre>
+ [end]
+ </body>
+</html>
diff --git a/appengine/monorail/templates/tracker/issue-peek-ajah.ezt b/appengine/monorail/templates/tracker/issue-peek-ajah.ezt
new file mode 100644
index 0000000..4315e74
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-peek-ajah.ezt
@@ -0,0 +1,124 @@
+[# Note: this file is not currently in use, but it could come back into use
+ before launch or soon after. See monorail:317.]
+<div id="color_control" class="[if-any issue.is_open][else]closed_colors[end]">
+<div class="issuepage peek" id="meta-container">
+
+ <div id="peekheading">
+ <div id="hc_controls"
+ ><a href="#" title="First comment (Key: F)" class="paginate" [#_firstComment()]
+ ><img src="/static/images/pagination-first.png" width="16" height="16"></a
+ ><a href="#" title="Previous comment (Key: P)" class="paginate" [#_prevComment()]
+ ><img src="/static/images/pagination-prev.png" width="16" height="16"></a
+ ><a href="#" title="Next comment (Key: N)" class="paginate" [#_nextComment()]
+ ><img src="/static/images/pagination-next.png" width="16" height="16"></a
+ ><a href="#" title="Last comment (Key: L)" class="paginate" [#_lastComment()]
+ ><img src="/static/images/pagination-last.png" width="16" height="16"></a
+ ><a href="#" title="Close (Key: Esc)" class="close" [#_hideInfoPeek()]
+ ><img src="/static/images/close_icon.png" width="16" height="16"></a
+ ></div>
+
+ [if-any page_perms.SetStar]
+ [if-any read_only][else]
+ <a class="star" id="peekstar" style="text-decoration:none; cursor:pointer;
+ color:[if-any starred]cornflowerblue[else]gray[end]"
+ title="[if-any starred]Un-s[else]S[end]tar this issue">
+ [if-any starred]★[else]☆[end]
+ </a>
+ [# TODO(jrobbins): re-implement starring event handlers when this feature come back.]
+ [end]
+ [end]
+
+ Issue <a id="issuelink" href="[issue.detail_relative_url]"><span id="issueid">[issue.local_id]<span></a>:
+ <span id="issuesummary">[issue.summary]</span>
+ </div>[#peekheading]
+
+
+<div style="border:1px solid #ccc">
+<div style="width:16em; float:left; padding-left: 1px" id="issuemeta" class="vt">
+ [include "issue-meta-part.ezt" ""]
+</div>[#issuemeta]
+
+<div id="issuecomments" style="padding:3px">
+ <div class="issuecommentheading" style="background:none; padding:2px 6px">
+ Reported by
+ [include "../framework/user-link.ezt" description.creator],
+ <span class="date" title="[description.date_tooltip]">[description.date_string][description.date_relative]</span>
+ </div>
+ [include "issue-comments-part.ezt" ""]
+
+ [# To allow the final comment to be scrolled up to the top of the
+ div's scroll area.]
+ <div style="height:23em"> </div>
+</div>[#issuecomments]
+
+[if-any read_only][else]
+[if-any page_perms.EditIssue]
+ <div id="infobuttons">
+ <form action="[quick_edit_submit_url]" method="POST" id="quickeditform">
+ <input type="hidden" name="token" value="[form_token]">
+ <input type="hidden" name="pagegen" value="[pagegen]">
+ <input type="hidden" name="slot_used" id="slot_used" value="[default_slot_num]">
+ <input type="hidden" name="can" value="[can]">
+ <input type="hidden" name="q" value="[query]">
+ <input type="hidden" name="colspec" value="[colspec]">
+ <input type="hidden" name="sort" value="[sortspec]">
+ <input type="hidden" name="groupby" value="[groupby]">
+ <input type="hidden" name="start" value="[start]">
+ <input type="hidden" name="num" value="[num]">
+
+ <table width="100%" cellpadding="0" cellspacing="3" style="padding:3px">
+ <tr>
+ <td title="Key: M"><b>Commands:</b></td>
+ <td width="90%" style="padding-right:4px"><input type="input" style="width:100%;" name="cmd" id="cmd"
+ autocomplete="off" [# TODO(jrobbins): on key down ="_qeDirty=true;"]
+ value="[for cmd_slots][is cmd_slots.slot_num default_slot_num][cmd_slots.command][end][end]"
+ title="Type commands to update this issue."
+ ><input type="button" id="redo" value="▼" [# TODO(jrobbins): on mouse down ="return _toggleRedoMenu(this)"]
+ title="Command slots"
+ style="background:none; border:none; margin-left:-2em; font-size:90%"
+ ></td>
+ <td style="padding:0 3px" nowrap="nowrap">
+ <input type="submit" name="btn" id="execute" value="Execute" title="Key: E">
+ </td>
+ </tr>
+ <tr>
+ <td class="vt" style="padding-top:2px; text-align:right"><b>Comment:</b></td>
+ <td><textarea style="width:100%; height:2.5em" name="comment" id="comment"
+ [# TODO(jrobbins): on key down ="_qeDirty=true"]
+ >[for cmd_slots][is cmd_slots.slot_num default_slot_num][cmd_slots.comment][end][end]</textarea></td>
+ <td class="vt" nowrap="nowrap">
+ <input type="checkbox" checked="checked" name="send_email"
+ id="send_email">
+ <label for="send_email"
+ title="Send issue change notifications to interested users"
+ >Send email</label>
+ </td>
+
+ </tr>
+
+ </table>
+ </form>
+ </div>
+
+ [for cmd_slots]
+ <input type="hidden" id="recent_cmd_[cmd_slots.slot_num]"
+ value="[cmd_slots.command]">
+ <input type="hidden" id="recent_comment_[cmd_slots.slot_num]"
+ value="[cmd_slots.comment]">
+ [end]
+
+ <div class="popup" id="redoMenu">
+ <table cellspacing="0" cellpadding="0" border="0">
+ [for cmd_slots]
+ <tr [# TODO(jrobbins): on mouse up ="return _selectRecentCommand(:cmd_slots.slot_num:)"]
+ ><td nowrap="nowrap" title="Key: [cmd_slots.slot_num]">[cmd_slots.slot_num]: [cmd_slots.command]</td></tr>
+ [end]
+ </table>
+ </div>
+
+[end]
+[end]
+
+</div>
+
+</div></div>
diff --git a/appengine/monorail/templates/tracker/issue-reindex-page.ezt b/appengine/monorail/templates/tracker/issue-reindex-page.ezt
new file mode 100644
index 0000000..ba0ecaf
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-reindex-page.ezt
@@ -0,0 +1,45 @@
+[define title]Reindex Issues[end]
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[if-any read_only][include "../framework/read-only-rejection.ezt"]
+[else]
+
+<form action="reindex.do" method="POST" id="form">
+ <input type="hidden" name="token" value="[form_token]">
+ <table>
+ <tr>
+ <td>Start:</td>
+ <td><input type="input" name="start" value="[start]"></td>
+ </tr>
+ <tr>
+ <td>Num:</td>
+ <td><input type="input" name="num" value="[num]"></td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <input type="submit" id="submit_btn" name="btn" value="Re-index"></td>
+ </tr>
+ <tr>
+ <td><label for="autosubmit">Autosubmit:</label></td>
+ <td><input type="checkbox" name="auto_submit" id="autosubmit"
+ [is auto_submit "True"]checked="checked"[end] ></td>
+ </tr>
+ </table>
+</form>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ function autosubmit() {
+ if (document.getElementById('autosubmit').checked) {
+ document.getElementById('form').submit();
+ }
+ }
+ if (document.getElementById('autosubmit').checked) {
+ setTimeout(autosubmit, 5000);
+ }
+});
+</script>
+
+[end]
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-search-form.ezt b/appengine/monorail/templates/tracker/issue-search-form.ezt
new file mode 100644
index 0000000..f4953ea
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-search-form.ezt
@@ -0,0 +1,97 @@
+<div class="[issue_tab_mode]">
+<div class="isf">
+ [define offer_new_issue]Yes[end]
+ [if-any read_only][define offer_new_issue]No[end][end]
+ [if-any logged_in_user][# Note: rather than hide the New Issue link when the user is not logged in, we let them try. ]
+ [if-any page_perms.CreateIssue][else][define offer_new_issue]No[end][end]
+ [end]
+ [is offer_new_issue "Yes"]
+ <span class="inIssueEntry" style="margin-right:.4em">
+ <a class="buttonify" href="[issue_entry_url]">New issue</a>
+ </span>
+ [end]
+
+ <span class="inIssueList" style="margin:0 .4em">
+ <label for="can">Search</label>
+ <form action="list" method="GET" style="display:inline">
+ <select id="can" name="can">
+ [include "issue-can-widget.ezt" "search"]
+ </select>
+ <label for="searchq">for</label>
+ <span id="qq"><input type="text" size="[q_field_size]" id="searchq" name="q"
+ value="[query]" autocomplete="off"></span>
+ [if-any sortspec]<input type="hidden" id="sort" name="sort" value="[sortspec]">[end]
+ [if-any groupby]<input type="hidden" id="groupby" name="groupby" value="[groupby]">[end]
+ [if-any colspec]<span id="search_colspec"><input type="hidden" name="colspec" value="[colspec]"></span>[end]
+ [if-any grid_x_attr]<input type="hidden" name="x" value="[grid_x_attr]">[end]
+ [if-any grid_y_attr]<input type="hidden" name="y" value="[grid_y_attr]">[end]
+ [if-any grid_mode]<input type="hidden" name="mode" value="[if-any grid_mode]grid[end]">[end]
+ [if-any grid_cell_mode]<input type="hidden" name="cells" value="[grid_cell_mode]">[end]
+ <input type="submit" value="Search">
+ </form>
+ </span>
+
+ <span class="inIssueAdvSearch" style="margin:0 .4em">
+ <a href="advsearch">Advanced search</a>
+ </span>
+
+ <span class="inIssueSearchTips" style="margin:0 .4em">
+ <a href="searchtips">Search tips</a>
+ </span>
+
+ [if-any logged_in_user]
+ <span class="inSavedQueries" style="margin:0 .4em">
+ <a href="[logged_in_user.profile_url]queries">Saved queries</a>
+ </span>
+ [end]
+
+</div>
+</div>
+
+
+[if-any warnings]
+ <table align="center" border="0" cellspacing="0" cellpadding="0" style="margin-bottom: 6px">
+ [for warnings]
+ <tr><td class="notice">
+ [warnings]
+ </td></tr>
+ [end]
+ </table>
+[end]
+[if-any errors.query]
+ <table align="center" border="0" cellspacing="0" cellpadding="0" style="margin-bottom: 6px">
+ <tr><td class="notice">
+ [errors.query]
+ </td></tr>
+ </table>
+[end]
+
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ [# Keep track of the old scope and set it back to the old value if the
+ user selects one of the "manage" options. That ensures that if
+ the user returns to the page via the browser Back button, he/she
+ will not see the "manage" item as the current scope value.]
+ var oldScope = "2";
+ function scopeChange() {
+ [if-any logged_in_user]
+ var scopeWidget = document.getElementById('can');
+ if (scopeWidget.value == 'manageprojectqueries') {
+ scopeWidget.value = oldScope;
+ _go('../adminViews');
+ }
+ else if (scopeWidget.value == 'managemyqueries') {
+ scopeWidget.value = oldScope;
+ _go('[format "js"][logged_in_user.profile_url][end]queries');
+ }
+ else {
+ oldScope = scopeWidget.value;
+ }
+ [end]
+ }
+
+ if ($("can"))
+ $("can").addEventListener("change", scopeChange);
+});
+</script>
diff --git a/appengine/monorail/templates/tracker/issue-search-tips.ezt b/appengine/monorail/templates/tracker/issue-search-tips.ezt
new file mode 100644
index 0000000..a57c2e2
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-search-tips.ezt
@@ -0,0 +1,331 @@
+[define category_css]css/ph_detail.css[end]
+[include "../framework/master-header.ezt" "showtabs"]
+
+[# Note: No UI element permission checking needed on this page. ]
+
+<div id="searchtips">
+
+[# TODO(jrobbins): this page should be redone in the standard Google]
+[# on-line help system. For now, it helps to document our planned search]
+[# features. ]
+
+
+<h3>Basic issue search</h3>
+
+<p>In most cases you can find the issues that you want to work with
+very easily by using the issue list headers or by entering a few
+simple keywords into the main search field.</p>
+
+<p>Whenever you visit the "<a href="list">Issues</a>" tab in your
+project, you are presented with a table of all open issues. If you
+see too many results, you can quickly filter your results by clicking
+on the table headers and choosing a specific value from the "Show
+only:" submenu.</p>
+
+[# TODO screenshot ]
+
+<p>The main search field consists of two parts:</p>
+
+<ul>
+ <li>A drop-down selection of search scopes, e.g, "All issues" or just "Open issues".</li>
+ <li>A search text field where you can enter search terms.</li>
+</ul>
+
+[# TODO screenshot ]
+
+<p>In the text field, you may enter simple search terms, or add any of
+the search operators described below.</p>
+
+<p>You can also use the search text field to jump directly to any
+issue by entering its issue number. If you wish to search for issues
+that contain a number, rather than jumping to that issue, enclose the
+number in quotation marks.</p>
+
+<p>Behind the scenes, the search scope is simply an additional set of
+search terms that is automatically combined with the user's search
+terms to make a complete query.</p>
+
+
+<h3>Advanced issue search</h3>
+
+<p>The <a href="advsearch">Advanced Search</a> page helps you
+compose a complex query. The advanced search form breaks the search
+down into several popular criteria and allows you to specify each one
+easily. The search criteria boil down to the same thing as the search
+operators described below, but you don't need to remember the operator
+names.</p>
+
+
+
+<h3>Search operators</h3>
+
+<p>Just like the Google web search that everyone knows and loves, you
+can search for issues by simply entering a few words. However, you
+may get a few more results than you expected. When you need to search
+more precisely, you can use search operators for more power.</p>
+
+<h4>Searching in specific fields</h4>
+
+<p>Normal search terms will match words found in any field of an
+issue. You can narrow the search to a specific field by using the
+name of the field. The built-in field operators are summary:,
+description:, comment:, status:, reporter:, owner:, cc:, commentby:,
+and label:. You can limit your search to just open issues by using
+is:open, or to just closed issues by using isnot:open.</p>
+[# TODO(jrobbins): dateopened:]
+
+<p>For example, here's how to search for issues with the word
+<b>calculation</b> in the summary field.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="summary:calculation">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>When searching for issues owned by a specific user, you can use his
+or her username. When referring to yourself, you can also use the
+special term <b>me</b>. For example, this restricts the search to
+issues that are owned by you.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="owner:username">
+ <input type="submit" name="btn" value="Search">
+</form>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="owner:me">
+ <input type="submit" name="btn" [if-any logged_in_user][else]disabled=disabled[end] value="Search">
+ [if-any logged_in_user][else]
+ <span style="white-space:nowrap"><a href="[login_url]"
+ >Sign in</a> to try this example</span>
+ [end]</p>
+</form>
+
+<p>Rather than have a large number of predefined fields, our issue
+tracker stores many issue details as labels.</p>
+
+<p>For example, if you labeled security-related issues with the label
+<b>Security</b>, here's how to search for them.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="label:security">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+
+<p>In addition to simple one-word labels, you can use two part labels
+that specify an attribute and a value, like <b>Priority-High</b>,
+<b>Priority-Medium</b>, and <b>Priority-Low</b>. You can search for
+these with the label: operator, or you can use the first part of the
+label name like an operator.</p>
+
+<p>For example, if you labeled high priority issues with
+<b>Priority-High</b>, here's one way to search for them.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="label:Priority-High">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>And, here is a more compact way to do the same search.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="Priority:High">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>For <b>components</b>, the default search will find issues in that component
+and all of its subcomponents.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="component:UI">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>And of course, you can combine any of these field operators.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="is:open owner:me Hostlist:Fixit">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+
+<h3>Empty (or non-empty) field search</h3>
+
+<p>For each built-in field operator, you can use the 'has' operator to search for
+issues with empty or non-empty fields.</p>
+
+<p>For example, here's how to search for issues whose owner field is non-empty,
+assigned to someone.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="has:owner">
+ <input type="submit" name="btn" value="Search">
+</form>
+
+<p>Or, you can use '-has' operator for negation, to search for issues with empty
+fields.</p>
+
+<p>For example, here's how to search for issues whose owner field is empty
+because no one is assigned to it.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="-has:owner">
+ <input type="submit" name="btn" value="Search">
+</form>
+
+
+<h3>Multiple values in search terms</h3>
+
+[# TODO(jrobbins): put back OR documentation when OR is working.]
+
+<p>You can search for two values for one field, or two labels
+with the same prefix by using.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="Priority:High,Medium">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+
+<h3>Exact value search</h3>
+
+<p>You can search for issues that exactly match the given term by using
+the search operator '='.</p>
+
+<p>For example, searching for 'Milestone=2009' only matches issues with the
+label 'Milestone-2009', while searching for 'Milestone:2009' matches issues
+with the labels 'Milestone-2009', 'Milestone-2009-Q1', 'Milestone-2009-Q3',
+etc.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="Milestone=2009">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>Similarly, using exact matching on components will get you only those issues
+that are in that component, not including any of its descendants.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="component=UI">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<h3>Star search</h3>
+
+<p>Any logged in user can mark any issue with a star. The star
+indicates interest in the issue.</p>
+
+<p>For example, to quickly see all the issues in this project that you
+have starred, you could use the following:</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="is:starred">
+ <input type="submit" name="btn" [if-any logged_in_user][else]disabled="disabled"[end] value="Search">
+ [if-any logged_in_user][else]
+ <span style="white-space:nowrap"><a href="[login_url]"
+ >Sign in</a> to try this example</span>
+ [end]</p>
+</form>
+
+<p>And, to see the issues that exactly three users have starred, use the following:</p>
+[# TODO(jrobbins): should search for issues with *at least* N stars.]
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="stars:3">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+
+<h3>Jump to issue and numeric search</h3>
+
+<p>You can jump directly to a specific issue by entering its ID in the
+search field.</p>
+
+<p>For example, to jump to issue 1, just search for 1. If there is no
+existing issue with that ID, the system will search for issues that
+contain that number anywhere in the issue.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="1">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>If you just want to search for issues that contain the number 1, without
+jumping to issue 1, enclose the number in quotation marks.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value=""1"">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+
+<h3>Attachment search</h3>
+
+<p>Users can attach files to any issues, either when issues are created or as
+part of issue comments.</p>
+
+<p>To quickly see all the issues that have attachments, use the following:</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="has:attachment">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>Or, you can search for a specific filename of the attachment.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="attachment:screenshot">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>You can also search for the file extension of the attachment.</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="attachment:png">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+
+<h3>Date range search</h3>
+
+<p>You can perform searches based on date ranges.</p>
+
+<p>This search syntax is divided into two parts, the action and the date,
+[[]action]:[[]date]</p>
+
+<p>Supported actions are: 'opened-after', 'opened-before',
+'modified-after', 'modified-before', 'closed-after', and 'closed-before'.
+And the date must to be specified as 'YYYY-MM-DD', 'YYYY/MM/DD' or 'today-N'.</p>
+
+<p>For example, if you want to search for issues opened after 2009/4/1, you
+could do the following:</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="opened-after:2009/4/1">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+<p>Or, if you want to search for issues modified 20 days before today's date,
+you could do the following:</p>
+
+<form action="list" method="GET">
+ <p><input type="text" size="25" name="q" value="modified-before:today-20">
+ <input type="submit" name="btn" value="Search"></p>
+</form>
+
+
+</div>
+
+[include "../framework/footer-script.ezt"]
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ _fetchOptions(
+ '[projectname]', 'issueOptions',
+ CS_env.token, [project.cached_content_timestamp]);
+ _onload();
+});
+</script>
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/templates/tracker/issue-update-form.ezt b/appengine/monorail/templates/tracker/issue-update-form.ezt
new file mode 100644
index 0000000..3b12500
--- /dev/null
+++ b/appengine/monorail/templates/tracker/issue-update-form.ezt
@@ -0,0 +1,490 @@
+<div id="makechanges">
+ [# Note: user must have AddIssueComment permission to even include this file. ]
+
+ <div class="h4" style="margin-bottom: 0"
+ ><label for="addCommentTextArea">Add a comment
+ [if-any offer_make_changes]and make changes[end]</label>
+ </div>
+ <div id="makechangesarea" class="closed" style="margin-top:0; padding:5px">
+
+ [if-any discourage_plus_one][if-any page_perms.SetStar][if-any read_only][else]
+ <div class="updates" style="margin-bottom: 1em; padding-left:5px">
+ <a class="star" id="star2" style="text-decoration:none; cursor:pointer; color:[if-any starred]cornflowerblue[else]gray[end]" title="[if-any starred]Un-s[else]S[end]tar this issue">
+ [if-any starred]★[else]☆[end]
+ </a>
+ <b id="vote_feedback">Vote for this issue and get email change notifications</b>
+ </div>
+ [end][end][end]
+
+ <div>
+ <form action="detail.do" id="issue_update_form"
+ method="POST" enctype="multipart/form-data">
+ <input type="hidden" name="_charset_" value="">
+ <input type="hidden" name="token" value="[form_token]">
+ <input type="hidden" name="id" value="[issue.local_id]">
+ <input type="hidden" name="can" value="[can]">
+ <input type="hidden" name="q" value="[query]">
+ <input type="hidden" name="colspec" value="[colspec]">
+ <input type="hidden" name="sort" value="[sortspec]">
+ <input type="hidden" name="groupby" value="[groupby]">
+ <input type="hidden" name="start" value="[start]">
+ <input type="hidden" name="num" value="[num]">
+ <input type="hidden" name="pagegen" value="[pagegen]">
+ <table cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td>
+ [if-any errors.comment]
+ <div class="fielderror">[errors.comment]</div>
+ [end]
+ <textarea cols="80" rows="8" name="comment" id="addCommentTextArea"
+ class="issue_text">[initial_comment]</textarea><br>
+ [if-any allow_attachments]
+ <div id="attachmentarea"></div>
+ <span id="attachprompt"><img width="16" height="16" src="/static/images/paperclip.png" border="0"
+ alt="A paperclip">
+ <a href="#" id="attachafile">Attach a file</a></span>
+ <span id="attachmaxsize" style="margin-left:2em; display:none">Max. attachments per comment: [max_attach_size]</span><br>
+ [if-any errors.attachments]
+ <div class="fielderror">[errors.attachments]</div>
+ [end]
+ [else]
+ <div style="color:#666">Issue attachment storage quota exceeded.</div>
+ [end]
+ <br>
+ </td>
+ [if-any discourage_plus_one]
+ <td valign="top">
+ <div class="tip">
+ Each comment triggers notification emails.
+ So, please do not post
+ "<tt style="white-space:nowrap">+1 Me too!</tt>".<br>
+ Instead, click the star icon.
+ </div>
+ </td>
+ [end]
+ </tr>
+ <tr>
+ <td>
+ <table cellspacing="0" cellpadding="3" border="0" class="rowmajor">
+ [# Only show specific issue fields if the user can edit them.]
+ [if-any page_perms.EditIssue page_perms.EditIssueSummary]
+ <tr><th style="width: 1em"><label for="summary">Summary:</label></th>
+ <td class="inplace" colspan="2">
+ <input type="text" size="90"
+ name="summary" id="summary" value="[initial_summary]">
+ [if-any errors.summary]
+ <div class="fielderror">[errors.summary]</div>
+ [end]
+ </td>
+ </tr>
+ [end]
+
+
+ [if-any page_perms.EditIssue page_perms.EditIssueStatus]
+ <tr><th><label for="statusedit">Status:</label></th><td class="inplace" colspan="2">
+ <input type="text" id="statusedit" style="width: 12em" autocomplete="off"
+ name="status" value="[initial_status]">
+ <span id="merge_area" style="margin-left:2em;">
+ Merge into issue:
+ <input type="text" id="merge_into" name="merge_into" style="width: 5em"
+ value="[is initial_merge_into "0"][else][initial_merge_into][end]">
+ </span>
+ [if-any errors.merge_into_id]
+ <div class="fielderror">[errors.merge_into_id]</div>
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ [if-any page_perms.EditIssue page_perms.EditIssueOwner]
+ <tr><th><label for="owneredit">Owner:</label></th><td class="inplace">
+ <input type="text" id="owneredit" autocomplete="off"
+ style="width: 12em"
+ name="owner" value="[is initial_owner "----"][else][initial_owner][end]">
+ [if-any errors.owner]
+ <div class="fielderror" id="ownererror">[errors.owner]</div>
+ [end]
+ </td>
+ </tr>
+ [end]
+
+ [if-any page_perms.EditIssue page_perms.EditIssueCc]
+ <tr><th><label for="memberccedit">Cc:</label></th><td class="inplace" colspan="2">
+ <input type="text" multiple id="memberccedit" size="90" autocomplete="off"
+ name="cc" value="[initial_cc]">
+ [if-any errors.cc]
+ <div class="fielderror">[errors.cc]</div>
+ [end]
+ </td></tr>
+ [end]
+
+ [if-any page_perms.EditIssue] [# TODO(jrobbins): page_perms.EditIssueComponent]
+ <tr><th><label for="componentedit">Components:</label></th><td class="inplace" colspan="2">
+ <input type="text" id="componentedit" size="90" autocomplete="off"
+ name="components" value="[initial_components]">
+ [if-any errors.components]
+ <div class="fielderror">[errors.components]</div>
+ [end]
+ </td></tr>
+ [end]
+
+ [if-any page_perms.EditIssue][# Show field editing elements iff user can edit.]
+ [define any_fields_to_reveal]No[end]
+ <tbody class="collapse">
+ [for fields]
+ [if-any fields.applicable]
+ [# TODO(jrobbins): determine applicability dynamically and update fields in JS]
+ <tr [if-any fields.display][else]class="ifExpand"[define any_fields_to_reveal]Yes[end][end]>
+ <th class="vt" title="[fields.field_def.docstring_short][if-any fields.field_def.validate_help]
+ [fields.field_def.validate_help][end]">[fields.field_name]:</th>
+ <td class="vt">
+ [include "field-value-widgets.ezt" fields.field_def.is_multivalued_bool]
+ <div class="fielderror" style="display:none" id="error_custom_[fields.field_id]"></div>
+ </td>
+ <tr>
+ [end]
+ [end]
+ [is any_fields_to_reveal "Yes"]
+ <tr class="ifCollapse">
+ <td colspan="2"><a href="#" class="toggleCollapse">Show all fields</a><t/td>
+ </tr>
+ [end]
+ </tbody>
+ [end]
+
+
+ [if-any page_perms.EditIssue][# Show label editing elements iff the user can edit.]
+ <tr><th class="vt"><label for="label0">Labels:</label></th>
+ <td class="labelediting" colspan="2">
+ [include "label-fields.ezt" "just-two"]
+ </td>
+ </tr>
+
+ <tr><th style="white-space:nowrap"><label for="blocked_on">Blocked on:</label></th>
+ <td class="inplace" colspan="2">
+ <input type="text" name="blocked_on" id="blocked_on" value="[initial_blocked_on]">
+ [if-any errors.blocked_on]
+ <div class="fielderror">[errors.blocked_on]</div>
+ [end]
+ </td>
+ </tr>
+ <tr><th><label for="blocking">Blocking:</label></th>
+ <td class="inplace" colspan="2">
+ <input type="text" name="blocking" id="blocking" value="[initial_blocking]" />
+ [if-any errors.blocking]
+ <div class="fielderror">[errors.blocking]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tr id="copy_issue_form_fragment" style="display:none">
+ <td style="white-space:nowrap">
+ <b><label for="copy_to">Copy to project:</label> </b>
+ </td>
+ <td>
+ <input type="text" name="copy_to" id="copy_to" autocomplete="off"
+ [if-any errors.copy_to_project]value="[errors.copy_to_project]"[else]value="[projectname]"[end] >
+ <span id="derived_labels_target"></span>
+ [if-any errors.copy_to]
+ <div class="fielderror">[errors.copy_to]</div>
+ [end]
+ </td>
+ </tr>
+
+ <tr id="move_issue_form_fragment" style="display:none">
+ <td style="white-space:nowrap">
+ <b><label for="move_to">Move to project:</label> </b>
+ </td>
+ <td>
+ <input type="text" name="move_to" id="move_to" autocomplete="off"
+ [if-any errors.move_to_project]value="[errors.move_to_project]"[end] >
+ <span id="derived_labels_target"></span>
+ [if-any errors.move_to]
+ <div class="fielderror">[errors.move_to]</div>
+ [end]
+ </td>
+ </tr>
+
+ [end][# if page_perms.EditIssue]
+
+ [if-any show_captcha]
+ <tr><th class="vt" style="white-space:nowrap">Human Verification:</th>
+ <td colspan="2">
+ [include "../framework/captcha-field.ezt"]
+ </td>
+ </tr>
+ [end]
+
+ [include "../framework/label-validation-row.ezt"]
+ </table>
+
+ <input type="submit" id="submit_btn" name="btn" value="Save changes">
+ <input type="button" id="discard" name="nobtn" value="Discard" data-local-id="[issue.local_id]">
+ [if-any page_perms.EditIssue]
+ <span style="margin-left:1.5em"><label for="after_issue_update">And then:</label></span>
+ <select name="after_issue_update" id="after_issue_update">
+ <option value="0" [is after_issue_update "0"]selected=selected[end]>Go up to issue list</option>
+ <option value="1" [is after_issue_update "1"]selected=selected[end]>Stay on this issue</option>
+ <option value="2" [is after_issue_update "2"]selected=selected[end]>Go to next issue</option>
+ </select>
+ <input type="hidden" name="next_id" value="[if-any flipper.show][flipper.next_id][else][end]">
+ [end]
+
+ [if-any page_perms.EditIssue]
+ <input type="checkbox" checked="checked" name="send_email" id="send_email"
+ style="margin-left:1.5em">
+ <label for="send_email" title="Send issue change notifications to interested users">Send email</label>
+ [end]
+
+ [if-any page_perms.DeleteIssue]
+ <label for="more_actions" style="margin-left:3em">More actions:</label>
+ <select name="more_actions" id="more_actions" style="display:none">
+ <option value="0" selected="selected" disabled="1">More actions...</option>
+ <option value="delete">Delete issue</option>
+ <option value="copy" [if-any offer_issue_copy_move][else]disabled="disabled"[end]>Copy issue</option>
+ <option value="move" [if-any offer_issue_copy_move][else]disabled="disabled"[end]>Move issue</option>
+ </select>
+ [end]
+
+ </td>
+ </tr>
+ </table>
+ </form>
+
+ [if-any page_perms.DeleteIssue]
+ <div id="delete_div"><br><br>
+ <form action="delete.do" method="post" id="delete_form">
+ <input type="hidden" name="token" value="[delete_form_token]">
+ <input type="hidden" name="id" value="[issue.local_id]">
+ <input type="hidden" name="delete" value="true">
+ <input type="submit" name="deletebtn" value="Delete issue">
+ </form>
+ </div>
+
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ [# Hide the non-js UI and show a better UI to users that have JS enabled.]
+ [if-any errors.copy_to][else]
+ document.getElementById('copy_issue_form_fragment').style.display = "none";
+ [end]
+ [if-any errors.move_to][else]
+ document.getElementById('move_issue_form_fragment').style.display = "none";
+ [end]
+ [if-any page_perms.DeleteIssue]
+ document.getElementById('delete_div').style.display = "none";
+ [end]
+
+ // TODO(jobbins): _attachIssueMoveValidator('move_to', '[issue.local_id]');
+});
+ </script>
+
+ <script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var more_actions = document.getElementById('more_actions');
+ more_actions.style.display = "";
+});
+ </script>
+ [end]
+
+
+ </div>[# makechangesarea]
+</div>[# makechanges]
+
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("attachafile")) {
+ $("attachafile").addEventListener("click", function(event) {
+ _addAttachmentFields("attachmentarea");
+ event.preventDefault();
+ });
+ }
+
+ if ($("addCommentTextArea")) {
+ $("addCommentTextArea").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+ }
+
+ if ($("submit_btn")) {
+ $("submit_btn").addEventListener("focus", function(event) {
+ _acrob(null);
+ });
+ $("submit_btn").addEventListener("mousedown", function(event) {
+ _acrob(null);
+ });
+ $("submit_btn").addEventListener("click", function(event) {
+ _trimCommas();
+ userMadeChanges = false;
+ TKR_isDirty = false;
+ });
+ }
+ if ($("discard")) {
+ $("discard").addEventListener("focus", function(event) {
+ _acrob(null);
+ });
+ $("discard").addEventListener("click", function(event) {
+ _acrob(null);
+ _confirmDiscardUpdate('detail?id=' + event.target.getAttribute("data-local-id"));
+ return false;
+ });
+ }
+ if ($("more_actions")) {
+ $("more_actions").addEventListener("change", function(event) {
+ _handleDetailActions();
+ });
+ }
+
+ if ($("summary")) {
+ $("summary").addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ $("summary").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+ }
+ if ($("blocked_on")) {
+ $("blocked_on").addEventListener("focus", function(event) {
+ _acrob(null);
+ _acof(event);
+ });
+ $("blocked_on").addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+ }
+ if ($("statusedit")) {
+ $("statusedit").addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $("statusedit").addEventListener("keyup", function(event) {
+ _dirty();
+ return _confirmNovelStatus($("statusedit"));
+ });
+ }
+
+ var _idsToAddDefaultListeners = [[]
+ "owneredit", "memberccedit", "componentedit", "copy_to", "move_to"];
+ for (var i = 0; i < _idsToAddDefaultListeners.length; i++) {
+ var id = _idsToAddDefaultListeners[[]i];
+ if ($(id)) {
+ $(id).addEventListener("focus", function(event) {
+ _acof(event);
+ });
+ $(id).addEventListener("keyup", function(event) {
+ _dirty();
+ return true;
+ });
+ }
+ }
+
+ _allOrigLabels = [[]
+ "[label0]", "[label1]", "[label2]", "[label3]", "[label4]",
+ "[label5]", "[label6]", "[label7]", "[label8]", "[label9]",
+ "[label10]", "[label11]", "[label12]", "[label13]", "[label14]",
+ "[label15]", "[label16]", "[label17]", "[label18]", "[label19]",
+ "[label20]", "[label21]", "[label22]", "[label23]"];
+
+ _lfidprefix = 'labeledit';
+
+ window.allowSubmit = true;
+ $("issue_update_form").addEventListener("submit", function(event) {
+ if (!_checkPlusOne())
+ event.preventDefault();
+ [# User cannot remove restricted labels if they cannot see them.]
+ [if-any page_perms.EditIssue]
+ if (!_checkUnrestrict([if-any prevent_restriction_removal]true[else]false[end]))
+ event.preventDefault();
+ [end]
+ if (allowSubmit) {
+ allowSubmit = false;
+ $("submit_btn").value = "Adding comment...";
+ $("submit_btn").disabled = "disabled";
+ }
+ else {
+ event.preventDefault();
+ }
+ });
+
+ if ($("star2"))
+ $("star2").addEventListener("click", function (event) {
+ _TKR_toggleStar($("star2"), "[projectname]", [issue.local_id], "[set_star_token]");
+ _TKR_syncStarIcons($("star2"), "star");
+ });
+
+ if ($("flag_spam")) {
+ $("flag_spam").addEventListener("click", function(event) {
+ $("spam_form").submit();
+ return;
+ });
+ }
+});
+</script>
+
+[# If the user can edit issue metadata, we need to do a bunch of JS setup for editing features.
+ Otherwise, if they can only enter comments, they still might need to retry a failed CAPTCHA. ]
+
+[if-any page_perms.EditIssue page_perms.EditIssueStatus page_perms.EditIssueOwner page_perms.EditIssueCc]
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ [if-any page_perms.EditIssue]
+ _exposeExistingLabelFields();
+ [end]
+
+ [if-any errors.custom_fields]
+ [for errors.custom_fields]
+ var field_error;
+ field_error = document.getElementById('error_custom_' + [errors.custom_fields.field_id]);
+ field_error.innerText = "[errors.custom_fields.message]";
+ field_error.style.display = "";
+ [end]
+ [end]
+
+
+function checksubmit() {
+ var restrict_to_known = [if-any restrict_to_known]true[else]false[end];
+ var submit = $('submit_btn');
+ var blocksubmitmsg = $('blocksubmitmsg');
+ var cg = $('cg');
+ if (cg != undefined) { submit.disabled='disabled'; }
+ var confirmmsg = $('confirmmsg');
+ var statusedit = $('statusedit');
+ var merge_area = $('merge_area');
+ var statuses_offer_merge = [[] [for statuses_offer_merge]"[statuses_offer_merge]"[if-index statuses_offer_merge last][else],[end][end] ];
+ if ((restrict_to_known && confirmmsg && confirmmsg.innerText) ||
+ (blocksubmitmsg && blocksubmitmsg.innerHTML) ||
+ (cg && cg.value == "") ||
+ (!allowSubmit)) {
+ submit.disabled='disabled';
+ } else {
+ submit.disabled='';
+ }
+
+ if (statusedit) {
+ var offer_merge = 'none';
+ for (var i = 0; i < statuses_offer_merge.length; i++) {
+ if (statusedit.value == statuses_offer_merge[[]i]) offer_merge = '';
+ }
+ merge_area.style.display = offer_merge;
+ }
+}
+
+[if-any any_errors]
+ // Take user directly to the errors.
+ if (!location.hash) {
+ location.hash = "#makechanges";
+ }
+[end]
+
+checksubmit();
+setInterval(checksubmit, 700); [# catch changes that were not keystrokes, e.g., paste menu item.]
+
+});
+</script>
+[end]
+
+
+[include "field-value-widgets-js.ezt"]
diff --git a/appengine/monorail/templates/tracker/label-fields.ezt b/appengine/monorail/templates/tracker/label-fields.ezt
new file mode 100644
index 0000000..c48adf0
--- /dev/null
+++ b/appengine/monorail/templates/tracker/label-fields.ezt
@@ -0,0 +1,126 @@
+[# Make a 3x8 grid of label entry form fields with autocomplete on each one.
+
+ Args:
+ arg0: if "just-two" is passed, only show the first two rows
+ and give the user links to click to expose more rows.
+]
+
+<div id="LF_row1" class="nowrap">
+ <input type="text" class="labelinput" id="label0" size="20" autocomplete="off"
+ name="label" value="[label0]">
+ <input type="text" class="labelinput" id="label1" size="20" autocomplete="off"
+ name="label" value="[label1]">
+ <input type="text" class="labelinput" id="label2" size="20" autocomplete="off"
+ name="label" value="[label2]">
+</div>
+
+<div id="LF_row2" class="nowrap">
+ <input type="text" class="labelinput" id="label3" size="20" autocomplete="off"
+ name="label" value="[label3]">
+ <input type="text" class="labelinput" id="label4" size="20" autocomplete="off"
+ name="label" value="[label4]">
+ <input type="text" class="labelinput" id="label5" size="20" autocomplete="off"
+ [is arg0 "just-two"]data-show-id="LF_row3" data-hide-id="addrow2"[end]
+ name="label" value="[label5]">
+ [is arg0 "just-two"]<span id="addrow2" class="fakelink" data-instead="LF_row3">Add a row</span>[end]
+</div>
+
+<div id="LF_row3" [is arg0 "just-two"]style="display:none"[end] class="nowrap">
+ <input type="text" class="labelinput" id="label6" size="20" autocomplete="off"
+ name="label" value="[label6]">
+ <input type="text" class="labelinput" id="label7" size="20" autocomplete="off"
+ name="label" value="[label7]">
+ <input type="text" class="labelinput" id="label8" size="20" autocomplete="off"
+ [is arg0 "just-two"]data-show-id="LF_row4" data-hide-id="addrow3"[end]
+ name="label" value="[label8]">
+ [is arg0 "just-two"]<span id="addrow3" class="fakelink" data-instead="LF_row4">Add a row</span>[end]
+</div>
+
+<div id="LF_row4" [is arg0 "just-two"]style="display:none"[end] class="nowrap">
+ <input type="text" class="labelinput" id="label9" size="20" autocomplete="off"
+ name="label" value="[label9]">
+ <input type="text" class="labelinput" id="label10" size="20" autocomplete="off"
+ name="label" value="[label10]">
+ <input type="text" class="labelinput" id="label11" size="20" autocomplete="off"
+ [is arg0 "just-two"]data-show-id="LF_row5" data-hide-id="addrow4"[end]
+ name="label" value="[label11]">
+ [is arg0 "just-two"]<span id="addrow4" class="fakelink" data-instead="LF_row5">Add a row</span>[end]
+</div>
+
+<div id="LF_row5" [is arg0 "just-two"]style="display:none"[end] class="nowrap">
+ <input type="text" class="labelinput" id="label12" size="20" autocomplete="off"
+ name="label" value="[label12]">
+ <input type="text" class="labelinput" id="label13" size="20" autocomplete="off"
+ name="label" value="[label13]">
+ <input type="text" class="labelinput" id="label14" size="20" autocomplete="off"
+ [is arg0 "just-two"]data-show-id="LF_row6" data-hide-id="addrow5"[end]
+ name="label" value="[label14]">
+ [is arg0 "just-two"]<span id="addrow5" class="fakelink" data-instead="LF_row6">Add a row</span>[end]
+</div>
+
+<div id="LF_row6" [is arg0 "just-two"]style="display:none"[end] class="nowrap">
+ <input type="text" class="labelinput" id="label15" size="20" autocomplete="off"
+ name="label" value="[label15]">
+ <input type="text" class="labelinput" id="label16" size="20" autocomplete="off"
+ name="label" value="[label16]">
+ <input type="text" class="labelinput" id="label17" size="20" autocomplete="off"
+ [is arg0 "just-two"]data-show-id="LF_row7" data-hide-id="addrow6"[end]
+ name="label" value="[label17]">
+ [is arg0 "just-two"]<span id="addrow6" class="fakelink" data-instead="LF_row7">Add a row</span>[end]
+</div>
+
+<div id="LF_row7" [is arg0 "just-two"]style="display:none"[end] class="nowrap">
+ <input type="text" class="labelinput" id="label18" size="20" autocomplete="off"
+ name="label" value="[label18]">
+ <input type="text" class="labelinput" id="label19" size="20" autocomplete="off"
+ name="label" value="[label19]">
+ <input type="text" class="labelinput" id="label20" size="20" autocomplete="off"
+ [is arg0 "just-two"]data-show-id="LF_row8" data-hide-id="addrow7"[end]
+ name="label" value="[label20]">
+ [is arg0 "just-two"]<span id="addrow7" class="fakelink" data-instead="LF_row8">Add a row</span>[end]
+</div>
+
+<div id="LF_row8" [is arg0 "just-two"]style="display:none"[end] class="nowrap">
+ <input type="text" class="labelinput" id="label21" size="20" autocomplete="off"
+ name="label" value="[label21]">
+ <input type="text" class="labelinput" id="label22" size="20" autocomplete="off"
+ name="label" value="[label22]">
+ <input type="text" class="labelinput" id="label23" size="20" autocomplete="off"
+ name="label" value="[label23]">
+</div>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ var labelInputs = document.getElementsByClassName("labelinput");
+ for (var i = 0; i < labelInputs.length; ++i) {
+ var labelInput = labelInputs[[]i];
+ labelInput.addEventListener("keyup", function (event) {
+ _dirty();
+ if (event.target.getAttribute("data-show-id") &&
+ event.target.getAttribute("data-hide-id") &&
+ event.target.value) {
+ _showID(event.target.getAttribute("data-show-id"));
+ _hideID(event.target.getAttribute("data-hide-id"));
+ }
+ return _vallab(event.target);
+ });
+ labelInput.addEventListener("blur", function (event) {
+ return _vallab(event.target);
+ });
+ labelInput.addEventListener("focus", function (event) {
+ return _acof(event);
+ });
+ }
+
+ var addRowLinks = document.getElementsByClassName("fakelink");
+ for (var i = 0; i < addRowLinks.length; ++i) {
+ var rowLink = addRowLinks[[]i];
+ rowLink.addEventListener("click", function (event) {
+ _acrob(null);
+ var insteadID = event.target.getAttribute("data-instead");
+ if (insteadID)
+ _showInstead(insteadID, this);
+ });
+ }
+});
+</script>
diff --git a/appengine/monorail/templates/tracker/render-plain-text.ezt b/appengine/monorail/templates/tracker/render-plain-text.ezt
new file mode 100644
index 0000000..e79bb2a
--- /dev/null
+++ b/appengine/monorail/templates/tracker/render-plain-text.ezt
@@ -0,0 +1,8 @@
+[# Safely display some text that includes some markup, completely removing the markup.
+
+ arg0 is a list of element EZT objects that have a tad and content and maybe some
+ other attributes.
+
+ We do not use extra whitespace in this template because it generates text into a
+ context where whitespace is significant.
+][arg0.content]
\ No newline at end of file
diff --git a/appengine/monorail/templates/tracker/render-rich-text.ezt b/appengine/monorail/templates/tracker/render-rich-text.ezt
new file mode 100644
index 0000000..1d44902
--- /dev/null
+++ b/appengine/monorail/templates/tracker/render-rich-text.ezt
@@ -0,0 +1,10 @@
+[# Safely display some text that includes some markup. Only the tags
+ that we explicitly whitelist are allowed, everything else gets
+ escaped.
+
+ description.text_runs is a list of element EZT objects that have a
+ tag and content and maybe some other attributes.
+
+ We do not use extra whitespace in this template because it
+ generates text into a context where whitespace is significant.
+][is arg0.tag ""][arg0.content][end][is arg0.tag "a"]<a href="[arg0.href]" title="[arg0.title]" class="[arg0.css_class]" rel="nofollow">[arg0.content]</a>[end][is arg0.tag "b"]<b>[arg0.content]</b>[end]
\ No newline at end of file
diff --git a/appengine/monorail/templates/tracker/spam-moderation-queue.ezt b/appengine/monorail/templates/tracker/spam-moderation-queue.ezt
new file mode 100644
index 0000000..86aac23
--- /dev/null
+++ b/appengine/monorail/templates/tracker/spam-moderation-queue.ezt
@@ -0,0 +1,106 @@
+[define title]Spam Moderation Queue[end]
+[define category_css]css/ph_list.css[end]
+[define page_css]css/ph_detail.css[end][# needed for infopeek]
+
+[if-any projectname]
+ [include "../framework/master-header.ezt" "showtabs"]
+[else]
+ [include "../framework/master-header.ezt" "hidetabs"]
+[end]
+[include "../framework/js-placeholders.ezt" "showtabs"]
+
+<h2>Spam Moderation Queue</h2>
+[include "../framework/artifact-list-pagination-part.ezt"]
+
+<form method="POST" action="spamqueue.do">
+<button type="submit" vaue="mark_spam" disabled="true">Mark as Spam</button>
+<button type="submit" value="mark_ham" disabled="true">Mark as Ham</button>
+
+<span style="margin:0 .7em">Select:
+ <a id="selectall" href="#">All</a>
+ <a id="selectnone" href="#">None</a>
+</span>
+
+<table id='resultstable'>
+<tr>
+ <td>
+ </td>
+ <td>ID</td>
+ <td>Author</td>
+ <td>Summary</td>
+ <td>Snippet</td>
+ <td>Opened at</td>
+ <td>Spam?</td>
+ <td>Verdict reason</td>
+ <td>Confidence</td>
+ <td>Verdict at</td>
+ <td>Flag count</td>
+</tr>
+[for spam_queue]
+<tr>
+ <td><input type='checkbox' name='issue_local_id' value='[spam_queue.issue.local_id]'/></td>
+ <td><a href='/p/[projectname]/issues/detail?id=[spam_queue.issue.local_id]'>[spam_queue.issue.local_id]</a></td>
+ <td><a href='/u/[spam_queue.reporter.email]'>[spam_queue.reporter.email]</a></td>
+ <td><a href='/p/[projectname]/issues/detail?id=[spam_queue.issue.local_id]'>[spam_queue.summary]</a></td>
+ <td>
+ [spam_queue.comment_text]
+ </td>
+ <td>[spam_queue.issue.opened_timestamp]</td>
+ <td>[spam_queue.issue.is_spam]</td>
+
+ <td>[spam_queue.reason]</td>
+ <td>[spam_queue.classifier_confidence]</td>
+ <td>[spam_queue.verdict_time]</td>
+ <td>[spam_queue.flag_count]</td>
+</tr>
+[end]
+</table>
+
+[include "../framework/artifact-list-pagination-part.ezt"]
+<input type="hidden" name="token" value="[moderate_spam_token]">
+<button type="submit" vaue="mark_spam" disabled="true">Mark as Spam</button>
+<button type="submit" value="mark_ham" disabled="true">Mark as Ham</button>
+
+</form>
+
+<script type="text/javascript" nonce="[nonce]">
+runOnLoad(function() {
+ if ($("selectall")) {
+ $("selectall").addEventListener("click", function() {
+ _selectAllIssues();
+ setDisabled(false);
+ });
+ }
+ if ($("selectnone")) {
+ $("selectnone").addEventListener("click", function() {
+ _selectNoneIssues();
+ setDisabled(true);
+ });
+ }
+ var checkboxNodes = document.querySelectorAll("input[type=checkbox]");
+ var checkboxes = Array();
+ for (var i = 0; i < checkboxNodes.length; ++i) {
+ var checkbox = checkboxNodes.item(i);
+ checkboxes.push(checkbox);
+ checkbox.addEventListener("change", updateEnabled);
+ }
+
+ function updateEnabled() {
+ var anySelected = checkboxes.some(function(checkbox) {
+ return checkbox.checked;
+ });
+ setDisabled(!anySelected);
+ }
+
+ function setDisabled(disabled) {
+ var buttons = document.querySelectorAll("button[type=submit]");
+ for (var i = 0; i < buttons.length; ++i) {
+ buttons.item(i).disabled = disabled;
+ }
+ }
+});
+</script>
+
+[include "../framework/footer-script.ezt"]
+
+[include "../framework/master-footer.ezt"]
diff --git a/appengine/monorail/testing/__init__.py b/appengine/monorail/testing/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/testing/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/testing/api_clients.cfg b/appengine/monorail/testing/api_clients.cfg
new file mode 100644
index 0000000..9b0e686
--- /dev/null
+++ b/appengine/monorail/testing/api_clients.cfg
@@ -0,0 +1,29 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# Defines fake monorail api clients for testing
+
+clients {
+ client_email: "123456789@developer.gserviceaccount.com"
+ client_id: "123456789.apps.googleusercontent.com"
+ display_name: "johndoe@example.com"
+ description: "John Doe needs api access"
+ project_permissions {
+ project: "chromium"
+ role: contributor
+ }
+ contacts: "johndoe@example.com"
+}
+
+clients {
+ client_email: "bugdroid1@chromium.org"
+ client_id: "987654321.apps.googleusercontent.com"
+ description: "bugdroid"
+ project_permissions {
+ project: "chromium"
+ role: committer
+ }
+ contacts: "bugdroidowner@example.com"
+}
diff --git a/appengine/monorail/testing/fake.py b/appengine/monorail/testing/fake.py
new file mode 100644
index 0000000..6447078
--- /dev/null
+++ b/appengine/monorail/testing/fake.py
@@ -0,0 +1,1531 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Fake object classes that are useful for unit tests."""
+
+import collections
+import logging
+import re
+
+import settings
+from framework import framework_helpers
+from framework import monorailrequest
+from framework import permissions
+from framework import validate
+from proto import project_pb2
+from proto import tracker_pb2
+from proto import user_pb2
+from proto import usergroup_pb2
+from services import caches
+from services import issue_svc
+from services import project_svc
+from services import user_svc
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+# Many fakes return partial or constant values, regardless of their arguments.
+# pylint: disable=unused-argument
+
+BOUNDARY = '-----thisisaboundary'
+OWNER_ROLE = 'OWNER_ROLE'
+COMMITTER_ROLE = 'COMMITTER_ROLE'
+CONTRIBUTOR_ROLE = 'CONTRIBUTOR_ROLE'
+
+
+def Project(
+ project_name='proj', project_id=None, state=project_pb2.ProjectState.LIVE,
+ access=project_pb2.ProjectAccess.ANYONE, moved_to=None,
+ cached_content_timestamp=None,
+ owner_ids=None, committer_ids=None, contributor_ids=None):
+ """Returns a project protocol buffer with the given attributes."""
+ project_id = project_id or hash(project_name)
+ return project_pb2.MakeProject(
+ project_name, project_id=project_id, state=state, access=access,
+ moved_to=moved_to, cached_content_timestamp=cached_content_timestamp,
+ owner_ids=owner_ids, committer_ids=committer_ids,
+ contributor_ids=contributor_ids)
+
+
+def MakeTestIssue(
+ project_id, local_id, summary, status, owner_id, labels=None,
+ derived_labels=None, derived_status=None, merged_into=0, star_count=0,
+ derived_owner_id=0, issue_id=None, reporter_id=None, opened_timestamp=None,
+ closed_timestamp=None, modified_timestamp=None, is_spam=False,
+ component_ids=None, project_name=None, field_values=None):
+ """Easily make an Issue for testing."""
+ issue = tracker_pb2.Issue()
+ issue.project_id = project_id
+ issue.project_name = project_name
+ issue.local_id = local_id
+ issue.issue_id = issue_id if issue_id else 100000 + local_id
+ issue.reporter_id = reporter_id if reporter_id else owner_id
+ issue.summary = summary
+ issue.status = status
+ issue.owner_id = owner_id
+ issue.derived_owner_id = derived_owner_id
+ issue.star_count = star_count
+ issue.merged_into = merged_into
+ issue.is_spam = is_spam
+ if opened_timestamp:
+ issue.opened_timestamp = opened_timestamp
+ if modified_timestamp:
+ issue.modified_timestamp = modified_timestamp
+ if closed_timestamp:
+ issue.closed_timestamp = closed_timestamp
+ if labels is not None:
+ if isinstance(labels, basestring):
+ labels = labels.split()
+ issue.labels.extend(labels)
+ if derived_labels is not None:
+ if isinstance(derived_labels, basestring):
+ derived_labels = derived_labels.split()
+ issue.derived_labels.extend(derived_labels)
+ if derived_status is not None:
+ issue.derived_status = derived_status
+ if component_ids is not None:
+ issue.component_ids = component_ids
+ if field_values is not None:
+ issue.field_values = field_values
+ return issue
+
+
+def MakeTestConfig(project_id, labels, statuses):
+ """Convenient function to make a ProjectIssueConfig object."""
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(project_id)
+ if isinstance(labels, basestring):
+ labels = labels.split()
+ if isinstance(statuses, basestring):
+ statuses = statuses.split()
+ config.well_known_labels = [
+ tracker_pb2.LabelDef(label=lab) for lab in labels]
+ config.well_known_statuses = [
+ tracker_pb2.StatusDef(status=stat) for stat in statuses]
+ return config
+
+
+class MonorailConnection(object):
+ """Fake connection to databases for use in tests."""
+
+ def Commit(self):
+ pass
+
+ def Close(self):
+ pass
+
+
+class MonorailRequest(monorailrequest.MonorailRequest):
+ """Subclass of MonorailRequest suitable for testing."""
+
+ def __init__(self, user_info=None, project=None, perms=None, **kwargs):
+ """Construct a test MonorailRequest.
+
+ Typically, this is constructed via testing.helpers.GetRequestObjects,
+ which also causes url parsing and optionally initializes the user,
+ project, and permissions info.
+
+ Args:
+ user_info: a dict of user attributes to set on a MonorailRequest object.
+ For example, "user_id: 5" causes self.auth.user_id=5.
+ project: the Project pb for this request.
+ perms: a PermissionSet for this request.
+ """
+ super(MonorailRequest, self).__init__(**kwargs)
+
+ if user_info is not None:
+ for key in user_info:
+ setattr(self.auth, key, user_info[key])
+ if 'user_id' in user_info:
+ self.auth.effective_ids = {user_info['user_id']}
+
+ self.perms = perms or permissions.ADMIN_PERMISSIONSET
+ self.project = project
+
+
+class UserGroupService(object):
+ """Fake UserGroupService class for testing other code."""
+
+ def __init__(self):
+ self.group_settings = {}
+ self.group_members = {}
+ self.group_addrs = {}
+ self.role_dict = {}
+
+ def TestAddGroupSettings(
+ self, group_id, email, who_can_view=None, anyone_can_join=False,
+ who_can_add=None, external_group_type=None,
+ last_sync_time=0, friend_projects=None):
+ """Set up a fake group for testing.
+
+ Args:
+ group_id: int user ID of the new user group.
+ email: string email address to identify the user group.
+ who_can_view: string enum 'owners', 'members', or 'anyone'.
+ anyone_can_join: optional boolean to allow any users to join the group.
+ who_can_add: optional list of int user IDs of users who can add
+ more members to the group.
+ """
+ friend_projects = friend_projects or []
+ group_settings = usergroup_pb2.MakeSettings(
+ who_can_view or 'members',
+ external_group_type, last_sync_time, friend_projects)
+ self.group_settings[group_id] = group_settings
+ self.group_addrs[group_id] = email
+ # TODO(jrobbins): store the other settings.
+
+ def TestAddMembers(self, group_id, user_ids, role='member'):
+ self.group_members.setdefault(group_id, []).extend(user_ids)
+ for user_id in user_ids:
+ self.role_dict.setdefault(group_id, {})[user_id] = role
+
+ def LookupMemberships(self, _cnxn, user_id):
+ memberships = {
+ group_id for group_id, member_ids in self.group_members.iteritems()
+ if user_id in member_ids}
+ return memberships
+
+ def DetermineWhichUserIDsAreGroups(self, _cnxn, user_ids):
+ return [uid for uid in user_ids
+ if uid in self.group_settings]
+
+ def GetAllUserGroupsInfo(self, cnxn):
+ infos = []
+ for group_id in self.group_settings:
+ infos.append(
+ (self.group_addrs[group_id],
+ len(self.group_members.get(group_id, [])),
+ self.group_settings[group_id], group_id))
+
+ return infos
+
+ def GetAllGroupSettings(self, _cnxn, group_ids):
+ return {gid: self.group_settings[gid]
+ for gid in group_ids
+ if gid in self.group_settings}
+
+ def GetGroupSettings(self, cnxn, group_id):
+ return self.GetAllGroupSettings(cnxn, [group_id]).get(group_id)
+
+ def CreateGroup(self, cnxn, services, email, who_can_view_members,
+ ext_group_type=None, friend_projects=None):
+ friend_projects = friend_projects or []
+ group_id = services.user.LookupUserID(
+ cnxn, email, autocreate=True, allowgroups=True)
+ group_settings = usergroup_pb2.MakeSettings(
+ who_can_view_members, ext_group_type, 0, friend_projects)
+ self.UpdateSettings(cnxn, group_id, group_settings)
+ return group_id
+
+ def DeleteGroups(self, cnxn, group_ids):
+ member_ids_dict, owner_ids_dict = self.LookupMembers(cnxn, group_ids)
+ citizens_id_dict = collections.defaultdict(list)
+ for g_id, user_ids in member_ids_dict.iteritems():
+ citizens_id_dict[g_id].extend(user_ids)
+ for g_id, user_ids in owner_ids_dict.iteritems():
+ citizens_id_dict[g_id].extend(user_ids)
+ for g_id, citizen_ids in citizens_id_dict.iteritems():
+ # Remove group members, friend projects and settings
+ self.RemoveMembers(cnxn, g_id, citizen_ids)
+ self.group_settings.pop(g_id, None)
+
+ def LookupMembers(self, _cnxn, group_id_list):
+ members_dict = {}
+ owners_dict = {}
+ for gid in group_id_list:
+ members_dict[gid] = []
+ owners_dict[gid] = []
+ for mid in self.group_members.get(gid, []):
+ if self.role_dict.get(gid, {}).get(mid) == 'owner':
+ owners_dict[gid].append(mid)
+ elif self.role_dict.get(gid, {}).get(mid) == 'member':
+ members_dict[gid].append(mid)
+ return members_dict, owners_dict
+
+ def LookupAllMembers(self, _cnxn, group_id_list):
+ direct_members, direct_owners = self.LookupMembers(
+ _cnxn, group_id_list)
+ members_dict = {}
+ owners_dict = {}
+ for gid in group_id_list:
+ members = direct_members[gid]
+ owners = direct_owners[gid]
+ owners_dict[gid] = owners
+ members_dict[gid] = members
+ group_ids = set([uid for uid in members + owners
+ if uid in self.group_settings])
+ while group_ids:
+ indirect_members, indirect_owners = self.LookupMembers(
+ _cnxn, group_ids)
+ child_members = set()
+ child_owners = set()
+ for _, children in indirect_members.iteritems():
+ child_members.update(children)
+ for _, children in indirect_owners.iteritems():
+ child_owners.update(children)
+ members_dict[gid].extend(list(child_members))
+ owners_dict[gid].extend(list(child_owners))
+ group_ids = set(self.DetermineWhichUserIDsAreGroups(
+ _cnxn, list(child_members) + list(child_owners)))
+ members_dict[gid] = list(set(members_dict[gid]))
+ return members_dict, owners_dict
+
+
+ def RemoveMembers(self, _cnxn, group_id, old_member_ids):
+ current_member_ids = self.group_members.get(group_id, [])
+ revised_member_ids = [mid for mid in current_member_ids
+ if mid not in old_member_ids]
+ self.group_members[group_id] = revised_member_ids
+
+ def UpdateMembers(self, _cnxn, group_id, member_ids, new_role):
+ self.RemoveMembers(_cnxn, group_id, member_ids)
+ self.TestAddMembers(group_id, member_ids, new_role)
+
+ def UpdateSettings(self, _cnxn, group_id, group_settings):
+ self.group_settings[group_id] = group_settings
+
+ def ExpandAnyUserGroups(self, cnxn, user_ids):
+ group_ids = set(self.DetermineWhichUserIDsAreGroups(cnxn, user_ids))
+ direct_ids = [uid for uid in user_ids if uid not in group_ids]
+ member_ids_dict, owner_ids_dict = self.LookupAllMembers(cnxn, group_ids)
+
+ indirect_ids = set()
+ for gid in group_ids:
+ indirect_ids.update(member_ids_dict[gid])
+ indirect_ids.update(owner_ids_dict[gid])
+ # It's possible that a user has both direct and indirect memberships of
+ # one group. In this case, mark the user as direct member only.
+ indirect_ids = [iid for iid in indirect_ids if iid not in direct_ids]
+
+ return direct_ids, list(indirect_ids)
+
+ def LookupVisibleMembers(
+ self, cnxn, group_id_list, perms, effective_ids, services):
+ settings_dict = self.GetAllGroupSettings(cnxn, group_id_list)
+ group_ids = settings_dict.keys()
+
+ direct_member_ids_dict, direct_owner_ids_dict = self.LookupMembers(
+ cnxn, group_ids)
+ all_member_ids_dict, all_owner_ids_dict = self.LookupAllMembers(
+ cnxn, group_ids)
+ visible_member_ids_dict = {}
+ visible_owner_ids_dict = {}
+ for gid in group_ids:
+ member_ids = all_member_ids_dict[gid]
+ owner_ids = all_owner_ids_dict[gid]
+ if permissions.CanViewGroup(perms, effective_ids, settings_dict[gid],
+ member_ids, owner_ids, []):
+ visible_member_ids_dict[gid] = direct_member_ids_dict[gid]
+ visible_owner_ids_dict[gid] = direct_owner_ids_dict[gid]
+
+ return visible_member_ids_dict, visible_owner_ids_dict
+
+ def ValidateFriendProjects(self, cnxn, services, friend_projects):
+ project_names = filter(None, re.split('; |, | |;|,', friend_projects))
+ id_dict = services.project.LookupProjectIDs(cnxn, project_names)
+ missed_projects = []
+ result = []
+ for p_name in project_names:
+ if p_name in id_dict:
+ result.append(id_dict[p_name])
+ else:
+ missed_projects.append(p_name)
+ error_msg = ''
+ if missed_projects:
+ error_msg = 'Project(s) %s do not exist' % ', '.join(missed_projects)
+ return None, error_msg
+ else:
+ return result, None
+
+
+class CacheManager(object):
+
+ def __init__(self, invalidate_tbl=None):
+ self.last_call = None
+ self.processed_invalidations_up_to = 0
+
+ def MakeCache(self, kind, max_size=None, use_value_centric_cache=False):
+ """Make a new cache and register it for future invalidations."""
+ if use_value_centric_cache:
+ cache = caches.ValueCentricRamCache(self, kind, max_size=max_size)
+ else:
+ cache = caches.RamCache(self, kind, max_size=max_size)
+ return cache
+
+ def DoDistributedInvalidation(self, cnxn):
+ """Drop any cache entries that were invalidated by other jobs."""
+ self.last_call = 'DoDistributedInvalidation', cnxn
+
+ def StoreInvalidateRows(self, cnxn, kind, keys):
+ """Store database rows to let all frontends know to invalidate."""
+ self.last_call = 'StoreInvalidateRows', cnxn, kind, keys
+
+ def StoreInvalidateAll(self, cnxn, kind):
+ """Store a database row to let all frontends know to invalidate."""
+ self.last_call = 'StoreInvalidateAll', cnxn, kind
+
+
+
+class UserService(object):
+
+ def __init__(self):
+ """Creates a test-appropriate UserService object."""
+ self.users_by_email = {}
+ self.users_by_id = {}
+ self.test_users = {}
+
+ def TestAddUser(self, email, user_id, add_user=True, banned=False):
+ """Add a user to the fake UserService instance.
+
+ Args:
+ email: Email of the user.
+ user_id: int user ID.
+ add_user: Flag whether user pb should be created, i.e. whether a
+ Monorail account should be created
+ banned: Boolean to set the user as banned
+
+ Returns:
+ The User PB that was added, or None.
+ """
+ self.users_by_email[email] = user_id
+ self.users_by_id[user_id] = email
+
+ user = None
+ if add_user:
+ user = user_pb2.MakeUser()
+ user.is_site_admin = False
+ user.email = email
+ user.obscure_email = True
+ if banned:
+ user.banned = 'is banned'
+ self.test_users[user_id] = user
+
+ return user
+
+ def GetUser(self, _cnxn, user_id):
+ return self.test_users.get(user_id)
+
+ def _CreateUser(self, _cnxn, email):
+ if email in self.users_by_email:
+ return
+ user_id = framework_helpers.MurmurHash3_x86_32(email)
+ self.users_by_id[user_id] = email
+ self.users_by_email[email] = user_id
+
+ def _CreateUsers(self, cnxn, emails):
+ for email in emails:
+ self._CreateUser(cnxn, email)
+
+ def LookupUserID(self, cnxn, email, autocreate=False, allowgroups=False):
+ user_id = self.users_by_email.get(email)
+ if not user_id and validate.IsValidEmail(email):
+ if autocreate:
+ self._CreateUser(cnxn, email)
+ user_id = self.users_by_email.get(email)
+ else:
+ raise user_svc.NoSuchUserException(email)
+
+ return user_id
+
+ def GetUsersByIDs(self, cnxn, user_ids, use_cache=True):
+ user_dict = {}
+ for user_id in user_ids:
+ if user_id and self.test_users.get(user_id):
+ user_dict[user_id] = self.test_users[user_id]
+ return user_dict
+
+ def LookupExistingUserIDs(self, cnxn, emails):
+ email_dict = {
+ email: self.users_by_email[email]
+ for email in emails
+ if email in self.users_by_email}
+ return email_dict
+
+ def LookupUserIDs(self, cnxn, emails, autocreate=False,
+ allowgroups=False):
+ email_dict = {}
+ for email in emails:
+ user_id = self.LookupUserID(
+ cnxn, email, autocreate=autocreate, allowgroups=allowgroups)
+ if user_id:
+ email_dict[email] = user_id
+ return email_dict
+
+ def LookupUserEmail(self, _cnxn, user_id):
+ email = self.users_by_id.get(user_id)
+ return email
+
+ def LookupUserEmails(self, cnxn, user_ids):
+ user_dict = {
+ user_id: self.LookupUserEmail(cnxn, user_id)
+ for user_id in user_ids}
+ return user_dict
+
+ def UpdateUser(self, _cnxn, user_id, user):
+ """Updates the user pb."""
+ self.test_users[user_id] = user
+
+ def UpdateUserSettings(
+ self, cnxn, user_id, user, notify=None, notify_starred=None,
+ obscure_email=None, after_issue_update=None,
+ is_site_admin=None, ignore_action_limits=None,
+ is_banned=None, banned_reason=None, action_limit_updates=None,
+ dismissed_cues=None, keep_people_perms_open=None, preview_on_hover=None):
+ self.UpdateUser(cnxn, user_id, user)
+
+
+class AbstractStarService(object):
+ """Fake StarService."""
+
+ def __init__(self):
+ self.stars_by_item_id = {}
+ self.stars_by_starrer_id = {}
+ self.expunged_item_ids = []
+
+ def ExpungeStars(self, _cnxn, item_id):
+ self.expunged_item_ids.append(item_id)
+ old_starrer = self.stars_by_item_id.get(item_id)
+ self.stars_by_item_id[item_id] = []
+ if self.stars_by_starrer_id.get(old_starrer):
+ self.stars_by_starrer_id[old_starrer] = [
+ it for it in self.stars_by_starrer_id[old_starrer]
+ if it != item_id]
+
+ def LookupItemStarrers(self, _cnxn, item_id):
+ return self.stars_by_item_id.get(item_id, [])
+
+ def LookupStarredItemIDs(self, _cnxn, starrer_user_id):
+ return self.stars_by_starrer_id.get(starrer_user_id, [])
+
+ def IsItemStarredBy(self, cnxn, item_id, starrer_user_id):
+ return item_id in self.LookupStarredItemIDs(cnxn, starrer_user_id)
+
+ def CountItemStars(self, cnxn, item_id):
+ return len(self.LookupItemStarrers(cnxn, item_id))
+
+ def CountItemsStars(self, cnxn, item_ids):
+ return {item_id: self.CountItemStars(cnxn, item_id)
+ for item_id in item_ids}
+
+ def SetStar(self, cnxn, item_id, starrer_user_id, starred):
+ if starred and not self.IsItemStarredBy(cnxn, item_id, starrer_user_id):
+ self.stars_by_item_id.setdefault(item_id, []).append(starrer_user_id)
+ self.stars_by_starrer_id.setdefault(starrer_user_id, []).append(item_id)
+
+ elif not starred and self.IsItemStarredBy(cnxn, item_id, starrer_user_id):
+ self.stars_by_item_id[item_id].remove(starrer_user_id)
+ self.stars_by_starrer_id[starrer_user_id].remove(item_id)
+
+
+class UserStarService(AbstractStarService):
+ pass
+
+
+class ProjectStarService(AbstractStarService):
+ pass
+
+
+class IssueStarService(AbstractStarService):
+
+ # pylint: disable=arguments-differ
+ def SetStar(
+ self, cnxn, _service, _config, issue_id, starrer_user_id,
+ starred):
+ super(IssueStarService, self).SetStar(
+ cnxn, issue_id, starrer_user_id, starred)
+
+
+class ProjectService(object):
+ """Fake ProjectService object.
+
+ Provides methods for creating users and projects, which are accessible
+ through parts of the real ProjectService interface.
+ """
+
+ def __init__(self):
+ self.test_projects = {} # project_name -> project_pb
+ self.projects_by_id = {}
+ self.test_star_manager = None
+ self.indexed_projects = {}
+ self.unindexed_projects = set()
+ self.index_counter = 0
+ self.project_commitments = {}
+
+ def TestAddProject(
+ self, name, summary='', state=project_pb2.ProjectState.LIVE,
+ owner_ids=None, committer_ids=None, contrib_ids=None,
+ issue_notify_address=None, state_reason='',
+ description=None, project_id=None, process_inbound_email=None,
+ access=None):
+ """Add a project to the fake ProjectService object.
+
+ Args:
+ name: The name of the project. Will replace any existing project under
+ the same name.
+ summary: The summary string of the project.
+ state: Initial state for the project from project_pb2.ProjectState.
+ owner_ids: List of user ids for project owners
+ committer_ids: List of user ids for project committers
+ contrib_ids: List of user ids for project contributors
+ issue_notify_address: email address to send issue change notifications
+ state_reason: string describing the reason the project is in its current
+ state.
+ description: The description string for this project
+ project_id: A unique integer identifier for the created project.
+ process_inbound_email: True to make this project accept inbound email.
+ access: One of the values of enum project_pb2.ProjectAccess.
+
+ Returns:
+ A populated project PB.
+ """
+ proj_pb = project_pb2.Project()
+ proj_pb.project_id = project_id or hash(name) % 100000
+ proj_pb.project_name = name
+ proj_pb.summary = summary
+ proj_pb.state = state
+ proj_pb.state_reason = state_reason
+ if description is not None:
+ proj_pb.description = description
+
+ self.TestAddProjectMembers(owner_ids, proj_pb, OWNER_ROLE)
+ self.TestAddProjectMembers(committer_ids, proj_pb, COMMITTER_ROLE)
+ self.TestAddProjectMembers(contrib_ids, proj_pb, CONTRIBUTOR_ROLE)
+
+ if issue_notify_address is not None:
+ proj_pb.issue_notify_address = issue_notify_address
+ if process_inbound_email is not None:
+ proj_pb.process_inbound_email = process_inbound_email
+ if access is not None:
+ proj_pb.access = access
+
+ self.test_projects[name] = proj_pb
+ self.projects_by_id[proj_pb.project_id] = proj_pb
+ return proj_pb
+
+ def TestAddProjectMembers(self, user_id_list, proj_pb, role):
+ if user_id_list is not None:
+ for user_id in user_id_list:
+ if role == OWNER_ROLE:
+ proj_pb.owner_ids.append(user_id)
+ elif role == COMMITTER_ROLE:
+ proj_pb.committer_ids.append(user_id)
+ elif role == CONTRIBUTOR_ROLE:
+ proj_pb.contributor_ids.append(user_id)
+
+ def LookupProjectIDs(self, cnxn, project_names):
+ return {
+ project_name: self.test_projects[project_name].project_id
+ for project_name in project_names
+ if project_name in self.test_projects}
+
+ def LookupProjectNames(self, cnxn, project_ids):
+ projects_dict = self.GetProjects(cnxn, project_ids)
+ return {p.project_id: p.project_name
+ for p in projects_dict.itervalues()}
+
+ def CreateProject(
+ self, _cnxn, project_name, owner_ids, committer_ids,
+ contributor_ids, summary, description,
+ state=project_pb2.ProjectState.LIVE, access=None, read_only=None,
+ home_page=None, docs_url=None, logo_gcs_id=None, logo_file_name=None):
+ """Create and store a Project with the given attributes."""
+ if project_name in self.test_projects:
+ raise project_svc.ProjectAlreadyExists()
+ self.TestAddProject(
+ project_name, summary=summary, state=state,
+ owner_ids=owner_ids, committer_ids=committer_ids,
+ contrib_ids=contributor_ids, description=description,
+ access=access)
+
+ def ExpungeProject(self, _cnxn, project_id):
+ project = self.projects_by_id.get(project_id)
+ if project:
+ self.test_projects.pop(project.project_name, None)
+
+ def GetProjectsByName(self, _cnxn, project_name_list, use_cache=True):
+ return {
+ pn: self.test_projects[pn] for pn in project_name_list
+ if pn in self.test_projects}
+
+ def GetProjectByName(self, _cnxn, name, use_cache=True):
+ return self.test_projects.get(name)
+
+ def GetProjectList(self, cnxn, project_id_list, use_cache=True):
+ project_dict = self.GetProjects(cnxn, project_id_list, use_cache=use_cache)
+ return [project_dict[pid] for pid in project_id_list
+ if pid in project_dict]
+
+ def GetVisibleLiveProjects(self, _cnxn, logged_in_user, effective_ids,
+ use_cache=True):
+ return self.projects_by_id.keys()
+
+ def GetProjects(self, _cnxn, project_ids, use_cache=True):
+ result = {}
+ for project_id in project_ids:
+ project = self.projects_by_id.get(project_id)
+ if project:
+ result[project_id] = project
+ return result
+
+ def GetProject(self, cnxn, project_id, use_cache=True):
+ """Load the specified project from the database."""
+ project_id_dict = self.GetProjects(cnxn, [project_id], use_cache=use_cache)
+ return project_id_dict.get(project_id)
+
+ @staticmethod
+ def IsValidProjectName(string):
+ """Return true if the given string is a valid project name."""
+ return project_svc.RE_PROJECT_NAME.match(string)
+
+ def GetProjectCommitments(self, _cnxn, project_id):
+ if project_id in self.project_commitments:
+ return self.project_commitments[project_id]
+
+ project_commitments = project_pb2.ProjectCommitments()
+ project_commitments.project_id = project_id
+ return project_commitments
+
+ def TestStoreProjectCommitments(self, project_commitments):
+ key = project_commitments.project_id
+ self.project_commitments[key] = project_commitments
+
+ def UpdateProject(
+ self, _cnxn, project_id, summary=None, description=None,
+ state=None, state_reason=None, access=None,
+ issue_notify_address=None, attachment_bytes_used=None,
+ attachment_quota=None, moved_to=None, process_inbound_email=None,
+ only_owners_remove_restrictions=None,
+ read_only_reason=None, cached_content_timestamp=None,
+ only_owners_see_contributors=None, delete_time=None,
+ recent_activity=None, revision_url_format=None, home_page=None,
+ docs_url=None, logo_gcs_id=None, logo_file_name=None):
+ project = self.projects_by_id.get(project_id)
+ if not project:
+ raise project_svc.NoSuchProjectException(
+ 'Project "%s" not found!' % project_id)
+
+ # TODO(jrobbins): implement all passed arguments - probably as a utility
+ # method shared with the real persistence implementation.
+ if read_only_reason is not None:
+ project.read_only_reason = read_only_reason
+
+ def UpdateProjectRoles(
+ self, _cnxn, project_id, owner_ids, committer_ids,
+ contributor_ids, now=None):
+ project = self.projects_by_id.get(project_id)
+ if not project:
+ raise project_svc.NoSuchProjectException(
+ 'Project "%s" not found!' % project_id)
+
+ project.owner_ids = owner_ids
+ project.committer_ids = committer_ids
+ project.contributor_ids = contributor_ids
+
+ def MarkProjectDeletable(
+ self, _cnxn, project_id, _config_service):
+ project = self.projects_by_id[project_id]
+ project.project_name = 'DELETABLE_%d' % project_id
+ project.state = project_pb2.ProjectState.DELETABLE
+
+ def UpdateRecentActivity(self, _cnxn, _project_id, now=None):
+ pass
+
+ def GetUserRolesInAllProjects(self, _cnxn, effective_ids):
+ owned_project_ids = set()
+ membered_project_ids = set()
+ contrib_project_ids = set()
+
+ for project in self.projects_by_id.itervalues():
+ if not effective_ids.isdisjoint(project.owner_ids):
+ owned_project_ids.add(project.project_id)
+ elif not effective_ids.isdisjoint(project.committer_ids):
+ membered_project_ids.add(project.project_id)
+ elif not effective_ids.isdisjoint(project.contributor_ids):
+ contrib_project_ids.add(project.project_id)
+
+ return owned_project_ids, membered_project_ids, contrib_project_ids
+
+
+class ConfigService(object):
+ """Fake version of ConfigService that just works in-RAM."""
+
+ def __init__(self, user_id=None):
+ self.project_configs = {}
+ self.next_field_id = 123
+ self.next_component_id = 345
+ self.expunged_configs = []
+ self.component_ids_to_templates = {}
+
+ def TemplatesWithComponent(self, _cnxn, component_id, _config):
+ return self.component_ids_to_templates.get(component_id, [])
+
+ def ExpungeConfig(self, _cnxn, project_id):
+ self.expunged_configs.append(project_id)
+
+ def GetLabelDefRows(self, cnxn, project_id):
+ """This always returns empty results. Mock it to test other cases."""
+ return []
+
+ def GetLabelDefRowsAnyProject(self, cnxn, where=None):
+ """This always returns empty results. Mock it to test other cases."""
+ return []
+
+ def LookupLabel(self, cnxn, project_id, label_id):
+ if label_id == 999:
+ return None
+ return 'label_%d_%d' % (project_id, label_id)
+
+ def LookupLabelID(self, cnxn, project_id, label, autocreate=True):
+ return 1
+
+ def LookupLabelIDs(self, cnxn, project_id, labels, autocreate=False):
+ return [idx for idx, _label in enumerate(labels)]
+
+ def LookupIDsOfLabelsMatching(self, cnxn, project_id, regex):
+ return [1, 2, 3]
+
+ def LookupStatus(self, cnxn, project_id, status_id):
+ return 'status_%d_%d' % (project_id, status_id)
+
+ def LookupStatusID(self, cnxn, project_id, status, autocreate=True):
+ if status:
+ return 1
+ else:
+ return 0
+
+ def LookupStatusIDs(self, cnxn, project_id, statuses):
+ return [idx for idx, _status in enumerate(statuses)]
+
+ def LookupClosedStatusIDs(self, cnxn, project_id):
+ return [7, 8, 9]
+
+ def StoreConfig(self, _cnxn, config):
+ self.project_configs[config.project_id] = config
+
+ def GetProjectConfig(self, _cnxn, project_id, use_cache=True):
+ if project_id in self.project_configs:
+ return self.project_configs[project_id]
+ else:
+ return tracker_bizobj.MakeDefaultProjectIssueConfig(project_id)
+
+ def GetProjectConfigs(self, _cnxn, project_ids, use_cache=True):
+ config_dict = {}
+ for project_id in project_ids:
+ if project_id in self.project_configs:
+ config_dict[project_id] = self.project_configs[project_id]
+ else:
+ config_dict[project_id] = tracker_bizobj.MakeDefaultProjectIssueConfig(
+ project_id)
+
+ return config_dict
+
+ def UpdateConfig(
+ self, cnxn, project, well_known_statuses=None,
+ statuses_offer_merge=None, well_known_labels=None,
+ excl_label_prefixes=None, templates=None,
+ default_template_for_developers=None, default_template_for_users=None,
+ list_prefs=None, restrict_to_known=None):
+ project_id = project.project_id
+ project_config = self.GetProjectConfig(cnxn, project_id, use_cache=False)
+
+ if well_known_statuses is not None:
+ tracker_bizobj.SetConfigStatuses(project_config, well_known_statuses)
+
+ if statuses_offer_merge is not None:
+ project_config.statuses_offer_merge = statuses_offer_merge
+
+ if well_known_labels is not None:
+ tracker_bizobj.SetConfigLabels(project_config, well_known_labels)
+
+ if excl_label_prefixes is not None:
+ project_config.exclusive_label_prefixes = excl_label_prefixes
+
+ if templates is not None:
+ project_config.templates = templates
+
+ if default_template_for_developers is not None:
+ project_config.default_template_for_developers = (
+ default_template_for_developers)
+ if default_template_for_users is not None:
+ project_config.default_template_for_users = default_template_for_users
+
+ if list_prefs:
+ default_col_spec, default_sort_spec, x_attr, y_attr = list_prefs
+ project_config.default_col_spec = default_col_spec
+ project_config.default_sort_spec = default_sort_spec
+ project_config.default_x_attr = x_attr
+ project_config.default_y_attr = y_attr
+
+ if restrict_to_known is not None:
+ project_config.restrict_to_known = restrict_to_known
+
+ self.StoreConfig(cnxn, project_config)
+ return project_config
+
+ def CreateFieldDef(
+ self, cnxn, project_id, field_name, field_type_str, applic_type,
+ applic_pred, is_required, is_multivalued,
+ min_value, max_value, regex, needs_member, needs_perm,
+ grants_perm, notify_on, docstring, admin_ids):
+ config = self.GetProjectConfig(cnxn, project_id)
+ field_type = tracker_pb2.FieldTypes(field_type_str)
+ field_id = self.next_field_id
+ self.next_field_id += 1
+ fd = tracker_bizobj.MakeFieldDef(
+ field_id, project_id, field_name, field_type, applic_type, applic_pred,
+ is_required, is_multivalued, min_value, max_value, regex,
+ needs_member, needs_perm, grants_perm, notify_on, docstring, False)
+ config.field_defs.append(fd)
+ self.StoreConfig(cnxn, config)
+
+ def SoftDeleteFieldDef(self, cnxn, project_id, field_id):
+ config = self.GetProjectConfig(cnxn, project_id)
+ fd = tracker_bizobj.FindFieldDefByID(field_id, config)
+ fd.is_deleted = True
+ self.StoreConfig(cnxn, config)
+
+ def UpdateFieldDef(
+ self, cnxn, project_id, field_id, field_name=None,
+ applicable_type=None, applicable_predicate=None, is_required=None,
+ is_multivalued=None, min_value=None, max_value=None, regex=None,
+ needs_member=None, needs_perm=None, grants_perm=None, notify_on=None,
+ docstring=None, admin_ids=None):
+ config = self.GetProjectConfig(cnxn, project_id)
+ fd = tracker_bizobj.FindFieldDefByID(field_id, config)
+ # pylint: disable=multiple-statements
+ if field_name is not None: fd.field_name = field_name
+ if applicable_type is not None: fd.applicable_type = applicable_type
+ if applicable_predicate is not None:
+ fd.applicable_predicate = applicable_predicate
+ if is_required is not None: fd.is_required = is_required
+ if is_multivalued is not None: fd.is_multivalued = is_multivalued
+ if min_value is not None: fd.min_value = min_value
+ if max_value is not None: fd.max_value = max_value
+ if regex is not None: fd.regex = regex
+ if docstring is not None: fd.docstring = docstring
+ if admin_ids is not None: fd.admin_ids = admin_ids
+ self.StoreConfig(cnxn, config)
+
+ def CreateComponentDef(
+ self, cnxn, project_id, path, docstring, deprecated, admin_ids, cc_ids,
+ created, creator_id):
+ config = self.GetProjectConfig(cnxn, project_id)
+ cd = tracker_bizobj.MakeComponentDef(
+ self.next_component_id, project_id, path, docstring, deprecated,
+ admin_ids, cc_ids, created, creator_id)
+ config.component_defs.append(cd)
+ self.next_component_id += 1
+ self.StoreConfig(cnxn, config)
+ return self.next_component_id - 1
+
+ def UpdateComponentDef(
+ self, cnxn, project_id, component_id, path=None, docstring=None,
+ deprecated=None, admin_ids=None, cc_ids=None, created=None,
+ creator_id=None, modified=None, modifier_id=None):
+ config = self.GetProjectConfig(cnxn, project_id)
+ cd = tracker_bizobj.FindComponentDefByID(component_id, config)
+ if path is not None:
+ assert path
+ cd.path = path
+ # pylint: disable=multiple-statements
+ if docstring is not None: cd.docstring = docstring
+ if deprecated is not None: cd.deprecated = deprecated
+ if admin_ids is not None: cd.admin_ids = admin_ids
+ if cc_ids is not None: cd.cc_ids = cc_ids
+ if created is not None: cd.created = created
+ if creator_id is not None: cd.creator_id = creator_id
+ if modified is not None: cd.modified = modified
+ if modifier_id is not None: cd.modifier_id = modifier_id
+ self.StoreConfig(cnxn, config)
+
+ def DeleteComponentDef(self, cnxn, project_id, component_id):
+ """Delete the specified component definition."""
+ config = self.GetProjectConfig(cnxn, project_id)
+ config.component_defs = [
+ cd for cd in config.component_defs
+ if cd.component_id != component_id]
+ self.StoreConfig(cnxn, config)
+
+ def InvalidateMemcache(self, issues, key_prefix=''):
+ pass
+
+
+class IssueService(object):
+ """Fake version of IssueService that just works in-RAM."""
+ # pylint: disable=unused-argument
+
+ def __init__(self, user_id=None):
+ self.user_id = user_id
+ # Dictionary {project_id: issue_pb_dict}
+ # where issue_pb_dict is a dictionary of the form
+ # {local_id: issue_pb}
+ self.issues_by_project = {}
+ self.issues_by_iid = {}
+ # Dictionary {project_id: comment_pb_dict}
+ # where comment_pb_dict is a dictionary of the form
+ # {local_id: comment_pb_list}
+ self.comments_by_project = {}
+ self.comments_by_iid = {}
+ self.comments_by_cid = {}
+ self.attachments_by_id = {}
+
+ # Set of issue IDs for issues that have been indexed by calling
+ # IndexIssues().
+ self.indexed_issue_iids = set()
+
+ # Test-only indication that the indexer would have been called
+ # by the real DITPersist.
+ self.indexer_called = False
+
+ # Test-only sequence of updated and enqueued.
+ self.updated_issues = []
+ self.enqueued_issues = []
+
+ # Test-only sequence of expunged issues and projects.
+ self.expunged_issues = []
+ self.expunged_former_locations = []
+ self.expunged_local_ids = []
+
+ # Test-only indicators that methods were called.
+ self.get_all_issues_in_project_called = False
+ self.update_issues_called = False
+ self.enqueue_issues_called = False
+
+ # The next id to return if it is > 0.
+ self.next_id = -1
+
+ def UpdateIssues(
+ self, cnxn, issues, update_cols=None, just_derived=False,
+ commit=True, invalidate=True):
+ self.update_issues_called = True
+ self.updated_issues.extend(issues)
+
+ def EnqueueIssuesForIndexing(self, _cnxn, issues):
+ self.enqueue_issues_called = True
+ self.enqueued_issues.extend(issues)
+
+ def ExpungeIssues(self, _cnxn, issue_ids):
+ self.expunged_issues.extend(issue_ids)
+
+ def ExpungeFormerLocations(self, _cnxn, project_id):
+ self.expunged_former_locations.append(project_id)
+
+ def ExpungeLocalIDCounters(self, _cnxn, project_id):
+ self.expunged_local_ids.append(project_id)
+
+ def TestAddIssue(self, issue):
+ project_id = issue.project_id
+ self.issues_by_project.setdefault(project_id, {})
+ self.issues_by_project[project_id][issue.local_id] = issue
+ self.issues_by_iid[issue.issue_id] = issue
+
+ # Adding a new issue should add the first comment to the issue
+ comment = tracker_pb2.IssueComment()
+ comment.project_id = issue.project_id
+ comment.issue_id = issue.issue_id
+ comment.content = issue.summary
+ comment.timestamp = issue.opened_timestamp
+ if issue.reporter_id:
+ comment.user_id = issue.reporter_id
+ comment.sequence = 0
+ self.TestAddComment(comment, issue.local_id)
+
+ def TestAddComment(self, comment, local_id):
+ pid = comment.project_id
+ if not comment.id:
+ comment.id = len(self.comments_by_cid)
+
+ self.comments_by_project.setdefault(pid, {})
+ self.comments_by_project[pid].setdefault(local_id, []).append(comment)
+ self.comments_by_iid.setdefault(comment.issue_id, []).append(comment)
+ self.comments_by_cid[comment.id] = comment
+
+ def TestAddAttachment(self, attachment, comment_id, issue_id):
+ if not attachment.attachment_id:
+ attachment.attachment_id = len(self.attachments_by_id)
+
+ aid = attachment.attachment_id
+ self.attachments_by_id[aid] = attachment, comment_id, issue_id
+ comment = self.comments_by_cid[comment_id]
+ if attachment not in comment.attachments:
+ comment.attachments.extend([attachment])
+
+ def GetAttachmentAndContext(self, _cnxn, attachment_id):
+ if attachment_id in self.attachments_by_id:
+ attach, comment_id, issue_id = self.attachments_by_id[attachment_id]
+ if not attach.deleted:
+ return attach, comment_id, issue_id
+
+ raise issue_svc.NoSuchAttachmentException()
+
+ def GetComments(self, _cnxn, where=None, order_by=None, **kwargs):
+ # This is a very limited subset of what the real GetComments() can do.
+ cid = kwargs.get('id')
+
+ comment = self.comments_by_cid.get(cid)
+ if comment:
+ return [comment]
+ else:
+ return []
+
+ def GetComment(self, cnxn, comment_id):
+ """Get the requested comment, or raise an exception."""
+ comments = self.GetComments(cnxn, id=comment_id)
+ if len(comments) == 1:
+ return comments[0]
+
+ raise issue_svc.NoSuchCommentException()
+
+ def ResolveIssueRefs(self, cnxn, ref_projects, default_project_name, refs):
+ result = []
+ for project_name, local_id in refs:
+ project = ref_projects.get(project_name or default_project_name)
+ if not project or project.state == project_pb2.ProjectState.DELETABLE:
+ continue # ignore any refs to issues in deleted projects
+ try:
+ issue = self.GetIssueByLocalID(cnxn, project.project_id, local_id)
+ result.append(issue.issue_id)
+ except issue_svc.NoSuchIssueException:
+ pass # ignore any refs to issues that don't exist
+
+ return result
+
+ def GetAllIssuesInProject(self, _cnxn, project_id, min_local_id=None):
+ self.get_all_issues_in_project_called = True
+ if project_id in self.issues_by_project:
+ return self.issues_by_project[project_id].values()
+ else:
+ return []
+
+ def GetIssuesByLocalIDs(
+ self, _cnxn, project_id, local_id_list, shard_id=None):
+ results = []
+ for local_id in local_id_list:
+ if (project_id in self.issues_by_project
+ and local_id in self.issues_by_project[project_id]):
+ results.append(self.issues_by_project[project_id][local_id])
+
+ return results
+
+ def GetIssueByLocalID(self, _cnxn, project_id, local_id):
+ try:
+ return self.issues_by_project[project_id][local_id]
+ except KeyError:
+ raise issue_svc.NoSuchIssueException()
+
+ def GetAnyOnHandIssue(self, issue_ids, start=None, end=None):
+ return None # Treat them all like misses.
+
+ def GetIssue(self, _cnxn, issue_id):
+ if issue_id in self.issues_by_iid:
+ return self.issues_by_iid[issue_id]
+ else:
+ raise issue_svc.NoSuchIssueException()
+
+ def LookupIssueID(self, _cnxn, project_id, local_id):
+ try:
+ issue = self.issues_by_project[project_id][local_id]
+ except KeyError:
+ raise issue_svc.NoSuchIssueException()
+ return issue.issue_id
+
+ def GetCommentsForIssue(self, _cnxn, issue_id):
+ comments = self.comments_by_iid.get(issue_id, [])
+ for idx, c in enumerate(comments):
+ c.sequence = idx
+
+ return comments
+
+ def InsertIssue(self, cnxn, issue):
+ issue.issue_id = issue.project_id * 1000000 + issue.local_id
+ self.issues_by_project.setdefault(issue.project_id, {})
+ self.issues_by_project[issue.project_id][issue.local_id] = issue
+ self.issues_by_iid[issue.issue_id] = issue
+ return issue.issue_id
+
+ def CreateIssue(
+ self, cnxn, services, project_id,
+ summary, status, owner_id, cc_ids, labels, field_values,
+ component_ids, reporter_id, marked_description, blocked_on=None,
+ blocking=None, attachments=None, timestamp=None, index_now=True):
+ issue = tracker_pb2.Issue()
+ issue.project_id = project_id
+ issue.summary = summary
+ issue.status = status
+ if owner_id:
+ issue.owner_id = owner_id
+ issue.cc_ids.extend(cc_ids)
+ issue.labels.extend(labels)
+ issue.field_values.extend(field_values)
+ issue.reporter_id = reporter_id
+ if timestamp:
+ issue.opened_timestamp = timestamp
+
+ if blocked_on:
+ issue.blocked_on_iids.extend(blocked_on)
+ if blocking:
+ issue.blocking.extend(blocking)
+
+ if blocking:
+ issue.blocking_iids.extend(blocking)
+
+ issue.local_id = self.AllocateNextLocalID(cnxn, project_id)
+ issue.issue_id = project_id * 1000000 + issue.local_id
+
+ self.TestAddIssue(issue)
+ self.comments_by_iid[issue.issue_id][0].content = marked_description
+ return issue.local_id
+
+ def SetUsedLocalID(self, cnxn, project_id):
+ self.next_id = self.GetHighestLocalID(cnxn, project_id) + 1
+
+ def AllocateNextLocalID(self, cnxn, project_id):
+ return self.GetHighestLocalID(cnxn, project_id) + 1
+
+ def GetHighestLocalID(self, _cnxn, project_id):
+ if self.next_id > 0:
+ return self.next_id - 1
+ else:
+ issue_dict = self.issues_by_project.get(project_id, {})
+ highest = max([0] + [issue.local_id for issue in issue_dict.itervalues()])
+ return highest
+
+ def ApplyIssueComment(
+ self, cnxn, services, reporter_id, project_id,
+ local_id, summary, status, owner_id, cc_ids, labels, field_values,
+ component_ids, blocked_on, blocking, dangling_blocked_on_refs,
+ dangling_blocking_refs, merged_into, index_now=True,
+ page_gen_ts=None, comment=None, inbound_message=None, attachments=None,
+ timestamp=None):
+ """Feel free to implement a spec-compliant return value."""
+ issue = self.issues_by_project[project_id][local_id]
+ amendments = []
+
+ if summary and summary != issue.summary:
+ issue.summary = summary
+ amendments.append(tracker_bizobj.MakeSummaryAmendment(
+ summary, issue.summary))
+
+ if status and status != issue.status:
+ issue.status = status
+ amendments.append(tracker_bizobj.MakeStatusAmendment(
+ status, issue.status))
+
+ issue.owner_id = owner_id
+ issue.cc_ids = cc_ids
+ issue.labels = labels
+ issue.field_values = field_values
+ issue.component_ids = component_ids
+
+ issue.blocked_on_iids.extend(blocked_on)
+ issue.blocking_iids.extend(blocking)
+ issue.dangling_blocked_on_refs.extend(dangling_blocked_on_refs)
+ issue.dangling_blocking_refs.extend(dangling_blocking_refs)
+
+ if merged_into is not None:
+ issue.merged_into = merged_into
+
+ if amendments or (comment and comment.strip()) or attachments:
+ comment_pb = self.CreateIssueComment(
+ cnxn, project_id, local_id, reporter_id, comment,
+ amendments=amendments, inbound_message=inbound_message)
+ else:
+ comment_pb = None
+
+ return amendments, comment_pb
+
+ def GetCommentsForIssues(self, _cnxn, issue_ids):
+ comments_dict = {}
+ for issue_id in issue_ids:
+ comments_dict[issue_id] = self.comments_by_iid[issue_id]
+
+ return comments_dict
+
+ def InsertComment(self, cnxn, comment, commit=True):
+ issue = self.GetIssue(cnxn, comment.issue_id)
+ self.TestAddComment(comment, issue.local_id)
+
+ # pylint: disable=unused-argument
+ def DeltaUpdateIssue(
+ self, cnxn, services, reporter_id, project_id,
+ config, issue, status, owner_id, cc_add, cc_remove, comp_ids_add,
+ comp_ids_remove, labels_add, labels_remove, field_vals_add,
+ field_vals_remove, fields_clear, blocked_on_add=None,
+ blocked_on_remove=None, blocking_add=None, blocking_remove=None,
+ merged_into=None, index_now=False, comment=None, summary=None,
+ iids_to_invalidate=None, rules=None, predicate_asts=None,
+ timestamp=None):
+ # Return a bogus amendments list if any of the fields changed
+ amendments = []
+ comment_pb = tracker_pb2.IssueComment()
+ if (status or owner_id or cc_add or cc_remove or labels_add or
+ labels_remove or field_vals_add or field_vals_remove or fields_clear or
+ blocked_on_add or blocked_on_remove or blocking_add or
+ blocking_remove or merged_into or summary):
+ amendments.append(tracker_bizobj.MakeStatusAmendment(
+ 'Updated', issue.status))
+
+ if not amendments and (not comment or not comment.strip()):
+ return [], None
+
+ comment_pb = self.CreateIssueComment(
+ cnxn, project_id, issue.local_id, reporter_id, comment,
+ amendments=amendments)
+
+ self.indexer_called = index_now
+ return amendments, comment_pb
+
+ def InvalidateIIDs(self, cnxn, iids_to_invalidate):
+ pass
+
+ # pylint: disable=unused-argument
+ def CreateIssueComment(
+ self, _cnxn, project_id, local_id, user_id, content,
+ inbound_message=None, amendments=None, attachments=None, timestamp=None,
+ is_spam=False, commit=True):
+ # Add a comment to an issue
+ issue = self.issues_by_project[project_id][local_id]
+
+ comment = tracker_pb2.IssueComment()
+ comment.id = len(self.comments_by_cid)
+ comment.project_id = project_id
+ comment.issue_id = issue.issue_id
+ comment.content = content
+ comment.user_id = user_id
+ if timestamp is not None:
+ comment.timestamp = timestamp
+ else:
+ comment.timestamp = 1234567890
+ if amendments:
+ comment.amendments.extend(amendments)
+ if inbound_message:
+ comment.inbound_message = inbound_message
+
+ pid = project_id
+ self.comments_by_project.setdefault(pid, {})
+ self.comments_by_project[pid].setdefault(local_id, []).append(comment)
+ self.comments_by_iid.setdefault(issue.issue_id, []).append(comment)
+ self.comments_by_cid[comment.id] = comment
+
+ if attachments:
+ for filename, filecontent, mimetype in attachments:
+ aid = len(self.attachments_by_id)
+ attach = comment.attachments_add(
+ attachment_id=aid,
+ filename=filename,
+ filesize=len(filecontent),
+ mimetype=mimetype,
+ blobkey='blob(%s)' % filename)
+ self.attachments_by_id[aid] = attach, pid, comment.id
+
+ return comment
+
+ def GetOpenAndClosedIssues(self, _cnxn, issue_ids):
+ open_issues = []
+ closed_issues = []
+ for issue_id in issue_ids:
+ try:
+ issue = self.issues_by_iid[issue_id]
+ if issue.status == 'Fixed':
+ closed_issues.append(issue)
+ else:
+ open_issues.append(issue)
+ except KeyError:
+ continue
+
+ return open_issues, closed_issues
+
+ def GetIssuesDict(
+ self, _cnxn, issue_ids, use_cache=True, shard_id=None):
+ return {iid: self.issues_by_iid[iid] for iid in issue_ids}
+
+ def GetIssues(self, _cnxn, issue_ids, use_cache=True, shard_id=None):
+ results = [self.issues_by_iid[issue_id] for issue_id in issue_ids
+ if issue_id in self.issues_by_iid]
+
+ return results
+
+ def SoftDeleteIssue(
+ self, _cnxn, project_id, local_id, deleted, user_service):
+ issue = self.issues_by_project[project_id][local_id]
+ issue.deleted = deleted
+
+ def SoftDeleteComment(
+ self, cnxn, project_id, local_id, sequence_num,
+ deleted_by_user_id, user_service, delete=True, reindex=True,
+ is_spam=False):
+ issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
+ comments = self.GetCommentsForIssue(cnxn, issue.issue_id)
+ if not comments:
+ raise Exception(
+ 'No comments for issue, project, seq (%s, %s, %s), cannot delete'
+ % (local_id, project_id, sequence_num))
+ if len(comments) < sequence_num:
+ raise Exception(
+ 'Attempting to delete comment %s only %s comments created' %
+ (sequence_num, len(comments)))
+ comments[sequence_num].is_spam = is_spam
+ if delete:
+ comments[sequence_num].deleted_by = deleted_by_user_id
+ else:
+ comments[sequence_num].reset('deleted_by')
+
+ def DeleteComponentReferences(self, _cnxn, component_id):
+ for _, issue in self.issues_by_iid.iteritems():
+ issue.component_ids = [
+ cid for cid in issue.component_ids if cid != component_id]
+
+ def RunIssueQuery(
+ self, cnxn, left_joins, where, order_by, shard_id=None, limit=None):
+ """This always returns empty results. Mock it to test other cases."""
+ return [], False
+
+ def GetIIDsByLabelIDs(self, cnxn, label_ids, project_id, shard_id):
+ """This always returns empty results. Mock it to test other cases."""
+ return []
+
+ def GetIIDsByParticipant(self, cnxn, user_ids, project_ids, shard_id):
+ """This always returns empty results. Mock it to test other cases."""
+ return []
+
+ def MoveIssues(self, cnxn, dest_project, issues, user_service):
+ move_to = dest_project.project_id
+ self.issues_by_project.setdefault(move_to, {})
+ for issue in issues:
+ project_id = issue.project_id
+ self.issues_by_project[project_id].pop(issue.local_id)
+ issue.local_id = self.AllocateNextLocalID(cnxn, move_to)
+ self.issues_by_project[move_to][issue.local_id] = issue
+ issue.project_id = move_to
+ return []
+
+
+class SpamService(object):
+ """Fake version of SpamService that just works in-RAM."""
+
+ def __init__(self, user_id=None):
+ self.user_id = user_id
+ self.reports_by_issue_id = collections.defaultdict(list)
+ self.comment_reports_by_issue_id = collections.defaultdict(dict)
+ self.manual_verdicts_by_issue_id = collections.defaultdict(dict)
+ self.manual_verdicts_by_comment_id = collections.defaultdict(dict)
+
+ def FlagIssues(self, cnxn, issue_service, issues, user_id, flagged_spam):
+ for issue in issues:
+ if flagged_spam:
+ self.reports_by_issue_id[issue.issue_id].append(user_id)
+ else:
+ self.reports_by_issue_id[issue.issue_id].remove(user_id)
+
+ def FlagComment(self, cnxn, issue_id, comment_id, reported_user_id, user_id,
+ flagged_spam):
+ if not comment_id in self.comment_reports_by_issue_id[issue_id]:
+ self.comment_reports_by_issue_id[issue_id][comment_id] = []
+ if flagged_spam:
+ self.comment_reports_by_issue_id[issue_id][comment_id].append(user_id)
+ else:
+ self.comment_reports_by_issue_id[issue_id][comment_id].remove(user_id)
+
+ def RecordManualIssueVerdicts(
+ self, cnxn, issue_service, issues, user_id, is_spam):
+ for issue in issues:
+ self.manual_verdicts_by_issue_id[issue.issue_id][user_id] = is_spam
+
+ def RecordManualCommentVerdict(
+ self, cnxn, issue_service, user_service, comment_id,
+ sequnce_num, user_id, is_spam):
+ self.manual_verdicts_by_comment_id[comment_id][user_id] = is_spam
+
+ def RecordClassifierIssueVerdict(self, cnxn, issue, is_spam, confidence):
+ return
+
+ def RecordClassifierCommentVerdict(self, cnxn, issue, is_spam, confidence):
+ return
+
+ def ClassifyComment(self, comment):
+ return {'outputLabel': 'ham',
+ 'outputMulti': [{'label': 'ham', 'score': '1.0'}]}
+
+ def ClassifyIssue(self, issue, firstComment):
+ return {'outputLabel': 'ham',
+ 'outputMulti': [{'label': 'ham', 'score': '1.0'}]}
+
+
+class FeaturesService(object):
+ """A fake implementation of FeaturesService."""
+ def __init__(self):
+ # Test-only sequence of expunged projects.
+ self.expunged_saved_queries = []
+ self.expunged_filter_rules = []
+ self.expunged_quick_edit = []
+
+ def ExpungeSavedQueriesExecuteInProject(self, _cnxn, project_id):
+ self.expunged_saved_queries.append(project_id)
+
+ def ExpungeFilterRules(self, _cnxn, project_id):
+ self.expunged_filter_rules.append(project_id)
+
+ def ExpungeQuickEditHistory(self, _cnxn, project_id):
+ self.expunged_quick_edit.append(project_id)
+
+ def GetFilterRules(self, cnxn, project_id):
+ return []
+
+ def GetCannedQueriesByProjectID(self, cnxn, project_id):
+ return []
+
+ def UpdateCannedQueries(self, cnxn, project_id, canned_queries):
+ pass
+
+ def GetSubscriptionsInProjects(self, cnxn, project_ids):
+ return {}
+
+ def GetSavedQuery(self, cnxn, query_id):
+ return tracker_pb2.SavedQuery()
+
+
+class PostData(object):
+ """A dictionary-like object that also implements getall()."""
+
+ def __init__(self, *args, **kwargs):
+ self.dictionary = dict(*args, **kwargs)
+
+ def getall(self, key):
+ """Return all values, assume that the value at key is already a list."""
+ return self.dictionary.get(key, [])
+
+ def get(self, key, default=None):
+ """Return first value, assume that the value at key is already a list."""
+ return self.dictionary.get(key, [default])[0]
+
+ def __getitem__(self, key):
+ """Return first value, assume that the value at key is already a list."""
+ return self.dictionary[key][0]
+
+ def __contains__(self, key):
+ return key in self.dictionary
+
+ def keys(self):
+ """Return the keys in the POST data."""
+ return self.dictionary.keys()
+
+
+class FakeFile:
+ def __init__(self, data=None):
+ self.data = data
+
+ def read(self):
+ return self.data
+
+ def write(self, content):
+ return
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, __1, __2, __3):
+ return None
+
+
+def gcs_open(filename, mode):
+ return FakeFile(filename)
diff --git a/appengine/monorail/testing/test/__init__.py b/appengine/monorail/testing/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/testing/test/__init__.py
diff --git a/appengine/monorail/testing/test/fake_test.py b/appengine/monorail/testing/test/fake_test.py
new file mode 100644
index 0000000..a58a44d
--- /dev/null
+++ b/appengine/monorail/testing/test/fake_test.py
@@ -0,0 +1,67 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the fake module."""
+
+import inspect
+import unittest
+
+from services import cachemanager_svc
+from services import config_svc
+from services import issue_svc
+from services import project_svc
+from services import star_svc
+from services import user_svc
+from services import usergroup_svc
+from testing import fake
+
+fake_class_map = {
+ fake.AbstractStarService: star_svc.AbstractStarService,
+ fake.CacheManager: cachemanager_svc.CacheManager,
+ fake.ProjectService: project_svc.ProjectService,
+ fake.ConfigService: config_svc.ConfigService,
+ fake.IssueService: issue_svc.IssueService,
+ fake.UserGroupService: usergroup_svc.UserGroupService,
+ fake.UserService: user_svc.UserService,
+ }
+
+
+class FakeMetaTest(unittest.TestCase):
+
+ def testFunctionsHaveSameSignatures(self):
+ """Verify that the fake class methods match the real ones."""
+ for fake_cls, real_cls in fake_class_map.iteritems():
+ fake_attrs = set(dir(fake_cls))
+ real_attrs = set(dir(real_cls))
+ both_attrs = fake_attrs.intersection(real_attrs)
+ to_test = [x for x in both_attrs if '__' not in x]
+ for name in to_test:
+ real_attr = getattr(real_cls, name)
+ if inspect.ismethod(real_attr):
+ real_spec = inspect.getargspec(real_attr)
+ fake_spec = inspect.getargspec(getattr(fake_cls, name))
+ # check same number of args and kwargs
+ real_kw_len = real_spec[3] and len(real_spec[3]) or 0
+ fake_kw_len = fake_spec[3] and len(fake_spec[3]) or 0
+
+ self.assertEquals(
+ len(real_spec[0]) - real_kw_len,
+ len(fake_spec[0]) - fake_kw_len,
+ 'Unequal number of args on %s.%s' % (fake_cls.__name__, name))
+ self.assertEquals(
+ real_kw_len, fake_kw_len,
+ 'Unequal number of kwargs on %s.%s' % (fake_cls.__name__, name))
+ if real_kw_len:
+ self.assertEquals(
+ real_spec[0][-real_kw_len:],
+ fake_spec[0][-fake_kw_len:],
+ 'Mismatched kwargs on %s.%s' % (fake_cls.__name__, name))
+ self.assertEquals(
+ real_spec[3], fake_spec[3],
+ 'Mismatched kwarg defaults on %s.%s' % (fake_cls.__name__, name))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/testing/test/testing_helpers_test.py b/appengine/monorail/testing/test/testing_helpers_test.py
new file mode 100644
index 0000000..f36595c
--- /dev/null
+++ b/appengine/monorail/testing/test/testing_helpers_test.py
@@ -0,0 +1,75 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the testing_helpers module."""
+
+import unittest
+
+from testing import testing_helpers
+
+
+class TestingHelpersTest(unittest.TestCase):
+
+ def testMakeMonorailRequest(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/foo?key1=2&key2=a%20string&key3')
+
+ self.assertEqual(None, mr.GetIntParam('foo'))
+ self.assertEqual(2, mr.GetIntParam('key1'))
+ self.assertEqual(None, mr.GetIntParam('key2'))
+ self.assertEqual(None, mr.GetIntParam('key3'))
+ self.assertEqual(3, mr.GetIntParam('key2', default_value=3))
+ self.assertEqual(3, mr.GetIntParam('foo', default_value=3))
+
+ def testGetRequestObjectsBasics(self):
+ request, mr = testing_helpers.GetRequestObjects(
+ path='/foo/bar/wee?sna=foo',
+ params={'ya': 'hoo'}, method='POST')
+
+ # supplied as part of the url
+ self.assertEqual('foo', mr.GetParam('sna'))
+
+ # supplied as a param
+ self.assertEqual('hoo', mr.GetParam('ya'))
+
+ # default Host header
+ self.assertEqual('127.0.0.1', request.host)
+
+ def testGetRequestObjectsHeaders(self):
+ # with some headers
+ request, _mr = testing_helpers.GetRequestObjects(
+ headers={'Accept-Language': 'en', 'Host': 'pickledsheep.com'},
+ path='/foo/bar/wee?sna=foo')
+
+ # default Host header
+ self.assertEqual('pickledsheep.com', request.host)
+
+ # user specified headers
+ self.assertEqual('en', request.headers['Accept-Language'])
+
+ def testGetRequestObjectsUserInfo(self):
+ user_id = '123'
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ user_info={'user_id': user_id})
+
+ self.assertEqual(user_id, mr.auth.user_id)
+
+
+class BlankTest(unittest.TestCase):
+
+ def testBlank(self):
+ blank = testing_helpers.Blank(
+ foo='foo',
+ bar=123,
+ inc=lambda x: x + 1)
+
+ self.assertEqual('foo', blank.foo)
+ self.assertEqual(123, blank.bar)
+ self.assertEqual(5, blank.inc(4))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/testing/testing_helpers.py b/appengine/monorail/testing/testing_helpers.py
new file mode 100644
index 0000000..d314294
--- /dev/null
+++ b/appengine/monorail/testing/testing_helpers.py
@@ -0,0 +1,108 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helpers for testing."""
+
+import email
+
+from framework import profiler
+from proto import user_pb2
+from services import service_manager
+from testing import fake
+import webapp2
+
+DEFAULT_HOST = '127.0.0.1'
+
+
+MINIMAL_HEADER_LINES = [
+ ('From', 'user@example.com'),
+ ('To', 'proj@monorail.example.com'),
+ ('Cc', 'ningerso@chromium.org'),
+ ('Subject', 'Issue 123 in proj: broken link'),
+ ]
+
+# Add one more (long) line for In-Reply-To
+HEADER_LINES = MINIMAL_HEADER_LINES + [
+ ('In-Reply-To', '<0=969704940193871313=13442892928193434663='
+ 'proj@monorail.example.com>'),
+ ]
+
+
+def MakeMessage(header_list, body):
+ """Convenience function to make an email.message.Message."""
+ msg = email.message.Message()
+ for key, value in header_list:
+ msg[key] = value
+ msg.set_payload(body)
+ return msg
+
+
+def MakeMonorailRequest(*args, **kwargs):
+ """Get just the monorailrequest.MonorailRequest() from GetRequestObjects."""
+ _request, mr = GetRequestObjects(*args, **kwargs)
+ return mr
+
+
+def GetRequestObjects(
+ headers=None, path='/', params=None, user_info=None, project=None,
+ method='GET', perms=None, services=None):
+ """Make fake request and MonorailRequest objects for testing.
+
+ Host param will override the 'Host' header, and has a default value of
+ '127.0.0.1'.
+
+ Args:
+ headers: Dict of HTTP header strings.
+ path: Path part of the URL in the request.
+ params: Dict of query-string parameters.
+ user_info: Dict of user attributes to set on a MonorailRequest object.
+ For example, "user_id: 5" causes self.auth.user_id=5.
+ project: optional Project object for the current request.
+ method: 'GET' or 'POST'.
+ perms: PermissionSet to use for this request.
+ services: Connections to backends.
+
+ Returns:
+ A tuple of (http Request, monorailrequest.MonorailRequest()).
+ """
+ headers = headers or {}
+ params = params or {}
+
+ headers.setdefault('Host', DEFAULT_HOST)
+ post_items = params if method == 'POST' else None
+
+ if not services:
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ services.project.TestAddProject('proj')
+
+ request = webapp2.Request.blank(path, headers=headers, POST=post_items)
+ mr = fake.MonorailRequest(
+ user_info=user_info, project=project, perms=perms, params=params)
+ mr.ParseRequest(
+ request, services, profiler.Profiler(), do_user_lookups=False)
+ mr.auth.user_pb = user_pb2.MakeUser()
+
+ return request, mr
+
+
+class Blank(object):
+ """Simple class that assigns all named args to attributes.
+
+ Tip: supply a lambda to define a method.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ def __repr__(self):
+ return '%s(%s)' % (self.__class__.__name__, str(vars(self)))
+
+ def __eq__(self, other):
+ if other is None:
+ return False
+ return vars(self) == vars(other)
diff --git a/appengine/monorail/testing_utils b/appengine/monorail/testing_utils
new file mode 120000
index 0000000..83b5d32
--- /dev/null
+++ b/appengine/monorail/testing_utils
@@ -0,0 +1 @@
+../../../infra/appengine_module/testing_utils/
\ No newline at end of file
diff --git a/appengine/monorail/third_party/__init__.py b/appengine/monorail/third_party/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/third_party/__init__.py
diff --git a/appengine/monorail/third_party/apiclient b/appengine/monorail/third_party/apiclient
new file mode 120000
index 0000000..dc34fc0
--- /dev/null
+++ b/appengine/monorail/third_party/apiclient
@@ -0,0 +1 @@
+../../third_party/google-api-python-client/apiclient
\ No newline at end of file
diff --git a/appengine/monorail/third_party/cloudstorage b/appengine/monorail/third_party/cloudstorage
new file mode 120000
index 0000000..2f7613d
--- /dev/null
+++ b/appengine/monorail/third_party/cloudstorage
@@ -0,0 +1 @@
+../../third_party/cloudstorage/python/src/cloudstorage
\ No newline at end of file
diff --git a/appengine/monorail/third_party/ezt.README b/appengine/monorail/third_party/ezt.README
new file mode 100644
index 0000000..07ef777
--- /dev/null
+++ b/appengine/monorail/third_party/ezt.README
@@ -0,0 +1,8 @@
+The license (BSD) is contained within the file ezt.py.
+
+This copy was fetched with the following command:
+
+$ svn cat https://ezt.googlecode.com/svn/trunk/ezt.py > ezt.py
+
+For documentation, see:
+http://code.google.com/p/ezt/
diff --git a/appengine/monorail/third_party/ezt.py b/appengine/monorail/third_party/ezt.py
new file mode 100644
index 0000000..e55c988
--- /dev/null
+++ b/appengine/monorail/third_party/ezt.py
@@ -0,0 +1,660 @@
+#!/usr/bin/env python
+"""ezt.py -- EaZy Templating
+
+For documentation, please see: http://code.google.com/p/ezt/wiki/Syntax
+"""
+#
+# Copyright (C) 2001-2011 Greg Stein. All Rights Reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+#
+#
+# This software is maintained by Greg and is available at:
+# http://code.google.com/p/ezt/
+#
+
+__author__ = 'Greg Stein'
+__version__ = '1.0'
+__license__ = 'BSD'
+
+import re
+from types import IntType, FloatType, LongType
+import os
+import urllib
+import StringIO
+
+#
+# Formatting types
+#
+FORMAT_RAW = 'raw'
+FORMAT_HTML = 'html'
+FORMAT_XML = 'xml'
+FORMAT_JS = 'js'
+FORMAT_URL = 'url'
+
+#
+# This regular expression matches four alternatives:
+# expr: NEWLINE | DIRECTIVE | BRACKET | COMMENT
+# DIRECTIVE: '[' ITEM (whitespace ARG)* ']
+# ITEM: STRING | NAME
+# ARG: STRING | NAME | NUMBER
+# STRING: '"' (not-slash-or-dquote | '\' anychar)* '"'
+# NAME: (alpha | '_') (alphanum | '_' | '-' | '.')*
+# NUMBER: digit+
+# BRACKET: '[[]'
+# COMMENT: '[#' not-rbracket* ']'
+#
+# Note: the above BNR is a bit loose around ITEM/ARG/NAME/NUMBER. The
+# important point is that the first value in a directive must
+# start with '_' or an alpha character (no digits). This greatly
+# helps to avoid simple errors like '[0]' in templates.
+#
+# When used with the split() method, the return value will be composed of
+# non-matching text and the three paren groups (NEWLINE, DIRECTIVE and
+# BRACKET). Since the COMMENT matches are not placed into a group, they are
+# considered a "splitting" value and simply dropped.
+#
+_item = r'(?:"(?:[^\\"]|\\.)*"|[A-Za-z_][-\w.]*)'
+_arg = r'(?:"(?:[^\\"]|\\.)*"|[-\w.]+)'
+_re_parse = re.compile(r'(\r?\n)|\[(%s(?: +%s)*)\]|(\[\[\])|\[#[^\]]*\]' %
+ (_item, _arg))
+
+_re_args = re.compile(r'"(?:[^\\"]|\\.)*"|[-\w.]+')
+
+# block commands and their argument counts
+_block_cmd_specs = { 'if-index':2, 'for':1, 'is':2, 'define':1, 'format':1 }
+_block_cmds = _block_cmd_specs.keys()
+
+# two regular expressions for compressing whitespace. the first is used to
+# compress any whitespace including a newline into a single newline. the
+# second regex is used to compress runs of whitespace into a single space.
+_re_newline = re.compile('[ \t\r\f\v]*\n\\s*')
+_re_whitespace = re.compile(r'\s\s+')
+
+# this regex is used to substitute arguments into a value. we split the value,
+# replace the relevant pieces, and then put it all back together. splitting
+# will produce a list of: TEXT ( splitter TEXT )*. splitter will be '%' or
+# an integer.
+_re_subst = re.compile('%(%|[0-9]+)')
+
+class Template:
+
+ def __init__(self, fname=None, compress_whitespace=1,
+ base_format=FORMAT_RAW):
+ self.compress_whitespace = compress_whitespace
+ if fname:
+ self.parse_file(fname, base_format)
+
+ def parse_file(self, fname, base_format=FORMAT_RAW):
+ "fname -> a string object with pathname of file containg an EZT template."
+
+ self.parse(_FileReader(fname), base_format)
+
+ def parse(self, text_or_reader, base_format=FORMAT_RAW):
+ """Parse the template specified by text_or_reader.
+
+ The argument should be a string containing the template, or it should
+ specify a subclass of ezt.Reader which can read templates. The base
+ format for printing values is given by base_format.
+ """
+ if not isinstance(text_or_reader, Reader):
+ # assume the argument is a plain text string
+ text_or_reader = _TextReader(text_or_reader)
+
+ self.program = self._parse(text_or_reader,
+ base_printer=_parse_format(base_format))
+
+ def generate(self, fp, data):
+ if hasattr(data, '__getitem__') or callable(getattr(data, 'keys', None)):
+ # a dictionary-like object was passed. convert it to an
+ # attribute-based object.
+ class _data_ob:
+ def __init__(self, d):
+ vars(self).update(d)
+ data = _data_ob(data)
+
+ ctx = _context()
+ ctx.data = data
+ ctx.for_index = { }
+ ctx.defines = { }
+ self._execute(self.program, fp, ctx)
+
+ def _parse(self, reader, for_names=None, file_args=(), base_printer=None):
+ """text -> string object containing the template.
+
+ This is a private helper function doing the real work for method parse.
+ It returns the parsed template as a 'program'. This program is a sequence
+ made out of strings or (function, argument) 2-tuples.
+
+ Note: comment directives [# ...] are automatically dropped by _re_parse.
+ """
+
+ filename = reader.filename()
+ # parse the template program into: (TEXT NEWLINE DIRECTIVE BRACKET)* TEXT
+ parts = _re_parse.split(reader.text)
+
+ program = [ ]
+ stack = [ ]
+ if not for_names:
+ for_names = [ ]
+
+ if base_printer is None:
+ base_printer = ()
+ printers = [ base_printer ]
+
+ one_newline_copied = False
+ line_number = 1
+ for i in range(len(parts)):
+ piece = parts[i]
+ which = i % 4 # discriminate between: TEXT NEWLINE DIRECTIVE BRACKET
+ if which == 0:
+ # TEXT. append if non-empty.
+ if piece:
+ if self.compress_whitespace:
+ piece = _re_whitespace.sub(' ', piece)
+ program.append(piece)
+ one_newline_copied = False
+ elif which == 1:
+ # NEWLINE. append unless compress_whitespace requested
+ if piece:
+ line_number += 1
+ if self.compress_whitespace:
+ if not one_newline_copied:
+ program.append('\n')
+ one_newline_copied = True
+ else:
+ program.append(piece)
+ elif which == 3:
+ # BRACKET directive. append '[' if present.
+ if piece:
+ program.append('[')
+ one_newline_copied = False
+ elif piece:
+ # DIRECTIVE is present.
+ one_newline_copied = False
+ args = _re_args.findall(piece)
+ cmd = args[0]
+ if cmd == 'else':
+ if len(args) > 1:
+ raise ArgCountSyntaxError(str(args[1:]), filename, line_number)
+ ### check: don't allow for 'for' cmd
+ idx = stack[-1][1]
+ true_section = program[idx:]
+ del program[idx:]
+ stack[-1][3] = true_section
+ elif cmd == 'end':
+ if len(args) > 1:
+ raise ArgCountSyntaxError(str(args[1:]), filename, line_number)
+ # note: true-section may be None
+ try:
+ cmd, idx, args, true_section, start_line_number = stack.pop()
+ except IndexError:
+ raise UnmatchedEndError(None, filename, line_number)
+ else_section = program[idx:]
+ if cmd == 'format':
+ printers.pop()
+ else:
+ func = getattr(self, '_cmd_' + re.sub('-', '_', cmd))
+ program[idx:] = [ (func, (args, true_section, else_section),
+ filename, line_number) ]
+ if cmd == 'for':
+ for_names.pop()
+ elif cmd in _block_cmds:
+ if len(args) > _block_cmd_specs[cmd] + 1:
+ raise ArgCountSyntaxError(str(args[1:]), filename, line_number)
+ ### this assumes arg1 is always a ref unless cmd is 'define'
+ if cmd != 'define':
+ args[1] = _prepare_ref(args[1], for_names, file_args)
+
+ # handle arg2 for the 'is' command
+ if cmd == 'is':
+ args[2] = _prepare_ref(args[2], for_names, file_args)
+ elif cmd == 'for':
+ for_names.append(args[1][0]) # append the refname
+ elif cmd == 'format':
+ if args[1][0]:
+ raise BadFormatConstantError(str(args[1:]), filename, line_number)
+ printers.append(_parse_format(args[1][1]))
+
+ # remember the cmd, current pos, args, and a section placeholder
+ stack.append([cmd, len(program), args[1:], None, line_number])
+ elif cmd == 'include' or cmd == 'insertfile':
+ is_insertfile = (cmd == 'insertfile')
+ # extra arguments are meaningless when using insertfile
+ if is_insertfile and len(args) != 2:
+ raise ArgCountSyntaxError(str(args), filename, line_number)
+ if args[1][0] == '"':
+ include_filename = args[1][1:-1]
+ if is_insertfile:
+ program.append(reader.read_other(include_filename).text)
+ else:
+ f_args = [ ]
+ for arg in args[2:]:
+ f_args.append(_prepare_ref(arg, for_names, file_args))
+ program.extend(self._parse(reader.read_other(include_filename),
+ for_names, f_args, printers[-1]))
+ else:
+ if len(args) != 2:
+ raise ArgCountSyntaxError(str(args), filename, line_number)
+ if is_insertfile:
+ cmd = self._cmd_insertfile
+ else:
+ cmd = self._cmd_include
+ program.append((cmd,
+ (_prepare_ref(args[1], for_names, file_args),
+ reader, printers[-1]), filename, line_number))
+ elif cmd == 'if-any':
+ f_args = [ ]
+ for arg in args[1:]:
+ f_args.append(_prepare_ref(arg, for_names, file_args))
+ stack.append(['if-any', len(program), f_args, None, line_number])
+ else:
+ # implied PRINT command
+ if len(args) > 1:
+ f_args = [ ]
+ for arg in args:
+ f_args.append(_prepare_ref(arg, for_names, file_args))
+ program.append((self._cmd_subst,
+ (printers[-1], f_args[0], f_args[1:]),
+ filename, line_number))
+ else:
+ valref = _prepare_ref(args[0], for_names, file_args)
+ program.append((self._cmd_print, (printers[-1], valref),
+ filename, line_number))
+
+ if stack:
+ raise UnclosedBlocksError('Block opened at line %s' % stack[-1][4],
+ filename=filename)
+ return program
+
+ def _execute(self, program, fp, ctx):
+ """This private helper function takes a 'program' sequence as created
+ by the method '_parse' and executes it step by step. strings are written
+ to the file object 'fp' and functions are called.
+ """
+ for step in program:
+ if isinstance(step, basestring):
+ fp.write(step)
+ else:
+ method, method_args, filename, line_number = step
+ method(method_args, fp, ctx, filename, line_number)
+
+ def _cmd_print(self, (transforms, valref), fp, ctx, filename, line_number):
+ value = _get_value(valref, ctx, filename, line_number)
+ # if the value has a 'read' attribute, then it is a stream: copy it
+ if hasattr(value, 'read'):
+ while 1:
+ chunk = value.read(16384)
+ if not chunk:
+ break
+ for t in transforms:
+ chunk = t(chunk)
+ fp.write(chunk)
+ else:
+ for t in transforms:
+ value = t(value)
+ fp.write(value)
+
+ def _cmd_subst(self, (transforms, valref, args), fp, ctx, filename,
+ line_number):
+ fmt = _get_value(valref, ctx, filename, line_number)
+ parts = _re_subst.split(fmt)
+ for i in range(len(parts)):
+ piece = parts[i]
+ if i%2 == 1 and piece != '%':
+ idx = int(piece)
+ if idx < len(args):
+ piece = _get_value(args[idx], ctx, filename, line_number)
+ else:
+ piece = '<undef>'
+ for t in transforms:
+ piece = t(piece)
+ fp.write(piece)
+
+ def _cmd_include(self, (valref, reader, printer), fp, ctx, filename,
+ line_number):
+ fname = _get_value(valref, ctx, filename, line_number)
+ ### note: we don't have the set of for_names to pass into this parse.
+ ### I don't think there is anything to do but document it
+ self._execute(self._parse(reader.read_other(fname), base_printer=printer),
+ fp, ctx)
+
+ def _cmd_insertfile(self, (valref, reader, printer), fp, ctx, filename,
+ line_number):
+ fname = _get_value(valref, ctx, filename, line_number)
+ fp.write(reader.read_other(fname).text)
+
+ def _cmd_if_any(self, args, fp, ctx, filename, line_number):
+ "If any value is a non-empty string or non-empty list, then T else F."
+ (valrefs, t_section, f_section) = args
+ value = 0
+ for valref in valrefs:
+ if _get_value(valref, ctx, filename, line_number):
+ value = 1
+ break
+ self._do_if(value, t_section, f_section, fp, ctx)
+
+ def _cmd_if_index(self, args, fp, ctx, filename, line_number):
+ ((valref, value), t_section, f_section) = args
+ list, idx = ctx.for_index[valref[0]]
+ if value == 'even':
+ value = idx % 2 == 0
+ elif value == 'odd':
+ value = idx % 2 == 1
+ elif value == 'first':
+ value = idx == 0
+ elif value == 'last':
+ value = idx == len(list)-1
+ else:
+ value = idx == int(value)
+ self._do_if(value, t_section, f_section, fp, ctx)
+
+ def _cmd_is(self, args, fp, ctx, filename, line_number):
+ ((left_ref, right_ref), t_section, f_section) = args
+ right_value = _get_value(right_ref, ctx, filename, line_number)
+ left_value = _get_value(left_ref, ctx, filename, line_number)
+ value = left_value.lower() == right_value.lower()
+ self._do_if(value, t_section, f_section, fp, ctx)
+
+ def _do_if(self, value, t_section, f_section, fp, ctx):
+ if t_section is None:
+ t_section = f_section
+ f_section = None
+ if value:
+ section = t_section
+ else:
+ section = f_section
+ if section is not None:
+ self._execute(section, fp, ctx)
+
+ def _cmd_for(self, args, fp, ctx, filename, line_number):
+ ((valref,), unused, section) = args
+ list = _get_value(valref, ctx, filename, line_number)
+ refname = valref[0]
+ if isinstance(list, basestring):
+ raise NeedSequenceError(refname, filename, line_number)
+ ctx.for_index[refname] = idx = [ list, 0 ]
+ for item in list:
+ self._execute(section, fp, ctx)
+ idx[1] = idx[1] + 1
+ del ctx.for_index[refname]
+
+ def _cmd_define(self, args, fp, ctx, filename, line_number):
+ ((name,), unused, section) = args
+ valfp = StringIO.StringIO()
+ if section is not None:
+ self._execute(section, valfp, ctx)
+ ctx.defines[name] = valfp.getvalue()
+
+def boolean(value):
+ "Return a value suitable for [if-any bool_var] usage in a template."
+ if value:
+ return 'yes'
+ return None
+
+
+def _prepare_ref(refname, for_names, file_args):
+ """refname -> a string containing a dotted identifier. example:"foo.bar.bang"
+ for_names -> a list of active for sequences.
+
+ Returns a `value reference', a 3-tuple made out of (refname, start, rest),
+ for fast access later.
+ """
+ # is the reference a string constant?
+ if refname[0] == '"':
+ return None, refname[1:-1], None
+
+ parts = refname.split('.')
+ start = parts[0]
+ rest = parts[1:]
+
+ # if this is an include-argument, then just return the prepared ref
+ if start[:3] == 'arg':
+ try:
+ idx = int(start[3:])
+ except ValueError:
+ pass
+ else:
+ if idx < len(file_args):
+ orig_refname, start, more_rest = file_args[idx]
+ if more_rest is None:
+ # the include-argument was a string constant
+ return None, start, None
+
+ # prepend the argument's "rest" for our further processing
+ rest[:0] = more_rest
+
+ # rewrite the refname to ensure that any potential 'for' processing
+ # has the correct name
+ ### this can make it hard for debugging include files since we lose
+ ### the 'argNNN' names
+ if not rest:
+ return start, start, [ ]
+ refname = start + '.' + '.'.join(rest)
+
+ if for_names:
+ # From last to first part, check if this reference is part of a for loop
+ for i in range(len(parts), 0, -1):
+ name = '.'.join(parts[:i])
+ if name in for_names:
+ return refname, name, parts[i:]
+
+ return refname, start, rest
+
+def _get_value((refname, start, rest), ctx, filename, line_number):
+ """(refname, start, rest) -> a prepared `value reference' (see above).
+ ctx -> an execution context instance.
+
+ Does a name space lookup within the template name space. Active
+ for blocks take precedence over data dictionary members with the
+ same name.
+ """
+ if rest is None:
+ # it was a string constant
+ return start
+
+ # get the starting object
+ if ctx.for_index.has_key(start):
+ list, idx = ctx.for_index[start]
+ ob = list[idx]
+ elif ctx.defines.has_key(start):
+ ob = ctx.defines[start]
+ elif hasattr(ctx.data, start):
+ ob = getattr(ctx.data, start)
+ else:
+ raise UnknownReference(refname, filename, line_number)
+
+ # walk the rest of the dotted reference
+ for attr in rest:
+ try:
+ if isinstance(ob, dict):
+ ob = ob[attr]
+ else:
+ ob = getattr(ob, attr)
+ except AttributeError, KeyError:
+ raise UnknownReference(refname, filename, line_number)
+
+ # make sure we return a string instead of some various Python types
+ if isinstance(ob, (IntType, FloatType, LongType)):
+ return str(ob)
+ if ob is None:
+ return ''
+
+ # string or a sequence
+ return ob
+
+def _replace(s, replace_map):
+ for orig, repl in replace_map:
+ s = s.replace(orig, repl)
+ return s
+
+REPLACE_JS_MAP = (
+ ('\\', r'\\'), ('\t', r'\t'), ('\n', r'\n'), ('\r', r'\r'),
+ ('"', r'\x22'), ('\'', r'\x27'), ('&', r'\x26'),
+ ('<', r'\x3c'), ('>', r'\x3e'), ('=', r'\x3d'),
+)
+
+# Various unicode whitespace
+REPLACE_JS_UNICODE_MAP = (
+ (u'\u0085', r'\u0085'), (u'\u2028', r'\u2028'), (u'\u2029', r'\u2029'),
+)
+
+# Why not cgi.escape? It doesn't do single quotes which are occasionally
+# used to contain HTML attributes and event handler definitions (unfortunately)
+REPLACE_HTML_MAP = (
+ ('&', '&'), ('<', '<'), ('>', '>'),
+ ('"', '"'), ('\'', '''),
+)
+
+def _js_escape(s):
+ s = _replace(s, REPLACE_JS_MAP)
+ ### perhaps attempt to coerce the string to unicode and then replace?
+ if isinstance(s, unicode):
+ s = _replace(s, REPLACE_JS_UNICODE_MAP)
+ return s
+
+def _html_escape(s):
+ return _replace(s, REPLACE_HTML_MAP)
+
+def _url_escape(s):
+ ### quote_plus barfs on non-ASCII characters. According to
+ ### http://www.w3.org/International/O-URL-code.html URIs should be
+ ### UTF-8 encoded first.
+ if isinstance(s, unicode):
+ s = s.encode('utf8')
+ return urllib.quote_plus(s)
+
+FORMATTERS = {
+ FORMAT_RAW: None,
+ FORMAT_HTML: _html_escape,
+ FORMAT_XML: _html_escape, ### use the same quoting as HTML for now
+ FORMAT_JS: _js_escape,
+ FORMAT_URL: _url_escape,
+}
+
+def _parse_format(format_string=FORMAT_RAW):
+ format_funcs = []
+ try:
+ for fspec in format_string.split(','):
+ format_func = FORMATTERS[fspec]
+ if format_func is not None:
+ format_funcs.append(format_func)
+ except KeyError:
+ raise UnknownFormatConstantError(format_string)
+ return format_funcs
+
+class _context:
+ """A container for the execution context"""
+
+
+class Reader:
+ """Abstract class which allows EZT to detect Reader objects."""
+ def filename(self):
+ return '(%s does not provide filename() method)' % repr(self)
+
+class _FileReader(Reader):
+ """Reads templates from the filesystem."""
+ def __init__(self, fname):
+ self.text = open(fname, 'rb').read()
+ self._dir = os.path.dirname(fname)
+ self.fname = fname
+ def read_other(self, relative):
+ return _FileReader(os.path.join(self._dir, relative))
+ def filename(self):
+ return self.fname
+
+class _TextReader(Reader):
+ """'Reads' a template from provided text."""
+ def __init__(self, text):
+ self.text = text
+ def read_other(self, relative):
+ raise BaseUnavailableError()
+ def filename(self):
+ return '(text)'
+
+
+class EZTException(Exception):
+ """Parent class of all EZT exceptions."""
+ def __init__(self, message=None, filename=None, line_number=None):
+ self.message = message
+ self.filename = filename
+ self.line_number = line_number
+ def __str__(self):
+ ret = []
+ if self.message is not None:
+ ret.append(self.message)
+ if self.filename is not None:
+ ret.append('in file ' + str(self.filename))
+ if self.line_number is not None:
+ ret.append('at line ' + str(self.line_number))
+ return ' '.join(ret)
+
+class ArgCountSyntaxError(EZTException):
+ """A bracket directive got the wrong number of arguments."""
+
+class UnknownReference(EZTException):
+ """The template references an object not contained in the data dictionary."""
+
+class NeedSequenceError(EZTException):
+ """The object dereferenced by the template is no sequence (tuple or list)."""
+
+class UnclosedBlocksError(EZTException):
+ """This error may be simply a missing [end]."""
+
+class UnmatchedEndError(EZTException):
+ """This error may be caused by a misspelled if directive."""
+
+class BaseUnavailableError(EZTException):
+ """Base location is unavailable, which disables includes."""
+
+class BadFormatConstantError(EZTException):
+ """Format specifiers must be string constants."""
+
+class UnknownFormatConstantError(EZTException):
+ """The format specifier is an unknown value."""
+
+
+# --- standard test environment ---
+def test_parse():
+ assert _re_parse.split('[a]') == ['', '[a]', None, '']
+ assert _re_parse.split('[a] [b]') == \
+ ['', '[a]', None, ' ', '[b]', None, '']
+ assert _re_parse.split('[a c] [b]') == \
+ ['', '[a c]', None, ' ', '[b]', None, '']
+ assert _re_parse.split('x [a] y [b] z') == \
+ ['x ', '[a]', None, ' y ', '[b]', None, ' z']
+ assert _re_parse.split('[a "b" c "d"]') == \
+ ['', '[a "b" c "d"]', None, '']
+ assert _re_parse.split(r'["a \"b[foo]" c.d f]') == \
+ ['', '["a \\"b[foo]" c.d f]', None, '']
+
+def _test(argv):
+ import doctest, ezt
+ verbose = "-v" in argv
+ return doctest.testmod(ezt, verbose=verbose)
+
+if __name__ == "__main__":
+ # invoke unit test for this module:
+ import sys
+ sys.exit(_test(sys.argv)[0])
diff --git a/appengine/monorail/third_party/googleapiclient b/appengine/monorail/third_party/googleapiclient
new file mode 120000
index 0000000..3015f99
--- /dev/null
+++ b/appengine/monorail/third_party/googleapiclient
@@ -0,0 +1 @@
+../../third_party/google-api-python-client/googleapiclient
\ No newline at end of file
diff --git a/appengine/monorail/third_party/httplib2 b/appengine/monorail/third_party/httplib2
new file mode 120000
index 0000000..15b666b
--- /dev/null
+++ b/appengine/monorail/third_party/httplib2
@@ -0,0 +1 @@
+../../third_party/httplib2/python2/httplib2
\ No newline at end of file
diff --git a/appengine/monorail/third_party/markdown.py b/appengine/monorail/third_party/markdown.py
new file mode 100644
index 0000000..cd415dc
--- /dev/null
+++ b/appengine/monorail/third_party/markdown.py
@@ -0,0 +1,677 @@
+#!/usr/bin/python
+import re, md5, sys, string
+
+"""markdown.py: A Markdown-styled-text to HTML converter in Python.
+
+Usage:
+ ./markdown.py textfile.markdown
+
+Calling:
+ import markdown
+ somehtml = markdown.markdown(sometext)
+
+For other versions of markdown, see:
+ http://www.freewisdom.org/projects/python-markdown/
+ http://en.wikipedia.org/wiki/Markdown
+"""
+
+__version__ = '1.0.1-2' # port of 1.0.1
+__license__ = "GNU GPL 2"
+__author__ = [
+ 'John Gruber <http://daringfireball.net/>',
+ 'Tollef Fog Heen <tfheen@err.no>',
+ 'Aaron Swartz <me@aaronsw.com>'
+]
+
+def htmlquote(text):
+ """Encodes `text` for raw use in HTML."""
+ text = text.replace("&", "&") # Must be done first!
+ text = text.replace("<", "<")
+ text = text.replace(">", ">")
+ text = text.replace("'", "'")
+ text = text.replace('"', """)
+ return text
+
+def semirandom(seed):
+ x = 0
+ for c in md5.new(seed).digest(): x += ord(c)
+ return x / (255*16.)
+
+class _Markdown:
+ emptyelt = " />"
+ tabwidth = 4
+
+ escapechars = '\\`*_{}[]()>#+-.!'
+ escapetable = {}
+ for char in escapechars:
+ escapetable[char] = md5.new(char).hexdigest()
+
+ r_multiline = re.compile("\n{2,}")
+ r_stripspace = re.compile(r"^[ \t]+$", re.MULTILINE)
+ def parse(self, text):
+ self.urls = {}
+ self.titles = {}
+ self.html_blocks = {}
+ self.list_level = 0
+
+ text = text.replace("\r\n", "\n")
+ text = text.replace("\r", "\n")
+ text += "\n\n"
+ text = self._Detab(text)
+ text = self.r_stripspace.sub("", text)
+ text = self._HashHTMLBlocks(text)
+ text = self._StripLinkDefinitions(text)
+ text = self._RunBlockGamut(text)
+ text = self._UnescapeSpecialChars(text)
+ return text
+
+ r_StripLinkDefinitions = re.compile(r"""
+ ^[ ]{0,%d}\[(.+)\]: # id = $1
+ [ \t]*\n?[ \t]*
+ <?(\S+?)>? # url = $2
+ [ \t]*\n?[ \t]*
+ (?:
+ (?<=\s) # lookbehind for whitespace
+ [\"\(] # " is backlashed so it colorizes our code right
+ (.+?) # title = $3
+ [\"\)]
+ [ \t]*
+ )? # title is optional
+ (?:\n+|\Z)
+ """ % (tabwidth-1), re.MULTILINE|re.VERBOSE)
+ def _StripLinkDefinitions(self, text):
+ def replacefunc(matchobj):
+ (t1, t2, t3) = matchobj.groups()
+ #@@ case sensitivity?
+ self.urls[t1.lower()] = self._EncodeAmpsAndAngles(t2)
+ if t3 is not None:
+ self.titles[t1.lower()] = t3.replace('"', '"')
+ return ""
+
+ text = self.r_StripLinkDefinitions.sub(replacefunc, text)
+ return text
+
+ blocktagsb = r"p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|math"
+ blocktagsa = blocktagsb + "|ins|del"
+
+ r_HashHTMLBlocks1 = re.compile(r"""
+ ( # save in $1
+ ^ # start of line (with /m)
+ <(%s) # start tag = $2
+ \b # word break
+ (.*\n)*? # any number of lines, minimally matching
+ </\2> # the matching end tag
+ [ \t]* # trailing spaces/tabs
+ (?=\n+|$) # followed by a newline or end of document
+ )
+ """ % blocktagsa, re.MULTILINE | re.VERBOSE)
+
+ r_HashHTMLBlocks2 = re.compile(r"""
+ ( # save in $1
+ ^ # start of line (with /m)
+ <(%s) # start tag = $2
+ \b # word break
+ (.*\n)*? # any number of lines, minimally matching
+ .*</\2> # the matching end tag
+ [ \t]* # trailing spaces/tabs
+ (?=\n+|\Z) # followed by a newline or end of document
+ )
+ """ % blocktagsb, re.MULTILINE | re.VERBOSE)
+
+ r_HashHR = re.compile(r"""
+ (?:
+ (?<=\n\n) # Starting after a blank line
+ | # or
+ \A\n? # the beginning of the doc
+ )
+ ( # save in $1
+ [ ]{0,%d}
+ <(hr) # start tag = $2
+ \b # word break
+ ([^<>])*? #
+ /?> # the matching end tag
+ [ \t]*
+ (?=\n{2,}|\Z)# followed by a blank line or end of document
+ )
+ """ % (tabwidth-1), re.VERBOSE)
+ r_HashComment = re.compile(r"""
+ (?:
+ (?<=\n\n) # Starting after a blank line
+ | # or
+ \A\n? # the beginning of the doc
+ )
+ ( # save in $1
+ [ ]{0,%d}
+ (?:
+ <!
+ (--.*?--\s*)+
+ >
+ )
+ [ \t]*
+ (?=\n{2,}|\Z)# followed by a blank line or end of document
+ )
+ """ % (tabwidth-1), re.VERBOSE)
+
+ def _HashHTMLBlocks(self, text):
+ def handler(m):
+ key = md5.new(m.group(1)).hexdigest()
+ self.html_blocks[key] = m.group(1)
+ return "\n\n%s\n\n" % key
+
+ text = self.r_HashHTMLBlocks1.sub(handler, text)
+ text = self.r_HashHTMLBlocks2.sub(handler, text)
+ oldtext = text
+ text = self.r_HashHR.sub(handler, text)
+ text = self.r_HashComment.sub(handler, text)
+ return text
+
+ #@@@ wrong!
+ r_hr1 = re.compile(r'^[ ]{0,2}([ ]?\*[ ]?){3,}[ \t]*$', re.M)
+ r_hr2 = re.compile(r'^[ ]{0,2}([ ]?-[ ]?){3,}[ \t]*$', re.M)
+ r_hr3 = re.compile(r'^[ ]{0,2}([ ]?_[ ]?){3,}[ \t]*$', re.M)
+
+ def _RunBlockGamut(self, text):
+ text = self._DoHeaders(text)
+ for x in [self.r_hr1, self.r_hr2, self.r_hr3]:
+ text = x.sub("\n<hr%s\n" % self.emptyelt, text);
+ text = self._DoLists(text)
+ text = self._DoCodeBlocks(text)
+ text = self._DoBlockQuotes(text)
+
+ # We did this in parse()
+ # to escape the source
+ # now it's stuff _we_ made
+ # so we don't wrap it in <p>s.
+ text = self._HashHTMLBlocks(text)
+ text = self._FormParagraphs(text)
+ return text
+
+ r_NewLine = re.compile(" {2,}\n")
+ def _RunSpanGamut(self, text):
+ text = self._DoCodeSpans(text)
+ text = self._EscapeSpecialChars(text)
+ text = self._DoImages(text)
+ text = self._DoAnchors(text)
+ text = self._DoAutoLinks(text)
+ text = self._EncodeAmpsAndAngles(text)
+ text = self._DoItalicsAndBold(text)
+ text = self.r_NewLine.sub(" <br%s\n" % self.emptyelt, text)
+ return text
+
+ def _EscapeSpecialChars(self, text):
+ tokens = self._TokenizeHTML(text)
+ text = ""
+ for cur_token in tokens:
+ if cur_token[0] == "tag":
+ cur_token[1] = cur_token[1].replace('*', self.escapetable["*"])
+ cur_token[1] = cur_token[1].replace('_', self.escapetable["_"])
+ text += cur_token[1]
+ else:
+ text += self._EncodeBackslashEscapes(cur_token[1])
+ return text
+
+ r_DoAnchors1 = re.compile(
+ r""" ( # wrap whole match in $1
+ \[
+ (.*?) # link text = $2
+ # [for bracket nesting, see below]
+ \]
+
+ [ ]? # one optional space
+ (?:\n[ ]*)? # one optional newline followed by spaces
+
+ \[
+ (.*?) # id = $3
+ \]
+ )
+ """, re.S|re.VERBOSE)
+ r_DoAnchors2 = re.compile(
+ r""" ( # wrap whole match in $1
+ \[
+ (.*?) # link text = $2
+ \]
+ \( # literal paren
+ [ \t]*
+ <?(.+?)>? # href = $3
+ [ \t]*
+ ( # $4
+ ([\'\"]) # quote char = $5
+ (.*?) # Title = $6
+ \5 # matching quote
+ )? # title is optional
+ \)
+ )
+ """, re.S|re.VERBOSE)
+ def _DoAnchors(self, text):
+ # We here don't do the same as the perl version, as python's regex
+ # engine gives us no way to match brackets.
+
+ def handler1(m):
+ whole_match = m.group(1)
+ link_text = m.group(2)
+ link_id = m.group(3).lower()
+ if not link_id: link_id = link_text.lower()
+ title = self.titles.get(link_id, None)
+
+
+ if self.urls.has_key(link_id):
+ url = self.urls[link_id]
+ url = url.replace("*", self.escapetable["*"])
+ url = url.replace("_", self.escapetable["_"])
+ res = '<a href="%s"' % htmlquote(url)
+
+ if title:
+ title = title.replace("*", self.escapetable["*"])
+ title = title.replace("_", self.escapetable["_"])
+ res += ' title="%s"' % htmlquote(title)
+ res += ">%s</a>" % htmlquote(link_text)
+ else:
+ res = whole_match
+ return res
+
+ def handler2(m):
+ whole_match = m.group(1)
+ link_text = m.group(2)
+ url = m.group(3)
+ title = m.group(6)
+
+ url = url.replace("*", self.escapetable["*"])
+ url = url.replace("_", self.escapetable["_"])
+ res = '''<a href="%s"''' % htmlquote(url)
+
+ if title:
+ title = title.replace('"', '"')
+ title = title.replace("*", self.escapetable["*"])
+ title = title.replace("_", self.escapetable["_"])
+ res += ' title="%s"' % htmlquote(title)
+ res += ">%s</a>" % htmlquote(link_text)
+ return res
+
+ text = self.r_DoAnchors1.sub(handler1, text)
+ text = self.r_DoAnchors2.sub(handler2, text)
+ return text
+
+ r_DoImages1 = re.compile(
+ r""" ( # wrap whole match in $1
+ !\[
+ (.*?) # alt text = $2
+ \]
+
+ [ ]? # one optional space
+ (?:\n[ ]*)? # one optional newline followed by spaces
+
+ \[
+ (.*?) # id = $3
+ \]
+
+ )
+ """, re.VERBOSE|re.S)
+
+ r_DoImages2 = re.compile(
+ r""" ( # wrap whole match in $1
+ !\[
+ (.*?) # alt text = $2
+ \]
+ \( # literal paren
+ [ \t]*
+ <?(\S+?)>? # src url = $3
+ [ \t]*
+ ( # $4
+ ([\'\"]) # quote char = $5
+ (.*?) # title = $6
+ \5 # matching quote
+ [ \t]*
+ )? # title is optional
+ \)
+ )
+ """, re.VERBOSE|re.S)
+
+ def _DoImages(self, text):
+ def handler1(m):
+ whole_match = m.group(1)
+ alt_text = m.group(2)
+ link_id = m.group(3).lower()
+
+ if not link_id:
+ link_id = alt_text.lower()
+
+ alt_text = alt_text.replace('"', """)
+ if self.urls.has_key(link_id):
+ url = self.urls[link_id]
+ url = url.replace("*", self.escapetable["*"])
+ url = url.replace("_", self.escapetable["_"])
+ res = '''<img src="%s" alt="%s"''' % (htmlquote(url), htmlquote(alt_text))
+ if self.titles.has_key(link_id):
+ title = self.titles[link_id]
+ title = title.replace("*", self.escapetable["*"])
+ title = title.replace("_", self.escapetable["_"])
+ res += ' title="%s"' % htmlquote(title)
+ res += self.emptyelt
+ else:
+ res = whole_match
+ return res
+
+ def handler2(m):
+ whole_match = m.group(1)
+ alt_text = m.group(2)
+ url = m.group(3)
+ title = m.group(6) or ''
+
+ alt_text = alt_text.replace('"', """)
+ title = title.replace('"', """)
+ url = url.replace("*", self.escapetable["*"])
+ url = url.replace("_", self.escapetable["_"])
+ res = '<img src="%s" alt="%s"' % (htmlquote(url), htmlquote(alt_text))
+ if title is not None:
+ title = title.replace("*", self.escapetable["*"])
+ title = title.replace("_", self.escapetable["_"])
+ res += ' title="%s"' % htmlquote(title)
+ res += self.emptyelt
+ return res
+
+ text = self.r_DoImages1.sub(handler1, text)
+ text = self.r_DoImages2.sub(handler2, text)
+ return text
+
+ r_DoHeaders = re.compile(r"^(\#{1,6})[ \t]*(.+?)[ \t]*\#*\n+", re.VERBOSE|re.M)
+ def _DoHeaders(self, text):
+ def findheader(text, c, n):
+ textl = text.split('\n')
+ for i in xrange(len(textl)):
+ if i >= len(textl): continue
+ count = textl[i].strip().count(c)
+ if count > 0 and count == len(textl[i].strip()) and textl[i+1].strip() == '' and textl[i-1].strip() != '':
+ textl = textl[:i] + textl[i+1:]
+ textl[i-1] = '<h'+n+'>'+self._RunSpanGamut(textl[i-1])+'</h'+n+'>'
+ textl = textl[:i] + textl[i+1:]
+ text = '\n'.join(textl)
+ return text
+
+ def handler(m):
+ level = len(m.group(1))
+ header = self._RunSpanGamut(m.group(2))
+ return "<h%s>%s</h%s>\n\n" % (level, header, level)
+
+ text = findheader(text, '=', '1')
+ text = findheader(text, '-', '2')
+ text = self.r_DoHeaders.sub(handler, text)
+ return text
+
+ rt_l = r"""
+ (
+ (
+ [ ]{0,%d}
+ ([*+-]|\d+[.])
+ [ \t]+
+ )
+ (?:.+?)
+ (
+ \Z
+ |
+ \n{2,}
+ (?=\S)
+ (?![ \t]* ([*+-]|\d+[.])[ \t]+)
+ )
+ )
+ """ % (tabwidth - 1)
+ r_DoLists = re.compile('^'+rt_l, re.M | re.VERBOSE | re.S)
+ r_DoListsTop = re.compile(
+ r'(?:\A\n?|(?<=\n\n))'+rt_l, re.M | re.VERBOSE | re.S)
+
+ def _DoLists(self, text):
+ def handler(m):
+ list_type = "ol"
+ if m.group(3) in [ "*", "-", "+" ]:
+ list_type = "ul"
+ listn = m.group(1)
+ listn = self.r_multiline.sub("\n\n\n", listn)
+ res = self._ProcessListItems(listn)
+ res = "<%s>\n%s</%s>\n" % (list_type, res, list_type)
+ return res
+
+ if self.list_level:
+ text = self.r_DoLists.sub(handler, text)
+ else:
+ text = self.r_DoListsTop.sub(handler, text)
+ return text
+
+ r_multiend = re.compile(r"\n{2,}\Z")
+ r_ProcessListItems = re.compile(r"""
+ (\n)? # leading line = $1
+ (^[ \t]*) # leading whitespace = $2
+ ([*+-]|\d+[.]) [ \t]+ # list marker = $3
+ ((?:.+?) # list item text = $4
+ (\n{1,2}))
+ (?= \n* (\Z | \2 ([*+-]|\d+[.]) [ \t]+))
+ """, re.VERBOSE | re.M | re.S)
+
+ def _ProcessListItems(self, text):
+ self.list_level += 1
+ text = self.r_multiend.sub("\n", text)
+
+ def handler(m):
+ item = m.group(4)
+ leading_line = m.group(1)
+ leading_space = m.group(2)
+
+ if leading_line or self.r_multiline.search(item):
+ item = self._RunBlockGamut(self._Outdent(item))
+ else:
+ item = self._DoLists(self._Outdent(item))
+ if item[-1] == "\n": item = item[:-1] # chomp
+ item = self._RunSpanGamut(item)
+ return "<li>%s</li>\n" % item
+
+ text = self.r_ProcessListItems.sub(handler, text)
+ self.list_level -= 1
+ return text
+
+ r_DoCodeBlocks = re.compile(r"""
+ (?:\n\n|\A)
+ ( # $1 = the code block
+ (?:
+ (?:[ ]{%d} | \t) # Lines must start with a tab or equiv
+ .*\n+
+ )+
+ )
+ ((?=^[ ]{0,%d}\S)|\Z) # Lookahead for non-space/end of doc
+ """ % (tabwidth, tabwidth), re.M | re.VERBOSE)
+ def _DoCodeBlocks(self, text):
+ def handler(m):
+ codeblock = m.group(1)
+ codeblock = self._EncodeCode(self._Outdent(codeblock))
+ codeblock = self._Detab(codeblock)
+ codeblock = codeblock.lstrip("\n")
+ codeblock = codeblock.rstrip()
+ res = "\n\n<pre><code>%s\n</code></pre>\n\n" % codeblock
+ return res
+
+ text = self.r_DoCodeBlocks.sub(handler, text)
+ return text
+ r_DoCodeSpans = re.compile(r"""
+ (`+) # $1 = Opening run of `
+ (.+?) # $2 = The code block
+ (?<!`)
+ \1 # Matching closer
+ (?!`)
+ """, re.I|re.VERBOSE)
+ def _DoCodeSpans(self, text):
+ def handler(m):
+ c = m.group(2)
+ c = c.strip()
+ c = self._EncodeCode(c)
+ return "<code>%s</code>" % c
+
+ text = self.r_DoCodeSpans.sub(handler, text)
+ return text
+
+ def _EncodeCode(self, text):
+ text = text.replace("&","&")
+ text = text.replace("<","<")
+ text = text.replace(">",">")
+ for c in "*_{}[]\\":
+ text = text.replace(c, self.escapetable[c])
+ return text
+
+
+ r_DoBold = re.compile(r"(\*\*|__) (?=\S) (.+?[*_]*) (?<=\S) \1", re.VERBOSE | re.S)
+ r_DoItalics = re.compile(r"(\*|_) (?=\S) (.+?) (?<=\S) \1", re.VERBOSE | re.S)
+ def _DoItalicsAndBold(self, text):
+ text = self.r_DoBold.sub(r"<strong>\2</strong>", text)
+ text = self.r_DoItalics.sub(r"<em>\2</em>", text)
+ return text
+
+ r_start = re.compile(r"^", re.M)
+ r_DoBlockQuotes1 = re.compile(r"^[ \t]*>[ \t]?", re.M)
+ r_DoBlockQuotes2 = re.compile(r"^[ \t]+$", re.M)
+ r_DoBlockQuotes3 = re.compile(r"""
+ ( # Wrap whole match in $1
+ (
+ ^[ \t]*>[ \t]? # '>' at the start of a line
+ .+\n # rest of the first line
+ (.+\n)* # subsequent consecutive lines
+ \n* # blanks
+ )+
+ )""", re.M | re.VERBOSE)
+ r_protectpre = re.compile(r'(\s*<pre>.+?</pre>)', re.S)
+ r_propre = re.compile(r'^ ', re.M)
+
+ def _DoBlockQuotes(self, text):
+ def prehandler(m):
+ return self.r_propre.sub('', m.group(1))
+
+ def handler(m):
+ bq = m.group(1)
+ bq = self.r_DoBlockQuotes1.sub("", bq)
+ bq = self.r_DoBlockQuotes2.sub("", bq)
+ bq = self._RunBlockGamut(bq)
+ bq = self.r_start.sub(" ", bq)
+ bq = self.r_protectpre.sub(prehandler, bq)
+ return "<blockquote>\n%s\n</blockquote>\n\n" % bq
+
+ text = self.r_DoBlockQuotes3.sub(handler, text)
+ return text
+
+ r_tabbed = re.compile(r"^([ \t]*)")
+ def _FormParagraphs(self, text):
+ text = text.strip("\n")
+ grafs = self.r_multiline.split(text)
+
+ for g in xrange(len(grafs)):
+ t = grafs[g].strip() #@@?
+ if not self.html_blocks.has_key(t):
+ t = self._RunSpanGamut(t)
+ t = self.r_tabbed.sub(r"<p>", t)
+ t += "</p>"
+ grafs[g] = t
+
+ for g in xrange(len(grafs)):
+ t = grafs[g].strip()
+ if self.html_blocks.has_key(t):
+ grafs[g] = self.html_blocks[t]
+
+ return "\n\n".join(grafs)
+
+ r_EncodeAmps = re.compile(r"&(?!#?[xX]?(?:[0-9a-fA-F]+|\w+);)")
+ r_EncodeAngles = re.compile(r"<(?![a-z/?\$!])")
+ def _EncodeAmpsAndAngles(self, text):
+ text = self.r_EncodeAmps.sub("&", text)
+ text = self.r_EncodeAngles.sub("<", text)
+ return text
+
+ def _EncodeBackslashEscapes(self, text):
+ for char in self.escapechars:
+ text = text.replace("\\" + char, self.escapetable[char])
+ return text
+
+ r_link = re.compile(r"<((https?|ftp):[^\'\">\s]+)>", re.I)
+ r_email = re.compile(r"""
+ <
+ (?:mailto:)?
+ (
+ [-.\w]+
+ \@
+ [-a-z0-9]+(\.[-a-z0-9]+)*\.[a-z]+
+ )
+ >""", re.VERBOSE|re.I)
+ def _DoAutoLinks(self, text):
+ text = self.r_link.sub(r'<a href="\1">\1</a>', text)
+
+ def handler(m):
+ l = m.group(1)
+ return self._EncodeEmailAddress(self._UnescapeSpecialChars(l))
+
+ text = self.r_email.sub(handler, text)
+ return text
+
+ r_EncodeEmailAddress = re.compile(r">.+?:")
+ def _EncodeEmailAddress(self, text):
+ encode = [
+ lambda x: "&#%s;" % ord(x),
+ lambda x: "&#x%X;" % ord(x),
+ lambda x: x
+ ]
+
+ text = "mailto:" + text
+ addr = ""
+ for c in text:
+ if c == ':': addr += c; continue
+
+ r = semirandom(addr)
+ if r < 0.45:
+ addr += encode[1](c)
+ elif r > 0.9 and c != '@':
+ addr += encode[2](c)
+ else:
+ addr += encode[0](c)
+
+ text = '<a href="%s">%s</a>' % (addr, addr)
+ text = self.r_EncodeEmailAddress.sub('>', text)
+ return text
+
+ def _UnescapeSpecialChars(self, text):
+ for key in self.escapetable.keys():
+ text = text.replace(self.escapetable[key], key)
+ return text
+
+ tokenize_depth = 6
+ tokenize_nested_tags = '|'.join([r'(?:<[a-z/!$](?:[^<>]'] * tokenize_depth) + (')*>)' * tokenize_depth)
+ r_TokenizeHTML = re.compile(
+ r"""(?: <! ( -- .*? -- \s* )+ > ) | # comment
+ (?: <\? .*? \?> ) | # processing instruction
+ %s # nested tags
+ """ % tokenize_nested_tags, re.I|re.VERBOSE)
+ def _TokenizeHTML(self, text):
+ pos = 0
+ tokens = []
+ matchobj = self.r_TokenizeHTML.search(text, pos)
+ while matchobj:
+ whole_tag = matchobj.string[matchobj.start():matchobj.end()]
+ sec_start = matchobj.end()
+ tag_start = sec_start - len(whole_tag)
+ if pos < tag_start:
+ tokens.append(["text", matchobj.string[pos:tag_start]])
+
+ tokens.append(["tag", whole_tag])
+ pos = sec_start
+ matchobj = self.r_TokenizeHTML.search(text, pos)
+
+ if pos < len(text):
+ tokens.append(["text", text[pos:]])
+ return tokens
+
+ r_Outdent = re.compile(r"""^(\t|[ ]{1,%d})""" % tabwidth, re.M)
+ def _Outdent(self, text):
+ text = self.r_Outdent.sub("", text)
+ return text
+
+ def _Detab(self, text): return text.expandtabs(self.tabwidth)
+
+def Markdown(*args, **kw): return _Markdown().parse(*args, **kw)
+markdown = Markdown
+
+if __name__ == '__main__':
+ if len(sys.argv) > 1:
+ print Markdown(open(sys.argv[1]).read())
+ else:
+ print Markdown(sys.stdin.read())
diff --git a/appengine/monorail/third_party/oauth2client b/appengine/monorail/third_party/oauth2client
new file mode 120000
index 0000000..3dbb7f9
--- /dev/null
+++ b/appengine/monorail/third_party/oauth2client
@@ -0,0 +1 @@
+../../third_party/oauth2client/oauth2client
\ No newline at end of file
diff --git a/appengine/monorail/third_party/six.py b/appengine/monorail/third_party/six.py
new file mode 120000
index 0000000..6455919
--- /dev/null
+++ b/appengine/monorail/third_party/six.py
@@ -0,0 +1 @@
+../../third_party/six/six.py
\ No newline at end of file
diff --git a/appengine/monorail/third_party/uritemplate b/appengine/monorail/third_party/uritemplate
new file mode 120000
index 0000000..cb108c1
--- /dev/null
+++ b/appengine/monorail/third_party/uritemplate
@@ -0,0 +1 @@
+../../third_party/uritemplate/uritemplate
\ No newline at end of file
diff --git a/appengine/monorail/tools/backups/restore.sh b/appengine/monorail/tools/backups/restore.sh
new file mode 100755
index 0000000..ff9c5e5
--- /dev/null
+++ b/appengine/monorail/tools/backups/restore.sh
@@ -0,0 +1,106 @@
+#!/bin/bash
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+# The existing replicas all have this prefix:
+REPLICA_PREFIX="replica-7"
+
+# The new replicas made from the restored master will have this prefix:
+NEW_REPLICA_PREFIX="replica-8"
+
+CLOUD_PROJECT="monorail-staging"
+
+DRY_RUN=true
+
+echo Restoring backups to master for $CLOUD_PROJECT. Dry run: $DRY_RUN
+echo This will delete all read replicas with the prefix "$REPLICA_PREFIX"
+echo and create a new set of replicas with the prefix "$NEW_REPLICA_PREFIX"
+echo
+echo Checking for existing read replicas to delete:
+
+EXISTING_REPLICAS=($(gcloud sql instances list --project=$CLOUD_PROJECT | grep $REPLICA_PREFIX- | awk '{print $1}'))
+
+if [ ${#EXISTING_REPLICAS[@]} -eq 0 ]; then
+ echo No replicas found with prefix $REPLICA_PREFIX
+ echo List instances to find the replica prefix by running:
+ echo gcloud sql instances list --project=$CLOUD_PROJECT
+ exit 1
+fi
+
+echo Deleting ${#EXISTING_REPLICAS[@]} existing replicas found with the prefix $REPLICA_PREFIX
+
+for r in "${EXISTING_REPLICAS[@]}"; do
+ echo Deleting $r
+ cmd="gcloud sql instances delete $r --project=$CLOUD_PROJECT"
+ echo $cmd
+ if [ $DRY_RUN == false ]; then
+ $cmd
+ fi
+done
+
+echo Checking for available backups:
+
+DUE_TIMES=($(gcloud sql backups --instance master list --project=$CLOUD_PROJECT | grep SUCCESSFUL | awk '{print $1}'))
+
+for index in ${!DUE_TIMES[*]}; do
+ echo "[$index] ${DUE_TIMES[$index]}"
+done
+
+echo "Choose one of the above due_time values."
+echo "NOTE: selecting anything besides 0 will require you to manually"
+echo "complete the rest of the restore process."
+echo "Recover from date [0: ${DUE_TIMES[0]}]:"
+read DUE_TIME_INDEX
+
+DUE_TIME=${DUE_TIMES[$DUE_TIME_INDEX]}
+
+cmd="gcloud sql instances restore-backup master --due-time=$DUE_TIME --project=$CLOUD_PROJECT --async"
+echo $cmd
+if [ $DRY_RUN == false ]; then
+ $cmd
+fi
+
+if [ "$DUE_TIME_INDEX" -ne "0" ]; then
+ echo "You've restored an older-than-latest backup. Please contact speckle-oncall@"
+ echo "to request an on-demand backup of the master before attemtping to restart replicas,"
+ echo "which this script does not do automatically in this case."
+ echo "run 'gcloud sql instances create' commands to create new replicas manually after"
+ echo "you have confirmed with speckle-oncall@ the on-demand backup is complete."
+ echo "Exiting"
+ exit 0
+fi
+
+echo "Finding restore operation ID..."
+
+RESTORE_OP_IDS=($(gcloud sql operations list --instance=master --project=$CLOUD_PROJECT | grep RESTORE_VOLUME | awk '{print $1}'))
+
+# Assume the fist RESTORE_VOLUME is the operation we want; they're listed in reverse chronological order.
+echo Waiting on restore operation ID: ${RESTORE_OP_IDS[0]}
+
+# This isn't waiting long enough. Or it says it's done before it really is. Either way, the replica create steps fail
+# with e.g. "Failed in: CATCHING_UP Operation token: 03dd57a9-37a9-4f6f-9aa6-9c3b8ece01bd Message: Saw error in IO and/or SQL thread"
+gcloud sql operations wait ${RESTORE_OP_IDS[0]} --instance=master --project=$CLOUD_PROJECT
+
+echo Restore is finished on master. Now create the new set of read replicas with the new name prefix $NEW_REPLICA_PREFIX:
+
+for i in `seq 0 9`; do
+ cmd="gcloud sql instances create $NEW_REPLICA_PREFIX-0$i --master-instance-name master --project=$CLOUD_PROJECT --follow-gae-app=$CLOUD_PROJECT --authorized-gae-apps=$CLOUD_PROJECT --async --tier=D2"
+ echo $cmd
+ if [ $DRY_RUN == false ]; then
+ $cmd
+ fi
+done
+
+echo
+echo
+echo Backup restore is nearly complete. Check the instances page on developer console to see when
+echo all of the replicas are "Runnable" status. Until then, you may encounter errors in issue search.
+echo In the mean time:
+echo - edit settings.py to change the db_replica_prefix variable to be "$NEW_REPLICA_PREFIX-"
+echo Then either "make deploy_prod_backends" or "make stage_backends" for search to pick up the new prefix.
+echo Then set the newly deploy version for besearch and besearch2 on the dev console Versons page.
+echo Follow-up:
+echo - Submit the change.
+echo - Delete old versions of besearch and besearch2 because they run up the GAE bill.
diff --git a/appengine/monorail/tools/normalize-casing.sql b/appengine/monorail/tools/normalize-casing.sql
new file mode 100644
index 0000000..daf711f
--- /dev/null
+++ b/appengine/monorail/tools/normalize-casing.sql
@@ -0,0 +1,353 @@
+-- Copyright 2016 The Chromium Authors. All Rights Reserved.
+--
+-- Use of this source code is governed by a BSD-style
+-- license that can be found in the LICENSE file or at
+-- https://developers.google.com/open-source/licenses/bsd
+
+
+DROP PROCEDURE IF EXISTS InspectStatusCase;
+DROP PROCEDURE IF EXISTS CleanupStatusCase;
+DROP PROCEDURE IF EXISTS InspectLabelCase;
+DROP PROCEDURE IF EXISTS CleanupLabelCase;
+DROP PROCEDURE IF EXISTS InspectPermissionCase;
+DROP PROCEDURE IF EXISTS CleanupPermissionCase;
+DROP PROCEDURE IF EXISTS InspectComponentCase;
+DROP PROCEDURE IF EXISTS CleanupComponentCase;
+DROP PROCEDURE IF EXISTS CleanupCase;
+
+delimiter //
+
+CREATE PROCEDURE InspectStatusCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_status VARCHAR(80) BINARY;
+
+ DECLARE curs CURSOR FOR SELECT id, project_id, status FROM StatusDef WHERE project_id=in_pid AND rank IS NOT NULL ORDER BY rank;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ OPEN curs;
+
+ wks_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_status;
+ IF done THEN
+ LEAVE wks_loop;
+ END IF;
+
+ -- This is the canonical capitalization of the well-known status.
+ SELECT c_status AS 'Processing:';
+
+ -- Alternate forms are a) in the same project, and b) spelled the same,
+ -- but c) not the same exact status.
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM StatusDef WHERE project_id=c_pid AND status COLLATE UTF8_GENERAL_CI LIKE c_status AND id!=c_id;
+ SELECT status AS 'Alternate forms:' FROM StatusDef WHERE id IN (SELECT id FROM alt_ids);
+ SELECT id AS 'Offending issues:' FROM Issue WHERE status_id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+END;
+//
+
+CREATE PROCEDURE CleanupStatusCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_status VARCHAR(80) BINARY;
+
+ DECLARE curs CURSOR FOR SELECT id, project_id, status FROM StatusDef WHERE project_id=in_pid AND rank IS NOT NULL ORDER BY rank;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ OPEN curs;
+
+ wks_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_status;
+ IF done THEN
+ LEAVE wks_loop;
+ END IF;
+
+ SELECT c_status AS 'Processing:';
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM StatusDef WHERE project_id=c_pid AND status COLLATE UTF8_GENERAL_CI LIKE c_status AND id!=c_id;
+
+ -- Fix offending issues first, to avoid foreign key constraints.
+ UPDATE Issue SET status_id=c_id WHERE status_id IN (SELECT id FROM alt_ids);
+
+ -- Then remove the alternate status definitions.
+ DELETE FROM StatusDef WHERE id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+END;
+//
+
+CREATE PROCEDURE InspectLabelCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_label VARCHAR(80) BINARY;
+
+ DECLARE curs CURSOR FOR SELECT id, project_id, label FROM LabelDef WHERE project_id=in_pid AND rank IS NOT NULL ORDER BY rank;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ OPEN curs;
+
+ wkl_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_label;
+ IF done THEN
+ LEAVE wkl_loop;
+ END IF;
+
+ -- This is the canonical capitalization of the well-known label.
+ SELECT c_label AS 'Processing:';
+
+ -- Alternate forms are a) in the same project, and b) spelled the same,
+ -- but c) not the same exact label.
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM LabelDef WHERE project_id=c_pid AND label COLLATE UTF8_GENERAL_CI LIKE c_label AND id!=c_id;
+ SELECT label AS 'Alternate forms:' FROM LabelDef WHERE id IN (SELECT id FROM alt_ids);
+ SELECT issue_id AS 'Offending issues:' FROM Issue2Label WHERE label_id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+END;
+//
+
+CREATE PROCEDURE CleanupLabelCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_label VARCHAR(80) BINARY;
+
+ DECLARE curs CURSOR FOR SELECT id, project_id, label FROM LabelDef WHERE project_id=in_pid AND rank IS NOT NULL ORDER BY rank;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ OPEN curs;
+
+ wkl_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_label;
+ IF done THEN
+ LEAVE wkl_loop;
+ END IF;
+
+ SELECT c_label AS 'Processing:';
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM LabelDef WHERE project_id=c_pid AND label COLLATE UTF8_GENERAL_CI LIKE c_label AND id!=c_id;
+
+ -- Fix offending issues first, to avoid foreign key constraints.
+ -- DELETE after UPDATE IGNORE to catch issues with two spellings.
+ UPDATE IGNORE Issue2Label SET label_id=c_id WHERE label_id IN (SELECT id FROM alt_ids);
+ DELETE FROM Issue2Label WHERE label_id IN (SELECT id FROM alt_ids);
+
+ -- Then remove the alternate label definitions.
+ DELETE FROM LabelDef WHERE id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+END;
+//
+
+CREATE PROCEDURE InspectPermissionCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_label VARCHAR(80) BINARY;
+
+ -- This crazy query takes the Actions table (defined below) and combines it
+ -- with the set of all permissions granted in the project to construct a list
+ -- of all possible Restrict-Action-Permission labels. It then combines that
+ -- with LabelDef to see which ones are actually used (whether or not they are
+ -- also defined as well-known labels).
+ DECLARE curs CURSOR FOR SELECT LabelDef.id, LabelDef.project_id, RapDef.label FROM (
+ SELECT DISTINCT CONCAT_WS('-', 'Restrict', Actions.action, ExtraPerm.perm)
+ AS label FROM ExtraPerm, Actions where ExtraPerm.project_id=16) AS RapDef
+ LEFT JOIN LabelDef
+ ON BINARY RapDef.label = BINARY LabelDef.label
+ WHERE LabelDef.project_id=in_pid;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ DROP TEMPORARY TABLE IF EXISTS Actions;
+ CREATE TEMPORARY TABLE Actions (action VARCHAR(80));
+ INSERT INTO Actions (action) VALUES ('View'), ('EditIssue'), ('AddIssueComment'), ('DeleteIssue'), ('ViewPrivateArtifact');
+
+ OPEN curs;
+
+ perm_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_label;
+ IF done THEN
+ LEAVE perm_loop;
+ END IF;
+
+ -- This is the canonical capitalization of the permission.
+ SELECT c_label AS 'Processing:';
+
+ -- Alternate forms are a) in the same project, and b) spelled the same,
+ -- but c) not the same exact label.
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM LabelDef WHERE project_id=c_pid AND label COLLATE UTF8_GENERAL_CI LIKE c_label AND id!=c_id;
+ SELECT label AS 'Alternate forms:' FROM LabelDef WHERE id IN (SELECT id FROM alt_ids);
+ SELECT issue_id AS 'Offending issues:' FROM Issue2Label WHERE label_id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+END;
+//
+
+CREATE PROCEDURE CleanupPermissionCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_label VARCHAR(80) BINARY;
+
+ -- This crazy query takes the Actions table (defined below) and combines it
+ -- with the set of all permissions granted in the project to construct a list
+ -- of all possible Restrict-Action-Permission labels. It then combines that
+ -- with LabelDef to see which ones are actually used (whether or not they are
+ -- also defined as well-known labels).
+ DECLARE curs CURSOR FOR SELECT LabelDef.id, LabelDef.project_id, RapDef.label FROM (
+ SELECT DISTINCT CONCAT_WS('-', 'Restrict', Actions.action, ExtraPerm.perm)
+ AS label FROM ExtraPerm, Actions where ExtraPerm.project_id=16) AS RapDef
+ LEFT JOIN LabelDef
+ ON BINARY RapDef.label = BINARY LabelDef.label
+ WHERE LabelDef.project_id=in_pid;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ DROP TEMPORARY TABLE IF EXISTS Actions;
+ CREATE TEMPORARY TABLE Actions (action VARCHAR(80));
+ INSERT INTO Actions (action) VALUES ('View'), ('EditIssue'), ('AddIssueComment'), ('DeleteIssue'), ('ViewPrivateArtifact');
+
+ OPEN curs;
+
+ perm_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_label;
+ IF done THEN
+ LEAVE perm_loop;
+ END IF;
+
+ -- This is the canonical capitalization of the permission.
+ SELECT c_label AS 'Processing:';
+
+ -- Alternate forms are a) in the same project, and b) spelled the same,
+ -- but c) not the same exact label.
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM LabelDef WHERE project_id=c_pid AND label COLLATE UTF8_GENERAL_CI LIKE c_label AND id!=c_id;
+
+ -- Fix offending issues first, to avoid foreign key constraings.
+ -- DELETE after UPDATE IGNORE to catch issues with two spellings.
+ UPDATE IGNORE Issue2Label SET label_id=c_id WHERE label_id IN (SELECT id FROM alt_ids);
+ DELETE FROM Issue2Label WHERE label_id IN (SELECT id FROM alt_ids);
+
+ -- Then remove the alternate label definitions.
+ DELETE FROM LabelDef WHERE id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+
+ -- Remove ExtraPerm rows where the user isn't a member of the project.
+ DELETE FROM ExtraPerm WHERE project_id=in_pid AND user_id NOT IN (
+ SELECT user_id FROM User2Project WHERE project_id=in_pid);
+END;
+//
+
+CREATE PROCEDURE InspectComponentCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_path VARCHAR(80) BINARY;
+
+ DECLARE curs CURSOR FOR SELECT id, project_id, path FROM ComponentDef WHERE project_id=in_pid AND docstring IS NOT NULL ORDER BY path;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ OPEN curs;
+
+ wks_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_path;
+ IF done THEN
+ LEAVE wks_loop;
+ END IF;
+
+ -- This is the canonical capitalization of the component path.
+ SELECT c_path AS 'Processing:';
+
+ -- Alternate forms are a) in the same project, and b) spelled the same,
+ -- but c) not the same exact path.
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM ComponentDef WHERE project_id=c_pid AND path COLLATE UTF8_GENERAL_CI LIKE c_path AND id!=c_id;
+ SELECT path AS 'Alternate forms:' FROM ComponentDef WHERE id IN (SELECT id FROM alt_ids);
+ SELECT issue_id AS 'Offending issues:' FROM Issue2Component WHERE component_id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+END;
+//
+
+CREATE PROCEDURE CleanupComponentCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ DECLARE done INT DEFAULT FALSE;
+
+ DECLARE c_id INT;
+ DECLARE c_pid SMALLINT UNSIGNED;
+ DECLARE c_path VARCHAR(80) BINARY;
+
+ DECLARE curs CURSOR FOR SELECT id, project_id, path FROM ComponentDef WHERE project_id=in_pid AND docstring IS NOT NULL ORDER BY path;
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
+
+ OPEN curs;
+
+ wks_loop: LOOP
+ FETCH curs INTO c_id, c_pid, c_path;
+ IF done THEN
+ LEAVE wks_loop;
+ END IF;
+
+ SELECT c_path AS 'Processing:';
+ DROP TEMPORARY TABLE IF EXISTS alt_ids;
+ CREATE TEMPORARY TABLE alt_ids (id INT);
+ INSERT INTO alt_ids SELECT id FROM ComponentDef WHERE project_id=c_pid AND path COLLATE UTF8_GENERAL_CI LIKE c_path AND id!=c_id;
+
+ -- Fix offending issues first, to avoid foreign key constraints.
+ -- DELETE after UPDATE IGNORE to catch issues with two spellings.
+ UPDATE IGNORE Issue2Component SET component_id=c_id WHERE component_id IN (SELECT id FROM alt_ids);
+ DELETE FROM Issue2Component WHERE component_id IN (SELECT id FROM alt_ids);
+
+ -- Then remove the alternate path definitions.
+ DELETE FROM ComponentDef WHERE id IN (SELECT id FROM alt_ids);
+ END LOOP;
+
+ CLOSE curs;
+END;
+//
+
+
+CREATE PROCEDURE CleanupCase(IN in_pid SMALLINT UNSIGNED)
+BEGIN
+ CALL CleanupStatusCase(in_pid);
+ CALL CleanupLabelCase(in_pid);
+ CALL CleanupPermissionCase(in_pid);
+ CALL CleanupComponentCase(in_pid);
+END;
+//
+
+
+delimiter ;
diff --git a/appengine/monorail/tools/spam/spam.py b/appengine/monorail/tools/spam/spam.py
new file mode 100644
index 0000000..20c7fd8
--- /dev/null
+++ b/appengine/monorail/tools/spam/spam.py
@@ -0,0 +1,303 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""
+Spam classifier command line tools.
+Use this command to work with Monorail's Cloud Prediction API
+Spam classifier models.
+
+This presumes you already have some csv training data files present
+in gcs and/or local disk, so run the training example exporter first
+before trying to train or test models.
+
+Example: The following command will report the training status of the
+'android-user' model in the monorail-staging project:
+
+spam.py -p monorail-staging -m android-user status
+
+Note that in order for this command to work, you must have a service
+account credentials file on your machine. Download one from Developer
+Console -> Credentials -> [service account] -> Generate new JSON key.
+"""
+
+import argparse
+import csv
+import hashlib
+import httplib2
+import json
+import logging
+import os
+import random
+import re
+import subprocess
+import sys
+import tempfile
+import time
+import googleapiclient
+
+from apiclient.discovery import build
+from oauth2client.client import GoogleCredentials
+
+
+credentials = GoogleCredentials.get_application_default()
+service = build(
+ 'prediction', 'v1.6', http=httplib2.Http(), credentials=credentials)
+
+def Status(args):
+ result = service.trainedmodels().get(
+ project=args.project,
+ id=args.model,
+ ).execute()
+ return result
+
+def List(args):
+ result = service.trainedmodels().list(
+ project=args.project,
+ ).execute()
+ return result
+
+def Analyze(args):
+ result = service.trainedmodels().analyze(
+ project=args.project,
+ id=args.model,
+ ).execute()
+ return result
+
+def Train(args):
+ result = service.trainedmodels().insert(
+ project=args.project,
+ body={'id':args.model, 'storageDataLocation': args.training_data}
+ ).execute()
+ return result
+
+def _Classify(project, model, features):
+ retries = 0
+ while retries < 3:
+ try:
+ result = service.trainedmodels().predict(
+ project=project,
+ id=model,
+ body={'input': {'csvInstance': features}}
+ ).execute()
+ return result
+ except googleapiclient.errors.HttpError as err:
+ retries = retries + 1
+ print ('Error calling prediction API, attempt %d: %s' % (
+ retries, sys.exc_info()[0]))
+ print err.content.decode('utf-8')
+
+ sys.exit(1)
+
+ return result
+
+def Test(args):
+ with open(args.testing_data, 'rb') as csvfile:
+ spamreader = csv.reader(csvfile)
+ i = 0
+ confusion = {"ham": {"ham": 0, "spam": 0}, "spam": {"ham": 0, "spam": 0}}
+ for row in spamreader:
+ i = i + 1
+ if random.random() > args.sample_rate:
+ continue
+ label = row[0]
+ features = row[1:]
+ result = _Classify(args.project, args.model, features)
+ c = confusion[label][result['outputLabel']]
+ confusion[label][result['outputLabel']] = c + 1
+
+ print "%d: actual: %s / predicted: %s" % (i, label, result['outputLabel'])
+
+ if label != result['outputLabel']:
+ print "Mismatch:"
+ print json.dumps(row, indent=2)
+ print json.dumps(result, indent=2)
+
+ return confusion
+
+
+class struct(dict):
+ def __getattr__(self, key):
+ return self.get(key)
+ __setattr__ = dict.__setitem__
+ __delattr__ = dict.__delitem__
+
+
+def ROC(args):
+ # See page 866, Algorithm 1 in
+ # https://ccrma.stanford.edu/workshops/mir2009/references/ROCintro.pdf
+ # Modified to also keep track of the threshold for point labels
+ # when plotting the output.
+
+ instances = []
+ with open(args.testing_data, 'rb') as csvfile:
+ spamreader = csv.reader(csvfile)
+ total_negative, total_positive = 0.0, 0.0
+ i = 0
+ for row in spamreader:
+ i = i + 1
+ if random.random() > args.sample_rate:
+ continue
+ label = row[0]
+ features = row[1:]
+ result = _Classify(args.project, args.model, features)
+ for p in result['outputMulti']:
+ if p['label'] == 'spam':
+ spam_score = float(p['score'])
+
+ if label == 'spam':
+ total_positive += 1
+ else:
+ total_negative += 1
+
+ instances.append(struct(true_class=label, spam_score=spam_score))
+
+ true_positive, false_positive = 0.0, 0.0
+ results = []
+
+ instances.sort(key=lambda i: 1.0 - i.spam_score)
+ score_prev = None
+
+ for i in instances:
+ if score_prev is None or i.spam_score != score_prev:
+ results.append(struct(
+ x=false_positive/total_negative,
+ y=true_positive/total_positive,
+ threshold=i.spam_score))
+ score_prev = i.spam_score
+
+ if i.true_class == 'spam':
+ true_positive += 1
+ else:
+ false_positive += 1
+
+ results.append(struct(
+ x=false_positive/total_negative,
+ y=true_positive/total_positive,
+ threshold=i.spam_score))
+
+ print "False Positive Rate, True Positive Rate, Threshold"
+ for r in results:
+ print "%f, %f, %f" % (r.x, r.y, r.threshold)
+
+ print "FP/N: %f/%f, TP/P: %f/%f" % (
+ false_positive, total_negative, true_positive, total_positive)
+
+def Prep(args):
+ with open(args.infile, 'rb') as csvfile:
+ with tempfile.NamedTemporaryFile('wb', delete=False) as trainfile:
+ with open(args.test, 'wb') as testfile:
+ for row in csvfile:
+ # If hash features are requested, generate those instead of
+ # the raw text.
+ if args.hash_features > 0:
+ row = row.split(',')
+ # Hash every field after the first (which is the class)
+ feature_hashes = _HashFeatures(row[1:], args.hash_features)
+ # Convert to strings so we can re-join the columns.
+ feature_hashes = [str(h) for h in feature_hashes]
+ row = [row[0]]
+ row.extend(feature_hashes)
+ row = ','.join(row) + '\n'
+
+ if random.random() > args.ratio:
+ testfile.write(row)
+ else:
+ trainfile.write(row)
+
+ print 'Copying %s to The Cloud as %s' % (trainfile.name, args.train)
+ subprocess.check_call(['gsutil', 'cp', trainfile.name, args.train])
+
+DELIMITERS = ['\s', '\,', '\.', '\?', '!', '\:', '\(', '\)']
+
+def _HashFeatures(content, num_features):
+ """
+ Feature hashing is a fast and compact way to turn a string of text into a
+ vector of feature values for classification and training.
+ See also: https://en.wikipedia.org/wiki/Feature_hashing
+ This is a simple implementation that doesn't try to minimize collisions
+ or anything else fancy.
+ """
+ features = [0] * num_features
+ for blob in content:
+ words = re.split('|'.join(DELIMITERS), blob)
+ for w in words:
+ feature_index = int(int(hashlib.sha1(w).hexdigest(), 16) % num_features)
+ features[feature_index] += 1
+
+ return features
+
+def main():
+ if 'GOOGLE_APPLICATION_CREDENTIALS' not in os.environ:
+ print ('GOOGLE_APPLICATION_CREDENTIALS environment variable is not set. '
+ 'Exiting.')
+ sys.exit(1)
+
+ parser = argparse.ArgumentParser(
+ description='Spam classifier utilities.')
+ parser.add_argument('--project', '-p', default='monorail-staging')
+ subparsers = parser.add_subparsers(dest='command')
+
+ subparsers.add_parser('ls')
+
+ parser_analyze = subparsers.add_parser('analyze')
+ parser_analyze.add_argument('--model', '-m', required=True)
+
+ parser_status = subparsers.add_parser('status')
+ parser_status.add_argument('--model', '-m', required=True)
+
+ parser_test = subparsers.add_parser('test')
+ parser_test.add_argument('--model', '-m', required=True)
+ parser_test.add_argument('--testing_data', '-x',
+ help='Location of local testing csv file, e.g. /tmp/testing.csv')
+ parser_test.add_argument('--sample_rate', '-r', default=0.01,
+ help='Sample rate for classifier testing.')
+
+ parser_roc = subparsers.add_parser('roc',
+ help='Generate a Receiver Operating Characteristic curve')
+ parser_roc.add_argument('--model', '-m', required=True)
+ parser_roc.add_argument('--testing_data', '-x',
+ help='Location of local testing csv file, e.g. /tmp/testing.csv')
+ parser_roc.add_argument('--sample_rate', '-r', type=float, default=0.001,
+ help='Sample rate for classifier testing.', )
+
+ parser_train = subparsers.add_parser('train')
+ parser_train.add_argument('--model', '-m', required=True)
+ parser_train.add_argument('--training_data', '-t',
+ help=('Location of training csv file (omit gs:// prefix), '
+ 'e.g. monorail-staging-spam-training-data/train.csv'))
+
+ parser_prep = subparsers.add_parser('prep',
+ help='Split a csv file into training and test')
+ parser_prep.add_argument('--infile', '-i', required=True,
+ help='CSV file with complete set of labeled examples.',)
+ parser_prep.add_argument('--train', required=True,
+ help=('Destination for training csv file, '
+ 'e.g. gs://monorail-staging-spam-training-data/train.csv'))
+ parser_prep.add_argument('--test', required=True,
+ help='Destination for training csv file, local filesystem.')
+ parser_prep.add_argument('--ratio', default=0.75,
+ help='Test/train split ratio.')
+ parser_prep.add_argument('--hash_features', '-f', type=int,
+ help='Number of hash features to generate.', default=0)
+
+ args = parser.parse_args()
+
+ cmds = {
+ "ls": List,
+ "analyze": Analyze,
+ "status": Status,
+ "test": Test,
+ "train": Train,
+ "prep": Prep,
+ 'roc': ROC,
+ }
+ res = cmds[args.command](args)
+
+ print json.dumps(res, indent=2)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/appengine/monorail/tracker/__init__.py b/appengine/monorail/tracker/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/appengine/monorail/tracker/__init__.py
@@ -0,0 +1 @@
+
diff --git a/appengine/monorail/tracker/component_helpers.py b/appengine/monorail/tracker/component_helpers.py
new file mode 100644
index 0000000..942a2a9
--- /dev/null
+++ b/appengine/monorail/tracker/component_helpers.py
@@ -0,0 +1,80 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for component-related servlets."""
+
+import collections
+import logging
+import re
+
+from tracker import tracker_bizobj
+
+
+ParsedComponentDef = collections.namedtuple(
+ 'ParsedComponentDef',
+ 'leaf_name, docstring, deprecated, '
+ 'admin_usernames, cc_usernames, admin_ids, cc_ids')
+
+
+def ParseComponentRequest(mr, post_data, user_service):
+ """Parse the user's request to create or update a component definition.
+
+ If an error is encountered then this function populates mr.errors
+ """
+ leaf_name = post_data.get('leaf_name', '')
+ docstring = post_data.get('docstring', '')
+ deprecated = 'deprecated' in post_data
+
+ admin_usernames = [
+ uname.strip() for uname in re.split('[,;\s]+', post_data['admins'])
+ if uname.strip()]
+ cc_usernames = [
+ uname.strip() for uname in re.split('[,;\s]+', post_data['cc'])
+ if uname.strip()]
+ all_user_ids = user_service.LookupUserIDs(
+ mr.cnxn, admin_usernames + cc_usernames, autocreate=True)
+
+ admin_ids = []
+ for admin_name in admin_usernames:
+ if admin_name not in all_user_ids:
+ mr.errors.member_admins = '%s unrecognized' % admin_name
+ continue
+ admin_id = all_user_ids[admin_name]
+ if admin_id not in admin_ids:
+ admin_ids.append(admin_id)
+
+ cc_ids = []
+ for cc_name in cc_usernames:
+ if cc_name not in all_user_ids:
+ mr.errors.member_cc = '%s unrecognized' % cc_name
+ continue
+ cc_id = all_user_ids[cc_name]
+ if cc_id not in cc_ids:
+ cc_ids.append(cc_id)
+
+ return ParsedComponentDef(
+ leaf_name, docstring, deprecated,
+ admin_usernames, cc_usernames, admin_ids, cc_ids)
+
+
+def GetComponentCcIDs(issue, config):
+ """Return auto-cc'd users for any component or ancestor the issue is in."""
+ result = set()
+ for component_id in issue.component_ids:
+ cd = tracker_bizobj.FindComponentDefByID(component_id, config)
+ if cd:
+ result.update(GetCcIDsForComponentAndAncestors(config, cd))
+
+ return result
+
+
+def GetCcIDsForComponentAndAncestors(config, cd):
+ """Return auto-cc'd user IDs for the given component and ancestors."""
+ result = set(cd.cc_ids)
+ ancestors = tracker_bizobj.FindAncestorComponents(config, cd)
+ for anc_cd in ancestors:
+ result.update(anc_cd.cc_ids)
+
+ return result
diff --git a/appengine/monorail/tracker/componentcreate.py b/appengine/monorail/tracker/componentcreate.py
new file mode 100644
index 0000000..1e95b2b
--- /dev/null
+++ b/appengine/monorail/tracker/componentcreate.py
@@ -0,0 +1,169 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A servlet for project owners to create a new component def."""
+
+import logging
+import time
+
+from framework import framework_helpers
+from framework import framework_views
+from framework import jsonfeed
+from framework import permissions
+from framework import servlet
+from framework import urls
+from tracker import component_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_views
+
+from third_party import ezt
+
+
+class ComponentCreate(servlet.Servlet):
+ """Servlet allowing project owners to create a component."""
+
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PROCESS
+ _PAGE_TEMPLATE = 'tracker/component-create-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(ComponentCreate, self).AssertBasePermission(mr)
+ if not self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ raise permissions.PermissionException(
+ 'User is not allowed to administer this project')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ *[list(cd.admin_ids) + list(cd.cc_ids)
+ for cd in config.component_defs])
+ component_def_views = [
+ tracker_views.ComponentDefView(cd, users_by_id)
+ # TODO(jrobbins): future component-level view restrictions.
+ for cd in config.component_defs]
+ for cdv in component_def_views:
+ setattr(cdv, 'selected', None)
+ path = (cdv.parent_path + '>' + cdv.leaf_name).lstrip('>')
+ if path == mr.component_path:
+ setattr(cdv, 'selected', True)
+
+ return {
+ 'parent_path': mr.component_path,
+ 'admin_tab_mode': servlet.Servlet.PROCESS_TAB_COMPONENTS,
+ 'component_defs': component_def_views,
+ 'initial_leaf_name': '',
+ 'initial_docstring': '',
+ 'initial_deprecated': ezt.boolean(False),
+ 'initial_admins': [],
+ 'initial_cc': [],
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Validate and store the contents of the issues tracker admin page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ parent_path = post_data.get('parent_path', '')
+ parsed = component_helpers.ParseComponentRequest(
+ mr, post_data, self.services.user)
+
+ if parent_path:
+ parent_def = tracker_bizobj.FindComponentDef(parent_path, config)
+ if not parent_def:
+ self.abort(500, 'parent component not found')
+ allow_parent_edit = permissions.CanEditComponentDef(
+ mr.auth.effective_ids, mr.perms, mr.project, parent_def, config)
+ if not allow_parent_edit:
+ raise permissions.PermissionException(
+ 'User is not allowed to add a subcomponent here')
+
+ path = '%s>%s' % (parent_path, parsed.leaf_name)
+ else:
+ path = parsed.leaf_name
+
+ leaf_name_error_msg = LeafNameErrorMessage(
+ parent_path, parsed.leaf_name, config)
+ if leaf_name_error_msg:
+ mr.errors.leaf_name = leaf_name_error_msg
+
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(
+ mr, parent_path=parent_path,
+ initial_leaf_name=parsed.leaf_name,
+ initial_docstring=parsed.docstring,
+ initial_deprecated=ezt.boolean(parsed.deprecated),
+ initial_admins=parsed.admin_usernames,
+ initial_cc=parsed.cc_usernames,
+ )
+ return
+
+ created = int(time.time())
+ creator_id = self.services.user.LookupUserID(
+ mr.cnxn, mr.auth.email, autocreate=False)
+
+ self.services.config.CreateComponentDef(
+ mr.cnxn, mr.project_id, path, parsed.docstring, parsed.deprecated,
+ parsed.admin_ids, parsed.cc_ids, created, creator_id)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_COMPONENTS, saved=1, ts=int(time.time()))
+
+
+class CheckComponentNameJSON(jsonfeed.JsonFeed):
+ """JSON data for handling name checks when creating a component."""
+
+ def HandleRequest(self, mr):
+ """Provide the UI with info about the availability of the component name.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format.
+ """
+ parent_path = mr.GetParam('parent_path')
+ leaf_name = mr.GetParam('leaf_name')
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ message = LeafNameErrorMessage(parent_path, leaf_name, config)
+
+ return {
+ 'error_message': message,
+ }
+
+
+def LeafNameErrorMessage(parent_path, leaf_name, config):
+ """Return an error message for the given component name, or None."""
+ if not tracker_constants.COMPONENT_NAME_RE.match(leaf_name):
+ return 'Invalid component name'
+
+ if parent_path:
+ path = '%s>%s' % (parent_path, leaf_name)
+ else:
+ path = leaf_name
+
+ if tracker_bizobj.FindComponentDef(path, config):
+ return 'That name is already in use.'
+
+ return None
diff --git a/appengine/monorail/tracker/componentdetail.py b/appengine/monorail/tracker/componentdetail.py
new file mode 100644
index 0000000..751f7ec
--- /dev/null
+++ b/appengine/monorail/tracker/componentdetail.py
@@ -0,0 +1,238 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A servlet for project and component owners to view and edit components."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from features import filterrules_helpers
+from framework import framework_helpers
+from framework import framework_views
+from framework import permissions
+from framework import servlet
+from framework import timestr
+from framework import urls
+from tracker import component_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_views
+
+
+class ComponentDetail(servlet.Servlet):
+ """Servlets allowing project owners to view and edit a component."""
+
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PROCESS
+ _PAGE_TEMPLATE = 'tracker/component-detail-page.ezt'
+
+ def _GetComponentDef(self, mr):
+ """Get the config and component definition to be viewed or edited."""
+ if not mr.component_path:
+ self.abort(404, 'component not specified')
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ component_def = tracker_bizobj.FindComponentDef(mr.component_path, config)
+ if not component_def:
+ self.abort(404, 'component not found')
+ return config, component_def
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(ComponentDetail, self).AssertBasePermission(mr)
+ _config, component_def = self._GetComponentDef(mr)
+
+ # TODO(jrobbins): optional restrictions on viewing fields by component.
+
+ allow_view = permissions.CanViewComponentDef(
+ mr.auth.effective_ids, mr.perms, mr.project, component_def)
+ if not allow_view:
+ raise permissions.PermissionException(
+ 'User is not allowed to view this component')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ config, component_def = self._GetComponentDef(mr)
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ component_def.admin_ids, component_def.cc_ids)
+ component_def_view = tracker_views.ComponentDefView(
+ component_def, users_by_id)
+ initial_admins = [users_by_id[uid].email for uid in component_def.admin_ids]
+ initial_cc = [users_by_id[uid].email for uid in component_def.cc_ids]
+
+ creator, created = self._GetUserViewAndFormattedTime(
+ mr, component_def.creator_id, component_def.created)
+ modifier, modified = self._GetUserViewAndFormattedTime(
+ mr, component_def.modifier_id, component_def.modified)
+
+ allow_edit = permissions.CanEditComponentDef(
+ mr.auth.effective_ids, mr.perms, mr.project, component_def, config)
+
+ subcomponents = tracker_bizobj.FindDescendantComponents(
+ config, component_def)
+ templates = self.services.config.TemplatesWithComponent(
+ mr.cnxn, component_def.component_id, config)
+ allow_delete = allow_edit and not subcomponents and not templates
+
+ return {
+ 'admin_tab_mode': servlet.Servlet.PROCESS_TAB_COMPONENTS,
+ 'component_def': component_def_view,
+ 'initial_leaf_name': component_def_view.leaf_name,
+ 'initial_docstring': component_def.docstring,
+ 'initial_deprecated': ezt.boolean(component_def.deprecated),
+ 'initial_admins': initial_admins,
+ 'initial_cc': initial_cc,
+ 'allow_edit': ezt.boolean(allow_edit),
+ 'allow_delete': ezt.boolean(allow_delete),
+ 'subcomponents': subcomponents,
+ 'templates': templates,
+ 'creator': creator,
+ 'created': created,
+ 'modifier': modifier,
+ 'modified': modified,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Validate and store the contents of the issues tracker admin page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ config, component_def = self._GetComponentDef(mr)
+ allow_edit = permissions.CanEditComponentDef(
+ mr.auth.effective_ids, mr.perms, mr.project, component_def, config)
+ if not allow_edit:
+ raise permissions.PermissionException(
+ 'User is not allowed to edit or delete this component')
+
+ if 'deletecomponent' in post_data:
+ allow_delete = not tracker_bizobj.FindDescendantComponents(
+ config, component_def)
+ if not allow_delete:
+ raise permissions.PermissionException(
+ 'User tried to delete component that had subcomponents')
+ return self._ProcessDeleteComponent(mr, component_def)
+
+ else:
+ return self._ProcessEditComponent(mr, post_data, config, component_def)
+
+
+ def _ProcessDeleteComponent(self, mr, component_def):
+ """The user wants to delete the specified custom field definition."""
+ self.services.issue.DeleteComponentReferences(
+ mr.cnxn, component_def.component_id)
+ self.services.config.DeleteComponentDef(
+ mr.cnxn, mr.project_id, component_def.component_id)
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_COMPONENTS, deleted=1, ts=int(time.time()))
+
+ def _GetUserViewAndFormattedTime(self, mr, user_id, timestamp):
+ formatted_time = (timestr.FormatAbsoluteDate(timestamp)
+ if timestamp else None)
+ user = self.services.user.GetUser(mr.cnxn, user_id) if user_id else None
+ user_view = None
+ if user:
+ user_view = framework_views.UserView(
+ user_id, user.email, user.obscure_email)
+ viewing_self = mr.auth.user_id == user_id
+ # Do not obscure email if current user is a site admin. Do not obscure
+ # email if current user is the same as the creator. For all other
+ # cases do whatever obscure_email setting for the user is.
+ email_obscured = (not(mr.auth.user_pb.is_site_admin or viewing_self)
+ and user_view.obscure_email)
+ if not email_obscured:
+ user_view.RevealEmail()
+
+ return user_view, formatted_time
+
+ def _ProcessEditComponent(self, mr, post_data, config, component_def):
+ """The user wants to edit this component definition."""
+ parsed = component_helpers.ParseComponentRequest(
+ mr, post_data, self.services.user)
+
+ if not tracker_constants.COMPONENT_NAME_RE.match(parsed.leaf_name):
+ mr.errors.leaf_name = 'Invalid component name'
+
+ original_path = component_def.path
+ if mr.component_path and '>' in mr.component_path:
+ parent_path = mr.component_path[:mr.component_path.rindex('>')]
+ new_path = '%s>%s' % (parent_path, parsed.leaf_name)
+ else:
+ new_path = parsed.leaf_name
+
+ conflict = tracker_bizobj.FindComponentDef(new_path, config)
+ if conflict and conflict.component_id != component_def.component_id:
+ mr.errors.leaf_name = 'That name is already in use.'
+
+ creator, created = self._GetUserViewAndFormattedTime(
+ mr, component_def.creator_id, component_def.created)
+ modifier, modified = self._GetUserViewAndFormattedTime(
+ mr, component_def.modifier_id, component_def.modified)
+
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(
+ mr, initial_leaf_name=parsed.leaf_name,
+ initial_docstring=parsed.docstring,
+ initial_deprecated=ezt.boolean(parsed.deprecated),
+ initial_admins=parsed.admin_usernames,
+ initial_cc=parsed.cc_usernames,
+ created=created,
+ creator=creator,
+ modified=modified,
+ modifier=modifier,
+ )
+ return None
+
+ new_modified = int(time.time())
+ new_modifier_id = self.services.user.LookupUserID(
+ mr.cnxn, mr.auth.email, autocreate=False)
+ self.services.config.UpdateComponentDef(
+ mr.cnxn, mr.project_id, component_def.component_id,
+ path=new_path, docstring=parsed.docstring, deprecated=parsed.deprecated,
+ admin_ids=parsed.admin_ids, cc_ids=parsed.cc_ids, modified=new_modified,
+ modifier_id=new_modifier_id)
+
+ update_rule = False
+ if new_path != original_path:
+ update_rule = True
+ # If the name changed then update all of its subcomponents as well.
+ subcomponent_ids = tracker_bizobj.FindMatchingComponentIDs(
+ original_path, config, exact=False)
+ for subcomponent_id in subcomponent_ids:
+ if subcomponent_id == component_def.component_id:
+ continue
+ subcomponent_def = tracker_bizobj.FindComponentDefByID(
+ subcomponent_id, config)
+ subcomponent_new_path = subcomponent_def.path.replace(
+ original_path, new_path, 1)
+ self.services.config.UpdateComponentDef(
+ mr.cnxn, mr.project_id, subcomponent_def.component_id,
+ path=subcomponent_new_path)
+
+ if set(parsed.cc_ids) != set(component_def.cc_ids):
+ update_rule = True
+ if update_rule:
+ filterrules_helpers.RecomputeAllDerivedFields(
+ mr.cnxn, self.services, mr.project, config)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.COMPONENT_DETAIL,
+ component=new_path, saved=1, ts=int(time.time()))
diff --git a/appengine/monorail/tracker/field_helpers.py b/appengine/monorail/tracker/field_helpers.py
new file mode 100644
index 0000000..452f329
--- /dev/null
+++ b/appengine/monorail/tracker/field_helpers.py
@@ -0,0 +1,225 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions for custom field sevlets."""
+
+import collections
+import logging
+import re
+
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import monorailrequest
+from framework import permissions
+from proto import tracker_pb2
+from services import config_svc
+from services import user_svc
+from tracker import tracker_bizobj
+
+
+INVALID_USER_ID = -1
+
+ParsedFieldDef = collections.namedtuple(
+ 'ParsedFieldDef',
+ 'field_name, field_type_str, min_value, max_value, regex, '
+ 'needs_member, needs_perm, grants_perm, notify_on, is_required, '
+ 'is_multivalued, field_docstring, choices_text, applicable_type, '
+ 'applicable_predicate, revised_labels')
+
+
+def ParseFieldDefRequest(post_data, config):
+ """Parse the user's HTML form data to update a field definition."""
+ field_name = post_data.get('name', '')
+ field_type_str = post_data.get('field_type')
+ # TODO(jrobbins): once a min or max is set, it cannot be completely removed.
+ min_value_str = post_data.get('min_value')
+ try:
+ min_value = int(min_value_str)
+ except (ValueError, TypeError):
+ min_value = None
+ max_value_str = post_data.get('max_value')
+ try:
+ max_value = int(max_value_str)
+ except (ValueError, TypeError):
+ max_value = None
+ regex = post_data.get('regex')
+ needs_member = 'needs_member' in post_data
+ needs_perm = post_data.get('needs_perm', '').strip()
+ grants_perm = post_data.get('grants_perm', '').strip()
+ notify_on_str = post_data.get('notify_on')
+ if notify_on_str in config_svc.NOTIFY_ON_ENUM:
+ notify_on = config_svc.NOTIFY_ON_ENUM.index(notify_on_str)
+ else:
+ notify_on = 0
+ is_required = 'is_required' in post_data
+ is_multivalued = 'is_multivalued' in post_data
+ field_docstring = post_data.get('docstring', '')
+ choices_text = post_data.get('choices', '')
+ applicable_type = post_data.get('applicable_type', '')
+ applicable_predicate = '' # TODO(jrobbins): placeholder for future feature
+ revised_labels = _ParseChoicesIntoWellKnownLabels(
+ choices_text, field_name, config)
+
+ return ParsedFieldDef(
+ field_name, field_type_str, min_value, max_value, regex,
+ needs_member, needs_perm, grants_perm, notify_on, is_required,
+ is_multivalued, field_docstring, choices_text, applicable_type,
+ applicable_predicate, revised_labels)
+
+
+def _ParseChoicesIntoWellKnownLabels(choices_text, field_name, config):
+ """Parse a field's possible choices and integrate them into the config.
+
+ Args:
+ choices_text: string with one label and optional docstring per line.
+ field_name: string name of the field definition being edited.
+ config: ProjectIssueConfig PB of the current project.
+
+ Returns:
+ A revised list of labels that can be used to update the config.
+ """
+ matches = framework_constants.IDENTIFIER_DOCSTRING_RE.findall(choices_text)
+ new_labels = [
+ ('%s-%s' % (field_name, label), choice_docstring.strip(), False)
+ for label, choice_docstring in matches]
+ kept_labels = [
+ (wkl.label, wkl.label_docstring, False)
+ for wkl in config.well_known_labels
+ if not tracker_bizobj.LabelIsMaskedByField(
+ wkl.label, [field_name.lower()])]
+ revised_labels = kept_labels + new_labels
+ return revised_labels
+
+
+def ShiftEnumFieldsIntoLabels(
+ labels, labels_remove, field_val_strs, field_val_strs_remove, config):
+ """Look at the custom field values and treat enum fields as labels.
+
+ Args:
+ labels: list of labels to add/set on the issue.
+ labels_remove: list of labels to remove from the issue.
+ field_val_strs: {field_id: [val_str, ...]} of custom fields to add/set.
+ field_val_strs_remove: {field_id: [val_str, ...]} of custom fields to
+ remove.
+ config: ProjectIssueConfig PB including custom field definitions.
+
+ SIDE-EFFECT: the labels and labels_remove lists will be extended with
+ key-value labels corresponding to the enum field values. Those field
+ entries will be removed from field_vals and field_vals_remove.
+ """
+ for fd in config.field_defs:
+ if fd.field_type != tracker_pb2.FieldTypes.ENUM_TYPE:
+ continue
+
+ if fd.field_id in field_val_strs:
+ labels.extend(
+ '%s-%s' % (fd.field_name, val)
+ for val in field_val_strs[fd.field_id]
+ if val and val != '--')
+ del field_val_strs[fd.field_id]
+
+ if fd.field_id in field_val_strs_remove:
+ labels_remove.extend(
+ '%s-%s' % (fd.field_name, val)
+ for val in field_val_strs_remove[fd.field_id]
+ if val and val != '--')
+ del field_val_strs_remove[fd.field_id]
+
+
+def _ParseOneFieldValue(cnxn, user_service, fd, val_str):
+ """Make one FieldValue PB from the given user-supplied string."""
+ if fd.field_type == tracker_pb2.FieldTypes.INT_TYPE:
+ try:
+ return tracker_bizobj.MakeFieldValue(
+ fd.field_id, int(val_str), None, None, False)
+ except ValueError:
+ return None # TODO(jrobbins): should bounce
+
+ elif fd.field_type == tracker_pb2.FieldTypes.STR_TYPE:
+ return tracker_bizobj.MakeFieldValue(
+ fd.field_id, None, val_str, None, False)
+
+ elif fd.field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ if val_str:
+ try:
+ user_id = user_service.LookupUserID(cnxn, val_str, autocreate=False)
+ except user_svc.NoSuchUserException:
+ # Set to invalid user ID to display error during the validation step.
+ user_id = INVALID_USER_ID
+ return tracker_bizobj.MakeFieldValue(
+ fd.field_id, None, None, user_id, False)
+ else:
+ return None
+
+ else:
+ logging.error('Cant parse field with unexpected type %r', fd.field_type)
+ return None
+
+
+def ParseFieldValues(cnxn, user_service, field_val_strs, config):
+ """Return a list of FieldValue PBs based on the the given dict of strings."""
+ field_values = []
+ for fd in config.field_defs:
+ if fd.field_id not in field_val_strs:
+ continue
+ for val_str in field_val_strs[fd.field_id]:
+ fv = _ParseOneFieldValue(cnxn, user_service, fd, val_str)
+ if fv:
+ field_values.append(fv)
+
+ return field_values
+
+
+def _ValidateOneCustomField(mr, services, field_def, field_val):
+ """Validate one custom field value and return an error string or None."""
+ if field_def.field_type == tracker_pb2.FieldTypes.INT_TYPE:
+ if (field_def.min_value is not None and
+ field_val.int_value < field_def.min_value):
+ return 'Value must be >= %d' % field_def.min_value
+ if (field_def.max_value is not None and
+ field_val.int_value > field_def.max_value):
+ return 'Value must be <= %d' % field_def.max_value
+
+ elif field_def.field_type == tracker_pb2.FieldTypes.STR_TYPE:
+ if field_def.regex and field_val.str_value:
+ try:
+ regex = re.compile(field_def.regex)
+ if not regex.match(field_val.str_value):
+ return 'Value must match regular expression: %s' % field_def.regex
+ except re.error:
+ logging.info('Failed to process regex %r with value %r. Allowing.',
+ field_def.regex, field_val.str_value)
+ return None
+
+ elif field_def.field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ if field_val.user_id == INVALID_USER_ID:
+ return 'User not found'
+ if field_def.needs_member:
+ auth = monorailrequest.AuthData.FromUserID(
+ mr.cnxn, field_val.user_id, services)
+ user_value_in_project = framework_bizobj.UserIsInProject(
+ mr.project, auth.effective_ids)
+ if not user_value_in_project:
+ return 'User must be a member of the project'
+ if field_def.needs_perm:
+ field_val_user = services.user.GetUser(mr.cnxn, field_val.user_id)
+ user_perms = permissions.GetPermissions(
+ field_val_user, auth.effective_ids, mr.project)
+ has_perm = user_perms.CanUsePerm(
+ field_def.needs_perm, auth.effective_ids, mr.project, [])
+ if not has_perm:
+ return 'User must have permission "%s"' % field_def.needs_perm
+
+ return None
+
+
+def ValidateCustomFields(mr, services, field_values, config, errors):
+ """Validate each of the given fields and report problems in errors object."""
+ for fv in field_values:
+ fd = tracker_bizobj.FindFieldDefByID(fv.field_id, config)
+ if fd:
+ err_msg = _ValidateOneCustomField(mr, services, fd, fv)
+ if err_msg:
+ errors.SetCustomFieldError(fv.field_id, err_msg)
diff --git a/appengine/monorail/tracker/fieldcreate.py b/appengine/monorail/tracker/fieldcreate.py
new file mode 100644
index 0000000..ce54353
--- /dev/null
+++ b/appengine/monorail/tracker/fieldcreate.py
@@ -0,0 +1,185 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A servlet for project owners to create a new field def."""
+
+import logging
+import re
+import time
+
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import jsonfeed
+from framework import permissions
+from framework import servlet
+from framework import urls
+from tracker import field_helpers
+from tracker import tracker_constants
+from tracker import tracker_helpers
+
+
+class FieldCreate(servlet.Servlet):
+ """Servlet allowing project owners to create a custom field."""
+
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PROCESS
+ _PAGE_TEMPLATE = 'tracker/field-create-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(FieldCreate, self).AssertBasePermission(mr)
+ if not self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ raise permissions.PermissionException(
+ 'You are not allowed to administer this project')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ well_known_issue_types = tracker_helpers.FilterIssueTypes(config)
+
+ return {
+ 'admin_tab_mode': servlet.Servlet.PROCESS_TAB_LABELS,
+ 'initial_field_name': '',
+ 'initial_field_docstring': '',
+ 'initial_is_required': ezt.boolean(False),
+ 'initial_is_multivalued': ezt.boolean(False),
+ 'initial_choices': '',
+ 'initial_admins': '',
+ 'initial_type': 'enum_type',
+ 'initial_applicable_type': '', # That means any issue type
+ 'initial_applicable_predicate': '',
+ 'initial_needs_member': ezt.boolean(False),
+ 'initial_needs_perm': '',
+ 'initial_grants_perm': '',
+ 'initial_notify_on': 0,
+ 'well_known_issue_types': well_known_issue_types,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Validate and store the contents of the issues tracker admin page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ parsed = field_helpers.ParseFieldDefRequest(post_data, config)
+
+ if not tracker_constants.FIELD_NAME_RE.match(parsed.field_name):
+ mr.errors.field_name = 'Invalid field name'
+
+ field_name_error_msg = FieldNameErrorMessage(parsed.field_name, config)
+ if field_name_error_msg:
+ mr.errors.field_name = field_name_error_msg
+
+ if (parsed.min_value is not None and parsed.max_value is not None and
+ parsed.min_value > parsed.max_value):
+ mr.errors.min_value = 'Minimum value must be less than maximum.'
+
+ if parsed.regex:
+ try:
+ re.compile(parsed.regex)
+ except re.error:
+ mr.errors.regex = 'Invalid regular expression.'
+
+ admin_ids, admin_str = tracker_helpers.ParseAdminUsers(
+ mr.cnxn, post_data['admin_names'], self.services.user)
+
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(
+ mr, initial_field_name=parsed.field_name,
+ initial_type=parsed.field_type_str,
+ initial_field_docstring=parsed.field_docstring,
+ initial_applicable_type=parsed.applicable_type,
+ initial_applicable_predicate=parsed.applicable_predicate,
+ initial_needs_member=ezt.boolean(parsed.needs_member),
+ initial_needs_perm=parsed.needs_perm,
+ initial_is_required=ezt.boolean(parsed.is_required),
+ initial_is_multivalued=ezt.boolean(parsed.is_multivalued),
+ initial_grants_perm=parsed.grants_perm,
+ initial_notify_on=parsed.notify_on,
+ initial_choices=parsed.choices_text,
+ initial_admins=admin_str)
+ return
+
+ self.services.config.CreateFieldDef(
+ mr.cnxn, mr.project_id, parsed.field_name, parsed.field_type_str,
+ parsed.applicable_type, parsed.applicable_predicate,
+ parsed.is_required, parsed.is_multivalued,
+ parsed.min_value, parsed.max_value, parsed.regex, parsed.needs_member,
+ parsed.needs_perm, parsed.grants_perm, parsed.notify_on,
+ parsed.field_docstring, admin_ids)
+ if parsed.field_type_str == 'enum_type':
+ self.services.config.UpdateConfig(
+ mr.cnxn, mr.project, well_known_labels=parsed.revised_labels)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_LABELS, saved=1, ts=int(time.time()))
+
+
+class CheckFieldNameJSON(jsonfeed.JsonFeed):
+ """JSON data for handling name checks when creating a field."""
+
+ def HandleRequest(self, mr):
+ """Provide the UI with info about the availability of the field name.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format.
+ """
+ field_name = mr.GetParam('field')
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ choices = ExistingEnumChoices(field_name, config)
+ choices_dicts = [dict(name=choice.name_padded, doc=choice.docstring)
+ for choice in choices]
+ message = FieldNameErrorMessage(field_name, config)
+
+ return {
+ 'error_message': message,
+ 'choices': choices_dicts,
+ }
+
+
+def FieldNameErrorMessage(field_name, config):
+ """Return an error message for the given field name, or None."""
+ field_name_lower = field_name.lower()
+ if field_name_lower in tracker_constants.RESERVED_PREFIXES:
+ return 'That name is reserved.'
+
+ for fd in config.field_defs:
+ fn_lower = fd.field_name.lower()
+ if field_name_lower == fn_lower:
+ return 'That name is already in use.'
+ if field_name_lower.startswith(fn_lower + '-'):
+ return 'An existing field name is a prefix of that name.'
+ if fn_lower.startswith(field_name_lower + '-'):
+ return 'That name is a prefix of an existing field name.'
+
+ return None
+
+
+def ExistingEnumChoices(field_name, config):
+ """Return a list of existing label choices for the given prefix."""
+ # If there are existing labels with that prefix, then it must be enum.
+ # The existing labels will be treated as field values.
+ choices = tracker_helpers.LabelsMaskedByFields(
+ config, [field_name], trim_prefix=True)
+ return choices
diff --git a/appengine/monorail/tracker/fielddetail.py b/appengine/monorail/tracker/fielddetail.py
new file mode 100644
index 0000000..e3e6e5d
--- /dev/null
+++ b/appengine/monorail/tracker/fielddetail.py
@@ -0,0 +1,154 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A servlet for project and component owners to view and edit field defs."""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import framework_views
+from framework import permissions
+from framework import servlet
+from framework import urls
+from proto import tracker_pb2
+from tracker import field_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+class FieldDetail(servlet.Servlet):
+ """Servlet allowing project owners to view and edit a custom field."""
+
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PROCESS
+ _PAGE_TEMPLATE = 'tracker/field-detail-page.ezt'
+
+ def _GetFieldDef(self, mr):
+ """Get the config and field definition to be viewed or edited."""
+ # TODO(jrobbins): since so many requests get the config object, and
+ # it is usually cached in RAM, just always get it and include it
+ # in the MonorailRequest, mr.
+ config = self.services.config.GetProjectConfig(
+ mr.cnxn, mr.project_id)
+ field_def = tracker_bizobj.FindFieldDef(mr.field_name, config)
+ if not field_def:
+ self.abort(404, 'custom field not found')
+ return config, field_def
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(FieldDetail, self).AssertBasePermission(mr)
+ _config, field_def = self._GetFieldDef(mr)
+
+ allow_view = permissions.CanViewFieldDef(
+ mr.auth.effective_ids, mr.perms, mr.project, field_def)
+ if not allow_view:
+ raise permissions.PermissionException(
+ 'User is not allowed to view this field definition')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ config, field_def = self._GetFieldDef(mr)
+ user_views = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, field_def.admin_ids)
+ field_def_view = tracker_views.FieldDefView(
+ field_def, config, user_views=user_views)
+
+ well_known_issue_types = tracker_helpers.FilterIssueTypes(config)
+
+ allow_edit = permissions.CanEditFieldDef(
+ mr.auth.effective_ids, mr.perms, mr.project, field_def)
+
+ # Right now we do not allow renaming of enum fields.
+ uneditable_name = field_def.field_type == tracker_pb2.FieldTypes.ENUM_TYPE
+
+ initial_admins = ', '.join(sorted([
+ uv.email for uv in field_def_view.admins]))
+
+ return {
+ 'admin_tab_mode': servlet.Servlet.PROCESS_TAB_LABELS,
+ 'field_def': field_def_view,
+ 'allow_edit': ezt.boolean(allow_edit),
+ 'uneditable_name': ezt.boolean(uneditable_name),
+ 'initial_admins': initial_admins,
+ 'initial_applicable_type': field_def.applicable_type,
+ 'initial_applicable_predicate': field_def.applicable_predicate,
+ 'well_known_issue_types': well_known_issue_types,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Validate and store the contents of the issues tracker admin page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ config, field_def = self._GetFieldDef(mr)
+ allow_edit = permissions.CanEditFieldDef(
+ mr.auth.effective_ids, mr.perms, mr.project, field_def)
+ if not allow_edit:
+ raise permissions.PermissionException(
+ 'User is not allowed to delete this field')
+
+ if 'deletefield' in post_data:
+ self._ProcessDeleteField(mr, field_def)
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_LABELS, deleted=1, ts=int(time.time()))
+
+ else:
+ self._ProcessEditField(mr, post_data, config, field_def)
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.FIELD_DETAIL, field=field_def.field_name,
+ saved=1, ts=int(time.time()))
+
+ def _ProcessDeleteField(self, mr, field_def):
+ """The user wants to delete the specified custom field definition."""
+ self.services.config.SoftDeleteFieldDef(
+ mr.cnxn, mr.project_id, field_def.field_id)
+
+ # TODO(jrobbins): add logic to reaper cron task to look for
+ # soft deleted field definitions that have no issues with
+ # any value and hard deleted them.
+
+ def _ProcessEditField(self, mr, post_data, config, field_def):
+ """The user wants to edit this field definition."""
+ # TODO(jrobbins): future feature: editable field names
+
+ parsed = field_helpers.ParseFieldDefRequest(post_data, config)
+
+ admin_ids, _admin_str = tracker_helpers.ParseAdminUsers(
+ mr.cnxn, post_data['admin_names'], self.services.user)
+
+ # TODO(jrobbins): bounce on validation errors
+
+ self.services.config.UpdateFieldDef(
+ mr.cnxn, mr.project_id, field_def.field_id,
+ applicable_type=parsed.applicable_type,
+ applicable_predicate=parsed.applicable_predicate,
+ is_required=parsed.is_required,
+ min_value=parsed.min_value, max_value=parsed.max_value,
+ regex=parsed.regex, needs_member=parsed.needs_member,
+ needs_perm=parsed.needs_perm, grants_perm=parsed.grants_perm,
+ notify_on=parsed.notify_on, is_multivalued=parsed.is_multivalued,
+ docstring=parsed.field_docstring, admin_ids=admin_ids)
+ self.services.config.UpdateConfig(
+ mr.cnxn, mr.project, well_known_labels=parsed.revised_labels)
diff --git a/appengine/monorail/tracker/issue-blocking-change-notification-email.ezt b/appengine/monorail/tracker/issue-blocking-change-notification-email.ezt
new file mode 100644
index 0000000..9e45c69
--- /dev/null
+++ b/appengine/monorail/tracker/issue-blocking-change-notification-email.ezt
@@ -0,0 +1,7 @@
+Issue [issue.local_id]: [format "raw"][summary][end]
+[detail_url]
+
+[if-any is_blocking]This issue is now blocking issue [downstream_issue_ref].
+See [downstream_issue_url]
+[else]This issue is no longer blocking issue [downstream_issue_ref].
+See [downstream_issue_url][end]
diff --git a/appengine/monorail/tracker/issue-bulk-change-notification-email.ezt b/appengine/monorail/tracker/issue-bulk-change-notification-email.ezt
new file mode 100644
index 0000000..2051220
--- /dev/null
+++ b/appengine/monorail/tracker/issue-bulk-change-notification-email.ezt
@@ -0,0 +1,18 @@
+[if-any amendments]Updates:
+[amendments]
+[end]
+Comment[if-any commenter] by [commenter.display_name][end]:
+[if-any comment_text][format "raw"][comment_text][end][else](No comment was entered for this change.)[end]
+
+Affected issues:
+[for issues] issue [issues.local_id]: [format "raw"][issues.summary][end]
+ [format "raw"]http://[hostport][issues.detail_relative_url][end]
+
+[end]
+[is body_type "email"]
+--
+You received this message because you are listed in the owner
+or CC fields of these issues, or because you starred them.
+You may adjust your issue notification preferences at:
+http://[hostport]/hosting/settings
+[end]
diff --git a/appengine/monorail/tracker/issueadmin.py b/appengine/monorail/tracker/issueadmin.py
new file mode 100644
index 0000000..2fb367f
--- /dev/null
+++ b/appengine/monorail/tracker/issueadmin.py
@@ -0,0 +1,656 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlets for issue tracker configuration.
+
+These classes implement the Statuses, Labels and fields, Components, Rules, and
+Views subtabs under the Process tab. Unlike most servlet modules, this single
+file holds a base class and several related servlet classes.
+"""
+
+import collections
+import itertools
+import logging
+import time
+
+from features import filterrules_helpers
+from features import filterrules_views
+from features import savedqueries_helpers
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import monorailrequest
+from framework import permissions
+from framework import servlet
+from framework import urls
+from tracker import field_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+class IssueAdminBase(servlet.Servlet):
+ """Base class for servlets allowing project owners to configure tracker."""
+
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_PROCESS
+ _PROCESS_SUBTAB = None # specified in subclasses
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ config_view = tracker_views.ConfigView(mr, self.services, config)
+ return {
+ 'admin_tab_mode': self._PROCESS_SUBTAB,
+ 'config': config_view,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Validate and store the contents of the issues tracker admin page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ page_url = self.ProcessSubtabForm(post_data, mr)
+
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(mr) # TODO(jrobbins): echo more user-entered text.
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, page_url, saved=1, ts=int(time.time()))
+
+
+class AdminStatuses(IssueAdminBase):
+ """Servlet allowing project owners to configure well-known statuses."""
+
+ _PAGE_TEMPLATE = 'tracker/admin-statuses-page.ezt'
+ _PROCESS_SUBTAB = servlet.Servlet.PROCESS_TAB_STATUSES
+
+ def ProcessSubtabForm(self, post_data, mr):
+ """Process the status definition section of the admin page.
+
+ Args:
+ post_data: HTML form data for the HTTP request being processed.
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ The URL of the page to show after processing.
+ """
+ wks_open_text = post_data.get('predefinedopen', '')
+ wks_open_matches = framework_constants.IDENTIFIER_DOCSTRING_RE.findall(
+ wks_open_text)
+ wks_open_tuples = [
+ (status.lstrip('#'), docstring.strip(), True, status.startswith('#'))
+ for status, docstring in wks_open_matches]
+
+ wks_closed_text = post_data.get('predefinedclosed', '')
+ wks_closed_matches = framework_constants.IDENTIFIER_DOCSTRING_RE.findall(
+ wks_closed_text)
+ wks_closed_tuples = [
+ (status.lstrip('#'), docstring.strip(), False, status.startswith('#'))
+ for status, docstring in wks_closed_matches]
+
+ statuses_offer_merge_text = post_data.get('statuses_offer_merge', '')
+ statuses_offer_merge = framework_constants.IDENTIFIER_RE.findall(
+ statuses_offer_merge_text)
+
+ if not mr.errors.AnyErrors():
+ self.services.config.UpdateConfig(
+ mr.cnxn, mr.project, statuses_offer_merge=statuses_offer_merge,
+ well_known_statuses=wks_open_tuples + wks_closed_tuples)
+
+ # TODO(jrobbins): define a "strict" mode that affects only statuses.
+
+ return urls.ADMIN_STATUSES
+
+
+class AdminLabels(IssueAdminBase):
+ """Servlet allowing project owners to labels and fields."""
+
+ _PAGE_TEMPLATE = 'tracker/admin-labels-page.ezt'
+ _PROCESS_SUBTAB = servlet.Servlet.PROCESS_TAB_LABELS
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ page_data = super(AdminLabels, self).GatherPageData(mr)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ field_def_views = [
+ tracker_views.FieldDefView(fd, config)
+ # TODO(jrobbins): future field-level view restrictions.
+ for fd in config.field_defs
+ if not fd.is_deleted]
+ page_data.update({
+ 'field_defs': field_def_views,
+ })
+ return page_data
+
+ def ProcessSubtabForm(self, post_data, mr):
+ """Process changes to labels and custom field definitions.
+
+ Args:
+ post_data: HTML form data for the HTTP request being processed.
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ The URL of the page to show after processing.
+ """
+ wkl_text = post_data.get('predefinedlabels', '')
+ wkl_matches = framework_constants.IDENTIFIER_DOCSTRING_RE.findall(wkl_text)
+ wkl_tuples = [
+ (label.lstrip('#'), docstring.strip(), label.startswith('#'))
+ for label, docstring in wkl_matches]
+
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ field_names = [fd.field_name for fd in config.field_defs
+ if not fd.is_deleted]
+ masked_labels = tracker_helpers.LabelsMaskedByFields(config, field_names)
+ wkl_tuples.extend([
+ (masked.name, masked.docstring, False) for masked in masked_labels])
+
+ excl_prefix_text = post_data.get('excl_prefixes', '')
+ excl_prefixes = framework_constants.IDENTIFIER_RE.findall(excl_prefix_text)
+
+ if not mr.errors.AnyErrors():
+ self.services.config.UpdateConfig(
+ mr.cnxn, mr.project,
+ well_known_labels=wkl_tuples, excl_label_prefixes=excl_prefixes)
+
+ # TODO(jrobbins): define a "strict" mode that affects only labels.
+
+ return urls.ADMIN_LABELS
+
+
+class AdminTemplates(IssueAdminBase):
+ """Servlet allowing project owners to configure templates."""
+
+ _PAGE_TEMPLATE = 'tracker/admin-templates-page.ezt'
+ _PROCESS_SUBTAB = servlet.Servlet.PROCESS_TAB_TEMPLATES
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ page_data = super(AdminTemplates, self).GatherPageData(mr)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ field_views = [
+ tracker_views.MakeFieldValueView(fd, config, [], [], [], {})
+ # TODO(jrobbins): field-level view restrictions, display options
+ for fd in config.field_defs
+ if not fd.is_deleted]
+
+ page_data.update({
+ 'fields': field_views,
+ })
+ return page_data
+
+ def ProcessSubtabForm(self, post_data, mr):
+ """Process changes to new issue templates.
+
+ Args:
+ post_data: HTML form data for the HTTP request being processed.
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ The URL of the page to show after processing.
+ """
+ templates = self._ParseAllTemplates(post_data, mr)
+
+ default_template_id_for_developers = None
+ default_template_id_for_users = None
+ if self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ default_template_id_for_developers, default_template_id_for_users = (
+ self._ParseDefaultTemplateSelections(post_data, templates))
+
+ if not mr.errors.AnyErrors():
+ self.services.config.UpdateConfig(
+ mr.cnxn, mr.project, templates=templates,
+ default_template_for_developers=default_template_id_for_developers,
+ default_template_for_users=default_template_id_for_users)
+
+ params = '';
+ if post_data.get('current_template_index'):
+ params = '?tindex=' + post_data['current_template_index']
+ return urls.ADMIN_TEMPLATES + params
+
+ def _ParseAllTemplates(self, post_data, mr):
+ """Iterate over the post_data and parse all templates in it."""
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ orig_templates = {tmpl.template_id: tmpl for tmpl in config.templates}
+
+ templates = []
+ for i in itertools.count():
+ if ('template_id_%s' % i) not in post_data:
+ break
+ template_id = int(post_data['template_id_%s' % i])
+ orig_template = orig_templates.get(template_id)
+ new_template = self._ParseTemplate(
+ post_data, mr, i, orig_template, config)
+ if new_template:
+ templates.append(new_template)
+
+ return templates
+
+ def _ParseTemplate(self, post_data, mr, i, orig_template, config):
+ """Parse an issue template. Return orig_template if cannot edit."""
+ if not self._CanEditTemplate(mr, orig_template):
+ return orig_template
+
+ name = post_data['name_%s' % i]
+ if name == tracker_constants.DELETED_TEMPLATE_NAME:
+ return None
+
+ members_only = False
+ if ('members_only_%s' % i) in post_data:
+ members_only = (
+ post_data['members_only_%s' % i] == 'yes')
+ summary = ''
+ if ('summary_%s' % i) in post_data:
+ summary = post_data['summary_%s' % i]
+ summary_must_be_edited = False
+ if ('summary_must_be_edited_%s' % i) in post_data:
+ summary_must_be_edited = (
+ post_data['summary_must_be_edited_%s' % i] == 'yes')
+ content = ''
+ if ('content_%s' % i) in post_data:
+ content = post_data['content_%s' % i]
+ # wrap="hard" has no effect on the content because we copy it to
+ # a hidden form field before submission. So, server-side word wrap.
+ content = framework_helpers.WordWrapSuperLongLines(content, max_cols=75)
+ status = ''
+ if ('status_%s' % i) in post_data:
+ status = post_data['status_%s' % i]
+ owner_id = 0
+ if ('owner_%s' % i) in post_data:
+ owner = post_data['owner_%s' % i]
+ if owner:
+ user_id = self.services.user.LookupUserID(mr.cnxn, owner)
+ auth = monorailrequest.AuthData.FromUserID(
+ mr.cnxn, user_id, self.services)
+ if framework_bizobj.UserIsInProject(mr.project, auth.effective_ids):
+ owner_id = user_id
+
+ labels = post_data.getall('label_%s' % i)
+ labels_remove = []
+
+ field_val_strs = collections.defaultdict(list)
+ for fd in config.field_defs:
+ field_value_key = 'field_value_%d_%d' % (i, fd.field_id)
+ if post_data.get(field_value_key):
+ field_val_strs[fd.field_id].append(post_data[field_value_key])
+
+ field_helpers.ShiftEnumFieldsIntoLabels(
+ labels, labels_remove, field_val_strs, {}, config)
+ field_values = field_helpers.ParseFieldValues(
+ mr.cnxn, self.services.user, field_val_strs, config)
+ for fv in field_values:
+ logging.info('field_value is %r: %r',
+ fv.field_id, tracker_bizobj.GetFieldValue(fv, {}))
+
+ admin_ids = []
+ if ('admin_names_%s' % i) in post_data:
+ admin_ids, _admin_str = tracker_helpers.ParseAdminUsers(
+ mr.cnxn, post_data['admin_names_%s' % i], self.services.user)
+
+ component_ids = []
+ if ('components_%s' % i) in post_data:
+ component_paths = []
+ for component_path in post_data['components_%s' % i].split(','):
+ if component_path.strip() not in component_paths:
+ component_paths.append(component_path.strip())
+ component_ids = tracker_helpers.LookupComponentIDs(
+ component_paths, config, mr.errors)
+
+ owner_defaults_to_member = False
+ if ('owner_defaults_to_member_%s' % i) in post_data:
+ owner_defaults_to_member = (
+ post_data['owner_defaults_to_member_%s' % i] == 'yes')
+
+ component_required = False
+ if ('component_required_%s' % i) in post_data:
+ component_required = post_data['component_required_%s' % i] == 'yes'
+
+ template = tracker_bizobj.MakeIssueTemplate(
+ name, summary, status, owner_id,
+ content, labels, field_values, admin_ids, component_ids,
+ summary_must_be_edited=summary_must_be_edited,
+ owner_defaults_to_member=owner_defaults_to_member,
+ component_required=component_required,
+ members_only=members_only)
+ template_id = int(post_data['template_id_%s' % i])
+ if template_id: # new templates have ID 0, so leave that None in PB.
+ template.template_id = template_id
+ logging.info('template is %r', template)
+
+ return template
+
+ def _CanEditTemplate(self, mr, template):
+ """Return True if the user is allowed to edit this template."""
+ if self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ return True
+
+ if template and not mr.auth.effective_ids.isdisjoint(template.admin_ids):
+ return True
+
+ return False
+
+ def _ParseDefaultTemplateSelections(self, post_data, templates):
+ """Parse the input for the default templates to offer users."""
+ def GetSelectedTemplateID(name):
+ """Find the ID of the template specified in post_data[name]."""
+ if name not in post_data:
+ return None
+ selected_template_name = post_data[name]
+ for template in templates:
+ if selected_template_name == template.name:
+ return template.template_id
+
+ logging.error('User somehow selected an invalid template: %r',
+ selected_template_name)
+ return None
+
+ return (GetSelectedTemplateID('default_template_for_developers'),
+ GetSelectedTemplateID('default_template_for_users'))
+
+
+class AdminComponents(IssueAdminBase):
+ """Servlet allowing project owners to view the list of components."""
+
+ _PAGE_TEMPLATE = 'tracker/admin-components-page.ezt'
+ _PROCESS_SUBTAB = servlet.Servlet.PROCESS_TAB_COMPONENTS
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ page_data = super(AdminComponents, self).GatherPageData(mr)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ *[list(cd.admin_ids) + list(cd.cc_ids)
+ for cd in config.component_defs])
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+ component_def_views = [
+ tracker_views.ComponentDefView(cd, users_by_id)
+ # TODO(jrobbins): future component-level view restrictions.
+ for cd in config.component_defs]
+ for cd in component_def_views:
+ if mr.auth.email in [user.email for user in cd.admins]:
+ cd.classes += 'myadmin '
+ if mr.auth.email in [user.email for user in cd.cc]:
+ cd.classes += 'mycc '
+
+ page_data.update({
+ 'component_defs': component_def_views,
+ 'failed_perm': mr.GetParam('failed_perm'),
+ 'failed_subcomp': mr.GetParam('failed_subcomp'),
+ 'failed_templ': mr.GetParam('failed_templ'),
+ })
+ return page_data
+
+ def _GetComponentDefs(self, _mr, post_data, config):
+ """Get the config and component definitions from the request."""
+ component_defs = []
+ component_paths = post_data.get('delete_components').split(',')
+ for component_path in component_paths:
+ component_def = tracker_bizobj.FindComponentDef(component_path, config)
+ component_defs.append(component_def)
+ return component_defs
+
+ def _ProcessDeleteComponent(self, mr, component_def):
+ """Delete the specified component and its references."""
+ self.services.issue.DeleteComponentReferences(
+ mr.cnxn, component_def.component_id)
+ self.services.config.DeleteComponentDef(
+ mr.cnxn, mr.project_id, component_def.component_id)
+
+ def ProcessFormData(self, mr, post_data):
+ """Processes a POST command to delete components.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ component_defs = self._GetComponentDefs(mr, post_data, config)
+ # Reverse the component_defs so that we start deleting from subcomponents.
+ component_defs.reverse()
+
+ # Collect errors.
+ perm_errors = []
+ subcomponents_errors = []
+ templates_errors = []
+ # Collect successes.
+ deleted_components = []
+
+ for component_def in component_defs:
+ allow_edit = permissions.CanEditComponentDef(
+ mr.auth.effective_ids, mr.perms, mr.project, component_def, config)
+ if not allow_edit:
+ perm_errors.append(component_def.path)
+
+ subcomponents = tracker_bizobj.FindDescendantComponents(
+ config, component_def)
+ if subcomponents:
+ subcomponents_errors.append(component_def.path)
+
+ templates = self.services.config.TemplatesWithComponent(
+ mr.cnxn, component_def.component_id, config)
+ if templates:
+ templates_errors.append(component_def.path)
+
+ allow_delete = allow_edit and not subcomponents and not templates
+ if allow_delete:
+ self._ProcessDeleteComponent(mr, component_def)
+ deleted_components.append(component_def.path)
+ # Refresh project config after the component deletion.
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ADMIN_COMPONENTS, ts=int(time.time()),
+ failed_perm=','.join(perm_errors),
+ failed_subcomp=','.join(subcomponents_errors),
+ failed_templ=','.join(templates_errors),
+ deleted=','.join(deleted_components))
+
+
+class AdminViews(IssueAdminBase):
+ """Servlet for project owners to set default columns, axes, and sorting."""
+
+ _PAGE_TEMPLATE = 'tracker/admin-views-page.ezt'
+ _PROCESS_SUBTAB = servlet.Servlet.PROCESS_TAB_VIEWS
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ page_data = super(AdminViews, self).GatherPageData(mr)
+ with self.profiler.Phase('getting canned queries'):
+ canned_queries = self.services.features.GetCannedQueriesByProjectID(
+ mr.cnxn, mr.project_id)
+
+ page_data.update({
+ 'new_query_indexes': range(
+ len(canned_queries) + 1, savedqueries_helpers.MAX_QUERIES + 1),
+ 'issue_notify': mr.project.issue_notify_address,
+ 'max_queries': savedqueries_helpers.MAX_QUERIES,
+ })
+ return page_data
+
+ def ProcessSubtabForm(self, post_data, mr):
+ """Process the Views subtab.
+
+ Args:
+ post_data: HTML form data for the HTTP request being processed.
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ The URL of the page to show after processing.
+ """
+ existing_queries = savedqueries_helpers.ParseSavedQueries(
+ mr.cnxn, post_data, self.services.project)
+ added_queries = savedqueries_helpers.ParseSavedQueries(
+ mr.cnxn, post_data, self.services.project, prefix='new_')
+ canned_queries = existing_queries + added_queries
+
+ list_prefs = _ParseListPreferences(post_data)
+
+ if not mr.errors.AnyErrors():
+ self.services.config.UpdateConfig(
+ mr.cnxn, mr.project, list_prefs=list_prefs)
+ self.services.features.UpdateCannedQueries(
+ mr.cnxn, mr.project_id, canned_queries)
+
+ return urls.ADMIN_VIEWS
+
+
+def _ParseListPreferences(post_data):
+ """Parse the part of a project admin form about artifact list preferences."""
+ default_col_spec = ''
+ if 'default_col_spec' in post_data:
+ default_col_spec = post_data['default_col_spec']
+ # Don't allow empty colum spec
+ if not default_col_spec:
+ default_col_spec = tracker_constants.DEFAULT_COL_SPEC
+ col_spec_words = monorailrequest.ParseColSpec(default_col_spec)
+ col_spec = ' '.join(word for word in col_spec_words)
+
+ default_sort_spec = ''
+ if 'default_sort_spec' in post_data:
+ default_sort_spec = post_data['default_sort_spec']
+ sort_spec_words = monorailrequest.ParseColSpec(default_sort_spec)
+ sort_spec = ' '.join(sort_spec_words)
+
+ x_attr_str = ''
+ if 'default_x_attr' in post_data:
+ x_attr_str = post_data['default_x_attr']
+ x_attr_words = monorailrequest.ParseColSpec(x_attr_str)
+ x_attr = ''
+ if x_attr_words:
+ x_attr = x_attr_words[0]
+
+ y_attr_str = ''
+ if 'default_y_attr' in post_data:
+ y_attr_str = post_data['default_y_attr']
+ y_attr_words = monorailrequest.ParseColSpec(y_attr_str)
+ y_attr = ''
+ if y_attr_words:
+ y_attr = y_attr_words[0]
+
+ return col_spec, sort_spec, x_attr, y_attr
+
+
+class AdminRules(IssueAdminBase):
+ """Servlet allowing project owners to configure filter rules."""
+
+ _PAGE_TEMPLATE = 'tracker/admin-rules-page.ezt'
+ _PROCESS_SUBTAB = servlet.Servlet.PROCESS_TAB_RULES
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(AdminRules, self).AssertBasePermission(mr)
+ if not self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ raise permissions.PermissionException(
+ 'User is not allowed to administer this project')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ page_data = super(AdminRules, self).GatherPageData(mr)
+ rules = self.services.features.GetFilterRules(
+ mr.cnxn, mr.project_id)
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ [rule.default_owner_id for rule in rules],
+ *[rule.add_cc_ids for rule in rules])
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+ rule_views = [filterrules_views.RuleView(rule, users_by_id)
+ for rule in rules]
+
+ for idx, rule_view in enumerate(rule_views):
+ rule_view.idx = idx + 1 # EZT has no loop index, so we set idx.
+
+ page_data.update({
+ 'rules': rule_views,
+ 'new_rule_indexes': (
+ range(len(rules) + 1, filterrules_helpers.MAX_RULES + 1)),
+ 'max_rules': filterrules_helpers.MAX_RULES,
+ })
+ return page_data
+
+ def ProcessSubtabForm(self, post_data, mr):
+ """Process the Rules subtab.
+
+ Args:
+ post_data: HTML form data for the HTTP request being processed.
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ The URL of the page to show after processing.
+ """
+ old_rules = self.services.features.GetFilterRules(mr.cnxn, mr.project_id)
+ rules = filterrules_helpers.ParseRules(
+ mr.cnxn, post_data, self.services.user, mr.errors)
+ new_rules = filterrules_helpers.ParseRules(
+ mr.cnxn, post_data, self.services.user, mr.errors, prefix='new_')
+ rules.extend(new_rules)
+
+ if not mr.errors.AnyErrors():
+ config = self.services.features.UpdateFilterRules(
+ mr.cnxn, mr.project_id, rules)
+
+ if old_rules != rules:
+ logging.info('recomputing derived fields')
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ filterrules_helpers.RecomputeAllDerivedFields(
+ mr.cnxn, self.services, mr.project, config)
+
+ return urls.ADMIN_RULES
diff --git a/appengine/monorail/tracker/issueadvsearch.py b/appengine/monorail/tracker/issueadvsearch.py
new file mode 100644
index 0000000..c4880bb
--- /dev/null
+++ b/appengine/monorail/tracker/issueadvsearch.py
@@ -0,0 +1,99 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that implement the advanced search feature page.
+
+The advanced search page simply displays an HTML page with a form.
+The form handler converts the widget-based query into a googley query
+string and redirects the user to the issue list servlet.
+"""
+
+import logging
+import re
+
+from framework import framework_helpers
+from framework import permissions
+from framework import servlet
+from framework import urls
+
+# Patterns for search values that can be words, labels,
+# component paths, or email addresses.
+VALUE_RE = re.compile(r'[-a-zA-Z0-9._>@]+')
+
+
+class IssueAdvancedSearch(servlet.Servlet):
+ """IssueAdvancedSearch shows a form to enter an advanced search."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-advsearch-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+
+ # This form *only* redirects to a GET request, and permissions are checked
+ # in that handler.
+ CHECK_SECURITY_TOKEN = False
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+
+ # TODO(jrobbins): Allow deep-linking into this page.
+
+ return {
+ 'issue_tab_mode': 'issueAdvSearch',
+ 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process a posted advanced query form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ # Default to searching open issues in this project.
+ can = post_data.get('can', 2)
+
+ terms = []
+ self._AccumulateANDTerm('', 'words', post_data, terms)
+ self._AccumulateANDTerm('-', 'without', post_data, terms)
+ self._AccumulateANDTerm('label:', 'labels', post_data, terms)
+ self._AccumulateORTerm('component:', 'components', post_data, terms)
+ self._AccumulateORTerm('status:', 'statuses', post_data, terms)
+ self._AccumulateORTerm('reporter:', 'reporters', post_data, terms)
+ self._AccumulateORTerm('owner:', 'owners', post_data, terms)
+ self._AccumulateORTerm('cc:', 'cc', post_data, terms)
+ self._AccumulateORTerm('commentby:', 'commentby', post_data, terms)
+
+ if 'starcount' in post_data:
+ starcount = int(post_data['starcount'])
+ if starcount >= 0:
+ terms.append('starcount:%s' % starcount)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_LIST, q=' '.join(terms), can=can)
+
+ def _AccumulateANDTerm(self, operator, form_field, post_data, search_query):
+ """Build a query that matches issues with ALL of the given field values."""
+ user_input = post_data.get(form_field)
+ if user_input:
+ values = VALUE_RE.findall(user_input)
+ search_terms = ['%s%s' % (operator, v) for v in values]
+ search_query.extend(search_terms)
+
+ def _AccumulateORTerm(self, operator, form_field, post_data, search_query):
+ """Build a query that matches issues with ANY of the given field values."""
+ user_input = post_data.get(form_field)
+ if user_input:
+ values = VALUE_RE.findall(user_input)
+ search_term = '%s%s' % (operator, ','.join(values))
+ search_query.append(search_term)
diff --git a/appengine/monorail/tracker/issueattachment.py b/appengine/monorail/tracker/issueattachment.py
new file mode 100644
index 0000000..27f84a1
--- /dev/null
+++ b/appengine/monorail/tracker/issueattachment.py
@@ -0,0 +1,126 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Issue Tracker code to serve out issue attachments.
+
+Summary of page classes:
+ AttachmentPage: Serve the content of an attachment w/ the appropriate
+ MIME type.
+ IssueAttachmentDeletion: Form handler for deleting attachments.
+"""
+
+import base64
+import logging
+import os
+import re
+import urllib
+
+import webapp2
+
+from google.appengine.api import app_identity
+from google.appengine.api import images
+
+from framework import framework_helpers
+from framework import gcs_helpers
+from framework import permissions
+from framework import servlet
+from framework import urls
+from services import issue_svc
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+# This will likely appear blank or as a broken image icon in the browser.
+NO_PREVIEW_ICON = ''
+NO_PREVIEW_MIME_TYPE = 'image/png'
+
+FILE_RE = re.compile('^[-_.a-zA-Z0-9 #+()]+$')
+
+
+class AttachmentPage(servlet.Servlet):
+ """AttachmentPage serves issue attachments."""
+
+ def GatherPageData(self, mr):
+ """Parse the attachment ID from the request and serve its content.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns: dict of values used by EZT for rendering the page.
+ """
+ try:
+ attachment, _issue = tracker_helpers.GetAttachmentIfAllowed(
+ mr, self.services)
+ except issue_svc.NoSuchIssueException:
+ webapp2.abort(404, 'issue not found')
+ except issue_svc.NoSuchAttachmentException:
+ webapp2.abort(404, 'attachment not found')
+ except issue_svc.NoSuchCommentException:
+ webapp2.abort(404, 'comment not found')
+
+ if not attachment.gcs_object_id:
+ webapp2.abort(404, 'attachment data not found')
+
+ bucket_name = app_identity.get_default_gcs_bucket_name()
+ object_path = '/' + bucket_name + attachment.gcs_object_id
+
+ if mr.thumb:
+ url = gcs_helpers.SignUrl(object_path + '-thumbnail')
+ self.redirect(url, abort=True)
+
+ # By default GCS will return images and attachments displayable inline.
+ url = gcs_helpers.SignUrl(object_path)
+ if not mr.inline:
+ filename = attachment.filename
+ if not FILE_RE.match(filename):
+ print "bad file name: %s" % attachment.attachment_id
+ filename = 'attachment-%d.dat' % attachment.attachment_id
+
+ url = url + '&' + urllib.urlencode(
+ {'response-content-disposition':
+ ('attachment; filename=%s' % filename)})
+
+ self.redirect(url, abort=True)
+
+
+class IssueAttachmentDeletion(servlet.Servlet):
+ """Form handler that allows user to hard-delete attachments."""
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the form that soft-deletes an issue attachment.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ local_id = int(post_data['id'])
+ sequence_num = int(post_data['sequence_num'])
+ attachment_id = int(post_data['aid'])
+ delete = 'delete' in post_data
+
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, local_id)
+
+ all_comments = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+ logging.info('comments on %s are: %s', local_id, all_comments)
+ comment = all_comments[sequence_num]
+
+ if not permissions.CanDelete(
+ mr.auth.user_id, mr.auth.effective_ids, mr.perms,
+ comment.deleted_by, comment.user_id, mr.project,
+ permissions.GetRestrictions(issue)):
+ raise permissions.PermissionException(
+ 'Cannot un/delete attachment')
+
+ self.services.issue.SoftDeleteAttachment(
+ mr.cnxn, mr.project_id, local_id, sequence_num,
+ attachment_id, self.services.user, delete=delete)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=local_id)
diff --git a/appengine/monorail/tracker/issueattachmenttext.py b/appengine/monorail/tracker/issueattachmenttext.py
new file mode 100644
index 0000000..a864ba0
--- /dev/null
+++ b/appengine/monorail/tracker/issueattachmenttext.py
@@ -0,0 +1,102 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet to safely display textual issue attachments.
+
+Unlike most attachments, this is not a download, it is a full HTML page
+with safely escaped user content.
+"""
+
+import httplib
+import logging
+
+import webapp2
+
+from google.appengine.api import app_identity
+
+from third_party import cloudstorage
+from third_party import ezt
+
+from features import prettify
+from framework import filecontent
+from framework import permissions
+from framework import servlet
+from framework import template_helpers
+from services import issue_svc
+from tracker import tracker_bizobj
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+class AttachmentText(servlet.Servlet):
+ """AttachmentText displays textual attachments much like source browsing."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-attachment-text.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+
+ def GatherPageData(self, mr):
+ """Parse the attachment ID from the request and serve its content.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering almost the page.
+ """
+ with self.profiler.Phase('get issue, comment, and attachment'):
+ try:
+ attachment, issue = tracker_helpers.GetAttachmentIfAllowed(
+ mr, self.services)
+ except issue_svc.NoSuchIssueException:
+ webapp2.abort(404, 'issue not found')
+ except issue_svc.NoSuchAttachmentException:
+ webapp2.abort(404, 'attachment not found')
+ except issue_svc.NoSuchCommentException:
+ webapp2.abort(404, 'comment not found')
+
+ content = []
+ if attachment.gcs_object_id:
+ bucket_name = app_identity.get_default_gcs_bucket_name()
+ full_path = '/' + bucket_name + attachment.gcs_object_id
+ logging.info("reading gcs: %s" % full_path)
+ with cloudstorage.open(full_path, 'r') as f:
+ content = f.read()
+
+ filesize = len(content)
+
+ # This servlet only displays safe textual attachments. The user should
+ # not have been given a link to this servlet for any other kind.
+ if not tracker_views.IsViewableText(attachment.mimetype, filesize):
+ self.response.status = httplib.BAD_REQUEST
+ raise servlet.AlreadySentResponseException('not a text file')
+
+ u_text, is_binary, too_large = filecontent.DecodeFileContents(content)
+ lines = prettify.PrepareSourceLinesForHighlighting(u_text.encode('utf8'))
+
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+ page_perms = self.MakePagePerms(
+ mr, issue, permissions.DELETE_ISSUE, permissions.CREATE_ISSUE,
+ granted_perms=granted_perms)
+
+ page_data = {
+ 'issue_tab_mode': 'issueDetail',
+ 'local_id': issue.local_id,
+ 'filename': attachment.filename,
+ 'filesize': template_helpers.BytesKbOrMb(filesize),
+ 'file_lines': lines,
+ 'is_binary': ezt.boolean(is_binary),
+ 'too_large': ezt.boolean(too_large),
+ 'code_reviews': None,
+ 'page_perms': page_perms,
+ }
+ if is_binary or too_large:
+ page_data['should_prettify'] = ezt.boolean(False)
+ else:
+ page_data.update(prettify.BuildPrettifyData(
+ len(lines), attachment.filename))
+
+ return page_data
diff --git a/appengine/monorail/tracker/issuebulkedit.py b/appengine/monorail/tracker/issuebulkedit.py
new file mode 100644
index 0000000..7d4871b
--- /dev/null
+++ b/appengine/monorail/tracker/issuebulkedit.py
@@ -0,0 +1,418 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that implement the issue bulk edit page and related forms.
+
+Summary of classes:
+ IssueBulkEdit: Show a form for editing multiple issues and allow the
+ user to update them all at once.
+"""
+
+import httplib
+import logging
+import time
+
+from third_party import ezt
+
+from features import filterrules_helpers
+from features import notify
+from framework import actionlimit
+from framework import framework_constants
+from framework import framework_views
+from framework import monorailrequest
+from framework import permissions
+from framework import servlet
+from framework import template_helpers
+from services import tracker_fulltext
+from tracker import field_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+class IssueBulkEdit(servlet.Servlet):
+ """IssueBulkEdit lists multiple issues and allows an edit to all of them."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-bulk-edit-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+ _CAPTCHA_ACTION_TYPES = [actionlimit.ISSUE_BULK_EDIT]
+
+ _SECONDS_OVERHEAD = 4
+ _SECONDS_PER_UPDATE = 0.12
+ _SLOWNESS_THRESHOLD = 10
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Raises:
+ PermissionException: if the user is not allowed to enter an issue.
+ """
+ super(IssueBulkEdit, self).AssertBasePermission(mr)
+ can_edit = self.CheckPerm(mr, permissions.EDIT_ISSUE)
+ can_comment = self.CheckPerm(mr, permissions.ADD_ISSUE_COMMENT)
+ if not (can_edit and can_comment):
+ raise permissions.PermissionException('bulk edit forbidden')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ with self.profiler.Phase('getting issues'):
+ if not mr.local_id_list:
+ raise monorailrequest.InputException()
+ requested_issues = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, mr.project_id, sorted(mr.local_id_list))
+
+ with self.profiler.Phase('filtering issues'):
+ # TODO(jrobbins): filter out issues that the user cannot edit and
+ # provide that as feedback rather than just siliently ignoring them.
+ open_issues, closed_issues = (
+ tracker_helpers.GetAllowedOpenedAndClosedIssues(
+ mr, [issue.issue_id for issue in requested_issues],
+ self.services))
+ issues = open_issues + closed_issues
+
+ if not issues:
+ self.abort(404, 'no issues found')
+
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ type_label_set = {
+ lab.lower() for lab in issues[0].labels
+ if lab.lower().startswith('type-')}
+ for issue in issues[1:]:
+ new_type_set = {
+ lab.lower() for lab in issue.labels
+ if lab.lower().startswith('type-')}
+ type_label_set &= new_type_set
+
+ field_views = [
+ tracker_views.MakeFieldValueView(
+ fd, config, type_label_set, [], [], {})
+ # TODO(jrobbins): field-level view restrictions, display options
+ # TODO(jrobbins): custom fields in templates supply values to view.
+ for fd in config.field_defs
+ if not fd.is_deleted]
+ # Explicitly set all field views to not required. We do not want to force
+ # users to have to set it for issues missing required fields.
+ # See https://bugs.chromium.org/p/monorail/issues/detail?id=500 for more
+ # details.
+ for fv in field_views:
+ fv.field_def.is_required_bool = None
+
+ with self.profiler.Phase('making issue proxies'):
+ issue_views = [
+ template_helpers.EZTItem(
+ local_id=issue.local_id, summary=issue.summary,
+ closed=ezt.boolean(issue in closed_issues))
+ for issue in issues]
+
+ num_seconds = (int(len(issue_views) * self._SECONDS_PER_UPDATE) +
+ self._SECONDS_OVERHEAD)
+
+ page_perms = self.MakePagePerms(
+ mr, None,
+ permissions.CREATE_ISSUE,
+ permissions.DELETE_ISSUE)
+
+ return {
+ 'issue_tab_mode': 'issueBulkEdit',
+ 'issues': issue_views,
+ 'num_issues': len(issue_views),
+ 'show_progress': ezt.boolean(num_seconds > self._SLOWNESS_THRESHOLD),
+ 'num_seconds': num_seconds,
+
+ 'initial_comment': '',
+ 'initial_status': '',
+ 'initial_owner': '',
+ 'initial_merge_into': '',
+ 'initial_cc': '',
+ 'initial_components': '',
+ 'labels': [],
+ 'fields': field_views,
+
+ 'restrict_to_known': ezt.boolean(config.restrict_to_known),
+ 'page_perms': page_perms,
+ 'statuses_offer_merge': config.statuses_offer_merge,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted issue update form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ if not mr.local_id_list:
+ logging.info('missing issue local IDs, probably tampered')
+ self.response.status = httplib.BAD_REQUEST
+ return
+
+ # Check that the user is logged in; anon users cannot update issues.
+ if not mr.auth.user_id:
+ logging.info('user was not logged in, cannot update issue')
+ self.response.status = httplib.BAD_REQUEST # xxx should raise except
+ return
+
+ self.CountRateLimitedActions(
+ mr, {actionlimit.ISSUE_BULK_EDIT: len(mr.local_id_list)})
+
+ # Check that the user has permission to add a comment, and to enter
+ # metadata if they are trying to do that.
+ if not self.CheckPerm(mr, permissions.ADD_ISSUE_COMMENT):
+ logging.info('user has no permission to add issue comment')
+ self.response.status = httplib.BAD_REQUEST
+ return
+
+ if not self.CheckPerm(mr, permissions.EDIT_ISSUE):
+ logging.info('user has no permission to edit issue metadata')
+ self.response.status = httplib.BAD_REQUEST
+ return
+
+ move_to = post_data.get('move_to', '').lower()
+ if move_to and not self.CheckPerm(mr, permissions.DELETE_ISSUE):
+ logging.info('user has no permission to move issue')
+ self.response.status = httplib.BAD_REQUEST
+ return
+
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ parsed = tracker_helpers.ParseIssueRequest(
+ mr.cnxn, post_data, self.services, mr.errors, mr.project_name)
+ field_helpers.ShiftEnumFieldsIntoLabels(
+ parsed.labels, parsed.labels_remove,
+ parsed.fields.vals, parsed.fields.vals_remove,
+ config)
+ field_vals = field_helpers.ParseFieldValues(
+ mr.cnxn, self.services.user, parsed.fields.vals, config)
+ field_vals_remove = field_helpers.ParseFieldValues(
+ mr.cnxn, self.services.user, parsed.fields.vals_remove, config)
+
+ # Treat status '' as no change and explicit 'clear' as clearing the status.
+ status = parsed.status
+ if status == '':
+ status = None
+ if post_data.get('op_statusenter') == 'clear':
+ status = ''
+
+ reporter_id = mr.auth.user_id
+ logging.info('bulk edit request by %s', reporter_id)
+ self.CheckCaptcha(mr, post_data)
+
+ if parsed.users.owner_id is None:
+ mr.errors.owner = 'Invalid owner username'
+ else:
+ valid, msg = tracker_helpers.IsValidIssueOwner(
+ mr.cnxn, mr.project, parsed.users.owner_id, self.services)
+ if not valid:
+ mr.errors.owner = msg
+
+ if (status in config.statuses_offer_merge and
+ not post_data.get('merge_into')):
+ mr.errors.merge_into_id = 'Please enter a valid issue ID'
+
+ move_to_project = None
+ if move_to:
+ if mr.project_name == move_to:
+ mr.errors.move_to = 'The issues are already in project ' + move_to
+ else:
+ move_to_project = self.services.project.GetProjectByName(
+ mr.cnxn, move_to)
+ if not move_to_project:
+ mr.errors.move_to = 'No such project: ' + move_to
+
+ # Treat owner '' as no change, and explicit 'clear' as NO_USER_SPECIFIED
+ owner_id = parsed.users.owner_id
+ if parsed.users.owner_username == '':
+ owner_id = None
+ if post_data.get('op_ownerenter') == 'clear':
+ owner_id = framework_constants.NO_USER_SPECIFIED
+
+ comp_ids = tracker_helpers.LookupComponentIDs(
+ parsed.components.paths, config, mr.errors)
+ comp_ids_remove = tracker_helpers.LookupComponentIDs(
+ parsed.components.paths_remove, config, mr.errors)
+ if post_data.get('op_componententer') == 'remove':
+ comp_ids, comp_ids_remove = comp_ids_remove, comp_ids
+
+ cc_ids, cc_ids_remove = parsed.users.cc_ids, parsed.users.cc_ids_remove
+ if post_data.get('op_memberenter') == 'remove':
+ cc_ids, cc_ids_remove = parsed.users.cc_ids_remove, parsed.users.cc_ids
+
+ local_ids_actually_changed = []
+ old_owner_ids = []
+ combined_amendments = []
+ merge_into_issue = None
+ new_starrers = set()
+
+ if not mr.errors.AnyErrors():
+ issue_list = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, mr.project_id, mr.local_id_list)
+
+ # Skip any individual issues that the user is not allowed to edit.
+ editable_issues = [
+ issue for issue in issue_list
+ if permissions.CanEditIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue)]
+
+ # Skip any restrict issues that cannot be moved
+ if move_to:
+ editable_issues = [
+ issue for issue in editable_issues
+ if not permissions.GetRestrictions(issue)]
+
+ # If 'Duplicate' status is specified ensure there are no permission issues
+ # with the issue we want to merge with.
+ if post_data.get('merge_into'):
+ for issue in editable_issues:
+ _, merge_into_issue = tracker_helpers.ParseMergeFields(
+ mr.cnxn, self.services, mr.project_name, post_data, parsed.status,
+ config, issue, mr.errors)
+ if merge_into_issue:
+ merge_allowed = tracker_helpers.IsMergeAllowed(
+ merge_into_issue, mr, self.services)
+ if not merge_allowed:
+ mr.errors.merge_into_id = 'Target issue %s cannot be modified' % (
+ merge_into_issue.local_id)
+ break
+
+ # Update the new_starrers set.
+ new_starrers.update(tracker_helpers.GetNewIssueStarrers(
+ mr.cnxn, self.services, issue.issue_id,
+ merge_into_issue.issue_id))
+
+ # Proceed with amendments only if there are no reported errors.
+ if not mr.errors.AnyErrors():
+ # Sort the issues: we want them in this order so that the
+ # corresponding old_owner_id are found in the same order.
+ editable_issues.sort(lambda i1, i2: cmp(i1.local_id, i2.local_id))
+
+ iids_to_invalidate = set()
+ rules = self.services.features.GetFilterRules(
+ mr.cnxn, config.project_id)
+ predicate_asts = filterrules_helpers.ParsePredicateASTs(
+ rules, config, None)
+ for issue in editable_issues:
+ old_owner_id = tracker_bizobj.GetOwnerId(issue)
+ merge_into_iid = (
+ merge_into_issue.issue_id if merge_into_issue else None)
+
+ amendments, _ = self.services.issue.DeltaUpdateIssue(
+ mr.cnxn, self.services, mr.auth.user_id, mr.project_id, config,
+ issue, status, owner_id, cc_ids, cc_ids_remove, comp_ids,
+ comp_ids_remove, parsed.labels, parsed.labels_remove, field_vals,
+ field_vals_remove, parsed.fields.fields_clear,
+ merged_into=merge_into_iid, comment=parsed.comment,
+ iids_to_invalidate=iids_to_invalidate, rules=rules,
+ predicate_asts=predicate_asts)
+
+ if amendments or parsed.comment: # Avoid empty comments.
+ local_ids_actually_changed.append(issue.local_id)
+ old_owner_ids.append(old_owner_id)
+ combined_amendments.extend(amendments)
+
+ self.services.issue.InvalidateIIDs(mr.cnxn, iids_to_invalidate)
+ self.services.project.UpdateRecentActivity(
+ mr.cnxn, mr.project.project_id)
+
+ # Add new_starrers and new CCs to merge_into_issue.
+ if merge_into_issue:
+ merge_into_project = self.services.project.GetProjectByName(
+ mr.cnxn, merge_into_issue.project_name)
+ tracker_helpers.AddIssueStarrers(
+ mr.cnxn, self.services, mr, merge_into_issue.issue_id,
+ merge_into_project, new_starrers)
+ tracker_helpers.MergeCCsAndAddCommentMultipleIssues(
+ self.services, mr, editable_issues, merge_into_project,
+ merge_into_issue)
+
+ if move_to and editable_issues:
+ tracker_fulltext.UnindexIssues(
+ [issue.issue_id for issue in editable_issues])
+ for issue in editable_issues:
+ old_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id)
+ moved_back_iids = self.services.issue.MoveIssues(
+ mr.cnxn, move_to_project, [issue], self.services.user)
+ new_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id)
+ if issue.issue_id in moved_back_iids:
+ content = 'Moved %s back to %s again.' % (
+ old_text_ref, new_text_ref)
+ else:
+ content = 'Moved %s to now be %s.' % (old_text_ref, new_text_ref)
+ self.services.issue.CreateIssueComment(
+ mr.cnxn, move_to_project.project_id, issue.local_id,
+ mr.auth.user_id, content, amendments=[
+ tracker_bizobj.MakeProjectAmendment(
+ move_to_project.project_name)])
+
+ send_email = 'send_email' in post_data
+
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ [owner_id], cc_ids, cc_ids_remove, old_owner_ids,
+ tracker_bizobj.UsersInvolvedInAmendments(combined_amendments))
+ if move_to and editable_issues:
+ project_id = move_to_project.project_id
+ local_ids_actually_changed = [
+ issue.local_id for issue in editable_issues]
+ else:
+ project_id = mr.project_id
+
+ notify.SendIssueBulkChangeNotification(
+ mr.request.host, project_id,
+ local_ids_actually_changed, old_owner_ids, parsed.comment,
+ reporter_id, combined_amendments, send_email, users_by_id)
+
+ if mr.errors.AnyErrors():
+ bounce_cc_parts = (
+ parsed.users.cc_usernames +
+ ['-%s' % ccur for ccur in parsed.users.cc_usernames_remove])
+ bounce_labels = (
+ parsed.labels +
+ ['-%s' % lr for lr in parsed.labels_remove])
+ self.PleaseCorrect(
+ mr, initial_status=parsed.status,
+ initial_owner=parsed.users.owner_username,
+ initial_merge_into=post_data.get('merge_into', 0),
+ initial_cc=', '.join(bounce_cc_parts),
+ initial_comment=parsed.comment,
+ initial_components=parsed.components.entered_str,
+ labels=bounce_labels)
+ return
+
+ with self.profiler.Phase('reindexing issues'):
+ logging.info('starting reindexing')
+ start = time.time()
+ # Get the updated issues and index them
+ issue_list = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, mr.project_id, mr.local_id_list)
+ tracker_fulltext.IndexIssues(
+ mr.cnxn, issue_list, self.services.user, self.services.issue,
+ self.services.config)
+ logging.info('reindexing %d issues took %s sec',
+ len(issue_list), time.time() - start)
+
+ # TODO(jrobbins): These could be put into the form action attribute.
+ mr.can = int(post_data['can'])
+ mr.query = post_data['q']
+ mr.col_spec = post_data['colspec']
+ mr.sort_spec = post_data['sort']
+ mr.group_by_spec = post_data['groupby']
+ mr.start = int(post_data['start'])
+ mr.num = int(post_data['num'])
+
+ # TODO(jrobbins): implement bulk=N param for a better confirmation alert.
+ return tracker_helpers.FormatIssueListURL(
+ mr, config, saved=len(mr.local_id_list), ts=int(time.time()))
diff --git a/appengine/monorail/tracker/issuedetail.py b/appengine/monorail/tracker/issuedetail.py
new file mode 100644
index 0000000..4dc8601
--- /dev/null
+++ b/appengine/monorail/tracker/issuedetail.py
@@ -0,0 +1,1253 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that implement the issue detail page and related forms.
+
+Summary of classes:
+ IssueDetail: Show one issue in detail w/ all metadata and comments, and
+ process additional comments or metadata changes on it.
+ SetStarForm: Record the user's desire to star or unstar an issue.
+ FlagSpamForm: Record the user's desire to report the issue as spam.
+"""
+
+import httplib
+import logging
+import time
+from third_party import ezt
+
+import settings
+from features import notify
+from framework import actionlimit
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import jsonfeed
+from framework import monorailrequest
+from framework import paginate
+from framework import permissions
+from framework import servlet
+from framework import servlet_helpers
+from framework import sql
+from framework import template_helpers
+from framework import urls
+from framework import xsrf
+from proto import user_pb2
+from search import frontendsearchpipeline
+from services import issue_svc
+from services import tracker_fulltext
+from tracker import field_helpers
+from tracker import issuepeek
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+class IssueDetail(issuepeek.IssuePeek):
+ """IssueDetail is a page that shows the details of one issue."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-detail-page.ezt'
+ _MISSING_ISSUE_PAGE_TEMPLATE = 'tracker/issue-missing-page.ezt'
+ _MAIN_TAB_MODE = issuepeek.IssuePeek.MAIN_TAB_ISSUES
+ _CAPTCHA_ACTION_TYPES = [actionlimit.ISSUE_COMMENT]
+ _ALLOW_VIEWING_DELETED = True
+
+ def __init__(self, request, response, **kwargs):
+ super(IssueDetail, self).__init__(request, response, **kwargs)
+ self.missing_issue_template = template_helpers.MonorailTemplate(
+ self._TEMPLATE_PATH + self._MISSING_ISSUE_PAGE_TEMPLATE)
+
+ def GetTemplate(self, page_data):
+ """Return a custom 404 page for skipped issue local IDs."""
+ if page_data.get('http_response_code', httplib.OK) == httplib.NOT_FOUND:
+ return self.missing_issue_template
+ else:
+ return servlet.Servlet.GetTemplate(self, page_data)
+
+ def _GetMissingIssuePageData(
+ self, mr, issue_deleted=False, issue_missing=False,
+ issue_not_specified=False, issue_not_created=False,
+ moved_to_project_name=None, moved_to_id=None,
+ local_id=None, page_perms=None, delete_form_token=None):
+ if not page_perms:
+ # Make a default page perms.
+ page_perms = self.MakePagePerms(mr, None, granted_perms=None)
+ page_perms.CreateIssue = False
+ return {
+ 'issue_tab_mode': 'issueDetail',
+ 'http_response_code': httplib.NOT_FOUND,
+ 'issue_deleted': ezt.boolean(issue_deleted),
+ 'issue_missing': ezt.boolean(issue_missing),
+ 'issue_not_specified': ezt.boolean(issue_not_specified),
+ 'issue_not_created': ezt.boolean(issue_not_created),
+ 'moved_to_project_name': moved_to_project_name,
+ 'moved_to_id': moved_to_id,
+ 'local_id': local_id,
+ 'page_perms': page_perms,
+ 'delete_form_token': delete_form_token,
+ }
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ with self.profiler.Phase('getting project issue config'):
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ # The flipper is not itself a Promise, but it contains Promises.
+ flipper = _Flipper(mr, self.services, self.profiler)
+
+ if mr.local_id is None:
+ return self._GetMissingIssuePageData(mr, issue_not_specified=True)
+ with self.profiler.Phase('finishing getting issue'):
+ try:
+ issue = self._GetIssue(mr)
+ except issue_svc.NoSuchIssueException:
+ issue = None
+
+ # Show explanation of skipped issue local IDs or deleted issues.
+ if issue is None or issue.deleted:
+ missing = mr.local_id <= self.services.issue.GetHighestLocalID(
+ mr.cnxn, mr.project_id)
+ if missing or (issue and issue.deleted):
+ moved_to_ref = self.services.issue.GetCurrentLocationOfMovedIssue(
+ mr.cnxn, mr.project_id, mr.local_id)
+ moved_to_project_id, moved_to_id = moved_to_ref
+ if moved_to_project_id is not None:
+ moved_to_project = self.services.project.GetProject(
+ mr.cnxn, moved_to_project_id)
+ moved_to_project_name = moved_to_project.project_name
+ else:
+ moved_to_project_name = None
+
+ if issue:
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+ else:
+ granted_perms = None
+ page_perms = self.MakePagePerms(
+ mr, issue,
+ permissions.DELETE_ISSUE, permissions.CREATE_ISSUE,
+ granted_perms=granted_perms)
+ return self._GetMissingIssuePageData(
+ mr,
+ issue_deleted=ezt.boolean(issue is not None),
+ issue_missing=ezt.boolean(issue is None and missing),
+ moved_to_project_name=moved_to_project_name,
+ moved_to_id=moved_to_id,
+ local_id=mr.local_id,
+ page_perms=page_perms,
+ delete_form_token=xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_DELETE_JSON)))
+ else:
+ # Issue is not "missing," moved, or deleted, it is just non-existent.
+ return self._GetMissingIssuePageData(mr, issue_not_created=True)
+
+ star_cnxn = sql.MonorailConnection()
+ star_promise = framework_helpers.Promise(
+ self.services.issue_star.IsItemStarredBy, star_cnxn,
+ issue.issue_id, mr.auth.user_id)
+
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+
+ page_perms = self.MakePagePerms(
+ mr, issue,
+ permissions.CREATE_ISSUE,
+ permissions.FLAG_SPAM,
+ permissions.VERDICT_SPAM,
+ permissions.SET_STAR,
+ permissions.EDIT_ISSUE,
+ permissions.EDIT_ISSUE_SUMMARY,
+ permissions.EDIT_ISSUE_STATUS,
+ permissions.EDIT_ISSUE_OWNER,
+ permissions.EDIT_ISSUE_CC,
+ permissions.DELETE_ISSUE,
+ permissions.ADD_ISSUE_COMMENT,
+ permissions.DELETE_OWN,
+ permissions.DELETE_ANY,
+ permissions.VIEW_INBOUND_MESSAGES,
+ granted_perms=granted_perms)
+
+ spam_promise = None
+ spam_hist_promise = None
+
+ if page_perms.FlagSpam:
+ spam_cnxn = sql.MonorailConnection()
+ spam_promise = framework_helpers.Promise(
+ self.services.spam.LookupFlaggers, spam_cnxn,
+ issue.issue_id)
+
+ if page_perms.VerdictSpam:
+ spam_hist_cnxn = sql.MonorailConnection()
+ spam_hist_promise = framework_helpers.Promise(
+ self.services.spam.LookUpIssueVerdictHistory, spam_hist_cnxn,
+ [issue.issue_id])
+
+ with self.profiler.Phase('finishing getting comments and pagination'):
+ (description, visible_comments,
+ cmnt_pagination) = self._PaginatePartialComments(mr, issue)
+
+ with self.profiler.Phase('making user views'):
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ tracker_bizobj.UsersInvolvedInIssues([issue]),
+ tracker_bizobj.UsersInvolvedInCommentList(
+ [description] + visible_comments))
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+
+ issue_flaggers, comment_flaggers = [], {}
+ if spam_promise:
+ issue_flaggers, comment_flaggers = spam_promise.WaitAndGetValue()
+
+ (issue_view, description_view,
+ comment_views) = self._MakeIssueAndCommentViews(
+ mr, issue, users_by_id, description, visible_comments, config,
+ issue_flaggers, comment_flaggers)
+
+ with self.profiler.Phase('getting starring info'):
+ starred = star_promise.WaitAndGetValue()
+ star_cnxn.Close()
+ permit_edit = permissions.CanEditIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue,
+ granted_perms=granted_perms)
+ page_perms.EditIssue = ezt.boolean(permit_edit)
+ permit_edit_cc = self.CheckPerm(
+ mr, permissions.EDIT_ISSUE_CC, art=issue, granted_perms=granted_perms)
+ discourage_plus_one = not (starred or permit_edit or permit_edit_cc)
+
+ # Check whether to allow attachments from the details page
+ allow_attachments = tracker_helpers.IsUnderSoftAttachmentQuota(mr.project)
+ mr.ComputeColSpec(config)
+ back_to_list_url = _ComputeBackToListURL(mr, issue, config)
+ flipper.SearchForIIDs(mr, issue)
+ restrict_to_known = config.restrict_to_known
+ field_name_set = {fd.field_name.lower() for fd in config.field_defs
+ if not fd.is_deleted} # TODO(jrobbins): restrictions
+ non_masked_labels = tracker_bizobj.NonMaskedLabels(
+ issue.labels, field_name_set)
+
+ component_paths = []
+ for comp_id in issue.component_ids:
+ cd = tracker_bizobj.FindComponentDefByID(comp_id, config)
+ if cd:
+ component_paths.append(cd.path)
+ else:
+ logging.warn(
+ 'Issue %r has unknown component %r', issue.issue_id, comp_id)
+ initial_components = ', '.join(component_paths)
+
+ after_issue_update = tracker_constants.DEFAULT_AFTER_ISSUE_UPDATE
+ if mr.auth.user_pb:
+ after_issue_update = mr.auth.user_pb.after_issue_update
+
+ prevent_restriction_removal = (
+ mr.project.only_owners_remove_restrictions and
+ not framework_bizobj.UserOwnsProject(
+ mr.project, mr.auth.effective_ids))
+
+ offer_issue_copy_move = True
+ for lab in tracker_bizobj.GetLabels(issue):
+ if lab.lower().startswith('restrict-'):
+ offer_issue_copy_move = False
+
+ previous_locations = self.GetPreviousLocations(mr, issue)
+
+ spam_verdict_history = []
+ if spam_hist_promise:
+ spam_hist = spam_hist_promise.WaitAndGetValue()
+
+ spam_verdict_history = [template_helpers.EZTItem(
+ created=verdict['created'].isoformat(),
+ is_spam=verdict['is_spam'],
+ reason=verdict['reason'],
+ user_id=verdict['user_id'],
+ classifier_confidence=verdict['classifier_confidence'],
+ overruled=verdict['overruled'],
+ ) for verdict in spam_hist]
+
+ return {
+ 'issue_tab_mode': 'issueDetail',
+ 'issue': issue_view,
+ 'title_summary': issue_view.summary, # used in <head><title>
+ 'description': description_view,
+ 'comments': comment_views,
+ 'num_detail_rows': len(comment_views) + 4,
+ 'noisy': ezt.boolean(tracker_helpers.IsNoisy(
+ len(comment_views), issue.star_count)),
+
+ 'flipper': flipper,
+ 'cmnt_pagination': cmnt_pagination,
+ 'searchtip': 'You can jump to any issue by number',
+ 'starred': ezt.boolean(starred),
+ 'discourage_plus_one': ezt.boolean(discourage_plus_one),
+ 'pagegen': str(long(time.time() * 1000000)),
+ 'attachment_form_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_ATTACHMENT_DELETION_JSON)),
+ 'delComment_form_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_COMMENT_DELETION_JSON)),
+ 'delete_form_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_DELETE_JSON)),
+ 'flag_spam_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_FLAGSPAM_JSON)),
+ 'set_star_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_SETSTAR_JSON)),
+
+
+ # For deep linking and input correction after a failed submit.
+ 'initial_summary': issue_view.summary,
+ 'initial_comment': '',
+ 'initial_status': issue_view.status.name,
+ 'initial_owner': issue_view.owner.email,
+ 'initial_cc': ', '.join([pb.email for pb in issue_view.cc]),
+ 'initial_blocked_on': issue_view.blocked_on_str,
+ 'initial_blocking': issue_view.blocking_str,
+ 'initial_merge_into': issue_view.merged_into_str,
+ 'labels': non_masked_labels,
+ 'initial_components': initial_components,
+ 'fields': issue_view.fields,
+
+ 'any_errors': ezt.boolean(mr.errors.AnyErrors()),
+ 'allow_attachments': ezt.boolean(allow_attachments),
+ 'max_attach_size': template_helpers.BytesKbOrMb(
+ framework_constants.MAX_POST_BODY_SIZE),
+ 'colspec': mr.col_spec,
+ 'back_to_list_url': back_to_list_url,
+ 'restrict_to_known': ezt.boolean(restrict_to_known),
+ 'after_issue_update': int(after_issue_update), # TODO(jrobbins): str
+ 'prevent_restriction_removal': ezt.boolean(
+ prevent_restriction_removal),
+ 'offer_issue_copy_move': ezt.boolean(offer_issue_copy_move),
+ 'statuses_offer_merge': config.statuses_offer_merge,
+ 'page_perms': page_perms,
+ 'previous_locations': previous_locations,
+ 'spam_verdict_history': spam_verdict_history,
+ }
+
+ def GatherHelpData(self, mr, _page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ _page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ is_privileged_domain_user = framework_bizobj.IsPriviledgedDomainUser(
+ mr.auth.user_pb.email)
+ # Check if the user's query is just the ID of an existing issue.
+ # If so, display a "did you mean to search?" cue card.
+ jump_local_id = None
+ cue = None
+ if (tracker_constants.JUMP_RE.match(mr.query) and
+ mr.auth.user_pb and
+ 'search_for_numbers' not in mr.auth.user_pb.dismissed_cues):
+ jump_local_id = int(mr.query)
+ cue = 'search_for_numbers'
+
+ if (mr.auth.user_id and
+ 'privacy_click_through' not in mr.auth.user_pb.dismissed_cues):
+ cue = 'privacy_click_through'
+
+ return {
+ 'is_privileged_domain_user': ezt.boolean(is_privileged_domain_user),
+ 'jump_local_id': jump_local_id,
+ 'cue': cue,
+ }
+
+ # TODO(sheyang): Support comments incremental loading in API
+ def _PaginatePartialComments(self, mr, issue):
+ """Load and paginate the visible comments for the given issue."""
+ abbr_comment_rows = self.services.issue.GetAbbrCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+ if not abbr_comment_rows:
+ return None, [], None
+
+ description = abbr_comment_rows[0]
+ comments = abbr_comment_rows[1:]
+ all_comment_ids = [row[0] for row in comments]
+
+ pagination_url = '%s?id=%d' % (urls.ISSUE_DETAIL, issue.local_id)
+ pagination = paginate.VirtualPagination(
+ mr, len(all_comment_ids),
+ framework_constants.DEFAULT_COMMENTS_PER_PAGE,
+ list_page_url=pagination_url,
+ count_up=False, start_param='cstart', num_param='cnum',
+ max_num=settings.max_comments_per_page)
+ if pagination.last == 1 and pagination.start == len(all_comment_ids):
+ pagination.visible = ezt.boolean(False)
+
+ visible_comment_ids = [description[0]] + all_comment_ids[
+ pagination.last - 1:pagination.start]
+ visible_comment_seqs = [0] + range(pagination.last, pagination.start + 1)
+ visible_comments = self.services.issue.GetCommentsByID(
+ mr.cnxn, visible_comment_ids, visible_comment_seqs)
+
+ return visible_comments[0], visible_comments[1:], pagination
+
+
+ def _ValidateOwner(self, mr, post_data_owner, parsed_owner_id,
+ original_issue_owner_id):
+ """Validates that the issue's owner was changed and is a valid owner.
+
+ Args:
+ mr: Commonly used info parsed from the request.
+ post_data_owner: The owner as specified in the request's data.
+ parsed_owner_id: The owner_id from the request.
+ original_issue_owner_id: The original owner id of the issue.
+
+ Returns:
+ String error message if the owner fails validation else returns None.
+ """
+ parsed_owner_valid, msg = tracker_helpers.IsValidIssueOwner(
+ mr.cnxn, mr.project, parsed_owner_id, self.services)
+ if not parsed_owner_valid:
+ # Only fail validation if the user actually changed the email address.
+ original_issue_owner = self.services.user.LookupUserEmail(
+ mr.cnxn, original_issue_owner_id)
+ if post_data_owner != original_issue_owner:
+ return msg
+ else:
+ # The user did not change the owner, thus do not fail validation.
+ # See https://bugs.chromium.org/p/monorail/issues/detail?id=28 for
+ # more details.
+ pass
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted issue update form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: The post_data dict for the current request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ issue = self._GetIssue(mr)
+ if not issue:
+ logging.warn('issue not found! project_name: %r local id: %r',
+ mr.project_name, mr.local_id)
+ raise monorailrequest.InputException('Issue not found in project')
+
+ # Check that the user is logged in; anon users cannot update issues.
+ if not mr.auth.user_id:
+ logging.info('user was not logged in, cannot update issue')
+ raise permissions.PermissionException(
+ 'User must be logged in to update an issue')
+
+ # Check that the user has permission to add a comment, and to enter
+ # metadata if they are trying to do that.
+ if not self.CheckPerm(mr, permissions.ADD_ISSUE_COMMENT,
+ art=issue):
+ logging.info('user has no permission to add issue comment')
+ raise permissions.PermissionException(
+ 'User has no permission to comment on issue')
+
+ parsed = tracker_helpers.ParseIssueRequest(
+ mr.cnxn, post_data, self.services, mr.errors, issue.project_name)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ bounce_labels = parsed.labels[:]
+ bounce_fields = tracker_views.MakeBounceFieldValueViews(
+ parsed.fields.vals, config)
+ field_helpers.ShiftEnumFieldsIntoLabels(
+ parsed.labels, parsed.labels_remove,
+ parsed.fields.vals, parsed.fields.vals_remove, config)
+ field_values = field_helpers.ParseFieldValues(
+ mr.cnxn, self.services.user, parsed.fields.vals, config)
+
+ component_ids = tracker_helpers.LookupComponentIDs(
+ parsed.components.paths, config, mr.errors)
+
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+ permit_edit = permissions.CanEditIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue,
+ granted_perms=granted_perms)
+ page_perms = self.MakePagePerms(
+ mr, issue,
+ permissions.CREATE_ISSUE,
+ permissions.EDIT_ISSUE_SUMMARY,
+ permissions.EDIT_ISSUE_STATUS,
+ permissions.EDIT_ISSUE_OWNER,
+ permissions.EDIT_ISSUE_CC,
+ granted_perms=granted_perms)
+ page_perms.EditIssue = ezt.boolean(permit_edit)
+
+ if not permit_edit:
+ if not _FieldEditPermitted(
+ parsed.labels, parsed.blocked_on.entered_str,
+ parsed.blocking.entered_str, parsed.summary,
+ parsed.status, parsed.users.owner_id,
+ parsed.users.cc_ids, page_perms):
+ raise permissions.PermissionException(
+ 'User lacks permission to edit fields')
+
+ page_generation_time = long(post_data['pagegen'])
+ reporter_id = mr.auth.user_id
+ self.CheckCaptcha(mr, post_data)
+
+ error_msg = self._ValidateOwner(
+ mr, post_data.get('owner', '').strip(), parsed.users.owner_id,
+ issue.owner_id)
+ if error_msg:
+ mr.errors.owner = error_msg
+
+ if None in parsed.users.cc_ids:
+ mr.errors.cc = 'Invalid Cc username'
+
+ if len(parsed.comment) > tracker_constants.MAX_COMMENT_CHARS:
+ mr.errors.comment = 'Comment is too long'
+ if len(parsed.summary) > tracker_constants.MAX_SUMMARY_CHARS:
+ mr.errors.summary = 'Summary is too long'
+
+ old_owner_id = tracker_bizobj.GetOwnerId(issue)
+
+ orig_merged_into_iid = issue.merged_into
+ merge_into_iid = issue.merged_into
+ merge_into_text, merge_into_issue = tracker_helpers.ParseMergeFields(
+ mr.cnxn, self.services, mr.project_name, post_data,
+ parsed.status, config, issue, mr.errors)
+ if merge_into_issue:
+ merge_into_iid = merge_into_issue.issue_id
+ merge_into_project = self.services.project.GetProjectByName(
+ mr.cnxn, merge_into_issue.project_name)
+ merge_allowed = tracker_helpers.IsMergeAllowed(
+ merge_into_issue, mr, self.services)
+
+ new_starrers = tracker_helpers.GetNewIssueStarrers(
+ mr.cnxn, self.services, issue.issue_id, merge_into_iid)
+
+ # For any fields that the user does not have permission to edit, use
+ # the current values in the issue rather than whatever strings were parsed.
+ labels = parsed.labels
+ summary = parsed.summary
+ status = parsed.status
+ owner_id = parsed.users.owner_id
+ cc_ids = parsed.users.cc_ids
+ blocked_on_iids = [iid for iid in parsed.blocked_on.iids
+ if iid != issue.issue_id]
+ blocking_iids = [iid for iid in parsed.blocking.iids
+ if iid != issue.issue_id]
+ dangling_blocked_on_refs = [tracker_bizobj.MakeDanglingIssueRef(*ref)
+ for ref in parsed.blocked_on.dangling_refs]
+ dangling_blocking_refs = [tracker_bizobj.MakeDanglingIssueRef(*ref)
+ for ref in parsed.blocking.dangling_refs]
+ if not permit_edit:
+ labels = issue.labels
+ field_values = issue.field_values
+ component_ids = issue.component_ids
+ blocked_on_iids = issue.blocked_on_iids
+ blocking_iids = issue.blocking_iids
+ dangling_blocked_on_refs = issue.dangling_blocked_on_refs
+ dangling_blocking_refs = issue.dangling_blocking_refs
+ merge_into_iid = issue.merged_into
+ if not page_perms.EditIssueSummary:
+ summary = issue.summary
+ if not page_perms.EditIssueStatus:
+ status = issue.status
+ if not page_perms.EditIssueOwner:
+ owner_id = issue.owner_id
+ if not page_perms.EditIssueCc:
+ cc_ids = issue.cc_ids
+
+ field_helpers.ValidateCustomFields(
+ mr, self.services, field_values, config, mr.errors)
+
+ orig_blocked_on = issue.blocked_on_iids
+ if not mr.errors.AnyErrors():
+ try:
+ if parsed.attachments:
+ new_bytes_used = tracker_helpers.ComputeNewQuotaBytesUsed(
+ mr.project, parsed.attachments)
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id,
+ attachment_bytes_used=new_bytes_used)
+
+ # Store everything we got from the form. If the user lacked perms
+ # any attempted edit would be a no-op because of the logic above.
+ amendments, _ = self.services.issue.ApplyIssueComment(
+ mr.cnxn, self.services,
+ mr.auth.user_id, mr.project_id, mr.local_id, summary, status,
+ owner_id, cc_ids, labels, field_values, component_ids,
+ blocked_on_iids, blocking_iids, dangling_blocked_on_refs,
+ dangling_blocking_refs, merge_into_iid,
+ page_gen_ts=page_generation_time, comment=parsed.comment,
+ attachments=parsed.attachments)
+ self.services.project.UpdateRecentActivity(
+ mr.cnxn, mr.project.project_id)
+
+ # Also update the Issue PB we have in RAM so that the correct
+ # CC list will be used for an issue merge.
+ # TODO(jrobbins): refactor the call above to: 1. compute the updates
+ # and update the issue PB in RAM, then 2. store the updated issue.
+ issue.cc_ids = cc_ids
+ issue.labels = labels
+
+ except tracker_helpers.OverAttachmentQuota:
+ mr.errors.attachments = 'Project attachment quota exceeded.'
+
+ if (merge_into_issue and merge_into_iid != orig_merged_into_iid and
+ merge_allowed):
+ tracker_helpers.AddIssueStarrers(
+ mr.cnxn, self.services, mr,
+ merge_into_iid, merge_into_project, new_starrers)
+ merge_comment = tracker_helpers.MergeCCsAndAddComment(
+ self.services, mr, issue, merge_into_project, merge_into_issue)
+ elif merge_into_issue:
+ merge_comment = None
+ logging.info('merge denied: target issue %s not modified',
+ merge_into_iid)
+ # TODO(jrobbins): distinguish between EditIssue and
+ # AddIssueComment and do just the part that is allowed.
+ # And, give feedback in the source issue if any part of the
+ # merge was not allowed. Maybe use AJAX to check as the
+ # user types in the issue local ID.
+
+ counts = {actionlimit.ISSUE_COMMENT: 1,
+ actionlimit.ISSUE_ATTACHMENT: len(parsed.attachments)}
+ self.CountRateLimitedActions(mr, counts)
+
+ copy_to_project = CheckCopyIssueRequest(
+ self.services, mr, issue, post_data.get('more_actions') == 'copy',
+ post_data.get('copy_to'), mr.errors)
+ move_to_project = CheckMoveIssueRequest(
+ self.services, mr, issue, post_data.get('more_actions') == 'move',
+ post_data.get('move_to'), mr.errors)
+
+ if mr.errors.AnyErrors():
+ self.PleaseCorrect(
+ mr, initial_summary=parsed.summary,
+ initial_status=parsed.status,
+ initial_owner=parsed.users.owner_username,
+ initial_cc=', '.join(parsed.users.cc_usernames),
+ initial_components=', '.join(parsed.components.paths),
+ initial_comment=parsed.comment,
+ labels=bounce_labels, fields=bounce_fields,
+ initial_blocked_on=parsed.blocked_on.entered_str,
+ initial_blocking=parsed.blocking.entered_str,
+ initial_merge_into=merge_into_text)
+ return
+
+ send_email = 'send_email' in post_data or not permit_edit
+
+ moved_to_project_name_and_local_id = None
+ copied_to_project_name_and_local_id = None
+ if move_to_project:
+ moved_to_project_name_and_local_id = self.HandleCopyOrMove(
+ mr.cnxn, mr, move_to_project, issue, send_email, move=True)
+ elif copy_to_project:
+ copied_to_project_name_and_local_id = self.HandleCopyOrMove(
+ mr.cnxn, mr, copy_to_project, issue, send_email, move=False)
+
+ # TODO(sheyang): use global issue id in case the issue gets moved again
+ # before the task gets processed
+ if amendments or parsed.comment.strip() or parsed.attachments:
+ cmnts = self.services.issue.GetCommentsForIssue(mr.cnxn, issue.issue_id)
+ notify.PrepareAndSendIssueChangeNotification(
+ issue.project_id, issue.local_id, mr.request.host, reporter_id,
+ len(cmnts) - 1, send_email=send_email, old_owner_id=old_owner_id)
+
+ if merge_into_issue and merge_allowed and merge_comment:
+ cmnts = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, merge_into_issue.issue_id)
+ notify.PrepareAndSendIssueChangeNotification(
+ merge_into_issue.project_id, merge_into_issue.local_id,
+ mr.request.host, reporter_id, len(cmnts) - 1, send_email=send_email)
+
+ if permit_edit:
+ # Only users who can edit metadata could have edited blocking.
+ blockers_added, blockers_removed = framework_helpers.ComputeListDeltas(
+ orig_blocked_on, blocked_on_iids)
+ delta_blockers = blockers_added + blockers_removed
+ notify.PrepareAndSendIssueBlockingNotification(
+ issue.project_id, mr.request.host, issue.local_id, delta_blockers,
+ reporter_id, send_email=send_email)
+ # We don't send notification emails to newly blocked issues: either they
+ # know they are blocked, or they don't care and can be fixed anyway.
+ # This is the same behavior as the issue entry page.
+
+ after_issue_update = _DetermineAndSetAfterIssueUpdate(
+ self.services, mr, post_data)
+ return _Redirect(
+ mr, post_data, issue.local_id, config,
+ moved_to_project_name_and_local_id,
+ copied_to_project_name_and_local_id, after_issue_update)
+
+ def HandleCopyOrMove(self, cnxn, mr, dest_project, issue, send_email, move):
+ """Handle Requests dealing with copying or moving an issue between projects.
+
+ Args:
+ cnxn: connection to the database.
+ mr: commonly used info parsed from the request.
+ dest_project: The project protobuf we are moving the issue to.
+ issue: The issue protobuf being moved.
+ send_email: True to send email for these actions.
+ move: Whether this is a move request. The original issue will not exist if
+ this is True.
+
+ Returns:
+ A tuple of (project_id, local_id) of the newly copied / moved issue.
+ """
+ old_text_ref = 'issue %s:%s' % (issue.project_name, issue.local_id)
+ if move:
+ tracker_fulltext.UnindexIssues([issue.issue_id])
+ moved_back_iids = self.services.issue.MoveIssues(
+ cnxn, dest_project, [issue], self.services.user)
+ ret_project_name_and_local_id = (issue.project_name, issue.local_id)
+ new_text_ref = 'issue %s:%s' % ret_project_name_and_local_id
+ if issue.issue_id in moved_back_iids:
+ content = 'Moved %s back to %s again.' % (old_text_ref, new_text_ref)
+ else:
+ content = 'Moved %s to now be %s.' % (old_text_ref, new_text_ref)
+ comment = self.services.issue.CreateIssueComment(
+ mr.cnxn, dest_project.project_id, issue.local_id, mr.auth.user_id,
+ content, amendments=[
+ tracker_bizobj.MakeProjectAmendment(dest_project.project_name)])
+ else:
+ copied_issues = self.services.issue.CopyIssues(
+ cnxn, dest_project, [issue], self.services.user, mr.auth.user_id)
+ copied_issue = copied_issues[0]
+ ret_project_name_and_local_id = (copied_issue.project_name,
+ copied_issue.local_id)
+ new_text_ref = 'issue %s:%s' % ret_project_name_and_local_id
+
+ # Add comment to the copied issue.
+ old_issue_content = 'Copied %s to %s' % (old_text_ref, new_text_ref)
+ self.services.issue.CreateIssueComment(
+ mr.cnxn, issue.project_id, issue.local_id, mr.auth.user_id,
+ old_issue_content)
+
+ # Add comment to the newly created issue.
+ # Add project amendment only if the project changed.
+ amendments = []
+ if issue.project_id != copied_issue.project_id:
+ amendments.append(
+ tracker_bizobj.MakeProjectAmendment(dest_project.project_name))
+ new_issue_content = 'Copied %s from %s' % (new_text_ref, old_text_ref)
+ comment = self.services.issue.CreateIssueComment(
+ mr.cnxn, dest_project.project_id, copied_issue.local_id,
+ mr.auth.user_id, new_issue_content, amendments=amendments)
+
+ tracker_fulltext.IndexIssues(
+ mr.cnxn, [issue], self.services.user, self.services.issue,
+ self.services.config)
+
+ if send_email:
+ logging.info('TODO(jrobbins): send email for a move? or combine? %r',
+ comment)
+
+ return ret_project_name_and_local_id
+
+
+def _DetermineAndSetAfterIssueUpdate(services, mr, post_data):
+ after_issue_update = tracker_constants.DEFAULT_AFTER_ISSUE_UPDATE
+ if 'after_issue_update' in post_data:
+ after_issue_update = user_pb2.IssueUpdateNav(
+ int(post_data['after_issue_update'][0]))
+ if after_issue_update != mr.auth.user_pb.after_issue_update:
+ logging.info('setting after_issue_update to %r', after_issue_update)
+ services.user.UpdateUserSettings(
+ mr.cnxn, mr.auth.user_id, mr.auth.user_pb,
+ after_issue_update=after_issue_update)
+
+ return after_issue_update
+
+
+def _Redirect(
+ mr, post_data, local_id, config, moved_to_project_name_and_local_id,
+ copied_to_project_name_and_local_id, after_issue_update):
+ """Prepare a redirect URL for the issuedetail servlets.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ post_data: The post_data dict for the current request.
+ local_id: int Issue ID for the current request.
+ config: The ProjectIssueConfig pb for the current request.
+ moved_to_project_name_and_local_id: tuple containing the project name the
+ issue was moved to and the local id in that project.
+ copied_to_project_name_and_local_id: tuple containing the project name the
+ issue was copied to and the local id in that project.
+ after_issue_update: User preference on where to go next.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ mr.can = int(post_data['can'])
+ mr.query = post_data['q']
+ mr.col_spec = post_data['colspec']
+ mr.sort_spec = post_data['sort']
+ mr.group_by_spec = post_data['groupby']
+ mr.start = int(post_data['start'])
+ mr.num = int(post_data['num'])
+ mr.local_id = local_id
+
+ # format a redirect url
+ next_id = post_data.get('next_id', '')
+ url = _ChooseNextPage(
+ mr, local_id, config, moved_to_project_name_and_local_id,
+ copied_to_project_name_and_local_id, after_issue_update, next_id)
+ logging.debug('Redirecting user to: %s', url)
+ return url
+
+
+def _ComputeBackToListURL(mr, issue, config):
+ """Construct a URL to return the user to the place that they came from."""
+ back_to_list_url = None
+ if not tracker_constants.JUMP_RE.match(mr.query):
+ back_to_list_url = tracker_helpers.FormatIssueListURL(
+ mr, config, cursor='%s:%d' % (issue.project_name, issue.local_id))
+
+ return back_to_list_url
+
+
+def _FieldEditPermitted(
+ labels, blocked_on_str, blocking_str, summary, status, owner_id, cc_ids,
+ page_perms):
+ """Check permissions on editing individual form fields.
+
+ This check is only done if the user does not have the overall
+ EditIssue perm. If the user edited any field that they do not have
+ permission to edit, then they could have forged a post, or maybe
+ they had a valid form open in a browser tab while at the same time
+ their perms in the project were reduced. Either way, the servlet
+ gives them a BadRequest HTTP error and makes them go back and try
+ again.
+
+ TODO(jrobbins): It would be better to show a custom error page that
+ takes the user back to the issue with a new page load rather than
+ having the user use the back button.
+
+ Args:
+ labels: list of label values parsed from the form.
+ blocked_on_str: list of blocked-on values parsed from the form.
+ blocking_str: list of blocking values parsed from the form.
+ summary: issue summary string parsed from the form.
+ status: issue status string parsed from the form.
+ owner_id: issue owner user ID parsed from the form and looked up.
+ cc_ids: list of user IDs for Cc'd users parsed from the form.
+ page_perms: object with fields for permissions the current user
+ has on the current issue.
+
+ Returns:
+ True if there was no permission violation. False if the user tried
+ to edit something that they do not have permission to edit.
+ """
+ if labels or blocked_on_str or blocking_str:
+ logging.info('user has no permission to edit issue metadata')
+ return False
+
+ if summary and not page_perms.EditIssueSummary:
+ logging.info('user has no permission to edit issue summary field')
+ return False
+
+ if status and not page_perms.EditIssueStatus:
+ logging.info('user has no permission to edit issue status field')
+ return False
+
+ if owner_id and not page_perms.EditIssueOwner:
+ logging.info('user has no permission to edit issue owner field')
+ return False
+
+ if cc_ids and not page_perms.EditIssueCc:
+ logging.info('user has no permission to edit issue cc field')
+ return False
+
+ return True
+
+
+def _ChooseNextPage(
+ mr, local_id, config, moved_to_project_name_and_local_id,
+ copied_to_project_name_and_local_id, after_issue_update, next_id):
+ """Choose the next page to show the user after an issue update.
+
+ Args:
+ mr: information parsed from the request.
+ local_id: int Issue ID of the issue that was updated.
+ config: project issue config object.
+ moved_to_project_name_and_local_id: tuple containing the project name the
+ issue was moved to and the local id in that project.
+ copied_to_project_name_and_local_id: tuple containing the project name the
+ issue was copied to and the local id in that project.
+ after_issue_update: user pref on where to go next.
+ next_id: string local ID of next issue at the time the form was generated.
+
+ Returns:
+ String absolute URL of next page to view.
+ """
+ issue_ref_str = '%s:%d' % (mr.project_name, local_id)
+ kwargs = {
+ 'ts': int(time.time()),
+ 'cursor': issue_ref_str,
+ }
+ if moved_to_project_name_and_local_id:
+ kwargs['moved_to_project'] = moved_to_project_name_and_local_id[0]
+ kwargs['moved_to_id'] = moved_to_project_name_and_local_id[1]
+ elif copied_to_project_name_and_local_id:
+ kwargs['copied_from_id'] = local_id
+ kwargs['copied_to_project'] = copied_to_project_name_and_local_id[0]
+ kwargs['copied_to_id'] = copied_to_project_name_and_local_id[1]
+ else:
+ kwargs['updated'] = local_id
+ url = tracker_helpers.FormatIssueListURL(
+ mr, config, **kwargs)
+
+ if after_issue_update == user_pb2.IssueUpdateNav.STAY_SAME_ISSUE:
+ # If it was a move request then will have to switch to the new project to
+ # stay on the same issue.
+ if moved_to_project_name_and_local_id:
+ mr.project_name = moved_to_project_name_and_local_id[0]
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=local_id)
+ elif after_issue_update == user_pb2.IssueUpdateNav.NEXT_IN_LIST:
+ if next_id:
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=next_id)
+
+ return url
+
+
+class SetStarForm(jsonfeed.JsonFeed):
+ """Star or unstar the specified issue for the logged in user."""
+
+ def AssertBasePermission(self, mr):
+ super(SetStarForm, self).AssertBasePermission(mr)
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, mr.local_id)
+ if not self.CheckPerm(mr, permissions.SET_STAR, art=issue):
+ raise permissions.PermissionException(
+ 'You are not allowed to star issues')
+
+ def HandleRequest(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, mr.local_id)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ self.services.issue_star.SetStar(
+ mr.cnxn, self.services, config, issue.issue_id, mr.auth.user_id,
+ mr.starred)
+
+ return {
+ 'starred': bool(mr.starred),
+ }
+
+
+def _ShouldShowFlipper(mr, services):
+ """Return True if we should show the flipper."""
+
+ # Check if the user entered a specific issue ID of an existing issue.
+ if tracker_constants.JUMP_RE.match(mr.query):
+ return False
+
+ # Check if the user came directly to an issue without specifying any
+ # query or sort. E.g., through crbug.com. Generating the issue ref
+ # list can be too expensive in projects that have a large number of
+ # issues. The all and open issues cans are broad queries, other
+ # canned queries should be narrow enough to not need this special
+ # treatment.
+ if (not mr.query and not mr.sort_spec and
+ mr.can in [tracker_constants.ALL_ISSUES_CAN,
+ tracker_constants.OPEN_ISSUES_CAN]):
+ num_issues_in_project = services.issue.GetHighestLocalID(
+ mr.cnxn, mr.project_id)
+ if num_issues_in_project > settings.threshold_to_suppress_prev_next:
+ return False
+
+ return True
+
+
+class _Flipper(object):
+ """Helper class for user to flip among issues within a search result."""
+
+ def __init__(self, mr, services, prof):
+ """Store info for issue flipper widget (prev & next navigation).
+
+ Args:
+ mr: commonly used info parsed from the request.
+ services: connections to backend services.
+ prof: a Profiler for the sevlet's handling of the current request.
+ """
+
+ if not _ShouldShowFlipper(mr, services):
+ self.show = ezt.boolean(False)
+ self.pipeline = None
+ return
+
+ self.pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ mr, services, prof, None)
+
+ self.services = services
+
+ def SearchForIIDs(self, mr, issue):
+ """Do the next step of searching for issue IDs for the flipper.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ issue: the currently viewed issue.
+ """
+ if not self.pipeline:
+ return
+
+ if not mr.errors.AnyErrors():
+ # Only do the search if the user's query parsed OK.
+ self.pipeline.SearchForIIDs()
+
+ # Note: we never call MergeAndSortIssues() because we don't need a unified
+ # sorted list, we only need to know the position on such a list of the
+ # current issue.
+ prev_iid, cur_index, next_iid = self.pipeline.DetermineIssuePosition(issue)
+
+ logging.info('prev_iid, cur_index, next_iid is %r %r %r',
+ prev_iid, cur_index, next_iid)
+ # pylint: disable=attribute-defined-outside-init
+ if cur_index is None or self.pipeline.total_count == 1:
+ # The user probably edited the URL, or bookmarked an issue
+ # in a search context that no longer matches the issue.
+ self.show = ezt.boolean(False)
+ else:
+ self.show = True
+ self.current = cur_index + 1
+ self.total_count = self.pipeline.total_count
+ self.next_id = None
+ self.next_project_name = None
+ self.prev_url = ''
+ self.next_url = ''
+
+ if prev_iid:
+ prev_issue = self.services.issue.GetIssue(mr.cnxn, prev_iid)
+ prev_path = '/p/%s%s' % (prev_issue.project_name, urls.ISSUE_DETAIL)
+ self.prev_url = framework_helpers.FormatURL(
+ mr, prev_path, id=prev_issue.local_id)
+
+ if next_iid:
+ next_issue = self.services.issue.GetIssue(mr.cnxn, next_iid)
+ self.next_id = next_issue.local_id
+ self.next_project_name = next_issue.project_name
+ next_path = '/p/%s%s' % (next_issue.project_name, urls.ISSUE_DETAIL)
+ self.next_url = framework_helpers.FormatURL(
+ mr, next_path, id=next_issue.local_id)
+
+ def DebugString(self):
+ """Return a string representation useful in debugging."""
+ if self.show:
+ return 'on %s of %s; prev_url:%s; next_url:%s' % (
+ self.current, self.total_count, self.prev_url, self.next_url)
+ else:
+ return 'invisible flipper(show=%s)' % self.show
+
+
+class IssueCommentDeletion(servlet.Servlet):
+ """Form handler that allows user to delete/undelete comments."""
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the form that un/deletes an issue comment.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: The post_data dict for the current request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ logging.info('post_data = %s', post_data)
+ local_id = int(post_data['id'])
+ sequence_num = int(post_data['sequence_num'])
+ delete = (post_data['mode'] == '1')
+
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, local_id)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ all_comments = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+ logging.info('comments on %s are: %s', local_id, all_comments)
+ comment = all_comments[sequence_num]
+
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+
+ if ((comment.is_spam and mr.auth.user_id == comment.user_id) or
+ not permissions.CanDelete(
+ mr.auth.user_id, mr.auth.effective_ids, mr.perms,
+ comment.deleted_by, comment.user_id, mr.project,
+ permissions.GetRestrictions(issue), granted_perms=granted_perms)):
+ raise permissions.PermissionException('Cannot delete comment')
+
+ self.services.issue.SoftDeleteComment(
+ mr.cnxn, mr.project_id, local_id, sequence_num,
+ mr.auth.user_id, self.services.user, delete=delete)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=local_id)
+
+
+class IssueDeleteForm(servlet.Servlet):
+ """A form handler to delete or undelete an issue.
+
+ Project owners will see a button on every issue to delete it, and
+ if they specifically visit a deleted issue they will see a button to
+ undelete it.
+ """
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the form that un/deletes an issue comment.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: The post_data dict for the current request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ local_id = int(post_data['id'])
+ delete = 'delete' in post_data
+ logging.info('Marking issue %d as deleted: %r', local_id, delete)
+
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, local_id)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+ permit_delete = self.CheckPerm(
+ mr, permissions.DELETE_ISSUE, art=issue, granted_perms=granted_perms)
+ if not permit_delete:
+ raise permissions.PermissionException('Cannot un/delete issue')
+
+ self.services.issue.SoftDeleteIssue(
+ mr.cnxn, mr.project_id, local_id, delete, self.services.user)
+
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=local_id)
+
+# TODO(jrobbins): do we want this?
+# class IssueDerivedLabelsJSON(jsonfeed.JsonFeed)
+
+
+def CheckCopyIssueRequest(
+ services, mr, issue, copy_selected, copy_to, errors):
+ """Process the copy issue portions of the issue update form.
+
+ Args:
+ services: A Services object
+ mr: commonly used info parsed from the request.
+ issue: Issue protobuf for the issue being copied.
+ copy_selected: True if the user selected the Copy action.
+ copy_to: A project_name or url to copy this issue to or None
+ if the project name wasn't sent in the form.
+ errors: The errors object for this request.
+
+ Returns:
+ The project pb for the project the issue will be copy to
+ or None if the copy cannot be performed. Perhaps because
+ the project does not exist, in which case copy_to and
+ copy_to_project will be set on the errors object. Perhaps
+ the user does not have permission to copy the issue to the
+ destination project, in which case the copy_to field will be
+ set on the errors object.
+ """
+ if not copy_selected:
+ return None
+
+ if not copy_to:
+ errors.copy_to = 'No destination project specified'
+ errors.copy_to_project = copy_to
+ return None
+
+ copy_to_project = services.project.GetProjectByName(mr.cnxn, copy_to)
+ if not copy_to_project:
+ errors.copy_to = 'No such project: ' + copy_to
+ errors.copy_to_project = copy_to
+ return None
+
+ # permissions enforcement
+ if not servlet_helpers.CheckPermForProject(
+ mr, permissions.EDIT_ISSUE, copy_to_project):
+ errors.copy_to = 'You do not have permission to copy issues to project'
+ errors.copy_to_project = copy_to
+ return None
+
+ elif permissions.GetRestrictions(issue):
+ errors.copy_to = (
+ 'Issues with Restrict labels are not allowed to be copied.')
+ errors.copy_to_project = ''
+ return None
+
+ return copy_to_project
+
+
+def CheckMoveIssueRequest(
+ services, mr, issue, move_selected, move_to, errors):
+ """Process the move issue portions of the issue update form.
+
+ Args:
+ services: A Services object
+ mr: commonly used info parsed from the request.
+ issue: Issue protobuf for the issue being moved.
+ move_selected: True if the user selected the Move action.
+ move_to: A project_name or url to move this issue to or None
+ if the project name wasn't sent in the form.
+ errors: The errors object for this request.
+
+ Returns:
+ The project pb for the project the issue will be moved to
+ or None if the move cannot be performed. Perhaps because
+ the project does not exist, in which case move_to and
+ move_to_project will be set on the errors object. Perhaps
+ the user does not have permission to move the issue to the
+ destination project, in which case the move_to field will be
+ set on the errors object.
+ """
+ if not move_selected:
+ return None
+
+ if not move_to:
+ errors.move_to = 'No destination project specified'
+ errors.move_to_project = move_to
+ return None
+
+ if issue.project_name == move_to:
+ errors.move_to = 'This issue is already in project ' + move_to
+ errors.move_to_project = move_to
+ return None
+
+ move_to_project = services.project.GetProjectByName(mr.cnxn, move_to)
+ if not move_to_project:
+ errors.move_to = 'No such project: ' + move_to
+ errors.move_to_project = move_to
+ return None
+
+ # permissions enforcement
+ if not servlet_helpers.CheckPermForProject(
+ mr, permissions.EDIT_ISSUE, move_to_project):
+ errors.move_to = 'You do not have permission to move issues to project'
+ errors.move_to_project = move_to
+ return None
+
+ elif permissions.GetRestrictions(issue):
+ errors.move_to = (
+ 'Issues with Restrict labels are not allowed to be moved.')
+ errors.move_to_project = ''
+ return None
+
+ return move_to_project
diff --git a/appengine/monorail/tracker/issueentry.py b/appengine/monorail/tracker/issueentry.py
new file mode 100644
index 0000000..bbc3544
--- /dev/null
+++ b/appengine/monorail/tracker/issueentry.py
@@ -0,0 +1,410 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet that implements the entry of new issues."""
+
+import logging
+import time
+from third_party import ezt
+
+from features import notify
+from framework import actionlimit
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import permissions
+from framework import servlet
+from framework import template_helpers
+from framework import urls
+from tracker import field_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+PLACEHOLDER_SUMMARY = 'Enter one-line summary'
+
+
+class IssueEntry(servlet.Servlet):
+ """IssueEntry shows a page with a simple form to enter a new issue."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-entry-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+ _CAPTCHA_ACTION_TYPES = [actionlimit.ISSUE_COMMENT]
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(IssueEntry, self).AssertBasePermission(mr)
+ if not self.CheckPerm(mr, permissions.CREATE_ISSUE):
+ raise permissions.PermissionException(
+ 'User is not allowed to enter an issue')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ with self.profiler.Phase('getting config'):
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ # In addition to checking perms, we adjust some default field values for
+ # project members.
+ is_member = framework_bizobj.UserIsInProject(
+ mr.project, mr.auth.effective_ids)
+ page_perms = self.MakePagePerms(
+ mr, None,
+ permissions.CREATE_ISSUE,
+ permissions.SET_STAR,
+ permissions.EDIT_ISSUE,
+ permissions.EDIT_ISSUE_SUMMARY,
+ permissions.EDIT_ISSUE_STATUS,
+ permissions.EDIT_ISSUE_OWNER,
+ permissions.EDIT_ISSUE_CC)
+
+ wkp = _SelectTemplate(mr.template_name, config, is_member)
+
+ if wkp.summary:
+ initial_summary = wkp.summary
+ initial_summary_must_be_edited = wkp.summary_must_be_edited
+ else:
+ initial_summary = PLACEHOLDER_SUMMARY
+ initial_summary_must_be_edited = True
+
+ if wkp.status:
+ initial_status = wkp.status
+ elif is_member:
+ initial_status = 'Accepted'
+ else:
+ initial_status = 'New' # not offering meta, only used in hidden field.
+
+ component_paths = []
+ for component_id in wkp.component_ids:
+ component_paths.append(
+ tracker_bizobj.FindComponentDefByID(component_id, config).path)
+ initial_components = ', '.join(component_paths)
+
+ if wkp.owner_id:
+ initial_owner = framework_views.MakeUserView(
+ mr.cnxn, self.services.user, wkp.owner_id)
+ initial_owner_name = initial_owner.email
+ elif wkp.owner_defaults_to_member and page_perms.EditIssue:
+ initial_owner_name = mr.auth.user_view.email
+ else:
+ initial_owner_name = ''
+
+ # Check whether to allow attachments from the entry page
+ allow_attachments = tracker_helpers.IsUnderSoftAttachmentQuota(mr.project)
+
+ config_view = tracker_views.ConfigView(mr, self.services, config)
+ # If the user followed a link that specified the template name, make sure
+ # that it is also in the menu as the current choice.
+ for template_view in config_view.templates:
+ if template_view.name == mr.template_name:
+ template_view.can_view = ezt.boolean(True)
+
+ offer_templates = len(list(
+ tmpl for tmpl in config_view.templates if tmpl.can_view)) > 1
+ restrict_to_known = config.restrict_to_known
+ field_name_set = {fd.field_name.lower() for fd in config.field_defs
+ if not fd.is_deleted} # TODO(jrobbins): restrictions
+ link_or_template_labels = mr.GetListParam('labels', wkp.labels)
+ labels = [lab for lab in link_or_template_labels
+ if not tracker_bizobj.LabelIsMaskedByField(lab, field_name_set)]
+
+ field_user_views = tracker_views.MakeFieldUserViews(
+ mr.cnxn, wkp, self.services.user)
+ field_views = [
+ tracker_views.MakeFieldValueView(
+ fd, config, link_or_template_labels, [], wkp.field_values,
+ field_user_views)
+ # TODO(jrobbins): field-level view restrictions, display options
+ for fd in config.field_defs
+ if not fd.is_deleted]
+
+ page_data = {
+ 'issue_tab_mode': 'issueEntry',
+ 'initial_summary': initial_summary,
+ 'template_summary': initial_summary,
+ 'clear_summary_on_click': ezt.boolean(
+ initial_summary_must_be_edited and
+ 'initial_summary' not in mr.form_overrides),
+ 'must_edit_summary': ezt.boolean(initial_summary_must_be_edited),
+
+ 'initial_description': wkp.content,
+ 'template_name': wkp.name,
+ 'component_required': ezt.boolean(wkp.component_required),
+ 'initial_status': initial_status,
+ 'initial_owner': initial_owner_name,
+ 'initial_components': initial_components,
+ 'initial_cc': '',
+ 'initial_blocked_on': '',
+ 'initial_blocking': '',
+ 'labels': labels,
+ 'fields': field_views,
+
+ 'any_errors': ezt.boolean(mr.errors.AnyErrors()),
+ 'page_perms': page_perms,
+ 'allow_attachments': ezt.boolean(allow_attachments),
+ 'max_attach_size': template_helpers.BytesKbOrMb(
+ framework_constants.MAX_POST_BODY_SIZE),
+
+ 'offer_templates': ezt.boolean(offer_templates),
+ 'config': config_view,
+
+ 'restrict_to_known': ezt.boolean(restrict_to_known),
+ }
+
+ return page_data
+
+ def GatherHelpData(self, mr, _page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ _page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ is_privileged_domain_user = framework_bizobj.IsPriviledgedDomainUser(
+ mr.auth.user_pb.email)
+ cue = None
+ if (mr.auth.user_id and
+ 'privacy_click_through' not in mr.auth.user_pb.dismissed_cues):
+ cue = 'privacy_click_through'
+
+ return {
+ 'is_privileged_domain_user': ezt.boolean(is_privileged_domain_user),
+ 'cue': cue,
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the issue entry form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: The post_data dict for the current request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ parsed = tracker_helpers.ParseIssueRequest(
+ mr.cnxn, post_data, self.services, mr.errors, mr.project_name)
+ bounce_labels = parsed.labels[:]
+ bounce_fields = tracker_views.MakeBounceFieldValueViews(
+ parsed.fields.vals, config)
+ field_helpers.ShiftEnumFieldsIntoLabels(
+ parsed.labels, parsed.labels_remove, parsed.fields.vals,
+ parsed.fields.vals_remove, config)
+ field_values = field_helpers.ParseFieldValues(
+ mr.cnxn, self.services.user, parsed.fields.vals, config)
+
+ labels = _DiscardUnusedTemplateLabelPrefixes(parsed.labels)
+ component_ids = tracker_helpers.LookupComponentIDs(
+ parsed.components.paths, config, mr.errors)
+
+ reporter_id = mr.auth.user_id
+ self.CheckCaptcha(mr, post_data)
+
+ if not parsed.summary.strip():
+ mr.errors.summary = 'Summary is required'
+
+ if not parsed.comment.strip():
+ mr.errors.comment = 'A description is required'
+
+ if len(parsed.comment) > tracker_constants.MAX_COMMENT_CHARS:
+ mr.errors.comment = 'Comment is too long'
+ if len(parsed.summary) > tracker_constants.MAX_SUMMARY_CHARS:
+ mr.errors.summary = 'Summary is too long'
+
+ if parsed.users.owner_id is None:
+ mr.errors.owner = 'Invalid owner username'
+ else:
+ valid, msg = tracker_helpers.IsValidIssueOwner(
+ mr.cnxn, mr.project, parsed.users.owner_id, self.services)
+ if not valid:
+ mr.errors.owner = msg
+
+ if None in parsed.users.cc_ids:
+ mr.errors.cc = 'Invalid Cc username'
+
+ field_helpers.ValidateCustomFields(
+ mr, self.services, field_values, config, mr.errors)
+
+ new_local_id = None
+
+ if not mr.errors.AnyErrors():
+ try:
+ if parsed.attachments:
+ new_bytes_used = tracker_helpers.ComputeNewQuotaBytesUsed(
+ mr.project, parsed.attachments)
+ self.services.project.UpdateProject(
+ mr.cnxn, mr.project.project_id,
+ attachment_bytes_used=new_bytes_used)
+
+ template_content = ''
+ for wkp in config.templates:
+ if wkp.name == parsed.template_name:
+ template_content = wkp.content
+ marked_comment = _MarkupDescriptionOnInput(
+ parsed.comment, template_content)
+ has_star = 'star' in post_data and post_data['star'] == '1'
+
+ new_local_id = self.services.issue.CreateIssue(
+ mr.cnxn, self.services,
+ mr.project_id, parsed.summary, parsed.status, parsed.users.owner_id,
+ parsed.users.cc_ids, labels, field_values,
+ component_ids, reporter_id, marked_comment,
+ blocked_on=parsed.blocked_on.iids, blocking=parsed.blocking.iids,
+ attachments=parsed.attachments)
+ self.services.project.UpdateRecentActivity(
+ mr.cnxn, mr.project.project_id)
+
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, new_local_id)
+
+ if has_star:
+ self.services.issue_star.SetStar(
+ mr.cnxn, self.services, config, issue.issue_id, reporter_id, True)
+
+ except tracker_helpers.OverAttachmentQuota:
+ mr.errors.attachments = 'Project attachment quota exceeded.'
+
+ counts = {actionlimit.ISSUE_COMMENT: 1,
+ actionlimit.ISSUE_ATTACHMENT: len(parsed.attachments)}
+ self.CountRateLimitedActions(mr, counts)
+
+ if mr.errors.AnyErrors():
+ component_required = False
+ for wkp in config.templates:
+ if wkp.name == parsed.template_name:
+ component_required = wkp.component_required
+ self.PleaseCorrect(
+ mr, initial_summary=parsed.summary, initial_status=parsed.status,
+ initial_owner=parsed.users.owner_username,
+ initial_cc=', '.join(parsed.users.cc_usernames),
+ initial_components=', '.join(parsed.components.paths),
+ initial_comment=parsed.comment, labels=bounce_labels,
+ fields=bounce_fields,
+ initial_blocked_on=parsed.blocked_on.entered_str,
+ initial_blocking=parsed.blocking.entered_str,
+ component_required=ezt.boolean(component_required))
+ return
+
+ notify.PrepareAndSendIssueChangeNotification(
+ mr.project_id, new_local_id, mr.request.host,
+ reporter_id, 0) # Initial description is comment 0.
+
+ notify.PrepareAndSendIssueBlockingNotification(
+ mr.project_id, mr.request.host, new_local_id,
+ parsed.blocked_on.iids, reporter_id)
+
+ # format a redirect url
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=new_local_id)
+
+
+def _MarkupDescriptionOnInput(content, tmpl_text):
+ """Return HTML for the content of an issue description or comment.
+
+ Args:
+ content: the text sumbitted by the user, any user-entered markup
+ has already been escaped.
+ tmpl_text: the initial text that was put into the textarea.
+
+ Returns:
+ The description content text with template lines highlighted.
+ """
+ tmpl_lines = tmpl_text.split('\n')
+ tmpl_lines = [pl.strip() for pl in tmpl_lines if pl.strip()]
+
+ entered_lines = content.split('\n')
+ marked_lines = [_MarkupDescriptionLineOnInput(line, tmpl_lines)
+ for line in entered_lines]
+ return '\n'.join(marked_lines)
+
+
+def _MarkupDescriptionLineOnInput(line, tmpl_lines):
+ """Markup one line of an issue description that was just entered.
+
+ Args:
+ line: string containing one line of the user-entered comment.
+ tmpl_lines: list of strings for the text of the template lines.
+
+ Returns:
+ The same user-entered line, or that line highlighted to
+ indicate that it came from the issue template.
+ """
+ for tmpl_line in tmpl_lines:
+ if line.startswith(tmpl_line):
+ return '<b>' + tmpl_line + '</b>' + line[len(tmpl_line):]
+
+ return line
+
+
+def _DiscardUnusedTemplateLabelPrefixes(labels):
+ """Drop any labels that end in '-?'.
+
+ Args:
+ labels: a list of label strings.
+
+ Returns:
+ A list of the same labels, but without any that end with '-?'.
+ Those label prefixes in the new issue templates are intended to
+ prompt the user to enter some label with that prefix, but if
+ nothing is entered there, we do not store anything.
+ """
+ return [lab for lab in labels
+ if not lab.endswith('-?')]
+
+
+def _SelectTemplate(requested_template_name, config, is_member):
+ """Return the template to show to the user in this situation.
+
+ Args:
+ requested_template_name: name of template requested by user, or None.
+ config: ProjectIssueConfig for this project.
+ is_member: True if user is a project member.
+
+ Returns:
+ A Template PB with info needed to populate the issue entry form.
+ """
+ if requested_template_name:
+ for template in config.templates:
+ if requested_template_name == template.name:
+ return template
+ logging.info('Issue template name %s not found', requested_template_name)
+
+ # No template was specified, or it was not found, so go with a default.
+ if is_member:
+ default_id = config.default_template_for_developers
+ else:
+ default_id = config.default_template_for_users
+
+ # Newly created projects have no default templates specified, use hard-coded
+ # positions of the templates that are defined in tracker_constants.
+ if default_id == 0:
+ if is_member:
+ return config.templates[0]
+ elif len(config.templates) > 1:
+ return config.templates[1]
+
+ # This project has a relevant default template ID that we can use.
+ for template in config.templates:
+ if template.template_id == default_id:
+ return template
+
+ # If it was not found, just go with a template that we know exists.
+ return config.templates[0]
diff --git a/appengine/monorail/tracker/issueexport.py b/appengine/monorail/tracker/issueexport.py
new file mode 100644
index 0000000..4965193
--- /dev/null
+++ b/appengine/monorail/tracker/issueexport.py
@@ -0,0 +1,205 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet to export a range of issues in JSON format.
+"""
+
+import logging
+import time
+
+from third_party import ezt
+
+from framework import permissions
+from framework import jsonfeed
+from framework import servlet
+from tracker import tracker_bizobj
+
+
+class IssueExport(servlet.Servlet):
+ """IssueExportControls let's an admin choose how to export issues."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-export-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page."""
+ super(IssueExport, self).AssertBasePermission(mr)
+ if not mr.auth.user_pb.is_site_admin:
+ raise permissions.PermissionException(
+ 'Only site admins may export issues')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+
+ return {
+ 'issue_tab_mode': None,
+ 'initial_start': mr.start,
+ 'initial_num': mr.num,
+ 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
+ }
+
+
+class IssueExportJSON(jsonfeed.JsonFeed):
+ """IssueExport shows a range of issues in JSON format."""
+
+ # Pretty-print the JSON output.
+ JSON_INDENT = 4
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page."""
+ super(IssueExportJSON, self).AssertBasePermission(mr)
+ if not mr.auth.user_pb.is_site_admin:
+ raise permissions.PermissionException(
+ 'Only site admins may export issues')
+
+ def HandleRequest(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ if not mr.start and not mr.num:
+ issues = self.services.issue.GetAllIssuesInProject(
+ mr.cnxn, mr.project.project_id)
+ else:
+ local_id_range = range(mr.start, mr.start + mr.num)
+ issues = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, mr.project.project_id, local_id_range)
+ user_id_set = tracker_bizobj.UsersInvolvedInIssues(issues)
+
+ comments_dict = self.services.issue.GetCommentsForIssues(
+ mr.cnxn, [issue.issue_id for issue in issues])
+ for comment_list in comments_dict.itervalues():
+ user_id_set.update(
+ tracker_bizobj.UsersInvolvedInCommentList(comment_list))
+
+ starrers_dict = self.services.issue_star.LookupItemsStarrers(
+ mr.cnxn, [issue.issue_id for issue in issues])
+ for starrer_id_list in starrers_dict.itervalues():
+ user_id_set.update(starrer_id_list)
+
+ # The value 0 indicates "no user", e.g., that an issue has no owner.
+ # We don't need to create a User row to represent that.
+ user_id_set.discard(0)
+ email_dict = self.services.user.LookupUserEmails(mr.cnxn, user_id_set)
+
+ issues_json = [
+ self._MakeIssueJSON(
+ mr, issue, email_dict,
+ comments_dict.get(issue.issue_id, []),
+ starrers_dict.get(issue.issue_id, []))
+ for issue in issues if not issue.deleted]
+
+ json_data = {
+ 'metadata': {
+ 'version': 1,
+ 'when': int(time.time()),
+ 'who': mr.auth.email,
+ 'project': mr.project_name,
+ 'start': mr.start,
+ 'num': mr.num,
+ },
+ 'issues': issues_json,
+ # This list could be derived from the 'issues', but we provide it for
+ # ease of processing.
+ 'emails': email_dict.values(),
+ }
+ return json_data
+
+ def _MakeAmendmentJSON(self, amendment, email_dict):
+ amendment_json = {
+ 'field': amendment.field.name,
+ }
+ if amendment.custom_field_name:
+ amendment_json.update({'custom_field_name': amendment.custom_field_name})
+ if amendment.newvalue:
+ amendment_json.update({'new_value': amendment.newvalue})
+ if amendment.added_user_ids:
+ amendment_json.update(
+ {'added_emails': [email_dict.get(user_id)
+ for user_id in amendment.added_user_ids]})
+ if amendment.removed_user_ids:
+ amendment_json.update(
+ {'removed_emails': [email_dict.get(user_id)
+ for user_id in amendment.removed_user_ids]})
+ return amendment_json
+
+ def _MakeAttachmentJSON(self, attachment):
+ if attachment.deleted:
+ return None
+ attachment_json = {
+ 'name': attachment.filename,
+ 'size': attachment.filesize,
+ 'mimetype': attachment.mimetype,
+ 'gcs_object_id': attachment.gcs_object_id,
+ }
+ return attachment_json
+
+ def _MakeCommentJSON(self, comment, email_dict):
+ if comment.deleted_by:
+ return None
+ amendments = [self._MakeAmendmentJSON(a, email_dict)
+ for a in comment.amendments]
+ attachments = [self._MakeAttachmentJSON(a)
+ for a in comment.attachments]
+ comment_json = {
+ 'timestamp': comment.timestamp,
+ 'commenter': email_dict.get(comment.user_id),
+ 'content': comment.content,
+ 'amendments': [a for a in amendments if a],
+ 'attachments': [a for a in attachments if a],
+ }
+ return comment_json
+
+ def _MakeFieldValueJSON(self, mr, field, email_dict):
+ field_value_json = {
+ 'field': self.services.config.LookupField(
+ mr.cnxn, mr.project.project_id, field.field_id)
+ }
+ if field.int_value:
+ field_value_json['int_value'] = field.int_value
+ if field.str_value:
+ field_value_json['str_value'] = field.str_value
+ if field.user_id:
+ field_value_json['user_value'] = email_dict.get(field.user_id)
+ return field_value_json
+
+ def _MakeIssueJSON(
+ self, mr, issue, email_dict, comment_list, starrer_id_list):
+ """Return a dict of info about the issue and its comments."""
+ comments = [self._MakeCommentJSON(c, email_dict) for c in comment_list]
+ issue_json = {
+ 'local_id': issue.local_id,
+ 'reporter': email_dict.get(issue.reporter_id),
+ 'summary': issue.summary,
+ 'owner': email_dict.get(issue.owner_id),
+ 'status': issue.status,
+ 'cc': [email_dict[cc_id] for cc_id in issue.cc_ids],
+ 'labels': issue.labels,
+ 'fields': [self._MakeFieldValueJSON(mr, field, email_dict)
+ for field in issue.field_values],
+ 'starrers': [email_dict[starrer] for starrer in starrer_id_list],
+ 'comments': [c for c in comments if c],
+ 'opened': issue.opened_timestamp,
+ 'modified': issue.modified_timestamp,
+ 'closed': issue.closed_timestamp,
+ }
+ # TODO(agable): Export cross-project references as well.
+ if issue.blocked_on_iids:
+ issue_json['blocked_on'] = [i.local_id for i in
+ self.services.issue.GetIssues(mr.cnxn, issue.blocked_on_iids)
+ if i.project_id == mr.project.project_id]
+ if issue.blocking_iids:
+ issue_json['blocking'] = [i.local_id for i in
+ self.services.issue.GetIssues(mr.cnxn, issue.blocking_iids)
+ if i.project_id == mr.project.project_id]
+ if issue.merged_into:
+ merge = self.services.issue.GetIssue(mr.cnxn, issue.merged_into)
+ if merge.project_id == mr.project.project_id:
+ issue_json['merged_into'] = merge.local_id
+ return issue_json
diff --git a/appengine/monorail/tracker/issueimport.py b/appengine/monorail/tracker/issueimport.py
new file mode 100644
index 0000000..cc63f9a
--- /dev/null
+++ b/appengine/monorail/tracker/issueimport.py
@@ -0,0 +1,304 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet to import a file of issues in JSON format.
+"""
+
+import collections
+import json
+import logging
+import time
+
+from third_party import ezt
+
+from features import filterrules_helpers
+from framework import framework_helpers
+from framework import jsonfeed
+from framework import permissions
+from framework import servlet
+from framework import urls
+from proto import tracker_pb2
+
+
+ParserState = collections.namedtuple(
+ 'ParserState',
+ 'user_id_dict, nonexist_emails, issue_list, comments_dict, starrers_dict, '
+ 'relations_dict')
+
+
+class IssueImport(servlet.Servlet):
+ """IssueImport loads a file of issues in JSON format."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-import-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page."""
+ super(IssueImport, self).AssertBasePermission(mr)
+ if not mr.auth.user_pb.is_site_admin:
+ raise permissions.PermissionException(
+ 'Only site admins may import issues')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+ return {
+ 'issue_tab_mode': None,
+ 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
+ 'import_errors': [],
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the issue entry form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: The post_data dict for the current request.
+
+ Returns:
+ String URL to redirect the user to after processing.
+ """
+ import_errors = []
+ json_data = None
+
+ pre_check_only = 'pre_check_only' in post_data
+
+ uploaded_file = post_data.get('jsonfile')
+ if uploaded_file is None:
+ import_errors.append('No file uploaded')
+ else:
+ try:
+ json_str = uploaded_file.value
+ if json_str.startswith(jsonfeed.XSSI_PREFIX):
+ json_str = json_str[len(jsonfeed.XSSI_PREFIX):]
+ json_data = json.loads(json_str)
+ except ValueError:
+ import_errors.append('error parsing JSON in file')
+
+ if uploaded_file and not json_data:
+ import_errors.append('JSON file was empty')
+
+ # Note that the project must already exist in order to even reach
+ # this servlet because it is hosted in the context of a project.
+ if json_data and mr.project_name != json_data['metadata']['project']:
+ import_errors.append(
+ 'Project name does not match. '
+ 'Edit the file if you want to import into this project anyway.')
+
+ if import_errors:
+ return self.PleaseCorrect(mr, import_errors=import_errors)
+
+ event_log = [] # We accumulate a list of messages to display to the user.
+
+ try:
+ # First we parse the JSON into objects, but we don't have DB IDs yet.
+ state = self._ParseObjects(mr.cnxn, mr.project_id, json_data, event_log)
+ # If that worked, go ahead and start saving the data to the DB.
+ if not pre_check_only:
+ self._SaveObjects(mr.cnxn, mr.project_id, state, event_log)
+ except JSONImportError:
+ # just report it to the user by displaying event_log
+ event_log.append('Aborted import processing')
+
+ # This is a little bit of a hack because it always uses the form validation
+ # error message display logic to show the results of this import run,
+ # which may include errors or not.
+ return self.PleaseCorrect(mr, import_errors=event_log)
+
+ def _ParseObjects(self, cnxn, project_id, json_data, event_log):
+ """Examine JSON data and return a parser state for further processing."""
+ # Decide which users need to be created.
+ needed_emails = json_data['emails']
+ user_id_dict = self.services.user.LookupExistingUserIDs(cnxn, needed_emails)
+ nonexist_emails = [email for email in needed_emails
+ if email not in user_id_dict]
+
+ event_log.append('Need to create %d users: %r' %
+ (len(nonexist_emails), nonexist_emails))
+ user_id_dict.update({
+ email.lower(): framework_helpers.MurmurHash3_x86_32(email.lower())
+ for email in nonexist_emails})
+
+ num_comments = 0
+ num_stars = 0
+ issue_list = []
+ comments_dict = collections.defaultdict(list)
+ starrers_dict = collections.defaultdict(list)
+ relations_dict = collections.defaultdict(list)
+ for issue_json in json_data.get('issues', []):
+ issue, comment_list, starrer_list, relation_list = self._ParseIssue(
+ cnxn, project_id, user_id_dict, issue_json, event_log)
+ issue_list.append(issue)
+ comments_dict[issue.local_id] = comment_list
+ starrers_dict[issue.local_id] = starrer_list
+ relations_dict[issue.local_id] = relation_list
+ num_comments += len(comment_list)
+ num_stars += len(starrer_list)
+
+ event_log.append(
+ 'Found info for %d issues: %r' %
+ (len(issue_list), sorted([issue.local_id for issue in issue_list])))
+
+ event_log.append(
+ 'Found %d total comments for %d issues' %
+ (num_comments, len(comments_dict)))
+
+ event_log.append(
+ 'Found %d total stars for %d issues' %
+ (num_stars, len(starrers_dict)))
+
+ event_log.append(
+ 'Found %d total relationships.' %
+ sum((len(dsts) for dsts in relations_dict.itervalues())))
+
+ event_log.append('Parsing phase finished OK')
+ return ParserState(
+ user_id_dict, nonexist_emails, issue_list,
+ comments_dict, starrers_dict, relations_dict)
+
+ def _ParseIssue(self, cnxn, project_id, user_id_dict, issue_json, event_log):
+ issue = tracker_pb2.Issue(
+ project_id=project_id,
+ local_id=issue_json['local_id'],
+ reporter_id=user_id_dict[issue_json['reporter']],
+ summary=issue_json['summary'],
+ opened_timestamp=issue_json['opened'],
+ modified_timestamp=issue_json['modified'],
+ cc_ids=[user_id_dict[cc_email]
+ for cc_email in issue_json.get('cc', [])
+ if cc_email in user_id_dict],
+ status=issue_json.get('status', ''),
+ labels=issue_json.get('labels', []),
+ field_values=[self._ParseFieldValue(cnxn, project_id, user_id_dict, field)
+ for field in issue_json.get('fields', [])])
+ if issue_json.get('owner'):
+ issue.owner_id = user_id_dict[issue_json['owner']]
+ if issue_json.get('closed'):
+ issue.closed_timestamp = issue_json['closed']
+ comments = [self._ParseComment(
+ project_id, user_id_dict, comment_json, event_log)
+ for comment_json in issue_json.get('comments', [])]
+
+ starrers = [user_id_dict[starrer] for starrer in issue_json['starrers']]
+
+ relations = []
+ relations.extend(
+ [(i, 'blockedon') for i in issue_json.get('blocked_on', [])])
+ relations.extend(
+ [(i, 'blocking') for i in issue_json.get('blocking', [])])
+ if 'merged_into' in issue_json:
+ relations.append((issue_json['merged_into'], 'mergedinto'))
+
+ return issue, comments, starrers, relations
+
+ def _ParseFieldValue(self, cnxn, project_id, user_id_dict, field_json):
+ field = tracker_pb2.FieldValue(
+ field_id=self.services.config.LookupFieldID(cnxn, project_id,
+ field_json['field']))
+ if 'int_value' in field_json:
+ field.int_value = field_json['int_value']
+ if 'str_value' in field_json:
+ field.str_value = field_json['str_value']
+ if 'user_value' in field_json:
+ field.user_value = user_id_dict.get(field_json['user_value'])
+
+ return field
+
+ def _ParseComment(self, project_id, user_id_dict, comment_json, event_log):
+ comment = tracker_pb2.IssueComment(
+ # Note: issue_id is filled in after the issue is saved.
+ project_id=project_id,
+ timestamp=comment_json['timestamp'],
+ user_id=user_id_dict[comment_json['commenter']],
+ content=comment_json.get('content'))
+
+ for amendment in comment_json['amendments']:
+ comment.amendments.append(
+ self._ParseAmendment(amendment, user_id_dict, event_log))
+
+ for attachment in comment_json['attachments']:
+ comment.attachments.append(
+ self._ParseAttachment(attachment, event_log))
+
+ return comment
+
+ def _ParseAmendment(self, amendment_json, user_id_dict, _event_log):
+ amendment = tracker_pb2.Amendment(
+ field=tracker_pb2.FieldID(amendment_json['field']))
+
+ if 'new_value' in amendment_json:
+ amendment.newvalue = amendment_json['new_value']
+ if 'custom_field_name' in amendment_json:
+ amendment.custom_field_name = amendment_json['custom_field_name']
+ if 'added_users' in amendment_json:
+ amendment.added_user_ids.extend(
+ [user_id_dict[email] for email in amendment_json['added_users']])
+ if 'removed_users' in amendment_json:
+ amendment.removed_user_ids.extend(
+ [user_id_dict[email] for email in amendment_json['removed_users']])
+
+ return amendment
+
+ def _ParseAttachment(self, attachment_json, _event_log):
+ attachment = tracker_pb2.Attachment(
+ filename=attachment_json['name'],
+ filesize=attachment_json['size'],
+ mimetype=attachment_json['mimetype'],
+ gcs_object_id=attachment_json['gcs_object_id']
+ )
+ return attachment
+
+ def _SaveObjects(self, cnxn, project_id, state, event_log):
+ """Examine JSON data and create users, issues, and comments."""
+
+ created_user_ids = self.services.user.LookupUserIDs(
+ cnxn, state.nonexist_emails, autocreate=True)
+ for created_email, created_id in created_user_ids.items():
+ if created_id != state.user_id_dict[created_email]:
+ event_log.append('Mismatched user_id for %r' % created_email)
+ raise JSONImportError()
+ event_log.append('Created %d users' % len(state.nonexist_emails))
+
+ total_comments = 0
+ total_stars = 0
+ config = self.services.config.GetProjectConfig(cnxn, project_id)
+ for issue in state.issue_list:
+ # TODO(jrobbins): renumber issues if there is a local_id conflict.
+ if issue.local_id not in state.starrers_dict:
+ # Issues with stars will have filter rules applied in SetStar().
+ filterrules_helpers.ApplyFilterRules(
+ cnxn, self.services, issue, config)
+ issue_id = self.services.issue.InsertIssue(cnxn, issue)
+ for comment in state.comments_dict[issue.local_id]:
+ total_comments += 1
+ comment.issue_id = issue_id
+ self.services.issue.InsertComment(cnxn, comment)
+ for starrer in state.starrers_dict[issue.local_id]:
+ total_stars += 1
+ self.services.issue_star.SetStar(
+ cnxn, self.services, config, issue_id, starrer, True)
+
+ event_log.append('Created %d issues' % len(state.issue_list))
+ event_log.append('Created %d comments for %d issues' % (
+ total_comments, len(state.comments_dict)))
+ event_log.append('Set %d stars on %d issues' % (
+ total_stars, len(state.starrers_dict)))
+
+ global_relations_dict = collections.defaultdict(list)
+ for issue, rels in state.relations_dict.iteritems():
+ src_iid = self.services.issue.GetIssueByLocalID(
+ cnxn, project_id, issue).issue_id
+ dst_iids = [i.issue_id for i in self.services.issue.GetIssuesByLocalIDs(
+ cnxn, project_id, [rel[0] for rel in rels])]
+ kinds = [rel[1] for rel in rels]
+ global_relations_dict[src_iid] = zip(dst_iids, kinds)
+ self.services.issue.RelateIssues(cnxn, global_relations_dict)
+
+ self.services.issue.SetUsedLocalID(cnxn, project_id)
+ event_log.append('Finished import')
+
+
+class JSONImportError(Exception):
+ """Exception to raise if imported JSON is invalid."""
+ pass
diff --git a/appengine/monorail/tracker/issuelist.py b/appengine/monorail/tracker/issuelist.py
new file mode 100644
index 0000000..293c1da
--- /dev/null
+++ b/appengine/monorail/tracker/issuelist.py
@@ -0,0 +1,427 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implementation of the issue list feature of the Monorail Issue Tracker.
+
+Summary of page classes:
+ IssueList: Shows a table of issue that satisfy search criteria.
+"""
+
+import logging
+from third_party import ezt
+
+import settings
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import grid_view_helpers
+from framework import permissions
+from framework import servlet
+from framework import sql
+from framework import table_view_helpers
+from framework import template_helpers
+from framework import urls
+from framework import xsrf
+from search import frontendsearchpipeline
+from search import searchpipeline
+from search import query2ast
+from services import issue_svc
+from tracker import tablecell
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+class IssueList(servlet.Servlet):
+ """IssueList shows a page with a list of issues (search results).
+
+ The issue list is actually a table with a configurable set of columns
+ that can be edited by the user.
+ """
+
+ _PAGE_TEMPLATE = 'tracker/issue-list-page.ezt'
+ _ELIMINATE_BLANK_LINES = True
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+ _DEFAULT_RESULTS_PER_PAGE = tracker_constants.DEFAULT_RESULTS_PER_PAGE
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ # Check if the user's query is just the ID of an existing issue.
+ # TODO(jrobbins): consider implementing this for cross-project search.
+ if mr.project and tracker_constants.JUMP_RE.match(mr.query):
+ local_id = int(mr.query)
+ try:
+ _issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, local_id) # does it exist?
+ url = framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=local_id)
+ self.redirect(url, abort=True) # Jump to specified issue.
+ except issue_svc.NoSuchIssueException:
+ pass # The user is searching for a number that is not an issue ID.
+
+ with self.profiler.Phase('finishing config work'):
+ if mr.project_id:
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ else:
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(None)
+
+ with self.profiler.Phase('starting frontend search pipeline'):
+ pipeline = frontendsearchpipeline.FrontendSearchPipeline(
+ mr, self.services, self.profiler, self._DEFAULT_RESULTS_PER_PAGE)
+
+ # Perform promises that require authentication information.
+ with self.profiler.Phase('getting stars'):
+ starred_iid_set = _GetStarredIssues(
+ mr.cnxn, mr.auth.user_id, self.services)
+
+ with self.profiler.Phase('computing col_spec'):
+ mr.ComputeColSpec(config)
+
+ if not mr.errors.AnyErrors():
+ pipeline.SearchForIIDs()
+ pipeline.MergeAndSortIssues()
+ pipeline.Paginate()
+
+ with self.profiler.Phase('publishing emails'):
+ framework_views.RevealAllEmailsToMembers(mr, pipeline.users_by_id)
+ # TODO(jrobbins): get the configs for all result issues and
+ # harmonize them to get field defs including restrictions.
+
+ with self.profiler.Phase('getting related issues'):
+ related_iids = set()
+ if pipeline.grid_mode:
+ results_needing_related = pipeline.allowed_results or []
+ else:
+ results_needing_related = pipeline.visible_results or []
+ lower_cols = mr.col_spec.lower().split()
+ for issue in results_needing_related:
+ if 'blockedon' in lower_cols:
+ related_iids.update(issue.blocked_on_iids)
+ if 'blocking' in lower_cols:
+ related_iids.update(issue.blocking_iids)
+ if 'mergedinto' in lower_cols:
+ related_iids.add(issue.merged_into)
+ related_issues_list = self.services.issue.GetIssues(
+ mr.cnxn, list(related_iids))
+ related_issues = {issue.issue_id: issue for issue in related_issues_list}
+
+ with self.profiler.Phase('building table/grid'):
+ if pipeline.grid_mode:
+ page_data = self.GetGridViewData(
+ mr, pipeline.allowed_results or [], config, pipeline.users_by_id,
+ starred_iid_set, pipeline.grid_limited)
+ else:
+ page_data = self.GetTableViewData(
+ mr, pipeline.visible_results or [], config, pipeline.users_by_id,
+ starred_iid_set, related_issues)
+
+ # We show a special message when no query will every produce any results
+ # because the project has no issues in it.
+ with self.profiler.Phase('starting stars promise'):
+ if mr.project_id:
+ project_has_any_issues = (
+ pipeline.allowed_results or
+ self.services.issue.GetHighestLocalID(mr.cnxn, mr.project_id) != 0)
+ else:
+ project_has_any_issues = True # Message only applies in a project.
+
+ with self.profiler.Phase('making page perms'):
+ page_perms = self.MakePagePerms(
+ mr, None,
+ permissions.SET_STAR,
+ permissions.CREATE_ISSUE,
+ permissions.EDIT_ISSUE)
+
+ # Update page data with variables that are shared between list and
+ # grid view.
+ page_data.update({
+ 'issue_tab_mode': 'issueList',
+ 'pagination': pipeline.pagination,
+ 'is_cross_project': ezt.boolean(len(pipeline.query_project_ids) != 1),
+ 'project_has_any_issues': ezt.boolean(project_has_any_issues),
+ 'colspec': mr.col_spec,
+ 'page_perms': page_perms,
+ 'grid_mode': ezt.boolean(pipeline.grid_mode),
+ 'panel_id': mr.panel_id,
+ 'set_star_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.ISSUE_SETSTAR_JSON)),
+ 'is_missing_shards': ezt.boolean(len(pipeline.error_responses)),
+ 'missing_shard_count': len(pipeline.error_responses),
+ })
+
+ return page_data
+
+ def GetGridViewData(
+ self, mr, results, config, users_by_id, starred_iid_set, grid_limited):
+ """EZT template values to render a Grid View of issues.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ results: The Issue PBs that are the search results to be displayed.
+ config: The ProjectConfig PB for the project this view is in.
+ users_by_id: A dictionary {user_id: user_view,...} for all the users
+ involved in results.
+ starred_iid_set: Set of issues that the user has starred.
+ grid_limited: True if the results were limited to fit within the grid.
+
+ Returns:
+ Dictionary for EZT template rendering of the Grid View.
+ """
+ # We need ordered_columns because EZT loops have no loop-counter available.
+ # And, we use column number in the Javascript to hide/show columns.
+ columns = mr.col_spec.split()
+ ordered_columns = [template_helpers.EZTItem(col_index=i, name=col)
+ for i, col in enumerate(columns)]
+ unshown_columns = table_view_helpers.ComputeUnshownColumns(
+ results, columns, config, tracker_constants.OTHER_BUILT_IN_COLS)
+
+ grid_x_attr = (mr.x or config.default_x_attr or '--').lower()
+ grid_y_attr = (mr.y or config.default_y_attr or '--').lower()
+ all_label_values = {}
+ for art in results:
+ all_label_values[art.local_id] = (
+ grid_view_helpers.MakeLabelValuesDict(art))
+
+ if grid_x_attr == '--':
+ grid_x_headings = ['All']
+ else:
+ grid_x_items = table_view_helpers.ExtractUniqueValues(
+ [grid_x_attr], results, users_by_id, config)
+ grid_x_headings = grid_x_items[0].filter_values
+ if grid_view_helpers.AnyArtifactHasNoAttr(
+ results, grid_x_attr, users_by_id, all_label_values, config):
+ grid_x_headings.append(framework_constants.NO_VALUES)
+ grid_x_headings = grid_view_helpers.SortGridHeadings(
+ grid_x_attr, grid_x_headings, users_by_id, config,
+ tracker_helpers.SORTABLE_FIELDS)
+
+ if grid_y_attr == '--':
+ grid_y_headings = ['All']
+ else:
+ grid_y_items = table_view_helpers.ExtractUniqueValues(
+ [grid_y_attr], results, users_by_id, config)
+ grid_y_headings = grid_y_items[0].filter_values
+ if grid_view_helpers.AnyArtifactHasNoAttr(
+ results, grid_y_attr, users_by_id, all_label_values, config):
+ grid_y_headings.append(framework_constants.NO_VALUES)
+ grid_y_headings = grid_view_helpers.SortGridHeadings(
+ grid_y_attr, grid_y_headings, users_by_id, config,
+ tracker_helpers.SORTABLE_FIELDS)
+
+ logging.info('grid_x_headings = %s', grid_x_headings)
+ logging.info('grid_y_headings = %s', grid_y_headings)
+ grid_data = _MakeGridData(
+ results, mr.auth.user_id,
+ starred_iid_set, grid_x_attr, grid_x_headings,
+ grid_y_attr, grid_y_headings, users_by_id, all_label_values,
+ config)
+
+ grid_axis_choice_dict = {}
+ for oc in ordered_columns:
+ grid_axis_choice_dict[oc.name] = True
+ for uc in unshown_columns:
+ grid_axis_choice_dict[uc] = True
+ for bad_axis in tracker_constants.NOT_USED_IN_GRID_AXES:
+ if bad_axis in grid_axis_choice_dict:
+ del grid_axis_choice_dict[bad_axis]
+ grid_axis_choices = grid_axis_choice_dict.keys()
+ grid_axis_choices.sort()
+
+ grid_cell_mode = mr.cells
+ if len(results) > settings.max_tiles_in_grid and mr.cells == 'tiles':
+ grid_cell_mode = 'ids'
+
+ grid_view_data = {
+ 'grid_limited': ezt.boolean(grid_limited),
+ 'grid_shown': len(results),
+ 'grid_x_headings': grid_x_headings,
+ 'grid_y_headings': grid_y_headings,
+ 'grid_data': grid_data,
+ 'grid_axis_choices': grid_axis_choices,
+ 'grid_cell_mode': grid_cell_mode,
+ 'results': results, # Really only useful in if-any.
+ }
+ return grid_view_data
+
+ def GetCellFactories(self):
+ return tablecell.CELL_FACTORIES
+
+ def GetTableViewData(
+ self, mr, results, config, users_by_id, starred_iid_set, related_issues):
+ """EZT template values to render a Table View of issues.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ results: list of Issue PBs for the search results to be displayed.
+ config: The ProjectIssueConfig PB for the current project.
+ users_by_id: A dictionary {user_id: UserView} for all the users
+ involved in results.
+ starred_iid_set: Set of issues that the user has starred.
+ related_issues: dict {issue_id: issue} of pre-fetched related issues.
+
+ Returns:
+ Dictionary of page data for rendering of the Table View.
+ """
+ # We need ordered_columns because EZT loops have no loop-counter available.
+ # And, we use column number in the Javascript to hide/show columns.
+ columns = mr.col_spec.split()
+ ordered_columns = [template_helpers.EZTItem(col_index=i, name=col)
+ for i, col in enumerate(columns)]
+ unshown_columns = table_view_helpers.ComputeUnshownColumns(
+ results, columns, config, tracker_constants.OTHER_BUILT_IN_COLS)
+
+ lower_columns = mr.col_spec.lower().split()
+ lower_group_by = mr.group_by_spec.lower().split()
+ table_data = _MakeTableData(
+ results, mr.auth.user_id,
+ starred_iid_set, lower_columns, lower_group_by,
+ users_by_id, self.GetCellFactories(), related_issues, config)
+
+ # Used to offer easy filtering of each unique value in each column.
+ column_values = table_view_helpers.ExtractUniqueValues(
+ lower_columns, results, users_by_id, config)
+
+ table_view_data = {
+ 'table_data': table_data,
+ 'column_values': column_values,
+ # Put ordered_columns inside a list of exactly 1 panel so that
+ # it can work the same as the dashboard initial panel list headers.
+ 'panels': [template_helpers.EZTItem(ordered_columns=ordered_columns)],
+ 'unshown_columns': unshown_columns,
+ 'cursor': mr.cursor or mr.preview,
+ 'preview': mr.preview,
+ 'default_colspec': tracker_constants.DEFAULT_COL_SPEC,
+ 'default_results_per_page': tracker_constants.DEFAULT_RESULTS_PER_PAGE,
+ 'csv_link': framework_helpers.FormatURL(mr, 'csv'),
+ 'preview_on_hover': ezt.boolean(
+ _ShouldPreviewOnHover(mr.auth.user_pb)),
+ }
+ return table_view_data
+
+ def GatherHelpData(self, mr, page_data):
+ """Return a dict of values to drive on-page user help.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+ page_data: Dictionary of base and page template data.
+
+ Returns:
+ A dict of values to drive on-page user help, to be added to page_data.
+ """
+ cue = None
+ dismissed = []
+ if mr.auth.user_pb:
+ dismissed = mr.auth.user_pb.dismissed_cues
+
+ if mr.project_id:
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ else:
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(None)
+
+ try:
+ _query_ast, is_fulltext_query = searchpipeline.ParseQuery(
+ mr, config, self.services)
+ except query2ast.InvalidQueryError:
+ is_fulltext_query = False
+
+ if mr.mode == 'grid' and mr.cells == 'tiles':
+ if len(page_data.get('results', [])) > settings.max_tiles_in_grid:
+ if 'showing_ids_instead_of_tiles' not in dismissed:
+ cue = 'showing_ids_instead_of_tiles'
+
+ if self.CheckPerm(mr, permissions.EDIT_ISSUE):
+ if ('italics_mean_derived' not in dismissed and
+ _AnyDerivedValues(page_data.get('table_data', []))):
+ cue = 'italics_mean_derived'
+ elif 'dit_keystrokes' not in dismissed and mr.mode != 'grid':
+ cue = 'dit_keystrokes'
+ elif 'stale_fulltext' not in dismissed and is_fulltext_query:
+ cue = 'stale_fulltext'
+
+ return {
+ 'cue': cue,
+ }
+
+
+def _AnyDerivedValues(table_data):
+ """Return True if any value in the given table_data was derived."""
+ for row in table_data:
+ for cell in row.cells:
+ for item in cell.values:
+ if item.is_derived:
+ return True
+
+ return False
+
+
+def _MakeTableData(
+ visible_results, logged_in_user_id, starred_iid_set,
+ lower_columns, lower_group_by, users_by_id, cell_factories,
+ related_issues, config):
+ """Return a list of list row objects for display by EZT."""
+ table_data = table_view_helpers.MakeTableData(
+ visible_results, logged_in_user_id, starred_iid_set,
+ lower_columns, lower_group_by, users_by_id, cell_factories,
+ lambda issue: issue.issue_id, related_issues, config)
+
+ for row, art in zip(table_data, visible_results):
+ row.local_id = art.local_id
+ row.project_name = art.project_name
+ row.issue_ref = '%s:%d' % (art.project_name, art.local_id)
+ row.issue_url = tracker_helpers.FormatRelativeIssueURL(
+ art.project_name, urls.ISSUE_DETAIL, id=art.local_id)
+
+ return table_data
+
+
+def _MakeGridData(
+ allowed_results, _logged_in_user_id, starred_iid_set, x_attr,
+ grid_col_values, y_attr, grid_row_values, users_by_id, all_label_values,
+ config):
+ """Return all data needed for EZT to render the body of the grid view."""
+
+ def IssueViewFactory(issue):
+ return template_helpers.EZTItem(
+ summary=issue.summary, local_id=issue.local_id, issue_id=issue.issue_id,
+ status=issue.status or issue.derived_status, starred=None)
+
+ grid_data = grid_view_helpers.MakeGridData(
+ allowed_results, x_attr, grid_col_values, y_attr, grid_row_values,
+ users_by_id, IssueViewFactory, all_label_values, config)
+ for grid_row in grid_data:
+ for grid_cell in grid_row.cells_in_row:
+ for tile in grid_cell.tiles:
+ if tile.issue_id in starred_iid_set:
+ tile.starred = ezt.boolean(True)
+
+ return grid_data
+
+
+def _GetStarredIssues(cnxn, logged_in_user_id, services):
+ """Get the set of issues that the logged in user has starred."""
+ starred_iids = services.issue_star.LookupStarredItemIDs(
+ cnxn, logged_in_user_id)
+ return set(starred_iids)
+
+
+def _ShouldPreviewOnHover(user):
+ """Return true if we should show the issue preview when the user hovers.
+
+ Args:
+ user: User PB for the currently signed in user.
+
+ Returns:
+ True if the preview (peek) should open on hover over the issue ID.
+ """
+ return settings.enable_quick_edit and user.preview_on_hover
diff --git a/appengine/monorail/tracker/issuelistcsv.py b/appengine/monorail/tracker/issuelistcsv.py
new file mode 100644
index 0000000..f00c6ee
--- /dev/null
+++ b/appengine/monorail/tracker/issuelistcsv.py
@@ -0,0 +1,85 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Implemention of the issue list output as a CSV file."""
+
+import settings
+from framework import framework_helpers
+from framework import permissions
+from framework import urls
+from tracker import issuelist
+from tracker import tablecell
+from tracker import tracker_constants
+
+
+class IssueListCsv(issuelist.IssueList):
+ """IssueListCsv provides to the user a list of issues as a CSV document.
+
+ Overrides the standard IssueList servlet but uses a different EZT template
+ to provide the same content as the IssueList only as CSV. Adds the HTTP
+ header to offer the result as a download.
+ """
+
+ _PAGE_TEMPLATE = 'tracker/issue-list-csv.ezt'
+ _DEFAULT_RESULTS_PER_PAGE = settings.max_artifact_search_results_per_page
+
+ def GatherPageData(self, mr):
+ if not mr.auth.user_id:
+ raise permissions.PermissionException(
+ 'Anonymous users are not allowed to download issue list CSV')
+
+ # Sets headers to allow the response to be downloaded.
+ self.content_type = 'text/csv; charset=UTF-8'
+ download_filename = '%s-issues.csv' % mr.project_name
+ self.response.headers.add(
+ 'Content-Disposition', 'attachment; filename=%s' % download_filename)
+ self.response.headers.add('X-Content-Type-Options', 'nosniff')
+
+ # Rewrite the colspec to add some extra columns that make the CSV
+ # file more complete.
+ with self.profiler.Phase('finishing config work'):
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ mr.ComputeColSpec(config)
+ mr.col_spec = _RewriteColspec(mr.col_spec)
+ page_data = issuelist.IssueList.GatherPageData(self, mr)
+
+ # CSV files are at risk for PDF content sniffing by Acrobat Reader.
+ page_data['prevent_sniffing'] = True
+
+ # If we're truncating the results, add a URL to the next page of results
+ page_data['next_csv_link'] = None
+ pagination = page_data['pagination']
+ if pagination.next_url:
+ page_data['next_csv_link'] = framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_LIST_CSV, start=pagination.last)
+ page_data['item_count'] = pagination.last - pagination.start + 1
+
+ return page_data
+
+ def GetCellFactories(self):
+ return tablecell.CSV_CELL_FACTORIES
+
+
+# Whenever the user request one of these columns, we replace it with the
+# list of alternate columns. In effect, we split the requested column
+# into two CSV columns.
+_CSV_COLS_TO_REPLACE = {
+ 'summary': ['Summary', 'AllLabels'],
+ 'opened': ['Opened', 'OpenedTimestamp'],
+ 'closed': ['Closed', 'ClosedTimestamp'],
+ 'modified': ['Modified', 'ModifiedTimestamp'],
+ }
+
+
+def _RewriteColspec(col_spec):
+ """Rewrite the given colspec to expand special CSV columns."""
+ new_cols = []
+
+ for col in col_spec.split():
+ rewriten_cols = _CSV_COLS_TO_REPLACE.get(col.lower(), [col])
+ new_cols.extend(rewriten_cols)
+
+ return ' '.join(new_cols)
diff --git a/appengine/monorail/tracker/issueoptions.py b/appengine/monorail/tracker/issueoptions.py
new file mode 100644
index 0000000..8301900
--- /dev/null
+++ b/appengine/monorail/tracker/issueoptions.py
@@ -0,0 +1,263 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""JSON feed for issue autocomplete options."""
+
+import logging
+from third_party import ezt
+
+from framework import framework_helpers
+from framework import framework_views
+from framework import jsonfeed
+from framework import monorailrequest
+from framework import permissions
+from project import project_helpers
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+# Here are some restriction labels to help people do the most common things
+# that they might want to do with restrictions.
+_FREQUENT_ISSUE_RESTRICTIONS = [
+ (permissions.VIEW, permissions.EDIT_ISSUE,
+ 'Only users who can edit the issue may access it'),
+ (permissions.ADD_ISSUE_COMMENT, permissions.EDIT_ISSUE,
+ 'Only users who can edit the issue may add comments'),
+ ]
+
+
+# These issue restrictions should be offered as examples whenever the project
+# does not have any custom permissions in use already.
+_EXAMPLE_ISSUE_RESTRICTIONS = [
+ (permissions.VIEW, 'CoreTeam',
+ 'Custom permission CoreTeam is needed to access'),
+ ]
+
+
+class IssueOptionsJSON(jsonfeed.JsonFeed):
+ """JSON data describing all issue statuses, labels, and members."""
+
+ def HandleRequest(self, mr):
+ """Provide the UI with info used in auto-completion.
+
+ Args:
+ mr: common information parsed from the HTTP request.
+
+ Returns:
+ Results dictionary in JSON format
+ """
+ # Issue options data can be cached separately in each user's browser. When
+ # the project changes, a new cached_content_timestamp is set and it will
+ # cause new requests to use a new URL.
+ self.SetCacheHeaders(self.response)
+
+ member_data = project_helpers.BuildProjectMembers(
+ mr.cnxn, mr.project, self.services.user)
+ owner_views = member_data['owners']
+ committer_views = member_data['committers']
+ contributor_views = member_data['contributors']
+
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ open_statuses = []
+ closed_statuses = []
+ for wks in config.well_known_statuses:
+ if not wks.deprecated:
+ item = dict(name=wks.status, doc=wks.status_docstring)
+ if wks.means_open:
+ open_statuses.append(item)
+ else:
+ closed_statuses.append(item)
+
+ # TODO(jrobbins): restrictions on component definitions?
+ components = [{'name': cd.path, 'doc': cd.docstring}
+ for cd in config.component_defs if not cd.deprecated]
+
+ labels = []
+ field_names = [
+ fd.field_name for fd in config.field_defs if not fd.is_deleted]
+ non_masked_labels = tracker_helpers.LabelsNotMaskedByFields(
+ config, field_names)
+ for wkl in non_masked_labels:
+ if not wkl.commented:
+ item = dict(name=wkl.name, doc=wkl.docstring)
+ labels.append(item)
+
+ # TODO(jrobbins): omit fields that they don't have permission to view.
+ field_def_views = [
+ tracker_views.FieldDefView(fd, config)
+ for fd in config.field_defs
+ if not fd.is_deleted]
+ fields = [
+ dict(field_name=fdv.field_name, field_type=fdv.field_type,
+ field_id=fdv.field_id, needs_perm=fdv.needs_perm,
+ is_required=fdv.is_required, is_multivalued=fdv.is_multivalued,
+ choices=[dict(name=c.name, doc=c.docstring) for c in fdv.choices],
+ docstring=fdv.docstring)
+ for fdv in field_def_views]
+
+ frequent_restrictions = _FREQUENT_ISSUE_RESTRICTIONS[:]
+ custom_permissions = permissions.GetCustomPermissions(mr.project)
+ if not custom_permissions:
+ frequent_restrictions.extend(
+ _EXAMPLE_ISSUE_RESTRICTIONS)
+
+ labels.extend(_BuildRestrictionChoices(
+ mr.project, frequent_restrictions,
+ permissions.STANDARD_ISSUE_PERMISSIONS))
+
+ group_ids = self.services.usergroup.DetermineWhichUserIDsAreGroups(
+ mr.cnxn, [mem.user_id for mem in member_data['all_members']])
+ logging.info('group_ids is %r', group_ids)
+
+ # TODO(jrobbins): Normally, users will be allowed view the members
+ # of any user group if the project From: email address is listed
+ # as a group member, as well as any group that they are personally
+ # members of.
+ member_ids, owner_ids = self.services.usergroup.LookupVisibleMembers(
+ mr.cnxn, group_ids, mr.perms, mr.auth.effective_ids, self.services)
+ indirect_ids = set()
+ for gid in group_ids:
+ indirect_ids.update(member_ids.get(gid, []))
+ indirect_ids.update(owner_ids.get(gid, []))
+ indirect_user_ids = list(indirect_ids)
+ indirect_member_views = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user, indirect_user_ids).values()
+
+ visible_member_views = _FilterMemberData(
+ mr, owner_views, committer_views, contributor_views,
+ indirect_member_views)
+ # Filter out servbice accounts
+ visible_member_views = [m for m in visible_member_views
+ if not framework_helpers.IsServiceAccount(m.email)]
+ visible_member_email_list = list({
+ uv.email for uv in visible_member_views})
+ user_indexes = {email: idx
+ for idx, email in enumerate(visible_member_email_list)}
+ visible_members_dict = {}
+ for uv in visible_member_views:
+ visible_members_dict[uv.email] = uv.user_id
+ group_ids = self.services.usergroup.DetermineWhichUserIDsAreGroups(
+ mr.cnxn, visible_members_dict.values())
+
+ for field_dict in fields:
+ needed_perm = field_dict['needs_perm']
+ if needed_perm:
+ qualified_user_indexes = []
+ for uv in visible_member_views:
+ # TODO(jrobbins): Similar code occurs in field_helpers.py.
+ user = self.services.user.GetUser(mr.cnxn, uv.user_id)
+ auth = monorailrequest.AuthData.FromUserID(
+ mr.cnxn, uv.user_id, self.services)
+ user_perms = permissions.GetPermissions(
+ user, auth.effective_ids, mr.project)
+ has_perm = user_perms.CanUsePerm(
+ needed_perm, auth.effective_ids, mr.project, [])
+ if has_perm:
+ qualified_user_indexes.append(user_indexes[uv.email])
+
+ field_dict['user_indexes'] = sorted(set(qualified_user_indexes))
+
+ excl_prefixes = [prefix.lower() for prefix in
+ config.exclusive_label_prefixes]
+ members_def_list = [dict(name=email, doc='')
+ for email in visible_member_email_list]
+ members_def_list = sorted(
+ members_def_list, key=lambda md: md['name'])
+ for md in members_def_list:
+ md_id = visible_members_dict[md['name']]
+ if md_id in group_ids:
+ md['is_group'] = True
+
+ return {
+ 'open': open_statuses,
+ 'closed': closed_statuses,
+ 'statuses_offer_merge': config.statuses_offer_merge,
+ 'components': components,
+ 'labels': labels,
+ 'fields': fields,
+ 'excl_prefixes': excl_prefixes,
+ 'strict': ezt.boolean(config.restrict_to_known),
+ 'members': members_def_list,
+ 'custom_permissions': custom_permissions,
+ }
+
+
+def _FilterMemberData(
+ mr, owner_views, committer_views, contributor_views,
+ indirect_member_views):
+ """Return a filtered list of members that the user can view.
+
+ In most projects, everyone can view the entire member list. But,
+ some projects are configured to only allow project owners to see
+ all members. In those projects, committers and contributors do not
+ see any contributors. Regardless of how the project is configured
+ or the role that the user plays in the current project, we include
+ any indirect members through user groups that the user has access
+ to view.
+
+ Args:
+ mr: Commonly used info parsed from the HTTP request.
+ owner_views: list of UserViews for project owners.
+ committer_views: list of UserViews for project committers.
+ contributor_views: list of UserViews for project contributors.
+ indirect_member_views: list of UserViews for users who have
+ an indirect role in the project via a user group, and that the
+ logged in user is allowed to see.
+
+ Returns:
+ A list of owners, committer and visible indirect members if the user is not
+ signed in. If the project is set to display contributors to non-owners or
+ the signed in user has necessary permissions then additionally a list of
+ contributors.
+ """
+ visible_members = []
+
+ # Everyone can view owners and committers
+ visible_members.extend(owner_views)
+ visible_members.extend(committer_views)
+
+ # The list of indirect members is already limited to ones that the user
+ # is allowed to see according to user group settings.
+ visible_members.extend(indirect_member_views)
+
+ # If the user is allowed to view the list of contributors, add those too.
+ if permissions.CanViewContributorList(mr):
+ visible_members.extend(contributor_views)
+
+ return visible_members
+
+
+def _BuildRestrictionChoices(project, freq_restrictions, actions):
+ """Return a list of autocompletion choices for restriction labels.
+
+ Args:
+ project: Project PB for the current project.
+ freq_restrictions: list of (action, perm, doc) tuples for restrictions
+ that are frequently used.
+ actions: list of strings for actions that are relevant to the current
+ artifact.
+
+ Returns:
+ A list of dictionaries [{'name': 'perm name', 'doc': 'docstring'}, ...]
+ suitable for use in a JSON feed to our JS autocompletion functions.
+ """
+ custom_permissions = permissions.GetCustomPermissions(project)
+ choices = []
+
+ for action, perm, doc in freq_restrictions:
+ choices.append({
+ 'name': 'Restrict-%s-%s' % (action, perm),
+ 'doc': doc,
+ })
+
+ for action in actions:
+ for perm in custom_permissions:
+ choices.append({
+ 'name': 'Restrict-%s-%s' % (action, perm),
+ 'doc': 'Permission %s needed to use %s' % (perm, action),
+ })
+
+ return choices
diff --git a/appengine/monorail/tracker/issueoriginal.py b/appengine/monorail/tracker/issueoriginal.py
new file mode 100644
index 0000000..e32df4c
--- /dev/null
+++ b/appengine/monorail/tracker/issueoriginal.py
@@ -0,0 +1,97 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Servlet to show the original email that caused an issue comment.
+
+The text of the body the email is shown in an HTML page with <pre>.
+All the text is automatically escaped by EZT to make it safe to
+include in an HTML page.
+"""
+
+import logging
+from third_party import ezt
+
+from framework import filecontent
+from framework import permissions
+from framework import servlet
+from services import issue_svc
+
+
+class IssueOriginal(servlet.Servlet):
+ """IssueOriginal shows an inbound email that caused an issue comment."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-original-page.ezt'
+
+ def AssertBasePermission(self, mr):
+ """Make sure that the logged in user has permission to view this page."""
+ super(IssueOriginal, self).AssertBasePermission(mr)
+ issue, comment = self._GetIssueAndComment(mr)
+
+ # TODO(jrobbins): take granted perms into account here.
+ if issue and not permissions.CanViewIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue,
+ allow_viewing_deleted=True):
+ raise permissions.PermissionException(
+ 'User is not allowed to view this issue')
+
+ can_view_inbound_message = self.CheckPerm(
+ mr, permissions.VIEW_INBOUND_MESSAGES, art=issue)
+ can_delete = permissions.CanDelete(
+ mr.auth.user_id, mr.auth.effective_ids, mr.perms,
+ comment.deleted_by, comment.user_id,
+ mr.project, permissions.GetRestrictions(issue))
+ if not can_view_inbound_message and not can_delete:
+ raise permissions.PermissionException(
+ 'Only project members may view original email text')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ issue, comment = self._GetIssueAndComment(mr)
+ message_body_unicode, is_binary, _is_long = (
+ filecontent.DecodeFileContents(comment.inbound_message))
+
+ # Take out the iso8859-1 non-breaking-space characters that gmail
+ # inserts between consecutive spaces when quoting text in a reply.
+ # You can see this in gmail by sending a plain text reply to a
+ # message that had multiple spaces on some line, then use the
+ # "Show original" menu item to view your reply, you will see "=A0".
+ #message_body_unicode = message_body_unicode.replace(u'\xa0', u' ')
+
+ page_data = {
+ 'local_id': issue.local_id,
+ 'seq': comment.sequence,
+ 'is_binary': ezt.boolean(is_binary),
+ 'message_body': message_body_unicode,
+ }
+
+ return page_data
+
+ def _GetIssueAndComment(self, mr):
+ """Wait on retriving the specified issue and issue comment."""
+ if mr.local_id is None or mr.seq is None:
+ self.abort(404, 'issue or comment not specified')
+
+ try:
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, mr.local_id)
+ except issue_svc.NoSuchIssueException:
+ self.abort(404, 'issue not found')
+
+ comments = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+
+ try:
+ comment = comments[mr.seq]
+ except IndexError:
+ self.abort(404, 'comment not found')
+
+ return issue, comment
diff --git a/appengine/monorail/tracker/issuepeek.py b/appengine/monorail/tracker/issuepeek.py
new file mode 100644
index 0000000..24d8a90
--- /dev/null
+++ b/appengine/monorail/tracker/issuepeek.py
@@ -0,0 +1,378 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that implement the issue peek page and related forms."""
+
+import logging
+import time
+from third_party import ezt
+
+import settings
+from features import commands
+from features import notify
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import monorailrequest
+from framework import paginate
+from framework import permissions
+from framework import servlet
+from framework import sql
+from framework import template_helpers
+from framework import urls
+from framework import xsrf
+from services import issue_svc
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+from tracker import tracker_views
+
+
+class IssuePeek(servlet.Servlet):
+ """IssuePeek is a page that shows the details of one issue."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-peek-ajah.ezt'
+ _ALLOW_VIEWING_DELETED = False
+
+ def AssertBasePermission(self, mr):
+ """Check that the user has permission to even visit this page."""
+ super(IssuePeek, self).AssertBasePermission(mr)
+ try:
+ issue = self._GetIssue(mr)
+ except issue_svc.NoSuchIssueException:
+ return
+ if not issue:
+ return
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+ permit_view = permissions.CanViewIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue,
+ allow_viewing_deleted=self._ALLOW_VIEWING_DELETED,
+ granted_perms=granted_perms)
+ if not permit_view:
+ raise permissions.PermissionException(
+ 'User is not allowed to view this issue')
+
+ def _GetIssue(self, mr):
+ """Retrieve the current issue."""
+ if mr.local_id is None:
+ return None # GatherPageData will detect the same condition.
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, mr.local_id)
+ return issue
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ if mr.local_id is None:
+ self.abort(404, 'no issue specified')
+ with self.profiler.Phase('finishing getting issue'):
+ issue = self._GetIssue(mr)
+ if issue is None:
+ self.abort(404, 'issue not found')
+
+ # We give no explanation of missing issues on the peek page.
+ if issue is None or issue.deleted:
+ self.abort(404, 'issue not found')
+
+ star_cnxn = sql.MonorailConnection()
+ star_promise = framework_helpers.Promise(
+ self.services.issue_star.IsItemStarredBy, star_cnxn,
+ issue.issue_id, mr.auth.user_id)
+
+ with self.profiler.Phase('getting project issue config'):
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ with self.profiler.Phase('finishing getting comments'):
+ comments = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+
+ description, visible_comments, cmnt_pagination = PaginateComments(
+ mr, issue, comments, config)
+
+ with self.profiler.Phase('making user proxies'):
+ users_by_id = framework_views.MakeAllUserViews(
+ mr.cnxn, self.services.user,
+ tracker_bizobj.UsersInvolvedInIssues([issue]),
+ tracker_bizobj.UsersInvolvedInCommentList(
+ [description] + visible_comments))
+ framework_views.RevealAllEmailsToMembers(mr, users_by_id)
+
+ (issue_view, description_view,
+ comment_views) = self._MakeIssueAndCommentViews(
+ mr, issue, users_by_id, description, visible_comments, config)
+
+ with self.profiler.Phase('getting starring info'):
+ starred = star_promise.WaitAndGetValue()
+ star_cnxn.Close()
+ permit_edit = permissions.CanEditIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue)
+
+ mr.ComputeColSpec(config)
+ restrict_to_known = config.restrict_to_known
+
+ page_perms = self.MakePagePerms(
+ mr, issue,
+ permissions.CREATE_ISSUE,
+ permissions.SET_STAR,
+ permissions.EDIT_ISSUE,
+ permissions.EDIT_ISSUE_SUMMARY,
+ permissions.EDIT_ISSUE_STATUS,
+ permissions.EDIT_ISSUE_OWNER,
+ permissions.EDIT_ISSUE_CC,
+ permissions.DELETE_ISSUE,
+ permissions.ADD_ISSUE_COMMENT,
+ permissions.DELETE_OWN,
+ permissions.DELETE_ANY,
+ permissions.VIEW_INBOUND_MESSAGES)
+ page_perms.EditIssue = ezt.boolean(permit_edit)
+
+ prevent_restriction_removal = (
+ mr.project.only_owners_remove_restrictions and
+ not framework_bizobj.UserOwnsProject(
+ mr.project, mr.auth.effective_ids))
+
+ cmd_slots, default_slot_num = self.services.features.GetRecentCommands(
+ mr.cnxn, mr.auth.user_id, mr.project_id)
+ cmd_slot_views = [
+ template_helpers.EZTItem(
+ slot_num=slot_num, command=command, comment=comment)
+ for slot_num, command, comment in cmd_slots]
+
+ previous_locations = self.GetPreviousLocations(mr, issue)
+
+ return {
+ 'issue_tab_mode': 'issueDetail',
+ 'issue': issue_view,
+ 'description': description_view,
+ 'comments': comment_views,
+ 'labels': issue.labels,
+ 'num_detail_rows': len(comment_views) + 4,
+ 'noisy': ezt.boolean(tracker_helpers.IsNoisy(
+ len(comment_views), issue.star_count)),
+
+ 'cmnt_pagination': cmnt_pagination,
+ 'colspec': mr.col_spec,
+ 'searchtip': 'You can jump to any issue by number',
+ 'starred': ezt.boolean(starred),
+
+ 'pagegen': str(long(time.time() * 1000000)),
+ 'set_star_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s' % ( # Note: no .do suffix.
+ mr.project_name, urls.ISSUE_SETSTAR_JSON)),
+
+ 'restrict_to_known': ezt.boolean(restrict_to_known),
+ 'prevent_restriction_removal': ezt.boolean(
+ prevent_restriction_removal),
+
+ 'statuses_offer_merge': config.statuses_offer_merge,
+ 'page_perms': page_perms,
+ 'cmd_slots': cmd_slot_views,
+ 'default_slot_num': default_slot_num,
+ 'quick_edit_submit_url': tracker_helpers.FormatRelativeIssueURL(
+ issue.project_name, urls.ISSUE_PEEK + '.do', id=issue.local_id),
+ 'previous_locations': previous_locations,
+ }
+
+ def GetPreviousLocations(self, mr, issue):
+ """Return a list of previous locations of the current issue."""
+ previous_location_ids = self.services.issue.GetPreviousLocations(
+ mr.cnxn, issue)
+ previous_locations = []
+ for old_pid, old_id in previous_location_ids:
+ old_project = self.services.project.GetProject(mr.cnxn, old_pid)
+ previous_locations.append(
+ template_helpers.EZTItem(
+ project_name=old_project.project_name, local_id=old_id))
+
+ return previous_locations
+
+ def _MakeIssueAndCommentViews(
+ self, mr, issue, users_by_id, initial_description, comments, config,
+ issue_reporters=None, comment_reporters=None):
+ """Create view objects that help display parts of an issue.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ issue: issue PB for the currently viewed issue.
+ users_by_id: dictionary of {user_id: UserView,...}.
+ initial_description: IssueComment for the initial issue report.
+ comments: list of IssueComment PBs on the current issue.
+ issue_reporters: list of user IDs who have flagged the issue as spam.
+ comment_reporters: map of comment ID to list of flagging user IDs.
+ config: ProjectIssueConfig for the project that contains this issue.
+
+ Returns:
+ (issue_view, description_view, comment_views). One IssueView for
+ the whole issue, one IssueCommentView for the initial description,
+ and then a list of IssueCommentView's for each additional comment.
+ """
+ with self.profiler.Phase('getting related issues'):
+ open_related, closed_related = (
+ tracker_helpers.GetAllowedOpenAndClosedRelatedIssues(
+ self.services, mr, issue))
+ all_related_iids = list(issue.blocked_on_iids) + list(issue.blocking_iids)
+ if issue.merged_into:
+ all_related_iids.append(issue.merged_into)
+ all_related = self.services.issue.GetIssues(mr.cnxn, all_related_iids)
+
+ with self.profiler.Phase('making issue view'):
+ issue_view = tracker_views.IssueView(
+ issue, users_by_id, config,
+ open_related=open_related, closed_related=closed_related,
+ all_related={rel.issue_id: rel for rel in all_related})
+
+ with self.profiler.Phase('autolinker object lookup'):
+ all_ref_artifacts = self.services.autolink.GetAllReferencedArtifacts(
+ mr, [c.content for c in [initial_description] + comments])
+
+ with self.profiler.Phase('making comment views'):
+ reporter_auth = monorailrequest.AuthData.FromUserID(
+ mr.cnxn, initial_description.user_id, self.services)
+ desc_view = tracker_views.IssueCommentView(
+ mr.project_name, initial_description, users_by_id,
+ self.services.autolink, all_ref_artifacts, mr,
+ issue, effective_ids=reporter_auth.effective_ids)
+ # TODO(jrobbins): get effective_ids of each comment author, but
+ # that is too slow right now.
+ comment_views = [
+ tracker_views.IssueCommentView(
+ mr.project_name, c, users_by_id, self.services.autolink,
+ all_ref_artifacts, mr, issue)
+ for c in comments]
+
+ issue_view.flagged_spam = mr.auth.user_id in issue_reporters
+ if comment_reporters is not None:
+ for c in comment_views:
+ c.flagged_spam = mr.auth.user_id in comment_reporters.get(c.id, [])
+
+ return issue_view, desc_view, comment_views
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the posted issue update form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to, or None if response was already sent.
+ """
+ cmd = post_data.get('cmd', '')
+ send_email = 'send_email' in post_data
+ comment = post_data.get('comment', '')
+ slot_used = int(post_data.get('slot_used', 1))
+ page_generation_time = long(post_data['pagegen'])
+ issue = self._GetIssue(mr)
+ old_owner_id = tracker_bizobj.GetOwnerId(issue)
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ summary, status, owner_id, cc_ids, labels = commands.ParseQuickEditCommand(
+ mr.cnxn, cmd, issue, config, mr.auth.user_id, self.services)
+ component_ids = issue.component_ids # TODO(jrobbins): component commands
+ field_values = issue.field_values # TODO(jrobbins): edit custom fields
+
+ permit_edit = permissions.CanEditIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue)
+ if not permit_edit:
+ raise permissions.PermissionException(
+ 'User is not allowed to edit this issue')
+
+ amendments, _ = self.services.issue.ApplyIssueComment(
+ mr.cnxn, self.services, mr.auth.user_id,
+ mr.project_id, mr.local_id, summary, status, owner_id, cc_ids,
+ labels, field_values, component_ids, issue.blocked_on_iids,
+ issue.blocking_iids, issue.dangling_blocked_on_refs,
+ issue.dangling_blocking_refs, issue.merged_into,
+ page_gen_ts=page_generation_time, comment=comment)
+ self.services.project.UpdateRecentActivity(
+ mr.cnxn, mr.project.project_id)
+
+ if send_email:
+ if amendments or comment.strip():
+ cmnts = self.services.issue.GetCommentsForIssue(
+ mr.cnxn, issue.issue_id)
+ notify.PrepareAndSendIssueChangeNotification(
+ mr.project_id, mr.local_id, mr.request.host,
+ mr.auth.user_id, len(cmnts) - 1,
+ send_email=send_email, old_owner_id=old_owner_id)
+
+ # TODO(jrobbins): allow issue merge via quick-edit.
+
+ self.services.features.StoreRecentCommand(
+ mr.cnxn, mr.auth.user_id, mr.project_id, slot_used, cmd, comment)
+
+ # TODO(jrobbins): this is very similar to a block of code in issuebulkedit.
+ mr.can = int(post_data['can'])
+ mr.query = post_data.get('q', '')
+ mr.col_spec = post_data.get('colspec', '')
+ mr.sort_spec = post_data.get('sort', '')
+ mr.group_by_spec = post_data.get('groupby', '')
+ mr.start = int(post_data['start'])
+ mr.num = int(post_data['num'])
+ preview_issue_ref_str = '%s:%d' % (issue.project_name, issue.local_id)
+ return tracker_helpers.FormatIssueListURL(
+ mr, config, preview=preview_issue_ref_str, updated=mr.local_id,
+ ts=int(time.time()))
+
+
+def PaginateComments(mr, issue, issuecomment_list, config):
+ """Filter and paginate the IssueComment PBs for the given issue.
+
+ Unlike most pagination, this one starts at the end of the whole
+ list so it shows only the most recent comments. The user can use
+ the "Older" and "Newer" links to page through older comments.
+
+ Args:
+ mr: common info parsed from the HTTP request.
+ issue: Issue PB for the issue being viewed.
+ issuecomment_list: list of IssueComment PBs for the viewed issue,
+ the zeroth item in this list is the initial issue description.
+ config: ProjectIssueConfig for the project that contains this issue.
+
+ Returns:
+ A tuple (description, visible_comments, pagination), where description
+ is the IssueComment for the initial issue description, visible_comments
+ is a list of IssueComment PBs for the comments that should be displayed
+ on the current pagination page, and pagination is a VirtualPagination
+ object that keeps track of the Older and Newer links.
+ """
+ if not issuecomment_list:
+ return None, [], None
+
+ description = issuecomment_list[0]
+ comments = issuecomment_list[1:]
+ allowed_comments = []
+ restrictions = permissions.GetRestrictions(issue)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+ for c in comments:
+ can_delete = permissions.CanDelete(
+ mr.auth.user_id, mr.auth.effective_ids, mr.perms, c.deleted_by,
+ c.user_id, mr.project, restrictions, granted_perms=granted_perms)
+ if can_delete or not c.deleted_by:
+ allowed_comments.append(c)
+
+ pagination_url = '%s?id=%d' % (urls.ISSUE_DETAIL, issue.local_id)
+ pagination = paginate.VirtualPagination(
+ mr, len(allowed_comments),
+ framework_constants.DEFAULT_COMMENTS_PER_PAGE,
+ list_page_url=pagination_url,
+ count_up=False, start_param='cstart', num_param='cnum',
+ max_num=settings.max_comments_per_page)
+ if pagination.last == 1 and pagination.start == len(allowed_comments):
+ pagination.visible = ezt.boolean(False)
+ visible_comments = allowed_comments[
+ pagination.last - 1:pagination.start]
+
+ return description, visible_comments, pagination
diff --git a/appengine/monorail/tracker/issuereindex.py b/appengine/monorail/tracker/issuereindex.py
new file mode 100644
index 0000000..5a47ed4
--- /dev/null
+++ b/appengine/monorail/tracker/issuereindex.py
@@ -0,0 +1,84 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that implement an admin utility to re-index issues in bulk."""
+
+import logging
+import urllib
+
+import settings
+from framework import permissions
+from framework import servlet
+from framework import urls
+from services import tracker_fulltext
+
+
+class IssueReindex(servlet.Servlet):
+ """IssueReindex shows a form to request that issues be indexed."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-reindex-page.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+
+ def AssertBasePermission(self, mr):
+ """Check whether the user has any permission to visit this page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ """
+ super(IssueReindex, self).AssertBasePermission(mr)
+ if not self.CheckPerm(mr, permissions.EDIT_PROJECT):
+ raise permissions.PermissionException(
+ 'You are not allowed to administer this project')
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page.
+
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ Dict of values used by EZT for rendering the page.
+ """
+ return {
+ # start and num are already passed to the template.
+ 'issue_tab_mode': None,
+ 'auto_submit': mr.auto_submit,
+ 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
+ }
+
+ def ProcessFormData(self, mr, post_data):
+ """Process a posted issue reindex form.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ post_data: HTML form data from the request.
+
+ Returns:
+ String URL to redirect the user to after processing. The URL will contain
+ a new start that is auto-incremented using the specified num value.
+ """
+ start = max(0, int(post_data['start']))
+ num = max(0, min(settings.max_artifact_search_results_per_page,
+ int(post_data['num'])))
+
+ issues = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, mr.project_id, range(start, start + num))
+ logging.info('got %d issues to index', len(issues))
+ if issues:
+ tracker_fulltext.IndexIssues(
+ mr.cnxn, issues, self.services.user, self.services.issue,
+ self.services.config)
+
+ # Make the browser keep submitting the form, if the user wants that,
+ # and we have not run out of issues to process.
+ auto_submit = issues and ('auto_submit' in post_data)
+
+ query_map = {
+ 'start': start + num, # auto-increment start.
+ 'num': num,
+ 'auto_submit': bool(auto_submit),
+ }
+ return '/p/%s%s?%s' % (mr.project_name, urls.ISSUE_REINDEX,
+ urllib.urlencode(query_map))
diff --git a/appengine/monorail/tracker/issuetips.py b/appengine/monorail/tracker/issuetips.py
new file mode 100644
index 0000000..32129c1
--- /dev/null
+++ b/appengine/monorail/tracker/issuetips.py
@@ -0,0 +1,26 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""A class to render a page of issue tracker search tips."""
+
+import logging
+
+from framework import servlet
+from framework import permissions
+
+
+class IssueSearchTips(servlet.Servlet):
+ """IssueSearchTips on-line help on how to use issue search."""
+
+ _PAGE_TEMPLATE = 'tracker/issue-search-tips.ezt'
+ _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
+
+ def GatherPageData(self, mr):
+ """Build up a dictionary of data values to use when rendering the page."""
+
+ return {
+ 'issue_tab_mode': 'issueSearchTips',
+ 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
+ }
diff --git a/appengine/monorail/tracker/spam.py b/appengine/monorail/tracker/spam.py
new file mode 100644
index 0000000..a1d6b82
--- /dev/null
+++ b/appengine/monorail/tracker/spam.py
@@ -0,0 +1,155 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that implement spam flagging features.
+"""
+
+import httplib
+import logging
+
+from framework import actionlimit
+from framework import framework_helpers
+from framework import paginate
+from framework import permissions
+from framework import urls
+from framework import servlet
+from framework import template_helpers
+from framework import xsrf
+from tracker import spam_helpers
+
+class FlagSpamForm(servlet.Servlet):
+ """Flag or un-flag the specified issue/comment for the logged in user."""
+
+ _CAPTCHA_ACTION_TYPES = [actionlimit.FLAG_SPAM]
+
+ def ProcessFormData(self, mr, post_data):
+ """Process the flagging request.
+ Args:
+ mr: commonly used info parsed from the request.
+
+ Returns:
+ A redirect URL to either the original issue page or to issue list.
+ """
+ comment_id = post_data.get('comment_id', 0) or None
+
+ flagged_spam = post_data['spam'] == 'true'
+
+ flag_count = 1
+ if mr.local_id_list is not None:
+ flag_count = len(mr.local_id_list)
+
+ self.CountRateLimitedActions(mr, {actionlimit.FLAG_SPAM: flag_count})
+ # Has the side effect of checking soft limits and returning an error page
+ # when the user hits the limit.
+ self.GatherCaptchaData(mr)
+
+ # Check perms here for both the single Issue and Comment case.
+ if mr.local_id is not None:
+ issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, mr.project_id, mr.local_id)
+ perms = self.MakePagePerms(
+ mr, issue, permissions.FLAG_SPAM, permissions.VERDICT_SPAM)
+ if not perms.FlagSpam:
+ logging.error('User %d not allowed to flag %d/%r as spam.' % (
+ mr.auth.user_id, mr.local_id, comment_id))
+ raise permissions.PermissionException(
+ 'User lacks permission to flag spam')
+
+ # TODO: Check for exceeding the max number of flags, issue verdict then too.
+
+ issue_list = []
+ # Flag a single comment.
+ if comment_id is not None:
+ comment = self.services.issue.GetComment(mr.cnxn, comment_id)
+ print 'flagged_spam: %s' % flagged_spam
+ if perms.VerdictSpam:
+ self.services.spam.RecordManualCommentVerdict(mr.cnxn,
+ self.services.issue, self.services.user, comment_id,
+ int(post_data['sequence_num']), mr.auth.user_id, flagged_spam)
+
+ self.services.spam.FlagComment(mr.cnxn, issue.issue_id, comment.id,
+ comment.user_id, mr.auth.user_id, flagged_spam)
+
+ elif mr.local_id is not None:
+ issue_list = [issue]
+ elif mr.local_id_list is not None:
+ issue_list = self.services.issue.GetIssuesByLocalIDs(
+ mr.cnxn, mr.project_id, mr.local_id_list)
+ else:
+ self.response.status = httplib.BAD_REQUEST
+ return
+
+ flag_issues = []
+ verdict_issues = []
+ for issue in issue_list:
+ perms = self.MakePagePerms(mr, issue, permissions.FLAG_SPAM,
+ permissions.VERDICT_SPAM)
+ if perms.VerdictSpam:
+ verdict_issues.append(issue)
+ if perms.FlagSpam:
+ flag_issues.append(issue)
+
+ if len(verdict_issues) > 0:
+ self.services.spam.RecordManualIssueVerdicts(mr.cnxn, self.services.issue,
+ verdict_issues, mr.auth.user_id, flagged_spam)
+
+ if len(flag_issues) > 0:
+ self.services.spam.FlagIssues(mr.cnxn, self.services.issue, flag_issues,
+ mr.auth.user_id, flagged_spam)
+
+ # TODO(seanmccullough): Make this an ajax request instead of a redirect.
+ if mr.local_id_list is not None:
+ return framework_helpers.FormatAbsoluteURL(mr, urls.ISSUE_LIST)
+ else:
+ return framework_helpers.FormatAbsoluteURL(
+ mr, urls.ISSUE_DETAIL, id=mr.local_id)
+
+
+class ModerationQueue(servlet.Servlet):
+ _PAGE_TEMPLATE = 'tracker/spam-moderation-queue.ezt'
+
+ def ProcessFormData(self, mr, post_data):
+ if not self.CheckPerm(mr, permissions.MODERATE_SPAM):
+ raise permissions.PermissionException()
+
+ issue_local_ids = [int(iid) for iid in post_data.getall("issue_local_id")]
+ mark_spam = "mark_spam" in post_data
+
+ issues = self.services.issue.GetIssuesByLocalIDs(mr.cnxn,
+ mr.project.project_id, issue_local_ids)
+
+ self.services.spam.RecordManualIssueVerdicts(mr.cnxn,
+ self.services.issue, issues, mr.auth.user_id, mark_spam)
+
+ return framework_helpers.FormatAbsoluteURL(mr, urls.SPAM_MODERATION_QUEUE)
+
+ def GatherPageData(self, mr):
+ if not self.CheckPerm(mr, permissions.MODERATE_SPAM):
+ raise permissions.PermissionException()
+
+ page_perms = self.MakePagePerms(
+ mr, None, permissions.MODERATE_SPAM,
+ permissions.EDIT_ISSUE, permissions.CREATE_ISSUE,
+ permissions.SET_STAR)
+
+ queue_items, total_count = self.services.spam.GetModerationQueue(mr.cnxn,
+ self.services.issue, mr.project.project_id, mr.start, mr.num)
+
+ decorated_queue = spam_helpers.DecorateModerationQueue(mr.cnxn,
+ self.services.issue, self.services.spam, self.services.user,
+ queue_items)
+
+ p = paginate.ArtifactPagination(mr, [], mr.num, urls.SPAM_MODERATION_QUEUE,
+ total_count)
+
+ return {
+ 'spam_queue': decorated_queue,
+ 'projectname': mr.project.project_name,
+ 'pagination': p,
+ 'page_perms': page_perms,
+ 'moderate_spam_token': xsrf.GenerateToken(
+ mr.auth.user_id, '/p/%s%s.do' % (
+ mr.project_name, urls.SPAM_MODERATION_QUEUE)),
+ }
diff --git a/appengine/monorail/tracker/spam_helpers.py b/appengine/monorail/tracker/spam_helpers.py
new file mode 100644
index 0000000..e6dfffd
--- /dev/null
+++ b/appengine/monorail/tracker/spam_helpers.py
@@ -0,0 +1,44 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Set of helpers for constructing spam-related pages.
+"""
+from framework import template_helpers
+from third_party import ezt
+
+from datetime import datetime
+
+def DecorateModerationQueue(
+ cnxn, issue_service, spam_service, user_service, moderation_items):
+ issue_ids = [item.issue_id for item in moderation_items]
+ issues = issue_service.GetIssues(cnxn, issue_ids)
+ issue_map = {}
+ for issue in issues:
+ issue_map[issue.issue_id] = issue
+
+ flag_counts = spam_service.LookUpFlagCounts(cnxn, issue_ids)
+
+ reporter_ids = [issue.reporter_id for issue in issues]
+ reporters = user_service.GetUsersByIDs(cnxn, reporter_ids)
+ comments = issue_service.GetCommentsForIssues(cnxn, issue_ids)
+
+ items = []
+ for item in moderation_items:
+ issue=issue_map[item.issue_id]
+ first_comment = comments.get(item.issue_id, ["[Empty]"])[0]
+
+ items.append(template_helpers.EZTItem(
+ issue=issue,
+ summary=template_helpers.FitUnsafeText(issue.summary, 80),
+ comment_text=template_helpers.FitUnsafeText(first_comment.content, 80),
+ reporter=reporters[issue.reporter_id],
+ flag_count=flag_counts.get(issue.issue_id, 0),
+ is_spam=ezt.boolean(item.is_spam),
+ verdict_time=item.verdict_time,
+ classifier_confidence=item.classifier_confidence,
+ reason=item.reason,
+ ))
+
+ return items
diff --git a/appengine/monorail/tracker/tablecell.py b/appengine/monorail/tracker/tablecell.py
new file mode 100644
index 0000000..ad84a89
--- /dev/null
+++ b/appengine/monorail/tracker/tablecell.py
@@ -0,0 +1,422 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Classes that generate value cells in the issue list table."""
+
+import logging
+import time
+from third_party import ezt
+
+from framework import table_view_helpers
+from framework import timestr
+from tracker import tracker_bizobj
+
+# pylint: disable=unused-argument
+
+
+class TableCellID(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue IDs."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ID, [str(issue.local_id)])
+
+
+class TableCellStatus(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue status values."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ values = []
+ derived_values = []
+ if issue.status:
+ values = [issue.status]
+ if issue.derived_status:
+ derived_values = [issue.derived_status]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values,
+ derived_values=derived_values)
+
+
+class TableCellOwner(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue owner name."""
+
+ # Make instances of this class render with whitespace:nowrap.
+ NOWRAP = ezt.boolean(True)
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ values = []
+ derived_values = []
+ if issue.owner_id:
+ values = [users_by_id[issue.owner_id].display_name]
+ if issue.derived_owner_id:
+ derived_values = [users_by_id[issue.derived_owner_id].display_name]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values,
+ derived_values=derived_values)
+
+
+class TableCellReporter(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue reporter name."""
+
+ # Make instances of this class render with whitespace:nowrap.
+ NOWRAP = ezt.boolean(True)
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ try:
+ values = [users_by_id[issue.reporter_id].display_name]
+ except KeyError:
+ logging.info('issue reporter %r not found', issue.reporter_id)
+ values = ['deleted?']
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values)
+
+
+class TableCellCc(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue Cc user names."""
+
+ def __init__(
+ self, issue, _col, users_by_id, _non_col_labels,
+ _label_values, _related_issues, _config):
+ values = [users_by_id[cc_id].display_name
+ for cc_id in issue.cc_ids]
+
+ derived_values = [users_by_id[cc_id].display_name
+ for cc_id in issue.derived_cc_ids]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values,
+ derived_values=derived_values)
+
+
+class TableCellAttachments(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue attachment count."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, [issue.attachment_count],
+ align='right')
+
+
+class TableCellOpened(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue opened date."""
+
+ # Make instances of this class render with whitespace:nowrap.
+ NOWRAP = ezt.boolean(True)
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ date_str = timestr.FormatRelativeDate(
+ issue.opened_timestamp, recent_only=True)
+ if not date_str:
+ date_str = timestr.FormatAbsoluteDate(issue.opened_timestamp)
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE, [date_str])
+
+
+class TableCellClosed(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue closed date."""
+
+ # Make instances of this class render with whitespace:nowrap.
+ NOWRAP = ezt.boolean(True)
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ values = []
+ if issue.closed_timestamp:
+ date_str = timestr.FormatRelativeDate(
+ issue.closed_timestamp, recent_only=True)
+ if not date_str:
+ date_str = timestr.FormatAbsoluteDate(issue.closed_timestamp)
+ values = [date_str]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE, values)
+
+
+class TableCellModified(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue modified date."""
+
+ # Make instances of this class render with whitespace:nowrap.
+ NOWRAP = ezt.boolean(True)
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ values = []
+ if issue.modified_timestamp:
+ date_str = timestr.FormatRelativeDate(
+ issue.modified_timestamp, recent_only=True)
+ if not date_str:
+ date_str = timestr.FormatAbsoluteDate(issue.modified_timestamp)
+ values = [date_str]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE, values)
+
+
+class TableCellBlockedOn(table_view_helpers.TableCell):
+ """TableCell subclass for listing issues the current issue is blocked on."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ related_issues, _config):
+ ref_issues = [related_issues[iid] for iid in issue.blocked_on_iids
+ if iid in related_issues]
+ default_pn = issue.project_name
+ # TODO(jrobbins): in cross-project searches, leave default_pn = None.
+ values = [
+ tracker_bizobj.FormatIssueRef(
+ (ref_issue.project_name, ref_issue.local_id),
+ default_project_name=default_pn)
+ for ref_issue in ref_issues]
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values)
+
+
+class TableCellBlocking(table_view_helpers.TableCell):
+ """TableCell subclass for listing issues the current issue is blocking."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ related_issues, _config):
+ ref_issues = [related_issues[iid] for iid in issue.blocking_iids
+ if iid in related_issues]
+ default_pn = issue.project_name
+ # TODO(jrobbins): in cross-project searches, leave default_pn = None.
+ values = [
+ tracker_bizobj.FormatIssueRef(
+ (ref_issue.project_name, ref_issue.local_id),
+ default_project_name=default_pn)
+ for ref_issue in ref_issues]
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values)
+
+
+class TableCellBlocked(table_view_helpers.TableCell):
+ """TableCell subclass for showing whether an issue is blocked."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related_issues, _config):
+ if issue.blocked_on_iids:
+ value = 'Yes'
+ else:
+ value = 'No'
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, [value])
+
+
+class TableCellMergedInto(table_view_helpers.TableCell):
+ """TableCell subclass for showing whether an issue is blocked."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ related_issues, _config):
+ if issue.merged_into:
+ ref_issue = related_issues[issue.merged_into]
+ ref = ref_issue.project_name, ref_issue.local_id
+ default_pn = issue.project_name
+ # TODO(jrobbins): in cross-project searches, leave default_pn = None.
+ values = [
+ tracker_bizobj.FormatIssueRef(ref, default_project_name=default_pn)]
+ else: # Note: None means not merged into any issue.
+ values = []
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values)
+
+
+class TableCellComponent(table_view_helpers.TableCell):
+ """TableCell subclass for showing components."""
+
+ def __init__(
+ self, issue, _col, _users_by_id, _non_col_labels,
+ _label_values, _related_issues, config):
+ explicit_paths = []
+ for component_id in issue.component_ids:
+ cd = tracker_bizobj.FindComponentDefByID(component_id, config)
+ if cd:
+ explicit_paths.append(cd.path)
+
+ derived_paths = []
+ for component_id in issue.derived_component_ids:
+ cd = tracker_bizobj.FindComponentDefByID(component_id, config)
+ if cd:
+ derived_paths.append(cd.path)
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, explicit_paths,
+ derived_values=derived_paths)
+
+
+# This maps column names to factories/constructors that make table cells.
+# Subclasses can override this mapping, so any additions to this mapping
+# should also be added to subclasses.
+CELL_FACTORIES = {
+ 'id': TableCellID,
+ 'project': table_view_helpers.TableCellProject,
+ 'component': TableCellComponent,
+ 'summary': table_view_helpers.TableCellSummary,
+ 'status': TableCellStatus,
+ 'owner': TableCellOwner,
+ 'reporter': TableCellReporter,
+ 'cc': TableCellCc,
+ 'stars': table_view_helpers.TableCellStars,
+ 'attachments': TableCellAttachments,
+ 'opened': TableCellOpened,
+ 'closed': TableCellClosed,
+ 'modified': TableCellModified,
+ 'blockedon': TableCellBlockedOn,
+ 'blocking': TableCellBlocking,
+ 'blocked': TableCellBlocked,
+ 'mergedinto': TableCellMergedInto,
+ }
+
+
+# Time format that spreadsheets seem to understand.
+# E.g.: "May 19 2008 13:30:23". Tested with MS Excel 2003,
+# OpenOffice.org, NeoOffice, and Google Spreadsheets.
+CSV_DATE_TIME_FMT = '%b %d, %Y %H:%M:%S'
+
+
+def TimeStringForCSV(timestamp):
+ """Return a timestamp in a format that spreadsheets understand."""
+ return time.strftime(CSV_DATE_TIME_FMT, time.gmtime(timestamp))
+
+
+class TableCellSummaryCSV(table_view_helpers.TableCell):
+ """TableCell subclass for showing issue summaries escaped for CSV."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ escaped_summary = issue.summary.replace('"', '""')
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_SUMMARY, [escaped_summary],
+ non_column_labels=non_col_labels)
+
+
+class TableCellAllLabels(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing all labels on an issue."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ values = []
+ derived_values = []
+ if issue.labels:
+ values = issue.labels[:]
+ if issue.derived_labels:
+ derived_values = issue.derived_labels[:]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_ATTR, values,
+ derived_values=derived_values)
+
+
+class TableCellOpenedCSV(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue opened date."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ date_str = TimeStringForCSV(issue.opened_timestamp)
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE, [date_str])
+
+
+class TableCellOpenedTimestamp(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue opened timestamp."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE,
+ [issue.opened_timestamp])
+
+
+class TableCellModifiedCSV(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue modified date."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ values = []
+ if issue.modified_timestamp:
+ values = [TimeStringForCSV(issue.modified_timestamp)]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE, values)
+
+
+class TableCellModifiedTimestamp(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue modified timestamp."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE,
+ [issue.modified_timestamp])
+
+
+class TableCellClosedCSV(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue closed date."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ values = []
+ if issue.closed_timestamp:
+ values = [TimeStringForCSV(issue.closed_timestamp)]
+
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE, values)
+
+
+class TableCellClosedTimestamp(table_view_helpers.TableCell):
+ """TableCell subclass specifically for showing issue closed timestamp."""
+
+ def __init__(
+ self, issue, col, users_by_id, non_col_labels, label_values,
+ _related, _config):
+ table_view_helpers.TableCell.__init__(
+ self, table_view_helpers.CELL_TYPE_UNFILTERABLE,
+ [issue.closed_timestamp])
+
+
+# Maps column names to factories/constructors that make table cells.
+# Uses the defaults in issuelist.py but changes the factory for the
+# summary cell to properly escape the data for CSV files.
+CSV_CELL_FACTORIES = CELL_FACTORIES.copy()
+CSV_CELL_FACTORIES.update({
+ 'summary': TableCellSummaryCSV,
+ 'alllabels': TableCellAllLabels,
+ 'opened': TableCellOpenedCSV,
+ 'openedtimestamp': TableCellOpenedTimestamp,
+ 'closed': TableCellClosedCSV,
+ 'closedtimestamp': TableCellClosedTimestamp,
+ 'modified': TableCellModifiedCSV,
+ 'modifiedtimestamp': TableCellModifiedTimestamp,
+ })
diff --git a/appengine/monorail/tracker/test/__init__.py b/appengine/monorail/tracker/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/monorail/tracker/test/__init__.py
diff --git a/appengine/monorail/tracker/test/component_helpers_test.py b/appengine/monorail/tracker/test/component_helpers_test.py
new file mode 100644
index 0000000..9d15428
--- /dev/null
+++ b/appengine/monorail/tracker/test/component_helpers_test.py
@@ -0,0 +1,109 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the component_helpers module."""
+
+import unittest
+
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from tracker import component_helpers
+from tracker import tracker_bizobj
+
+
+class ComponentHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.cd1 = tracker_bizobj.MakeComponentDef(
+ 1, 789, 'FrontEnd', 'doc', False, [], [111L], 0, 0)
+ self.cd2 = tracker_bizobj.MakeComponentDef(
+ 2, 789, 'FrontEnd>Splash', 'doc', False, [], [222L], 0, 0)
+ self.cd3 = tracker_bizobj.MakeComponentDef(
+ 3, 789, 'BackEnd', 'doc', True, [], [111L, 333L], 0, 0)
+ self.config.component_defs = [self.cd1, self.cd2, self.cd3]
+ self.services = service_manager.Services(
+ user=fake.UserService())
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.services.user.TestAddUser('b@example.com', 222L)
+ self.services.user.TestAddUser('c@example.com', 333L)
+ self.mr = fake.MonorailRequest()
+ self.mr.cnxn = fake.MonorailConnection()
+
+ def testParseComponentRequest_Empty(self):
+ post_data = fake.PostData(admins=[''], cc=[''])
+ parsed = component_helpers.ParseComponentRequest(
+ self.mr, post_data, self.services.user)
+ self.assertEqual('', parsed.leaf_name)
+ self.assertEqual('', parsed.docstring)
+ self.assertEqual([], parsed.admin_usernames)
+ self.assertEqual([], parsed.cc_usernames)
+ self.assertEqual([], parsed.admin_ids)
+ self.assertEqual([], parsed.cc_ids)
+ self.assertFalse(self.mr.errors.AnyErrors())
+
+ def testParseComponentRequest_Normal(self):
+ post_data = fake.PostData(
+ leaf_name=['FrontEnd'],
+ docstring=['The server-side app that serves pages'],
+ deprecated=[False],
+ admins=['a@example.com'],
+ cc=['b@example.com, c@example.com'])
+ parsed = component_helpers.ParseComponentRequest(
+ self.mr, post_data, self.services.user)
+ self.assertEqual('FrontEnd', parsed.leaf_name)
+ self.assertEqual('The server-side app that serves pages', parsed.docstring)
+ self.assertEqual(['a@example.com'], parsed.admin_usernames)
+ self.assertEqual(['b@example.com', 'c@example.com'], parsed.cc_usernames)
+ self.assertEqual([111L], parsed.admin_ids)
+ self.assertEqual([222L, 333L], parsed.cc_ids)
+ self.assertFalse(self.mr.errors.AnyErrors())
+
+ def testParseComponentRequest_InvalidUser(self):
+ post_data = fake.PostData(
+ leaf_name=['FrontEnd'],
+ docstring=['The server-side app that serves pages'],
+ deprecated=[False],
+ admins=['a@example.com, invalid_user'],
+ cc=['b@example.com, c@example.com'])
+ parsed = component_helpers.ParseComponentRequest(
+ self.mr, post_data, self.services.user)
+ self.assertEqual('FrontEnd', parsed.leaf_name)
+ self.assertEqual('The server-side app that serves pages', parsed.docstring)
+ self.assertEqual(['a@example.com', 'invalid_user'], parsed.admin_usernames)
+ self.assertEqual(['b@example.com', 'c@example.com'], parsed.cc_usernames)
+ self.assertEqual([111L], parsed.admin_ids)
+ self.assertEqual([222L, 333L], parsed.cc_ids)
+ self.assertTrue(self.mr.errors.AnyErrors())
+ self.assertEqual('invalid_user unrecognized', self.mr.errors.member_admins)
+
+ def testGetComponentCcIDs(self):
+ issue = tracker_pb2.Issue()
+ issues_components_cc_ids = component_helpers.GetComponentCcIDs(
+ issue, self.config)
+ self.assertEqual(set(), issues_components_cc_ids)
+
+ issue.component_ids = [1, 2]
+ issues_components_cc_ids = component_helpers.GetComponentCcIDs(
+ issue, self.config)
+ self.assertEqual({111L, 222L}, issues_components_cc_ids)
+
+ def testGetCcIDsForComponentAndAncestors(self):
+ components_cc_ids = component_helpers.GetCcIDsForComponentAndAncestors(
+ self.config, self.cd1)
+ self.assertEqual({111L}, components_cc_ids)
+
+ components_cc_ids = component_helpers.GetCcIDsForComponentAndAncestors(
+ self.config, self.cd2)
+ self.assertEqual({111L, 222L}, components_cc_ids)
+
+ components_cc_ids = component_helpers.GetCcIDsForComponentAndAncestors(
+ self.config, self.cd3)
+ self.assertEqual({111L, 333L}, components_cc_ids)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/componentcreate_test.py b/appengine/monorail/tracker/test/componentcreate_test.py
new file mode 100644
index 0000000..54809f1
--- /dev/null
+++ b/appengine/monorail/tracker/test/componentcreate_test.py
@@ -0,0 +1,177 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the componentcreate servlet."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import componentcreate
+from tracker import tracker_bizobj
+
+import webapp2
+
+
+class ComponentCreateTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ config=fake.ConfigService(),
+ project=fake.ProjectService())
+ self.servlet = componentcreate.ComponentCreate(
+ 'req', 'res', services=self.services)
+ self.project = self.services.project.TestAddProject('proj')
+ self.mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.mr.auth.email = 'b@example.com'
+ self.config = self.services.config.GetProjectConfig(
+ 'fake cnxn', self.project.project_id)
+ self.services.config.StoreConfig('fake cnxn', self.config)
+ self.cd = tracker_bizobj.MakeComponentDef(
+ 1, self.project.project_id, 'BackEnd', 'doc', False, [], [111L], 0,
+ 122L)
+ self.config.component_defs = [self.cd]
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.services.user.TestAddUser('b@example.com', 122L)
+
+ def testAssertBasePermission(self):
+ # Anon users can never do it
+ self.mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ # Project owner can do it.
+ self.mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+
+ # Project member cannot do it
+ self.mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ def testGatherPageData_CreatingAtTopLevel(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.PROCESS_TAB_COMPONENTS,
+ page_data['admin_tab_mode'])
+ self.assertIsNone(page_data['parent_path'])
+
+ def testGatherPageData_CreatingASubComponent(self):
+ self.mr.component_path = 'BackEnd'
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.PROCESS_TAB_COMPONENTS,
+ page_data['admin_tab_mode'])
+ self.assertEqual('BackEnd', page_data['parent_path'])
+
+ def testProcessFormData_NotFound(self):
+ post_data = fake.PostData(
+ parent_path=['Monitoring'],
+ leaf_name=['Rules'],
+ docstring=['Detecting outages'],
+ deprecated=[False],
+ admins=[''],
+ cc=[''])
+ self.assertRaises(
+ webapp2.HTTPException,
+ self.servlet.ProcessFormData, self.mr, post_data)
+
+ def testProcessFormData_Normal(self):
+ post_data = fake.PostData(
+ parent_path=['BackEnd'],
+ leaf_name=['DB'],
+ docstring=['A database'],
+ deprecated=[False],
+ admins=[''],
+ cc=[''])
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue('/adminComponents?saved=1&' in url)
+ config = self.services.config.GetProjectConfig(
+ self.mr.cnxn, self.mr.project_id)
+
+ cd = tracker_bizobj.FindComponentDef('BackEnd>DB', config)
+ self.assertEqual('BackEnd>DB', cd.path)
+ self.assertEqual('A database', cd.docstring)
+ self.assertEqual([], cd.admin_ids)
+ self.assertEqual([], cd.cc_ids)
+ self.assertTrue(cd.created > 0)
+ self.assertEqual(122L, cd.creator_id)
+
+
+class CheckComponentNameJSONTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ project=fake.ProjectService())
+ self.servlet = componentcreate.CheckComponentNameJSON(
+ 'req', 'res', services=self.services)
+ self.project = self.services.project.TestAddProject('proj')
+ self.config = self.services.config.GetProjectConfig(
+ 'fake cnxn', self.project.project_id)
+ self.cd = tracker_bizobj.MakeComponentDef(
+ 1, self.project.project_id, 'BackEnd', 'doc', False, [], [111L], 0,
+ 122L)
+ self.config.component_defs = [self.cd]
+ self.services.config.StoreConfig('fake cnxn', self.config)
+
+ def testHandleRequest_NewComponent(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ path='/p/proj/components/checkname?leaf_name=DB')
+ page_data = self.servlet.HandleRequest(mr)
+ self.assertItemsEqual(['error_message'], page_data.keys())
+ self.assertIsNone(page_data['error_message'])
+
+ def testHandleRequest_NameAlreadyUsed(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ path='/p/proj/components/checkname?leaf_name=BackEnd')
+ page_data = self.servlet.HandleRequest(mr)
+ self.assertItemsEqual(['error_message'], page_data.keys())
+ self.assertEqual('That name is already in use.',
+ page_data['error_message'])
+
+
+class ComponentCreateMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ cd1 = tracker_bizobj.MakeComponentDef(
+ 1, 789, 'BackEnd', 'doc', False, [], [111L], 0, 122L)
+ cd2 = tracker_bizobj.MakeComponentDef(
+ 2, 789, 'BackEnd>DB', 'doc', True, [], [111L], 0, 122L)
+ self.config.component_defs = [cd1, cd2]
+
+ def testLeafNameErrorMessage_Invalid(self):
+ self.assertEqual(
+ 'Invalid component name',
+ componentcreate.LeafNameErrorMessage('', 'bad name', self.config))
+
+ def testLeafNameErrorMessage_AlreadyInUse(self):
+ self.assertEqual(
+ 'That name is already in use.',
+ componentcreate.LeafNameErrorMessage('', 'BackEnd', self.config))
+ self.assertEqual(
+ 'That name is already in use.',
+ componentcreate.LeafNameErrorMessage('BackEnd', 'DB', self.config))
+
+ def testLeafNameErrorMessage_OK(self):
+ self.assertIsNone(
+ componentcreate.LeafNameErrorMessage('', 'FrontEnd', self.config))
+ self.assertIsNone(
+ componentcreate.LeafNameErrorMessage('BackEnd', 'Search', self.config))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/componentdetail_test.py b/appengine/monorail/tracker/test/componentdetail_test.py
new file mode 100644
index 0000000..f6f424e
--- /dev/null
+++ b/appengine/monorail/tracker/test/componentdetail_test.py
@@ -0,0 +1,316 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the componentdetail servlet."""
+
+import unittest
+
+import mox
+
+from features import filterrules_helpers
+from framework import permissions
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import componentdetail
+from tracker import tracker_bizobj
+
+import webapp2
+
+
+class ComponentDetailTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ issue=fake.IssueService(),
+ config=fake.ConfigService(),
+ project=fake.ProjectService())
+ self.servlet = componentdetail.ComponentDetail(
+ 'req', 'res', services=self.services)
+ self.project = self.services.project.TestAddProject('proj')
+ self.mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.mr.auth.email = 'b@example.com'
+ self.config = self.services.config.GetProjectConfig(
+ 'fake cnxn', self.project.project_id)
+ self.services.config.StoreConfig('fake cnxn', self.config)
+ self.cd = tracker_bizobj.MakeComponentDef(
+ 1, self.project.project_id, 'BackEnd', 'doc', False, [], [111L], 100000,
+ 122L, 10000000, 133L)
+ self.config.component_defs = [self.cd]
+ self.services.user.TestAddUser('a@example.com', 111L)
+ self.services.user.TestAddUser('b@example.com', 122L)
+ self.services.user.TestAddUser('c@example.com', 133L)
+ self.mr.component_path = 'BackEnd'
+
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testGetComponentDef_NotFound(self):
+ self.mr.component_path = 'NeverHeardOfIt'
+ self.assertRaises(
+ webapp2.HTTPException,
+ self.servlet._GetComponentDef, self.mr)
+
+ def testGetComponentDef_Normal(self):
+ actual_config, actual_cd = self.servlet._GetComponentDef(self.mr)
+ self.assertEqual(self.config, actual_config)
+ self.assertEqual(self.cd, actual_cd)
+
+ def testAssertBasePermission_AnyoneCanView(self):
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+
+ def testAssertBasePermission_MembersOnly(self):
+ self.project.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ # The project members can view the component definition.
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ # Non-member is not allowed to view anything in the project.
+ self.mr.perms = permissions.EMPTY_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ def testGatherPageData_ReadWrite(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.PROCESS_TAB_COMPONENTS,
+ page_data['admin_tab_mode'])
+ self.assertTrue(page_data['allow_edit'])
+ self.assertEqual([], page_data['initial_admins'])
+ component_def_view = page_data['component_def']
+ self.assertEqual('BackEnd', component_def_view.path)
+
+ def testGatherPageData_ReadOnly(self):
+ self.mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.PROCESS_TAB_COMPONENTS,
+ page_data['admin_tab_mode'])
+ self.assertFalse(page_data['allow_edit'])
+ self.assertFalse(page_data['allow_delete'])
+ self.assertEqual([], page_data['initial_admins'])
+ component_def_view = page_data['component_def']
+ self.assertEqual('BackEnd', component_def_view.path)
+
+ def testGatherPageData_ObscuredCreatorModifier(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+
+ self.assertEqual('b...@example.com', page_data['creator'].display_name)
+ self.assertEqual('/u/122/', page_data['creator'].profile_url)
+ self.assertEqual('Jan 1970', page_data['created'])
+ self.assertEqual('c...@example.com', page_data['modifier'].display_name)
+ self.assertEqual('/u/133/', page_data['modifier'].profile_url)
+ self.assertEqual('Apr 1970', page_data['modified'])
+
+ def testGatherPageData_VisibleCreatorModifierForAdmin(self):
+ self.mr.auth.user_pb.is_site_admin = True
+ page_data = self.servlet.GatherPageData(self.mr)
+
+ self.assertEqual('b@example.com', page_data['creator'].display_name)
+ self.assertEqual('/u/b@example.com/', page_data['creator'].profile_url)
+ self.assertEqual('Jan 1970', page_data['created'])
+ self.assertEqual('c@example.com', page_data['modifier'].display_name)
+ self.assertEqual('/u/c@example.com/', page_data['modifier'].profile_url)
+ self.assertEqual('Apr 1970', page_data['modified'])
+
+ def testGatherPageData_VisibleCreatorForSelf(self):
+ self.mr.auth.user_id = 122L
+ page_data = self.servlet.GatherPageData(self.mr)
+
+ self.assertEqual('b@example.com', page_data['creator'].display_name)
+ self.assertEqual('/u/b@example.com/', page_data['creator'].profile_url)
+ self.assertEqual('Jan 1970', page_data['created'])
+ # Modifier should still be obscured.
+ self.assertEqual('c...@example.com', page_data['modifier'].display_name)
+ self.assertEqual('/u/133/', page_data['modifier'].profile_url)
+ self.assertEqual('Apr 1970', page_data['modified'])
+
+ def testGatherPageData_VisibleCreatorModifierForUnobscuredEmail(self):
+ creator = self.services.user.GetUser(self.mr.cnxn, 122L)
+ creator.obscure_email = False
+ modifier = self.services.user.GetUser(self.mr.cnxn, 133L)
+ modifier.obscure_email = False
+ page_data = self.servlet.GatherPageData(self.mr)
+
+ self.assertEqual('b@example.com', page_data['creator'].display_name)
+ self.assertEqual('/u/b@example.com/', page_data['creator'].profile_url)
+ self.assertEqual('Jan 1970', page_data['created'])
+ self.assertEqual('c@example.com', page_data['modifier'].display_name)
+ self.assertEqual('/u/c@example.com/', page_data['modifier'].profile_url)
+ self.assertEqual('Apr 1970', page_data['modified'])
+
+ def testGatherPageData_WithSubComponents(self):
+ subcd = tracker_bizobj.MakeComponentDef(
+ 2, self.project.project_id, 'BackEnd>Worker', 'doc', False, [], [111L],
+ 0, 122L)
+ self.config.component_defs.append(subcd)
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertFalse(page_data['allow_delete'])
+ self.assertEqual([subcd], page_data['subcomponents'])
+
+ def testGatherPageData_WithTemplates(self):
+ self.services.config.component_ids_to_templates[self.cd.component_id] = [
+ 'template']
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertFalse(page_data['allow_delete'])
+ self.assertEqual(['template'], page_data['templates'])
+
+ def testProcessFormData_Permission(self):
+ """Only owners can edit components."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ mr.component_path = 'BackEnd'
+ post_data = fake.PostData(
+ name=['BackEnd'],
+ deletecomponent=['Submit'])
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, post_data)
+
+ self.servlet.ProcessFormData(self.mr, post_data)
+
+ def testProcessFormData_Delete(self):
+ post_data = fake.PostData(
+ name=['BackEnd'],
+ deletecomponent=['Submit'])
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue('/adminComponents?deleted=1&' in url)
+ self.assertIsNone(
+ tracker_bizobj.FindComponentDef('BackEnd', self.config))
+
+ def testProcessFormData_Delete_WithSubComponent(self):
+ subcd = tracker_bizobj.MakeComponentDef(
+ 2, self.project.project_id, 'BackEnd>Worker', 'doc', False, [], [111L],
+ 0, 122L)
+ self.config.component_defs.append(subcd)
+
+ post_data = fake.PostData(
+ name=['BackEnd'],
+ deletecomponent=['Submit'])
+ try:
+ self.servlet.ProcessFormData(self.mr, post_data)
+ self.fail('Expected permissions.PermissionException')
+ except permissions.PermissionException, e:
+ self.assertEquals('User tried to delete component that had subcomponents',
+ e.message)
+
+ def testProcessFormData_Edit(self):
+ post_data = fake.PostData(
+ leaf_name=['BackEnd'],
+ docstring=['This is where the magic happens'],
+ deprecated=[True],
+ admins=['a@example.com'],
+ cc=['a@example.com'])
+
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+
+ self.mox.VerifyAll()
+ self.assertTrue('/components/detail?component=BackEnd&saved=1&' in url)
+ config = self.services.config.GetProjectConfig(
+ self.mr.cnxn, self.mr.project_id)
+
+ cd = tracker_bizobj.FindComponentDef('BackEnd', config)
+ self.assertEqual('BackEnd', cd.path)
+ self.assertEqual(
+ 'This is where the magic happens',
+ cd.docstring)
+ self.assertEqual(True, cd.deprecated)
+ self.assertEqual([111L], cd.admin_ids)
+ self.assertEqual([111L], cd.cc_ids)
+
+ def testProcessDeleteComponent(self):
+ self.servlet._ProcessDeleteComponent(self.mr, self.cd)
+ self.assertIsNone(
+ tracker_bizobj.FindComponentDef('BackEnd', self.config))
+
+ def testProcessEditComponent(self):
+ post_data = fake.PostData(
+ leaf_name=['BackEnd'],
+ docstring=['This is where the magic happens'],
+ deprecated=[True],
+ admins=['a@example.com'],
+ cc=['a@example.com'])
+
+ self.servlet._ProcessEditComponent(
+ self.mr, post_data, self.config, self.cd)
+
+ self.mox.VerifyAll()
+ config = self.services.config.GetProjectConfig(
+ self.mr.cnxn, self.mr.project_id)
+ cd = tracker_bizobj.FindComponentDef('BackEnd', config)
+ self.assertEqual('BackEnd', cd.path)
+ self.assertEqual(
+ 'This is where the magic happens',
+ cd.docstring)
+ self.assertEqual(True, cd.deprecated)
+ self.assertEqual([111L], cd.admin_ids)
+ self.assertEqual([111L], cd.cc_ids)
+ # Assert that creator and created were not updated.
+ self.assertEqual(122L, cd.creator_id)
+ self.assertEqual(100000, cd.created)
+ # Assert that modifier and modified were updated.
+ self.assertEqual(122L, cd.modifier_id)
+ self.assertTrue(cd.modified > 10000000)
+
+ def testProcessEditComponent_RenameWithSubComponents(self):
+ subcd_1 = tracker_bizobj.MakeComponentDef(
+ 2, self.project.project_id, 'BackEnd>Worker1', 'doc', False, [], [111L],
+ 0, 125L, 3, 126L)
+ subcd_2 = tracker_bizobj.MakeComponentDef(
+ 3, self.project.project_id, 'BackEnd>Worker2', 'doc', False, [], [111L],
+ 0, 125L, 4, 127L)
+ self.config.component_defs.extend([subcd_1, subcd_2])
+
+ self.mox.StubOutWithMock(filterrules_helpers, 'RecomputeAllDerivedFields')
+ filterrules_helpers.RecomputeAllDerivedFields(
+ self.mr.cnxn, self.services, self.mr.project, self.config)
+ self.mox.ReplayAll()
+ post_data = fake.PostData(
+ leaf_name=['BackEnds'],
+ docstring=['This is where the magic happens'],
+ deprecated=[True],
+ admins=['a@example.com'],
+ cc=['a@example.com'])
+
+ self.servlet._ProcessEditComponent(
+ self.mr, post_data, self.config, self.cd)
+
+ self.mox.VerifyAll()
+ config = self.services.config.GetProjectConfig(
+ self.mr.cnxn, self.mr.project_id)
+ cd = tracker_bizobj.FindComponentDef('BackEnds', config)
+ self.assertEqual('BackEnds', cd.path)
+ subcd_1 = tracker_bizobj.FindComponentDef('BackEnds>Worker1', config)
+ self.assertEqual('BackEnds>Worker1', subcd_1.path)
+ # Assert that creator and modifier have not changed for subcd_1.
+ self.assertEqual(125L, subcd_1.creator_id)
+ self.assertEqual(0, subcd_1.created)
+ self.assertEqual(126L, subcd_1.modifier_id)
+ self.assertEqual(3, subcd_1.modified)
+
+ subcd_2 = tracker_bizobj.FindComponentDef('BackEnds>Worker2', config)
+ self.assertEqual('BackEnds>Worker2', subcd_2.path)
+ # Assert that creator and modifier have not changed for subcd_2.
+ self.assertEqual(125L, subcd_2.creator_id)
+ self.assertEqual(0, subcd_2.created)
+ self.assertEqual(127L, subcd_2.modifier_id)
+ self.assertEqual(4, subcd_2.modified)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/field_helpers_test.py b/appengine/monorail/tracker/test/field_helpers_test.py
new file mode 100644
index 0000000..41a23a2
--- /dev/null
+++ b/appengine/monorail/tracker/test/field_helpers_test.py
@@ -0,0 +1,322 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the field_helpers module."""
+
+import time
+import unittest
+
+from framework import template_helpers
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import field_helpers
+from tracker import tracker_bizobj
+
+
+class FieldHelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services = service_manager.Services(
+ user=fake.UserService())
+ self.mr = testing_helpers.MakeMonorailRequest(
+ project=fake.Project(), services=self.services)
+ self.mr.cnxn = fake.MonorailConnection()
+ self.errors = template_helpers.EZTError()
+
+ def testParseFieldDefRequest_Empty(self):
+ post_data = fake.PostData()
+ parsed = field_helpers.ParseFieldDefRequest(post_data, self.config)
+ self.assertEqual('', parsed.field_name)
+ self.assertEqual(None, parsed.field_type_str)
+ self.assertEqual(None, parsed.min_value)
+ self.assertEqual(None, parsed.max_value)
+ self.assertEqual(None, parsed.regex)
+ self.assertFalse(parsed.needs_member)
+ self.assertEqual('', parsed.needs_perm)
+ self.assertEqual('', parsed.grants_perm)
+ self.assertEqual(0, parsed.notify_on)
+ self.assertFalse(parsed.is_required)
+ self.assertFalse(parsed.is_multivalued)
+ self.assertEqual('', parsed.field_docstring)
+ self.assertEqual('', parsed.choices_text)
+ self.assertEqual('', parsed.applicable_type)
+ self.assertEqual('', parsed.applicable_predicate)
+ unchanged_labels = [
+ (label_def.label, label_def.label_docstring, False)
+ for label_def in self.config.well_known_labels]
+ self.assertEqual(unchanged_labels, parsed.revised_labels)
+
+ def testParseFieldDefRequest_Normal(self):
+ post_data = fake.PostData(
+ name=['somefield'],
+ field_type=['INT_TYPE'],
+ min_value=['11'],
+ max_value=['99'],
+ regex=['.*'],
+ needs_member=['Yes'],
+ needs_perm=['Commit'],
+ grants_perm=['View'],
+ notify_on=['any_comment'],
+ is_required=['Yes'],
+ is_multivalued=['Yes'],
+ docstring=['It is just some field'],
+ choices=['Hot = Lots of activity\nCold = Not much activity'],
+ applicable_type=['Defect'])
+ parsed = field_helpers.ParseFieldDefRequest(post_data, self.config)
+ self.assertEqual('somefield', parsed.field_name)
+ self.assertEqual('INT_TYPE', parsed.field_type_str)
+ self.assertEqual(11, parsed.min_value)
+ self.assertEqual(99, parsed.max_value)
+ self.assertEqual('.*', parsed.regex)
+ self.assertTrue(parsed.needs_member)
+ self.assertEqual('Commit', parsed.needs_perm)
+ self.assertEqual('View', parsed.grants_perm)
+ self.assertEqual(1, parsed.notify_on)
+ self.assertTrue(parsed.is_required)
+ self.assertTrue(parsed.is_multivalued)
+ self.assertEqual('It is just some field', parsed.field_docstring)
+ self.assertEqual('Hot = Lots of activity\nCold = Not much activity',
+ parsed.choices_text)
+ self.assertEqual('Defect', parsed.applicable_type)
+ self.assertEqual('', parsed.applicable_predicate)
+ unchanged_labels = [
+ (label_def.label, label_def.label_docstring, False)
+ for label_def in self.config.well_known_labels]
+ new_labels = [
+ ('somefield-Hot', 'Lots of activity', False),
+ ('somefield-Cold', 'Not much activity', False)]
+ self.assertEqual(unchanged_labels + new_labels, parsed.revised_labels)
+
+ def testParseChoicesIntoWellKnownLabels_NewFieldDef(self):
+ choices_text = 'Hot = Lots of activity\nCold = Not much activity'
+ field_name = 'somefield'
+ revised_labels = field_helpers._ParseChoicesIntoWellKnownLabels(
+ choices_text, field_name, self.config)
+ unchanged_labels = [
+ (label_def.label, label_def.label_docstring, False)
+ for label_def in self.config.well_known_labels]
+ new_labels = [
+ ('somefield-Hot', 'Lots of activity', False),
+ ('somefield-Cold', 'Not much activity', False)]
+ self.assertEqual(unchanged_labels + new_labels, revised_labels)
+
+ def testParseChoicesIntoWellKnownLabels_ConvertExistingLabel(self):
+ choices_text = 'High = Must be fixed\nMedium = Might slip'
+ field_name = 'Priority'
+ revised_labels = field_helpers._ParseChoicesIntoWellKnownLabels(
+ choices_text, field_name, self.config)
+ kept_labels = [
+ (label_def.label, label_def.label_docstring, False)
+ for label_def in self.config.well_known_labels
+ if not label_def.label.startswith('Priority-')]
+ new_labels = [
+ ('Priority-High', 'Must be fixed', False),
+ ('Priority-Medium', 'Might slip', False)]
+ self.maxDiff = None
+ self.assertEqual(kept_labels + new_labels, revised_labels)
+
+ def testShiftEnumFieldsIntoLabels_Empty(self):
+ labels = []
+ labels_remove = []
+ field_val_strs = {}
+ field_val_strs_remove = {}
+ field_helpers.ShiftEnumFieldsIntoLabels(
+ labels, labels_remove, field_val_strs, field_val_strs_remove,
+ self.config)
+ self.assertEqual([], labels)
+ self.assertEqual([], labels_remove)
+ self.assertEqual({}, field_val_strs)
+ self.assertEqual({}, field_val_strs_remove)
+
+ def testShiftEnumFieldsIntoLabels_NoOp(self):
+ labels = ['Security', 'Performance', 'Pri-1', 'M-2']
+ labels_remove = ['ReleaseBlock']
+ field_val_strs = {123: ['CPU']}
+ field_val_strs_remove = {234: ['Small']}
+ field_helpers.ShiftEnumFieldsIntoLabels(
+ labels, labels_remove, field_val_strs, field_val_strs_remove,
+ self.config)
+ self.assertEqual(['Security', 'Performance', 'Pri-1', 'M-2'], labels)
+ self.assertEqual(['ReleaseBlock'], labels_remove)
+ self.assertEqual({123: ['CPU']}, field_val_strs)
+ self.assertEqual({234: ['Small']}, field_val_strs_remove)
+
+ def testShiftEnumFieldsIntoLabels_FoundSomeEnumFields(self):
+ self.config.field_defs.append(
+ tracker_bizobj.MakeFieldDef(
+ 123, 789, 'Component', tracker_pb2.FieldTypes.ENUM_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'What HW part is affected?',
+ False))
+ self.config.field_defs.append(
+ tracker_bizobj.MakeFieldDef(
+ 234, 789, 'Size', tracker_pb2.FieldTypes.ENUM_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'How big is this work item?',
+ False))
+ labels = ['Security', 'Performance', 'Pri-1', 'M-2']
+ labels_remove = ['ReleaseBlock']
+ field_val_strs = {123: ['CPU']}
+ field_val_strs_remove = {234: ['Small']}
+ field_helpers.ShiftEnumFieldsIntoLabels(
+ labels, labels_remove, field_val_strs, field_val_strs_remove,
+ self.config)
+ self.assertEqual(
+ ['Security', 'Performance', 'Pri-1', 'M-2', 'Component-CPU'],
+ labels)
+ self.assertEqual(['ReleaseBlock', 'Size-Small'], labels_remove)
+ self.assertEqual({}, field_val_strs)
+ self.assertEqual({}, field_val_strs_remove)
+
+ def testParseOneFieldValue_IntType(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'Foo', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ fv = field_helpers._ParseOneFieldValue(
+ self.mr.cnxn, self.services.user, fd, '8675309')
+ self.assertEqual(fv.field_id, 123)
+ self.assertEqual(fv.int_value, 8675309)
+
+ def testParseOneFieldValue_StrType(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'Foo', tracker_pb2.FieldTypes.STR_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ fv = field_helpers._ParseOneFieldValue(
+ self.mr.cnxn, self.services.user, fd, '8675309')
+ self.assertEqual(fv.field_id, 123)
+ self.assertEqual(fv.str_value, '8675309')
+
+ def testParseOneFieldValue_UserType(self):
+ self.services.user.TestAddUser('user@example.com', 111L)
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'Foo', tracker_pb2.FieldTypes.USER_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ fv = field_helpers._ParseOneFieldValue(
+ self.mr.cnxn, self.services.user, fd, 'user@example.com')
+ self.assertEqual(fv.field_id, 123)
+ self.assertEqual(fv.user_id, 111)
+
+ def testParseFieldValues_Empty(self):
+ field_val_strs = {}
+ field_values = field_helpers.ParseFieldValues(
+ self.mr.cnxn, self.services.user, field_val_strs, self.config)
+ self.assertEqual([], field_values)
+
+ def testParseFieldValues_Normal(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+ field_val_strs = {
+ 123: ['80386', '68040']}
+ field_values = field_helpers.ParseFieldValues(
+ self.mr.cnxn, self.services.user, field_val_strs, self.config)
+ fv1 = tracker_bizobj.MakeFieldValue(123, 80386, None, None, False)
+ fv2 = tracker_bizobj.MakeFieldValue(123, 68040, None, None, False)
+ self.assertEqual([fv1, fv2], field_values)
+
+ def testValidateOneCustomField_IntType(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ fv = tracker_bizobj.MakeFieldValue(123, 8086, None, None, False)
+ msg = field_helpers._ValidateOneCustomField(
+ self.mr, self.services, fd, fv)
+ self.assertIsNone(msg)
+
+ fd.min_value = 1
+ fd.max_value = 999
+ msg = field_helpers._ValidateOneCustomField(
+ self.mr, self.services, fd, fv)
+ self.assertEqual('Value must be <= 999', msg)
+
+ fv.int_value = 0
+ msg = field_helpers._ValidateOneCustomField(
+ self.mr, self.services, fd, fv)
+ self.assertEqual('Value must be >= 1', msg)
+
+ def testValidateOneCustomField_StrType(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.STR_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ fv = tracker_bizobj.MakeFieldValue(123, None, 'i386', None, False)
+ msg = field_helpers._ValidateOneCustomField(
+ self.mr, self.services, fd, fv)
+ self.assertIsNone(msg)
+
+ fd.regex = r'^\d*$'
+ msg = field_helpers._ValidateOneCustomField(
+ self.mr, self.services, fd, fv)
+ self.assertEqual(r'Value must match regular expression: ^\d*$', msg)
+
+ fv.str_value = '386'
+ msg = field_helpers._ValidateOneCustomField(
+ self.mr, self.services, fd, fv)
+ self.assertIsNone(msg)
+
+ def testValidateOneCustomField_UserType(self):
+ pass # TODO(jrobbins): write this test.
+
+ def testValidateOneCustomField_OtherType(self):
+ # There are currently no validation options for date-type custom fields.
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'Deadline', tracker_pb2.FieldTypes.DATE_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ fv = tracker_bizobj.MakeFieldValue(
+ 123, int(time.time()), None, None, False)
+ msg = field_helpers._ValidateOneCustomField(
+ self.mr, self.services, fd, fv)
+ self.assertIsNone(msg)
+
+ def testValidateCustomFields_NoCustomFieldValues(self):
+ field_helpers.ValidateCustomFields(
+ self.mr, self.services, [], self.config, self.errors)
+ self.assertFalse(self.errors.AnyErrors())
+
+ def testValidateCustomFields_NoErrors(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+ fv1 = tracker_bizobj.MakeFieldValue(123, 8086, None, None, False)
+ fv2 = tracker_bizobj.MakeFieldValue(123, 486, None, None, False)
+
+ field_helpers.ValidateCustomFields(
+ self.mr, self.services, [fv1, fv2], self.config, self.errors)
+ self.assertFalse(self.errors.AnyErrors())
+
+ def testValidateCustomFields_SomeErrors(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+ fv1 = tracker_bizobj.MakeFieldValue(123, 8086, None, None, False)
+ fv2 = tracker_bizobj.MakeFieldValue(123, 486, None, None, False)
+
+ fd.min_value = 1
+ fd.max_value = 999
+ field_helpers.ValidateCustomFields(
+ self.mr, self.services, [fv1, fv2], self.config, self.errors)
+ self.assertTrue(self.errors.AnyErrors())
+ self.assertEqual(1, len(self.errors.custom_fields))
+ custom_field_error = self.errors.custom_fields[0]
+ self.assertEqual(123, custom_field_error.field_id)
+ self.assertEqual('Value must be <= 999', custom_field_error.message)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/fieldcreate_test.py b/appengine/monorail/tracker/test/fieldcreate_test.py
new file mode 100644
index 0000000..d6f3dc0
--- /dev/null
+++ b/appengine/monorail/tracker/test/fieldcreate_test.py
@@ -0,0 +1,210 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the fieldcreate servlet."""
+
+import unittest
+
+from framework import permissions
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import fieldcreate
+from tracker import tracker_bizobj
+
+
+class FieldCreateTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ config=fake.ConfigService(),
+ project=fake.ProjectService())
+ self.servlet = fieldcreate.FieldCreate(
+ 'req', 'res', services=self.services)
+ self.project = self.services.project.TestAddProject('proj')
+ self.mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+
+ def testAssertBasePermission(self):
+ # Anon users can never do it
+ self.mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ # Project owner can do it.
+ self.mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+
+ # Project member cannot do it
+ self.mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ def testGatherPageData(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.PROCESS_TAB_LABELS,
+ page_data['admin_tab_mode'])
+ self.assertItemsEqual(
+ ['Defect', 'Enhancement', 'Task', 'Other'],
+ page_data['well_known_issue_types'])
+
+ def testProcessFormData(self):
+ post_data = fake.PostData(
+ name=['somefield'],
+ field_type=['INT_TYPE'],
+ min_value=['1'],
+ max_value=['99'],
+ notify_on=['any_comment'],
+ is_required=['Yes'],
+ is_multivalued=['Yes'],
+ docstring=['It is just some field'],
+ applicable_type=['Defect'],
+ admin_names=[''])
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue('/adminLabels?saved=1&' in url)
+ config = self.services.config.GetProjectConfig(
+ self.mr.cnxn, self.mr.project_id)
+
+ fd = tracker_bizobj.FindFieldDef('somefield', config)
+ self.assertEqual('somefield', fd.field_name)
+ self.assertEqual(tracker_pb2.FieldTypes.INT_TYPE, fd.field_type)
+ self.assertEqual(1, fd.min_value)
+ self.assertEqual(99, fd.max_value)
+ self.assertEqual(tracker_pb2.NotifyTriggers.ANY_COMMENT, fd.notify_on)
+ self.assertTrue(fd.is_required)
+ self.assertTrue(fd.is_multivalued)
+ self.assertEqual('It is just some field', fd.docstring)
+ self.assertEqual('Defect', fd.applicable_type)
+ self.assertEqual('', fd.applicable_predicate)
+ self.assertEqual([], fd.admin_ids)
+
+
+class CheckFieldNameJSONTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ config=fake.ConfigService(),
+ project=fake.ProjectService())
+ self.servlet = fieldcreate.CheckFieldNameJSON(
+ 'req', 'res', services=self.services)
+ self.project = self.services.project.TestAddProject('proj')
+ self.config = self.services.config.GetProjectConfig(
+ 'fake cnxn', self.project.project_id)
+ self.services.config.StoreConfig('fake cnxn', self.config)
+
+ def testHandleRequest_NewField(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ path='/p/proj/fields/checkname?field=somefield')
+ page_data = self.servlet.HandleRequest(mr)
+ self.assertItemsEqual(
+ ['error_message', 'choices'], page_data.keys())
+ self.assertIsNone(page_data['error_message'])
+ self.assertItemsEqual([], page_data['choices'])
+
+ def testHandleRequest_FieldNameAlreadyUsed(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ path='/p/proj/fields/checkname?field=CPU')
+ page_data = self.servlet.HandleRequest(mr)
+ self.assertItemsEqual(
+ ['error_message', 'choices'], page_data.keys())
+ self.assertEqual('That name is already in use.',
+ page_data['error_message'])
+ self.assertItemsEqual([], page_data['choices'])
+
+ def testHandleRequest_ReservedField(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ path='/p/proj/fields/checkname?field=summary')
+ page_data = self.servlet.HandleRequest(mr)
+ self.assertItemsEqual(
+ ['error_message', 'choices'], page_data.keys())
+ self.assertEqual('That name is reserved.', page_data['error_message'])
+ self.assertItemsEqual([], page_data['choices'])
+
+ def testHandleRequest_LabelsToField(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ path='/p/proj/fields/checkname?field=type')
+ page_data = self.servlet.HandleRequest(mr)
+ self.assertItemsEqual(
+ ['error_message', 'choices'], page_data.keys())
+ self.assertIsNone(page_data['error_message'])
+ self.assertEqual(4, len(page_data['choices']))
+
+
+class FieldCreateMethodsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ def testFieldNameErrorMessage_NoConflict(self):
+ self.assertIsNone(fieldcreate.FieldNameErrorMessage(
+ 'somefield', self.config))
+
+ def testFieldNameErrorMessage_Reserved(self):
+ self.assertEqual(
+ 'That name is reserved.',
+ fieldcreate.FieldNameErrorMessage('owner', self.config))
+
+ def testFieldNameErrorMessage_AlreadyInUse(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+ self.assertEqual(
+ 'That name is already in use.',
+ fieldcreate.FieldNameErrorMessage('CPU', self.config))
+
+ def testFieldNameErrorMessage_PrefixOfExisting(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'sign-off', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+ self.assertEqual(
+ 'That name is a prefix of an existing field name.',
+ fieldcreate.FieldNameErrorMessage('sign', self.config))
+
+ def testFieldNameErrorMessage_IncludesExisting(self):
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'opt', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+ self.assertEqual(
+ 'An existing field name is a prefix of that name.',
+ fieldcreate.FieldNameErrorMessage('opt-in', self.config))
+
+ def testExistingEnumChoices_NewEnum(self):
+ self.assertItemsEqual(
+ [],
+ fieldcreate.ExistingEnumChoices('Theme', self.config))
+
+ def testExistingEnumChoices_ConvertLabelsToEnum(self):
+ label_doc_list = fieldcreate.ExistingEnumChoices('Priority', self.config)
+ self.assertItemsEqual(
+ ['Critical', 'High', 'Medium', 'Low'],
+ [item.name for item in label_doc_list])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/fielddetail_test.py b/appengine/monorail/tracker/test/fielddetail_test.py
new file mode 100644
index 0000000..7f49e8e
--- /dev/null
+++ b/appengine/monorail/tracker/test/fielddetail_test.py
@@ -0,0 +1,159 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for the fielddetail servlet."""
+
+import unittest
+
+import webapp2
+
+from framework import permissions
+from proto import project_pb2
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import fielddetail
+from tracker import tracker_bizobj
+
+
+class FieldDetailTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ user=fake.UserService(),
+ config=fake.ConfigService(),
+ project=fake.ProjectService())
+ self.servlet = fielddetail.FieldDetail(
+ 'req', 'res', services=self.services)
+ self.project = self.services.project.TestAddProject('proj')
+ self.mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.config = self.services.config.GetProjectConfig(
+ 'fake cnxn', self.project.project_id)
+ self.services.config.StoreConfig('fake cnxn', self.config)
+ self.fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(self.fd)
+ self.mr.field_name = 'CPU'
+
+ def testGetFieldDef_NotFound(self):
+ self.mr.field_name = 'NeverHeardOfIt'
+ self.assertRaises(
+ webapp2.HTTPException,
+ self.servlet._GetFieldDef, self.mr)
+
+ def testGetFieldDef_Normal(self):
+ actual_config, actual_fd = self.servlet._GetFieldDef(self.mr)
+ self.assertEqual(self.config, actual_config)
+ self.assertEqual(self.fd, actual_fd)
+
+ def testAssertBasePermission_AnyoneCanView(self):
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+
+ def testAssertBasePermission_MembersOnly(self):
+ self.project.access = project_pb2.ProjectAccess.MEMBERS_ONLY
+ # The project members can view the field definition.
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ self.mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(self.mr)
+ # Non-member is not allowed to view anything in the project.
+ self.mr.perms = permissions.EMPTY_PERMISSIONSET
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.AssertBasePermission, self.mr)
+
+ def testGatherPageData_ReadWrite(self):
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.PROCESS_TAB_LABELS,
+ page_data['admin_tab_mode'])
+ self.assertTrue(page_data['allow_edit'])
+ self.assertEqual('', page_data['initial_admins'])
+ field_def_view = page_data['field_def']
+ self.assertEqual('CPU', field_def_view.field_name)
+
+ def testGatherPageData_ReadOnly(self):
+ self.mr.perms = permissions.READ_ONLY_PERMISSIONSET
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.assertEqual(self.servlet.PROCESS_TAB_LABELS,
+ page_data['admin_tab_mode'])
+ self.assertFalse(page_data['allow_edit'])
+ self.assertEqual('', page_data['initial_admins'])
+ field_def_view = page_data['field_def']
+ self.assertEqual('CPU', field_def_view.field_name)
+
+ def testProcessFormData_Permission(self):
+ """Only owners can edit fields."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ mr.field_name = 'CPU'
+ post_data = fake.PostData(
+ name=['CPU'],
+ deletefield=['Submit'])
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, post_data)
+
+ self.servlet.ProcessFormData(self.mr, post_data)
+
+ def testProcessFormData_Delete(self):
+ post_data = fake.PostData(
+ name=['CPU'],
+ deletefield=['Submit'])
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue('/adminLabels?deleted=1&' in url)
+ fd = tracker_bizobj.FindFieldDef('CPU', self.config)
+ self.assertEqual('CPU', fd.field_name)
+ self.assertTrue(fd.is_deleted)
+
+ def testProcessFormData_Edit(self):
+ post_data = fake.PostData(
+ name=['CPU'],
+ field_type=['INT_TYPE'],
+ min_value=['2'],
+ max_value=['98'],
+ notify_on=['never'],
+ is_required=[],
+ is_multivalued=[],
+ docstring=['It is just some field'],
+ applicable_type=['Defect'],
+ admin_names=[''])
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue('/fields/detail?field=CPU&saved=1&' in url)
+ config = self.services.config.GetProjectConfig(
+ self.mr.cnxn, self.mr.project_id)
+
+ fd = tracker_bizobj.FindFieldDef('CPU', config)
+ self.assertEqual('CPU', fd.field_name)
+ self.assertEqual(2, fd.min_value)
+ self.assertEqual(98, fd.max_value)
+
+ def testProcessDeleteField(self):
+ self.servlet._ProcessDeleteField(self.mr, self.fd)
+ self.assertTrue(self.fd.is_deleted)
+
+ def testProcessEditField(self):
+ post_data = fake.PostData(
+ name=['CPU'], field_type=['INT_TYPE'], min_value=['2'],
+ admin_names=[''])
+ self.servlet._ProcessEditField(
+ self.mr, post_data, self.config, self.fd)
+ fd = tracker_bizobj.FindFieldDef('CPU', self.config)
+ self.assertEqual('CPU', fd.field_name)
+ self.assertEqual(2, fd.min_value)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issueadmin_test.py b/appengine/monorail/tracker/test/issueadmin_test.py
new file mode 100644
index 0000000..2a923e0
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueadmin_test.py
@@ -0,0 +1,400 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the issue admin pages."""
+
+import mox
+import unittest
+
+from framework import permissions
+from framework import urls
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issueadmin
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+class TestBase(unittest.TestCase):
+
+ def setUpServlet(self, servlet_factory):
+ # pylint: disable=attribute-defined-outside-init
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ user=fake.UserService(),
+ issue=fake.IssueService(),
+ features=fake.FeaturesService())
+ self.servlet = servlet_factory('req', 'res', services=self.services)
+ self.project = self.services.project.TestAddProject('proj', project_id=789)
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ self.services.config.StoreConfig(None, self.config)
+ self.cnxn = fake.MonorailConnection()
+ self.mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/admin', project=self.project)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def _mockGetUser(self):
+ self.mox.StubOutWithMock(self.services.user, 'GetUser')
+ user = self.services.user.TestAddUser('user@invalid', 100)
+ self.services.user.GetUser(
+ mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(user)
+
+
+class IssueAdminBaseTest(TestBase):
+
+ def setUp(self):
+ super(IssueAdminBaseTest, self).setUpServlet(issueadmin.IssueAdminBase)
+
+ def testGatherPageData(self):
+ self._mockGetUser()
+ self.mox.ReplayAll()
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+
+ self.assertItemsEqual(['admin_tab_mode', 'config'], page_data.keys())
+ config_view = page_data['config']
+ self.assertEqual(789, config_view.project_id)
+
+
+class AdminStatusesTest(TestBase):
+
+ def setUp(self):
+ super(AdminStatusesTest, self).setUpServlet(issueadmin.AdminStatuses)
+
+ def testProcessSubtabForm_MissingInput(self):
+ post_data = fake.PostData()
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_STATUSES, next_url)
+ self.assertEqual([], self.config.well_known_statuses)
+ self.assertEqual([], self.config.statuses_offer_merge)
+
+ def testProcessSubtabForm_EmptyInput(self):
+ post_data = fake.PostData(
+ predefinedopen=[''], predefinedclosed=[''], statuses_offer_merge=[''])
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_STATUSES, next_url)
+ self.assertEqual([], self.config.well_known_statuses)
+ self.assertEqual([], self.config.statuses_offer_merge)
+
+ def testProcessSubtabForm_Normal(self):
+ post_data = fake.PostData(
+ predefinedopen=['New = newly reported'],
+ predefinedclosed=['Fixed\nDuplicate'],
+ statuses_offer_merge=['Duplicate'])
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_STATUSES, next_url)
+ self.assertEqual(3, len(self.config.well_known_statuses))
+ self.assertEqual('New', self.config.well_known_statuses[0].status)
+ self.assertTrue(self.config.well_known_statuses[0].means_open)
+ self.assertEqual('Fixed', self.config.well_known_statuses[1].status)
+ self.assertFalse(self.config.well_known_statuses[1].means_open)
+ self.assertEqual('Duplicate', self.config.well_known_statuses[2].status)
+ self.assertFalse(self.config.well_known_statuses[2].means_open)
+ self.assertEqual(['Duplicate'], self.config.statuses_offer_merge)
+
+
+class AdminLabelsTest(TestBase):
+
+ def setUp(self):
+ super(AdminLabelsTest, self).setUpServlet(issueadmin.AdminLabels)
+
+ def testGatherPageData(self):
+ self._mockGetUser()
+ self.mox.ReplayAll()
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+
+ self.assertItemsEqual(
+ ['admin_tab_mode', 'config', 'field_defs'], page_data.keys())
+ config_view = page_data['config']
+ self.assertEqual(789, config_view.project_id)
+ self.assertEqual([], page_data['field_defs'])
+
+ def testProcessSubtabForm_MissingInput(self):
+ post_data = fake.PostData()
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_LABELS, next_url)
+ self.assertEqual([], self.config.well_known_labels)
+ self.assertEqual([], self.config.exclusive_label_prefixes)
+
+ def testProcessSubtabForm_EmptyInput(self):
+ post_data = fake.PostData(
+ predefinedlabels=[''], excl_prefixes=[''])
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_LABELS, next_url)
+ self.assertEqual([], self.config.well_known_labels)
+ self.assertEqual([], self.config.exclusive_label_prefixes)
+
+ def testProcessSubtabForm_Normal(self):
+ post_data = fake.PostData(
+ predefinedlabels=['Pri-0 = Burning issue\nPri-4 = It can wait'],
+ excl_prefixes=['pri'])
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_LABELS, next_url)
+ self.assertEqual(2, len(self.config.well_known_labels))
+ self.assertEqual('Pri-0', self.config.well_known_labels[0].label)
+ self.assertEqual('Pri-4', self.config.well_known_labels[1].label)
+ self.assertEqual(['pri'], self.config.exclusive_label_prefixes)
+
+
+class AdminTemplatesTest(TestBase):
+
+ def setUp(self):
+ super(AdminTemplatesTest, self).setUpServlet(issueadmin.AdminTemplates)
+ self.test_template = tracker_bizobj.MakeIssueTemplate(
+ 'Test Template', 'sum', 'New', 111L, 'content', [], [], [], [])
+ self.test_template.template_id = 12345
+
+ def testGatherPageData(self):
+ self._mockGetUser()
+ self.mox.ReplayAll()
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+
+ self.assertItemsEqual(
+ ['admin_tab_mode', 'config', 'fields'], page_data.keys())
+ config_view = page_data['config']
+ self.assertEqual(789, config_view.project_id)
+ self.assertEqual([], page_data['fields'])
+
+ def testProcessSubtabForm_NoEditProjectPerm(self):
+ """If user lacks perms, ignore the attempt to set default templates."""
+ self.config.templates.append(self.test_template)
+ post_data = fake.PostData(
+ default_template_for_developers=['Test Template'],
+ default_template_for_users=['Test Template'])
+ self.servlet._ParseAllTemplates = lambda x, y: self.config.templates
+ self.mr.perms = permissions.EMPTY_PERMISSIONSET
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_TEMPLATES, next_url)
+ self.assertEqual(0, self.config.default_template_for_developers)
+ self.assertEqual(0, self.config.default_template_for_users)
+
+ def testProcessSubtabForm_Normal(self):
+ """If user lacks perms, ignore the attempt to set default templates."""
+ self.config.templates.append(self.test_template)
+ post_data = fake.PostData(
+ default_template_for_developers=['Test Template'],
+ default_template_for_users=['Test Template'])
+ self.servlet._ParseAllTemplates = lambda x, y: self.config.templates
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_TEMPLATES, next_url)
+ self.assertEqual(12345, self.config.default_template_for_developers)
+ self.assertEqual(12345, self.config.default_template_for_users)
+
+ def testParseTemplate(self):
+ pass # TODO(jrobbins): write this
+
+ def testParseDefaultTempalteSelections_NotSpecified(self):
+ self.config.templates.append(self.test_template)
+ post_data = fake.PostData()
+ for_devs, for_users = self.servlet._ParseDefaultTemplateSelections(
+ post_data, self.config.tempaltes)
+ self.assertEqual(None, for_devs)
+ self.assertEqual(None, for_users)
+
+ def testParseDefaultTempalteSelections_NotSpecified(self):
+ self.config.templates.append(self.test_template)
+ post_data = fake.PostData(
+ default_template_for_developers=['Bad value'],
+ default_template_for_users=['Bad value'])
+ for_devs, for_users = self.servlet._ParseDefaultTemplateSelections(
+ post_data, self.config.templates)
+ self.assertEqual(None, for_devs)
+ self.assertEqual(None, for_users)
+
+ def testParseDefaultTempalteSelections_Normal(self):
+ self.config.templates.append(self.test_template)
+ post_data = fake.PostData(
+ default_template_for_developers=['Test Template'],
+ default_template_for_users=['Test Template'])
+ for_devs, for_users = self.servlet._ParseDefaultTemplateSelections(
+ post_data, self.config.templates)
+ self.assertEqual(12345, for_devs)
+ self.assertEqual(12345, for_users)
+
+
+
+class AdminComponentsTest(TestBase):
+
+ def setUp(self):
+ super(AdminComponentsTest, self).setUpServlet(issueadmin.AdminComponents)
+ self.cd_clean = tracker_bizobj.MakeComponentDef(
+ 1, self.project.project_id, 'BackEnd', 'doc', False, [], [111L], 100000,
+ 122L, 10000000, 133L)
+ self.cd_with_subcomp = tracker_bizobj.MakeComponentDef(
+ 2, self.project.project_id, 'FrontEnd', 'doc', False, [], [111L],
+ 100000, 122L, 10000000, 133L)
+ self.subcd = tracker_bizobj.MakeComponentDef(
+ 3, self.project.project_id, 'FrontEnd>Worker', 'doc', False, [], [111L],
+ 100000, 122L, 10000000, 133L)
+ self.cd_with_template = tracker_bizobj.MakeComponentDef(
+ 4, self.project.project_id, 'Middle', 'doc', False, [], [111L],
+ 100000, 122L, 10000000, 133L)
+
+ def testGatherPageData(self):
+ self._mockGetUser()
+ self.mox.ReplayAll()
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+
+ self.assertItemsEqual(
+ ['admin_tab_mode', 'config', 'component_defs',
+ 'failed_perm', 'failed_subcomp', 'failed_templ'], page_data.keys())
+ config_view = page_data['config']
+ self.assertEqual(789, config_view.project_id)
+ self.assertEqual([], page_data['component_defs'])
+
+ def testProcessFormData_NoErrors(self):
+ self.config.component_defs = [
+ self.cd_clean, self.cd_with_subcomp, self.subcd, self.cd_with_template]
+ post_data = {
+ 'delete_components' : '%s,%s,%s' % (
+ self.cd_clean.path, self.cd_with_subcomp.path, self.subcd.path)}
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue(
+ url.startswith('http://127.0.0.1/p/proj/adminComponents?deleted='
+ 'FrontEnd%3EWorker%2CFrontEnd%2CBackEnd&failed_perm=&'
+ 'failed_subcomp=&failed_templ=&ts='))
+
+ def testProcessFormData_SubCompError(self):
+ self.config.component_defs = [
+ self.cd_clean, self.cd_with_subcomp, self.subcd, self.cd_with_template]
+ post_data = {
+ 'delete_components' : '%s,%s' % (
+ self.cd_clean.path, self.cd_with_subcomp.path)}
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue(
+ url.startswith('http://127.0.0.1/p/proj/adminComponents?deleted='
+ 'BackEnd&failed_perm=&failed_subcomp=FrontEnd&'
+ 'failed_templ=&ts='))
+
+ def testProcessFormData_TemplateError(self):
+ self.config.component_defs = [
+ self.cd_clean, self.cd_with_subcomp, self.subcd, self.cd_with_template]
+ self.services.config.component_ids_to_templates[4] = 'Test Template'
+ post_data = {
+ 'delete_components' : '%s,%s,%s,%s' % (
+ self.cd_clean.path, self.cd_with_subcomp.path, self.subcd.path,
+ self.cd_with_template.path)}
+ url = self.servlet.ProcessFormData(self.mr, post_data)
+ self.assertTrue(
+ url.startswith('http://127.0.0.1/p/proj/adminComponents?deleted='
+ 'FrontEnd%3EWorker%2CFrontEnd%2CBackEnd&failed_perm=&'
+ 'failed_subcomp=&failed_templ=Middle&ts='))
+
+
+class AdminViewsTest(TestBase):
+
+ def setUp(self):
+ super(AdminViewsTest, self).setUpServlet(issueadmin.AdminViews)
+
+ def testGatherPageData(self):
+ self._mockGetUser()
+ self.mox.ReplayAll()
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+
+ self.assertItemsEqual(
+ ['admin_tab_mode', 'config', 'issue_notify',
+ 'new_query_indexes', 'max_queries'],
+ page_data.keys())
+ config_view = page_data['config']
+ self.assertEqual(789, config_view.project_id)
+
+ def testProcessSubtabForm(self):
+ post_data = fake.PostData(
+ default_col_spec=['id pri mstone owner status summary'],
+ default_sort_spec=['mstone pri'],
+ default_x_attr=['owner'], default_y_attr=['mstone'])
+ next_url = self.servlet.ProcessSubtabForm(post_data, self.mr)
+ self.assertEqual(urls.ADMIN_VIEWS, next_url)
+ self.assertEqual(
+ 'id pri mstone owner status summary', self.config.default_col_spec)
+ self.assertEqual('mstone pri', self.config.default_sort_spec)
+ self.assertEqual('owner', self.config.default_x_attr)
+ self.assertEqual('mstone', self.config.default_y_attr)
+
+
+class AdminViewsFunctionsTest(unittest.TestCase):
+
+ def testParseListPreferences(self):
+ # If no input, col_spec will be default column spec.
+ # For other fiels empty strings should be returned.
+ col_spec, sort_spec, x_attr, y_attr = issueadmin._ParseListPreferences(
+ {})
+ self.assertEqual(tracker_constants.DEFAULT_COL_SPEC, col_spec)
+ self.assertEqual('', sort_spec)
+ self.assertEqual('', x_attr)
+ self.assertEqual('', y_attr)
+
+ # Test how hyphens in input are treated.
+ spec = 'label1-sub1 label2 label3-sub3'
+ col_spec, sort_spec, x_attr, y_attr = issueadmin._ParseListPreferences(
+ fake.PostData(default_col_spec=[spec],
+ default_sort_spec=[spec],
+ default_x_attr=[spec],
+ default_y_attr=[spec]),
+ )
+
+ # Hyphens (and anything following) should be stripped from each term.
+ self.assertEqual('label1-sub1 label2 label3-sub3', col_spec)
+
+ # The sort spec should be as given (except with whitespace condensed).
+ self.assertEqual(' '.join(spec.split()), sort_spec)
+
+ # Only the first term (up to the first hyphen) should be used for x- or
+ # y-attr.
+ self.assertEqual('label1-sub1', x_attr)
+ self.assertEqual('label1-sub1', y_attr)
+
+ # Test that multibyte strings are not mangled.
+ spec = ('\xe7\xaa\xbf\xe8\x8b\xa5-\xe7\xb9\xb9 '
+ '\xe5\x9c\xb0\xe3\x81\xa6-\xe5\xbd\x93-\xe3\x81\xbe\xe3\x81\x99')
+ spec = spec.decode('utf-8')
+ col_spec, sort_spec, x_attr, y_attr = issueadmin._ParseListPreferences(
+ fake.PostData(default_col_spec=[spec],
+ default_sort_spec=[spec],
+ default_x_attr=[spec],
+ default_y_attr=[spec]),
+ )
+ self.assertEqual(spec, col_spec)
+ self.assertEqual(' '.join(spec.split()), sort_spec)
+ self.assertEqual('\xe7\xaa\xbf\xe8\x8b\xa5-\xe7\xb9\xb9'.decode('utf-8'),
+ x_attr)
+ self.assertEqual('\xe7\xaa\xbf\xe8\x8b\xa5-\xe7\xb9\xb9'.decode('utf-8'),
+ y_attr)
+
+
+class AdminRulesTest(TestBase):
+
+ def setUp(self):
+ super(AdminRulesTest, self).setUpServlet(issueadmin.AdminRules)
+
+ def testGatherPageData(self):
+ self._mockGetUser()
+ self.mox.ReplayAll()
+ page_data = self.servlet.GatherPageData(self.mr)
+ self.mox.VerifyAll()
+
+ self.assertItemsEqual(
+ ['admin_tab_mode', 'config', 'rules', 'new_rule_indexes', 'max_rules'],
+ page_data.keys())
+ config_view = page_data['config']
+ self.assertEqual(789, config_view.project_id)
+ self.assertEqual([], page_data['rules'])
+
+ def testProcessSubtabForm(self):
+ pass # TODO(jrobbins): write this test
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issueadvsearch_test.py b/appengine/monorail/tracker/test/issueadvsearch_test.py
new file mode 100644
index 0000000..a5439a5
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueadvsearch_test.py
@@ -0,0 +1,71 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for monorail.tracker.issueadvsearch."""
+
+import unittest
+
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issueadvsearch
+
+class IssueAdvSearchTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService())
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.servlet = issueadvsearch.IssueAdvancedSearch(
+ 'req', 'res', services=self.services)
+
+ def testGatherData(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/advsearch')
+ page_data = self.servlet.GatherPageData(mr)
+
+ self.assertTrue('issue_tab_mode' in page_data)
+ self.assertTrue('page_perms' in page_data)
+
+ def testProcessFormData(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/advsearch')
+ post_data = {}
+ url = self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue('can=2' in url)
+
+ post_data['can'] = 42
+ url = self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue('can=42' in url)
+
+ post_data['starcount'] = 42
+ url = self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue('starcount%3A42' in url)
+
+ def _testAND(self, operator, field, post_data, query):
+ self.servlet._AccumulateANDTerm(operator, field, post_data, query)
+ return query
+
+ def test_AccumulateANDTerm(self):
+ query = self._testAND('', 'foo', {'foo':'bar'}, [])
+ self.assertEquals(['bar'], query)
+
+ query = self._testAND('', 'bar', {'bar':'baz=zippy'}, query)
+ self.assertEquals(['bar', 'baz', 'zippy'], query)
+
+ def _testOR(self, operator, field, post_data, query):
+ self.servlet._AccumulateORTerm(operator, field, post_data, query)
+ return query
+
+ def test_AccumulateORTerm(self):
+ query = self._testOR('', 'foo', {'foo':'bar'}, [])
+ self.assertEquals(['bar'], query)
+
+ query = self._testOR('', 'bar', {'bar':'baz=zippy'}, query)
+ self.assertEquals(['bar', 'baz,zippy'], query)
+
diff --git a/appengine/monorail/tracker/test/issueattachment_test.py b/appengine/monorail/tracker/test/issueattachment_test.py
new file mode 100644
index 0000000..7fbc2802
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueattachment_test.py
@@ -0,0 +1,146 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for monorail.tracker.issueattachment."""
+
+import unittest
+
+from google.appengine.api import images
+from google.appengine.ext import testbed
+
+import mox
+import webapp2
+
+from framework import permissions
+from framework import servlet
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issueattachment
+
+from third_party import cloudstorage
+
+def MockResize(_self, image_data, width=None, height=None):
+ """Mock of images.resize() used to test AttachmentPage."""
+ _image_data = image_data
+ _width = width
+ _height = height
+ return 'this is a thumbnail'
+
+class IssueattachmentTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_app_identity_stub()
+ self.testbed.init_urlfetch_stub()
+ self.attachment_data = ""
+
+ self._old_gcs_open = cloudstorage.open
+ cloudstorage.open = fake.gcs_open
+
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService())
+ self.project = services.project.TestAddProject('proj')
+ self.servlet = issueattachment.AttachmentPage(
+ 'req', webapp2.Response(), services=services)
+ self.issue = fake.MakeTestIssue(
+ self.project.project_id, 1, 'summary', 'New', 111L)
+ services.issue.TestAddIssue(self.issue)
+ self.comment = tracker_pb2.IssueComment(
+ id=123, issue_id=self.issue.issue_id,
+ project_id=self.project.project_id, user_id=111L,
+ content='this is a comment')
+ services.issue.TestAddComment(self.comment, self.issue.local_id)
+ self.attachment = tracker_pb2.Attachment(
+ attachment_id=54321, filename='hello.txt', filesize=23432,
+ mimetype='text/plain', gcs_object_id='/pid/attachments/hello.txt')
+ services.issue.TestAddAttachment(
+ self.attachment, self.comment.id, self.issue.issue_id)
+
+ images.resize = MockResize
+
+ def tearDown(self):
+ cloudstorage.open = self._old_gcs_open
+
+ def testGatherPageData_NotFound(self):
+ aid = 12345
+ # But, no such attachment is in the database.
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=%s' % aid,
+ perms=permissions.EMPTY_PERMISSIONSET)
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ # TODO(jrobbins): test cases for missing comment and missing issue.
+
+ def testGatherPageData_PermissionDenied(self):
+ aid = self.attachment.attachment_id
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=%s' % aid,
+ perms=permissions.EMPTY_PERMISSIONSET) # not even VIEW
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=%s' % aid,
+ perms=permissions.READ_ONLY_PERMISSIONSET) # includes VIEW
+
+ # issue is now deleted
+ self.issue.deleted = True
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+ self.issue.deleted = False
+
+ # issue is now restricted
+ self.issue.labels.extend(['Restrict-View-PermYouLack'])
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+
+ def testGatherPageData_Download(self):
+ aid = self.attachment.attachment_id
+ self.mox.StubOutWithMock(self.servlet, 'redirect')
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=%s' % aid,
+ perms=permissions.READ_ONLY_PERMISSIONSET) # includes VIEW
+ self.servlet.redirect(mox.StrContains(self.attachment.filename), abort=True)
+ self.mox.ReplayAll()
+ self.servlet.GatherPageData(mr)
+ self.mox.VerifyAll()
+
+ def testGatherPageData_DownloadBadFilename(self):
+ aid = self.attachment.attachment_id
+ self.attachment.filename = '<script>alert("xsrf")</script>.txt';
+ self.mox.StubOutWithMock(self.servlet, 'redirect')
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=%s' % aid,
+ perms=permissions.READ_ONLY_PERMISSIONSET) # includes VIEW
+ self.servlet.redirect(mox.And(
+ mox.Not(mox.StrContains(self.attachment.filename)),
+ mox.StrContains('attachment-%d.dat' % aid)), abort=True)
+ self.mox.ReplayAll()
+ self.servlet.GatherPageData(mr)
+ self.mox.VerifyAll()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issueattachmenttext_test.py b/appengine/monorail/tracker/test/issueattachmenttext_test.py
new file mode 100644
index 0000000..3319d13
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueattachmenttext_test.py
@@ -0,0 +1,196 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for issueattachmenttext."""
+
+import logging
+import unittest
+
+from google.appengine.ext import testbed
+
+from third_party import cloudstorage
+from third_party import ezt
+
+import webapp2
+
+from framework import filecontent
+from framework import permissions
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issueattachmenttext
+
+
+class IssueAttachmentTextTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_app_identity_stub()
+
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService())
+ self.project = services.project.TestAddProject('proj')
+ self.servlet = issueattachmenttext.AttachmentText(
+ 'req', 'res', services=services)
+
+ self.issue = tracker_pb2.Issue()
+ self.issue.local_id = 1
+ self.issue.issue_id = 1
+ self.issue.summary = 'sum'
+ self.issue.project_name = 'proj'
+ self.issue.project_id = self.project.project_id
+ services.issue.TestAddIssue(self.issue)
+
+ self.comment0 = tracker_pb2.IssueComment()
+ self.comment0.content = 'this is the description'
+ self.comment1 = tracker_pb2.IssueComment()
+ self.comment1.content = 'this is a comment'
+
+ self.attach0 = tracker_pb2.Attachment(
+ attachment_id=4567, filename='b.txt', mimetype='text/plain',
+ gcs_object_id='/pid/attachments/abcd')
+ self.comment0.attachments.append(self.attach0)
+
+ self.attach1 = tracker_pb2.Attachment(
+ attachment_id=1234, filename='a.txt', mimetype='text/plain',
+ gcs_object_id='/pid/attachments/abcdefg')
+ self.comment0.attachments.append(self.attach1)
+
+ self.bin_attach = tracker_pb2.Attachment(
+ attachment_id=2468, mimetype='application/octets',
+ gcs_object_id='/pid/attachments/\0\0\0\0\0\1\2\3')
+ self.comment1.attachments.append(self.bin_attach)
+
+ self.comment0.project_id = self.project.project_id
+ services.issue.TestAddComment(self.comment0, self.issue.local_id)
+ self.comment1.project_id = self.project.project_id
+ services.issue.TestAddComment(self.comment1, self.issue.local_id)
+ services.issue.TestAddAttachment(
+ self.attach0, self.comment0.id, self.issue.issue_id)
+ services.issue.TestAddAttachment(
+ self.attach1, self.comment1.id, self.issue.issue_id)
+ # TODO(jrobbins): add tests for binary content
+ self._old_gcs_open = cloudstorage.open
+ cloudstorage.open = fake.gcs_open
+
+ def tearDown(self):
+ cloudstorage.open = self._old_gcs_open
+
+ def testGatherPageData_CommentDeleteed(self):
+ """If the attachment's comment was deleted, give a 403."""
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/a/d.com/p/proj/issues/attachmentText?aid=1234',
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ self.servlet.GatherPageData(mr) # OK
+ self.comment1.deleted_by = 111L
+ self.assertRaises( # 403
+ permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+
+ def testGatherPageData_IssueNotViewable(self):
+ """If the attachment's issue is not viewable, give a 403."""
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=1234',
+ perms=permissions.EMPTY_PERMISSIONSET) # No VIEW
+ self.assertRaises(
+ permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+
+ def testGatherPageData_IssueDeleted(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=1234',
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ self.issue.deleted = True
+ self.assertRaises( # Issue was deleted
+ permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+
+ def testGatherPageData_IssueRestricted(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachment?aid=1234',
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ self.issue.labels.append('Restrict-View-Nobody')
+ self.assertRaises( # Issue is restricted
+ permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+
+ def testGatherPageData_NoSuchAttachment(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachmentText?aid=9999',
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ def testGatherPageData_AttachmentDeleted(self):
+ """If the attachment was deleted, give a 404."""
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachmentText?aid=1234',
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ self.attach1.deleted = True
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ def testGatherPageData_Normal(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachmentText?id=1&aid=1234',
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(1, page_data['local_id'])
+ self.assertEqual('a.txt', page_data['filename'])
+ self.assertEqual('43 bytes', page_data['filesize'])
+ self.assertEqual(ezt.boolean(False), page_data['should_prettify'])
+ self.assertEqual(ezt.boolean(False), page_data['is_binary'])
+ self.assertEqual(ezt.boolean(False), page_data['too_large'])
+
+ file_lines = page_data['file_lines']
+ self.assertEqual(1, len(file_lines))
+ self.assertEqual(1, file_lines[0].num)
+ self.assertEqual('/app_default_bucket/pid/attachments/abcdefg',
+ file_lines[0].line)
+
+ self.assertEqual(None, page_data['code_reviews'])
+
+ def testGatherPageData_HugeFile(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ path='/p/proj/issues/attachmentText?id=1&aid=1234',
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+
+ def _MockDecodeFileContents(_content):
+ return 'too large text', False, True
+
+ orig_decode = filecontent.DecodeFileContents
+ filecontent.DecodeFileContents = _MockDecodeFileContents
+ try:
+ page_data = self.servlet.GatherPageData(mr)
+ finally:
+ filecontent.DecodeFileContents = orig_decode
+
+ filecontent.DecodeFileContents = orig_decode
+ self.assertEqual(ezt.boolean(False), page_data['should_prettify'])
+ self.assertEqual(ezt.boolean(False), page_data['is_binary'])
+ self.assertEqual(ezt.boolean(True), page_data['too_large'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issuebulkedit_test.py b/appengine/monorail/tracker/test/issuebulkedit_test.py
new file mode 100644
index 0000000..aa722de
--- /dev/null
+++ b/appengine/monorail/tracker/test/issuebulkedit_test.py
@@ -0,0 +1,462 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.tracker.issuebulkedit."""
+
+import os
+import unittest
+import webapp2
+
+from google.appengine.api import memcache
+from google.appengine.api import taskqueue
+from google.appengine.ext import testbed
+
+from framework import monorailrequest
+from framework import permissions
+from proto import tracker_pb2
+from services import service_manager
+from services import tracker_fulltext
+from testing import fake
+from testing import testing_helpers
+from tracker import issuebulkedit
+from tracker import tracker_bizobj
+
+
+class Response(object):
+
+ def __init__(self):
+ self.status = None
+
+
+class IssueBulkEditTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ features=fake.FeaturesService(),
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ issue_star=fake.IssueStarService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ self.servlet = issuebulkedit.IssueBulkEdit(
+ 'req', 'res', services=self.services)
+ self.mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.project = self.services.project.TestAddProject(
+ name='proj', project_id=789, owner_ids=[111])
+ self.cnxn = 'fake connection'
+ self.config = self.services.config.GetProjectConfig(
+ self.cnxn, self.project.project_id)
+ self.services.config.StoreConfig(self.cnxn, self.config)
+ self.owner = self.services.user.TestAddUser('owner@example.com', 111)
+
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_taskqueue_stub()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_datastore_v3_stub()
+ self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
+ self.taskqueue_stub._root_path = os.path.dirname(
+ os.path.dirname(os.path.dirname( __file__ )))
+
+ self.mocked_methods = {}
+
+ def tearDown(self):
+ """Restore mocked objects of other modules."""
+ for obj, items in self.mocked_methods.iteritems():
+ for member, previous_value in items.iteritems():
+ setattr(obj, member, previous_value)
+
+ def testAssertBasePermission(self):
+ """Permit users with EDIT_ISSUE and ADD_ISSUE_COMMENT permissions."""
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+
+ self.servlet.AssertBasePermission(self.mr)
+
+ def testGatherPageData(self):
+ """Test GPD works in a normal no-corner-cases case."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', None,
+ [], [], [], [], 111L, 'test issue')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project)
+ mr.local_id_list = [local_id_1]
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(1, page_data['num_issues'])
+
+ def testGatherPageData_NoIssues(self):
+ """Test GPD when no issues are specified in the mr."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project)
+ self.assertRaises(monorailrequest.InputException,
+ self.servlet.GatherPageData, mr)
+
+ def testGatherPageData_FilteredIssues(self):
+ """Test GPD when all specified issues get filtered out."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', None, [],
+ ['restrict-view-Googler'], [], [],
+ 111L, 'test issue')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project)
+ mr.local_id_list = [local_id_1]
+
+ self.assertRaises(webapp2.HTTPException,
+ self.servlet.GatherPageData, mr)
+
+ def testGatherPageData_TypeLabels(self):
+ """Test that GPD displays a custom field for appropriate issues."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', None, [],
+ ['type-customlabels'], [], [],
+ 111L, 'test issue')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project)
+ mr.local_id_list = [local_id_1]
+
+ fd = tracker_bizobj.MakeFieldDef(
+ 123, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(1, len(page_data['fields']))
+
+ def testProcessFormData(self):
+ """Test that PFD works in a normal no-corner-cases case."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', 111L,
+ [], [], [], [], 111L, 'test issue')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [local_id_1]
+
+ post_data = fake.PostData(
+ owner=['owner@example.com'], can=[1],
+ q=[''], colspec=[''], sort=[''], groupby=[''], start=[0], num=[100])
+ self._MockMethods()
+ url = self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue('list?can=1&saved=1' in url)
+
+ def testProcessFormData_NoIssues(self):
+ """Test PFD when no issues are specified."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ post_data = fake.PostData()
+ self.servlet.response = Response()
+ self.servlet.ProcessFormData(mr, post_data)
+ # 400 == bad request
+ self.assertEqual(400, self.servlet.response.status)
+
+ def testProcessFormData_NoUser(self):
+ """Test PDF when the user is not logged in."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project)
+ mr.local_id_list = [99999]
+ post_data = fake.PostData()
+ self.servlet.response = Response()
+ self.servlet.ProcessFormData(mr, post_data)
+ # 400 == bad request
+ self.assertEqual(400, self.servlet.response.status)
+
+ def testProcessFormData_CantComment(self):
+ """Test PFD when the user can't comment on any of the issues."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.EMPTY_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [99999]
+ post_data = fake.PostData()
+ self.servlet.response = Response()
+ self.servlet.ProcessFormData(mr, post_data)
+ # 400 == bad request
+ self.assertEqual(400, self.servlet.response.status)
+
+ def testProcessFormData_CantEdit(self):
+ """Test PFD when the user can't edit any issue metadata."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [99999]
+ post_data = fake.PostData()
+ self.servlet.response = Response()
+ self.servlet.ProcessFormData(mr, post_data)
+ # 400 == bad request
+ self.assertEqual(400, self.servlet.response.status)
+
+ def testProcessFormData_CantMove(self):
+ """Test PFD when the user can't move issues."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.COMMITTER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [99999]
+ post_data = fake.PostData(move_to=['proj'])
+ self.servlet.response = Response()
+ self.servlet.ProcessFormData(mr, post_data)
+ # 400 == bad request
+ self.assertEqual(400, self.servlet.response.status)
+
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', 111L,
+ [], [], [], [], 111L, 'test issue')
+ mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ mr.local_id_list = [local_id_1]
+ mr.project_name = 'proj'
+ self._MockMethods()
+ self.servlet.ProcessFormData(mr, post_data)
+ self.assertEqual(
+ 'The issues are already in project proj', mr.errors.move_to)
+
+ post_data = fake.PostData(move_to=['notexist'])
+ self.servlet.ProcessFormData(mr, post_data)
+ self.assertEqual('No such project: notexist', mr.errors.move_to)
+
+ def _MockMethods(self):
+ # Mock methods of other modules to avoid unnecessary testing
+ self.mocked_methods[tracker_fulltext] = {
+ 'IndexIssues': tracker_fulltext.IndexIssues,
+ 'UnindexIssues': tracker_fulltext.UnindexIssues}
+ def DoNothing(*_args, **_kwargs):
+ pass
+ self.servlet.PleaseCorrect = DoNothing
+ tracker_fulltext.IndexIssues = DoNothing
+ tracker_fulltext.UnindexIssues = DoNothing
+
+ def VerifyIssueUpdated(self, project_id, local_id):
+ issue = self.services.issue.GetIssueByLocalID(
+ self.cnxn, project_id, local_id)
+ issue_id = issue.issue_id
+ comments = self.services.issue.GetCommentsForIssue(self.cnxn, issue_id)
+ last_comment = comments[-1]
+ if last_comment.amendments[0].newvalue == 'Updated':
+ return True
+ else:
+ return False
+
+ def testProcessFormData_CustomFields(self):
+ """Test PFD processes edits to custom fields."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', 111L,
+ [], [], [], [], 111L, 'test issue')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [local_id_1]
+
+ fd = tracker_bizobj.MakeFieldDef(
+ 12345, 789, 'CPU', tracker_pb2.FieldTypes.INT_TYPE, None,
+ '', False, False, None, None, '', False, '', '',
+ tracker_pb2.NotifyTriggers.NEVER, 'doc', False)
+ self.config.field_defs.append(fd)
+
+ post_data = fake.PostData(
+ custom_12345=['111'], owner=['owner@example.com'], can=[1],
+ q=[''], colspec=[''], sort=[''], groupby=[''], start=[0], num=[100])
+ self._MockMethods()
+ self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue(self.VerifyIssueUpdated(789, local_id_1))
+
+ def testProcessFormData_DuplicateStatus_MergeSameIssue(self):
+ """Test PFD processes null/cleared status values."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id, 'issue summary',
+ 'New', 111L, [], [], [], [], 111L, 'test issue')
+ merge_into_local_id_2 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id, 'issue summary2',
+ 'New', 112L, [], [], [], [], 112L, 'test issue2')
+
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [local_id_1, merge_into_local_id_2]
+ mr.project_name = 'proj'
+
+ # Add required project_name to merge_into_issue.
+ merge_into_issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, self.project.project_id, merge_into_local_id_2)
+ merge_into_issue.project_name = 'proj'
+
+ post_data = fake.PostData(status=['Duplicate'],
+ merge_into=[str(merge_into_local_id_2)], owner=['owner@example.com'],
+ can=[1], q=[''], colspec=[''], sort=[''], groupby=[''], start=[0],
+ num=[100])
+ self._MockMethods()
+ self.servlet.ProcessFormData(mr, post_data)
+ self.assertEquals('Cannot merge issue into itself', mr.errors.merge_into_id)
+
+ def testProcessFormData_DuplicateStatus_MergeMissingIssue(self):
+ """Test PFD processes null/cleared status values."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id, 'issue summary',
+ 'New', 111L, [], [], [], [], 111L, 'test issue')
+ local_id_2 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id, 'issue summary2',
+ 'New', 112L, [], [], [], [], 112L, 'test issue2')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [local_id_1, local_id_2]
+ mr.project_name = 'proj'
+
+ post_data = fake.PostData(status=['Duplicate'],
+ merge_into=['non existant id'], owner=['owner@example.com'],
+ can=[1], q=[''], colspec=[''], sort=[''], groupby=[''], start=[0],
+ num=[100])
+ self._MockMethods()
+ self.servlet.ProcessFormData(mr, post_data)
+ self.assertEquals('Please enter an issue ID',
+ mr.errors.merge_into_id)
+
+ def testProcessFormData_DuplicateStatus_Success(self):
+ """Test PFD processes null/cleared status values."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id, 'issue summary',
+ 'New', 111L, [], [], [], [], 111L, 'test issue')
+ local_id_2 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id, 'issue summary2',
+ 'New', 111L, [], [], [], [], 111L, 'test issue2')
+ merge_into_local_id_3 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id, 'issue summary3',
+ 'New', 112L, [], [], [], [], 112L, 'test issue3')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [local_id_1, local_id_2]
+ mr.project_name = 'proj'
+
+ post_data = fake.PostData(status=['Duplicate'],
+ merge_into=[str(merge_into_local_id_3)], owner=['owner@example.com'],
+ can=[1], q=[''], colspec=[''], sort=[''], groupby=[''], start=[0],
+ num=[100])
+ self._MockMethods()
+
+ # Add project_name, CCs and starrers to the merge_into_issue.
+ merge_into_issue = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, self.project.project_id, merge_into_local_id_3)
+ merge_into_issue.project_name = 'proj'
+ merge_into_issue.cc_ids = [113L, 120L]
+ self.services.issue_star.SetStar(
+ mr.cnxn, None, None, merge_into_issue.issue_id, 120L, True)
+
+ # Add project_name, CCs and starrers to the source issues.
+ # Issue 1
+ issue_1 = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, self.project.project_id, local_id_1)
+ issue_1.project_name = 'proj'
+ issue_1.cc_ids = [113L, 114L]
+ self.services.issue_star.SetStar(
+ mr.cnxn, None, None, issue_1.issue_id, 113L, True)
+ # Issue 2
+ issue_2 = self.services.issue.GetIssueByLocalID(
+ mr.cnxn, self.project.project_id, local_id_2)
+ issue_2.project_name = 'proj'
+ issue_2.cc_ids = [113L, 115L, 118L]
+ self.services.issue_star.SetStar(
+ mr.cnxn, None, None, issue_2.issue_id, 114L, True)
+ self.services.issue_star.SetStar(
+ mr.cnxn, None, None, issue_2.issue_id, 115L, True)
+
+ self.servlet.ProcessFormData(mr, post_data)
+
+ # Verify both source issues were updated.
+ self.assertTrue(
+ self.VerifyIssueUpdated(self.project.project_id, local_id_1))
+ self.assertTrue(
+ self.VerifyIssueUpdated(self.project.project_id, local_id_2))
+
+ # Verify that the merge into issue was updated with a comment.
+ comments = self.services.issue.GetCommentsForIssue(
+ self.cnxn, merge_into_issue.issue_id)
+ self.assertEquals(
+ 'Issue 1 has been merged into this issue.\n'
+ 'Issue 2 has been merged into this issue.', comments[-1].content)
+
+ # Verify CC lists and owner were merged to the merge_into issue.
+ self.assertEquals(
+ [113L, 120L, 114L, 115L, 118L, 111L], merge_into_issue.cc_ids)
+ # Verify new starrers were added to the merge_into issue.
+ self.assertEquals(4,
+ self.services.issue_star.CountItemStars(
+ self.cnxn, merge_into_issue.issue_id))
+ self.assertEquals([120L, 113L, 114L, 115L],
+ self.services.issue_star.LookupItemStarrers(
+ self.cnxn, merge_into_issue.issue_id))
+
+ def testProcessFormData_ClearStatus(self):
+ """Test PFD processes null/cleared status values."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', 111L,
+ [], [], [], [], 111L, 'test issue')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [local_id_1]
+
+ post_data = fake.PostData(
+ op_statusenter=['clear'], owner=['owner@example.com'], can=[1],
+ q=[''], colspec=[''], sort=[''], groupby=[''], start=[0], num=[100])
+ self._MockMethods()
+ self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue(self.VerifyIssueUpdated(789, local_id_1))
+
+ def testProcessFormData_InvalidOwner(self):
+ """Test PFD rejects invalid owner emails."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue summary', 'New', None,
+ [], [], [], [], 111L, 'test issue')
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.local_id_list = [local_id_1]
+ post_data = fake.PostData(
+ owner=['invalid'])
+ self.servlet.response = Response()
+ self._MockMethods()
+ self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue(mr.errors.AnyErrors())
+
+ def testProcessFormData_MoveTo(self):
+ """Test PFD processes move_to values."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, 789, 'issue to move', 'New', 111L,
+ [], [], [], [], 111L, 'test issue')
+ move_to_project = self.services.project.TestAddProject(
+ name='proj2', project_id=790, owner_ids=[111])
+
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET,
+ user_info={'user_id': 111})
+ mr.project_name = 'proj'
+ mr.local_id_list = [local_id_1]
+
+ self._MockMethods()
+ post_data = fake.PostData(
+ move_to=['proj2'], can=[1], q=[''],
+ colspec=[''], sort=[''], groupby=[''], start=[0], num=[100])
+ self.servlet.response = Response()
+ self.servlet.ProcessFormData(mr, post_data)
+
+ issue = self.services.issue.GetIssueByLocalID(
+ self.cnxn, move_to_project.project_id, local_id_1)
+ self.assertIsNotNone(issue)
diff --git a/appengine/monorail/tracker/test/issuedetail_test.py b/appengine/monorail/tracker/test/issuedetail_test.py
new file mode 100644
index 0000000..b001c7d
--- /dev/null
+++ b/appengine/monorail/tracker/test/issuedetail_test.py
@@ -0,0 +1,565 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.tracker.issuedetail."""
+
+import logging
+import mox
+import time
+import unittest
+
+import settings
+from features import notify
+from framework import permissions
+from framework import template_helpers
+from proto import project_pb2
+from proto import tracker_pb2
+from proto import user_pb2
+from services import service_manager
+from services import issue_svc
+from testing import fake
+from testing import testing_helpers
+from tracker import issuedetail
+from tracker import tracker_constants
+from tracker import tracker_helpers
+
+
+class IssueDetailTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue_star=fake.IssueStarService(),
+ spam=fake.SpamService())
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.config = tracker_pb2.ProjectIssueConfig()
+ self.config.statuses_offer_merge.append('Duplicate')
+ self.services.config.StoreConfig(self.cnxn, self.config)
+
+ def testChooseNextPage(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&q=term')
+ mr.col_spec = ''
+ config = tracker_pb2.ProjectIssueConfig()
+ issue = fake.MakeTestIssue(987, 123, 'summary', 'New', 111L)
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, None,
+ user_pb2.IssueUpdateNav.UP_TO_LIST, '124')
+ self.assertTrue(url.startswith(
+ 'http://127.0.0.1/p/proj/issues/list?cursor=proj%3A123&q=term'))
+ self.assertTrue(url.endswith('&updated=123'))
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, None,
+ user_pb2.IssueUpdateNav.STAY_SAME_ISSUE, '124')
+ self.assertEqual('http://127.0.0.1/p/proj/issues/detail?id=123&q=term',
+ url)
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, None,
+ user_pb2.IssueUpdateNav.NEXT_IN_LIST, '124')
+ self.assertEqual('http://127.0.0.1/p/proj/issues/detail?id=124&q=term',
+ url)
+
+ # If this is the last in the list, the next_id from the form will be ''.
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, None,
+ user_pb2.IssueUpdateNav.NEXT_IN_LIST, '')
+ self.assertTrue(url.startswith(
+ 'http://127.0.0.1/p/proj/issues/list?cursor=proj%3A123&q=term'))
+ self.assertTrue(url.endswith('&updated=123'))
+
+ def testChooseNextPage_ForMoveRequest(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&q=term')
+ mr.col_spec = ''
+ config = tracker_pb2.ProjectIssueConfig()
+ issue = fake.MakeTestIssue(987, 123, 'summary', 'New', 111L)
+ moved_to_project_name = 'projB'
+ moved_to_project_local_id = 543
+ moved_to_project_name_and_local_id = (moved_to_project_name,
+ moved_to_project_local_id)
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, moved_to_project_name_and_local_id, None,
+ user_pb2.IssueUpdateNav.UP_TO_LIST, '124')
+ self.assertTrue(url.startswith(
+ 'http://127.0.0.1/p/proj/issues/list?cursor=proj%3A123&moved_to_id=' +
+ str(moved_to_project_local_id) + '&moved_to_project=' +
+ moved_to_project_name + '&q=term'))
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, moved_to_project_name_and_local_id, None,
+ user_pb2.IssueUpdateNav.STAY_SAME_ISSUE, '124')
+ self.assertEqual(
+ 'http://127.0.0.1/p/%s/issues/detail?id=123&q=term' % (
+ moved_to_project_name),
+ url)
+ mr.project_name = 'proj' # reset project name back.
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, moved_to_project_name_and_local_id, None,
+ user_pb2.IssueUpdateNav.NEXT_IN_LIST, '124')
+ self.assertEqual('http://127.0.0.1/p/proj/issues/detail?id=124&q=term',
+ url)
+
+ # If this is the last in the list, the next_id from the form will be ''.
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, moved_to_project_name_and_local_id, None,
+ user_pb2.IssueUpdateNav.NEXT_IN_LIST, '')
+ self.assertTrue(url.startswith(
+ 'http://127.0.0.1/p/proj/issues/list?cursor=proj%3A123&moved_to_id=' +
+ str(moved_to_project_local_id) + '&moved_to_project=' +
+ moved_to_project_name + '&q=term'))
+
+ def testChooseNextPage_ForCopyRequest(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/detail?id=123&q=term')
+ mr.col_spec = ''
+ config = tracker_pb2.ProjectIssueConfig()
+ issue = fake.MakeTestIssue(987, 123, 'summary', 'New', 111L)
+ copied_to_project_name = 'projB'
+ copied_to_project_local_id = 543
+ copied_to_project_name_and_local_id = (copied_to_project_name,
+ copied_to_project_local_id)
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, copied_to_project_name_and_local_id,
+ user_pb2.IssueUpdateNav.UP_TO_LIST, '124')
+ self.assertTrue(url.startswith(
+ 'http://127.0.0.1/p/proj/issues/list?copied_from_id=123'
+ '&copied_to_id=' + str(copied_to_project_local_id) +
+ '&copied_to_project=' + copied_to_project_name +
+ '&cursor=proj%3A123&q=term'))
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, copied_to_project_name_and_local_id,
+ user_pb2.IssueUpdateNav.STAY_SAME_ISSUE, '124')
+ self.assertEqual('http://127.0.0.1/p/proj/issues/detail?id=123&q=term', url)
+ mr.project_name = 'proj' # reset project name back.
+
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, copied_to_project_name_and_local_id,
+ user_pb2.IssueUpdateNav.NEXT_IN_LIST, '124')
+ self.assertEqual('http://127.0.0.1/p/proj/issues/detail?id=124&q=term',
+ url)
+
+ # If this is the last in the list, the next_id from the form will be ''.
+ url = issuedetail._ChooseNextPage(
+ mr, issue.local_id, config, None, copied_to_project_name_and_local_id,
+ user_pb2.IssueUpdateNav.NEXT_IN_LIST, '')
+ self.assertTrue(url.startswith(
+ 'http://127.0.0.1/p/proj/issues/list?copied_from_id=123'
+ '&copied_to_id=' + str(copied_to_project_local_id) +
+ '&copied_to_project=' + copied_to_project_name +
+ '&cursor=proj%3A123&q=term'))
+
+ def testGatherHelpData(self):
+ servlet = issuedetail.IssueDetail('req', 'res', services=self.services)
+ mr = testing_helpers.MakeMonorailRequest()
+
+ # User did not jump to an issue, no query at all.
+ help_data = servlet.GatherHelpData(mr, {})
+ self.assertEqual(None, help_data['cue'])
+
+ # User did not jump to an issue, query was not a local ID number.
+ mr.query = 'memory leak'
+ help_data = servlet.GatherHelpData(mr, {})
+ self.assertEqual(None, help_data['cue'])
+
+ # User jumped directly to an issue, maybe they meant to search instead.
+ mr.query = '123'
+ help_data = servlet.GatherHelpData(mr, {})
+ self.assertEqual('search_for_numbers', help_data['cue'])
+ self.assertEqual(123, help_data['jump_local_id'])
+
+
+class IssueDetailFunctionsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.project_name = 'proj'
+ self.project_id = 987
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ issue_star=fake.IssueStarService(),
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ self.project = self.services.project.TestAddProject(
+ 'proj', project_id=987, committer_ids=[111L])
+ self.servlet = issuedetail.IssueDetail(
+ 'req', 'res', services=self.services)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def VerifyShouldShowFlipper(
+ self, expected, query, sort_spec, can, create_issues=0):
+ """Instantiate a _Flipper and check if makes a pipeline or not."""
+ services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ project=fake.ProjectService(),
+ user=fake.UserService())
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+ mr.query = query
+ mr.sort_spec = sort_spec
+ mr.can = can
+ mr.project_name = self.project.project_name
+ mr.project = self.project
+
+ for idx in range(create_issues):
+ _local_id = services.issue.CreateIssue(
+ self.cnxn, services, self.project.project_id,
+ 'summary_%d' % idx, 'status', 111L, [], [], [], [], 111L,
+ 'description_%d' % idx)
+
+ self.assertEqual(
+ expected,
+ issuedetail._ShouldShowFlipper(mr, services))
+
+ def testShouldShowFlipper_RegularSizedProject(self):
+ # If the user is looking for a specific issue, no flipper.
+ self.VerifyShouldShowFlipper(
+ False, '123', '', tracker_constants.OPEN_ISSUES_CAN)
+ self.VerifyShouldShowFlipper(False, '123', '', 5)
+ self.VerifyShouldShowFlipper(
+ False, '123', 'priority', tracker_constants.OPEN_ISSUES_CAN)
+
+ # If the user did a search or sort or all in a small can, show flipper.
+ self.VerifyShouldShowFlipper(
+ True, 'memory leak', '', tracker_constants.OPEN_ISSUES_CAN)
+ self.VerifyShouldShowFlipper(
+ True, 'id=1,2,3', '', tracker_constants.OPEN_ISSUES_CAN)
+ # Any can other than 1 or 2 is doing a query and so it should have a
+ # failry narrow result set size. 5 is issues starred by me.
+ self.VerifyShouldShowFlipper(True, '', '', 5)
+ self.VerifyShouldShowFlipper(
+ True, '', 'status', tracker_constants.OPEN_ISSUES_CAN)
+
+ # In a project without a huge number of issues, still show the flipper even
+ # if there was no specific query.
+ self.VerifyShouldShowFlipper(
+ True, '', '', tracker_constants.OPEN_ISSUES_CAN)
+
+ def testShouldShowFlipper_LargeSizedProject(self):
+ settings.threshold_to_suppress_prev_next = 1
+
+ # In a project that has tons of issues, save time by not showing the
+ # flipper unless there was a specific query, sort, or can.
+ self.VerifyShouldShowFlipper(
+ False, '', '', tracker_constants.ALL_ISSUES_CAN, create_issues=3)
+ self.VerifyShouldShowFlipper(
+ False, '', '', tracker_constants.OPEN_ISSUES_CAN, create_issues=3)
+
+ def testFieldEditPermitted_NoEdit(self):
+ page_perms = testing_helpers.Blank(
+ EditIssueSummary=False, EditIssueStatus=False, EditIssueOwner=False,
+ EditIssueCc=False) # no perms are needed.
+ self.assertTrue(issuedetail._FieldEditPermitted(
+ [], '', '', '', '', 0, [], page_perms))
+
+ def testFieldEditPermitted_AllNeededPerms(self):
+ page_perms = testing_helpers.Blank(
+ EditIssueSummary=True, EditIssueStatus=True, EditIssueOwner=True,
+ EditIssueCc=True)
+ self.assertTrue(issuedetail._FieldEditPermitted(
+ [], '', '', 'new sum', 'new status', 111L, [222L], page_perms))
+
+ def testFieldEditPermitted_MissingPerms(self):
+ page_perms = testing_helpers.Blank(
+ EditIssueSummary=False, EditIssueStatus=False, EditIssueOwner=False,
+ EditIssueCc=False) # no perms.
+ self.assertFalse(issuedetail._FieldEditPermitted(
+ [], '', '', 'new sum', '', 0, [], page_perms))
+ self.assertFalse(issuedetail._FieldEditPermitted(
+ [], '', '', '', 'new status', 0, [], page_perms))
+ self.assertFalse(issuedetail._FieldEditPermitted(
+ [], '', '', '', '', 111L, [], page_perms))
+ self.assertFalse(issuedetail._FieldEditPermitted(
+ [], '', '', '', '', 0, [222L], page_perms))
+
+ def testFieldEditPermitted_NeededPermsNotOffered(self):
+ """Even if user has all the field-level perms, they still can't do this."""
+ page_perms = testing_helpers.Blank(
+ EditIssueSummary=True, EditIssueStatus=True, EditIssueOwner=True,
+ EditIssueCc=True)
+ self.assertFalse(issuedetail._FieldEditPermitted(
+ ['NewLabel'], '', '', '', '', 0, [], page_perms))
+ self.assertFalse(issuedetail._FieldEditPermitted(
+ [], 'new blocked on', '', '', '', 0, [], page_perms))
+ self.assertFalse(issuedetail._FieldEditPermitted(
+ [], '', 'new blocking', '', '', 0, [], page_perms))
+
+ def testValidateOwner_ChangedToValidOwner(self):
+ post_data_owner = 'superman@krypton.com'
+ parsed_owner_id = 111
+ original_issue_owner_id = 111
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+
+ self.mox.StubOutWithMock(tracker_helpers, 'IsValidIssueOwner')
+ tracker_helpers.IsValidIssueOwner(
+ mr.cnxn, mr.project, parsed_owner_id, self.services).AndReturn(
+ (True, ''))
+ self.mox.ReplayAll()
+
+ ret = self.servlet._ValidateOwner(
+ mr, post_data_owner, parsed_owner_id, original_issue_owner_id)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testValidateOwner_UnchangedInvalidOwner(self):
+ post_data_owner = 'superman@krypton.com'
+ parsed_owner_id = 111
+ original_issue_owner_id = 111
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+ self.services.user.TestAddUser(post_data_owner, original_issue_owner_id)
+
+ self.mox.StubOutWithMock(tracker_helpers, 'IsValidIssueOwner')
+ tracker_helpers.IsValidIssueOwner(
+ mr.cnxn, mr.project, parsed_owner_id, self.services).AndReturn(
+ (False, 'invalid owner'))
+ self.mox.ReplayAll()
+
+ ret = self.servlet._ValidateOwner(
+ mr, post_data_owner, parsed_owner_id, original_issue_owner_id)
+ self.mox.VerifyAll()
+ self.assertIsNone(ret)
+
+ def testValidateOwner_ChangedFromValidToInvalidOwner(self):
+ post_data_owner = 'lexluthor'
+ parsed_owner_id = 111
+ original_issue_owner_id = 111
+ original_issue_owner = 'superman@krypton.com'
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+ self.services.user.TestAddUser(original_issue_owner,
+ original_issue_owner_id)
+
+ self.mox.StubOutWithMock(tracker_helpers, 'IsValidIssueOwner')
+ tracker_helpers.IsValidIssueOwner(
+ mr.cnxn, mr.project, parsed_owner_id, self.services).AndReturn(
+ (False, 'invalid owner'))
+ self.mox.ReplayAll()
+
+ ret = self.servlet._ValidateOwner(
+ mr, post_data_owner, parsed_owner_id, original_issue_owner_id)
+ self.mox.VerifyAll()
+ self.assertEquals('invalid owner', ret)
+
+ def testProcessFormData_NoPermission(self):
+ """Anonymous users and users without ADD_ISSUE_COMMENT cannot comment."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_1', 'status', 111L, [], [], [], [], 111L, 'description_1')
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_INACTIVE_PERMISSIONSET)
+ mr.auth.user_id = 0
+ mr.local_id = local_id_1
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, {})
+ mr.auth.user_id = 111L
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, {})
+
+ def testProcessFormData_NonMembersCantEdit(self):
+ """Non-members can comment, but never affect issue fields."""
+ orig_prepsend = notify.PrepareAndSendIssueChangeNotification
+ notify.PrepareAndSendIssueChangeNotification = lambda *args, **kwargs: None
+
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_1', 'status', 111L, [], [], [], [], 111L, 'description_1')
+ local_id_2 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_2', 'status', 111L, [], [], [], [], 111L, 'description_2')
+
+ _amendments, _cmnt_pb = self.services.issue.ApplyIssueComment(
+ self.cnxn, self.services, 111L,
+ self.project.project_id, local_id_2, 'summary', 'Duplicate', 111L,
+ [], [], [], [], [], [], [], [], local_id_1,
+ comment='closing as a dup of 1')
+
+ non_member_user_id = 999L
+ post_data = fake.PostData({
+ 'merge_into': [''], # non-member tries to remove merged_into
+ 'comment': ['thanks!'],
+ 'can': ['1'],
+ 'q': ['foo'],
+ 'colspec': ['bar'],
+ 'sort': 'baz',
+ 'groupby': 'qux',
+ 'start': ['0'],
+ 'num': ['100'],
+ 'pagegen': [str(int(time.time()) + 1)],
+ })
+
+ _, mr = testing_helpers.GetRequestObjects(
+ user_info={'user_id': non_member_user_id},
+ path='/p/proj/issues/detail.do?id=%d' % local_id_2,
+ project=self.project, method='POST',
+ perms=permissions.USER_PERMISSIONSET)
+ mr.project_name = self.project.project_name
+ mr.project = self.project
+
+ # The form should be processed and redirect back to viewing the issue.
+ redirect_url = self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue(redirect_url.startswith(
+ 'http://127.0.0.1/p/proj/issues/detail?id=%d' % local_id_2))
+
+ # BUT, issue should not have been edited because user lacked permission.
+ updated_issue_2 = self.services.issue.GetIssueByLocalID(
+ self.cnxn, self.project.project_id, local_id_2)
+ self.assertEqual(local_id_1, updated_issue_2.merged_into)
+
+ notify.PrepareAndSendIssueChangeNotification = orig_prepsend
+
+ def testProcessFormData_DuplicateAddsACommentToTarget(self):
+ """Marking issue 2 as dup of 1 adds a comment to 1."""
+ orig_prepsend = notify.PrepareAndSendIssueChangeNotification
+ notify.PrepareAndSendIssueChangeNotification = lambda *args, **kwargs: None
+ orig_get_starrers = tracker_helpers.GetNewIssueStarrers
+ tracker_helpers.GetNewIssueStarrers = lambda *args, **kwargs: []
+
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_1', 'New', 111L, [], [], [], [], 111L, 'description_1')
+ issue_1 = self.services.issue.GetIssueByLocalID(
+ self.cnxn, self.project.project_id, local_id_1)
+ issue_1.project_name = 'proj'
+ local_id_2 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_2', 'New', 111L, [], [], [], [], 111L, 'description_2')
+ issue_2 = self.services.issue.GetIssueByLocalID(
+ self.cnxn, self.project.project_id, local_id_2)
+ issue_2.project_name = 'proj'
+
+ post_data = fake.PostData({
+ 'status': ['Duplicate'],
+ 'merge_into': [str(local_id_1)],
+ 'comment': ['marking as dup'],
+ 'can': ['1'],
+ 'q': ['foo'],
+ 'colspec': ['bar'],
+ 'sort': 'baz',
+ 'groupby': 'qux',
+ 'start': ['0'],
+ 'num': ['100'],
+ 'pagegen': [str(int(time.time()) + 1)],
+ })
+
+ member_user_id = 111L
+ _, mr = testing_helpers.GetRequestObjects(
+ user_info={'user_id': member_user_id},
+ path='/p/proj/issues/detail.do?id=%d' % local_id_2,
+ project=self.project, method='POST',
+ perms=permissions.COMMITTER_ACTIVE_PERMISSIONSET)
+ mr.project_name = self.project.project_name
+ mr.project = self.project
+
+ # The form should be processed and redirect back to viewing the issue.
+ self.servlet.ProcessFormData(mr, post_data)
+
+ self.assertEqual('Duplicate', issue_2.status)
+ self.assertEqual(issue_1.issue_id, issue_2.merged_into)
+ comments_1 = self.services.issue.GetCommentsForIssue(
+ self.cnxn, issue_1.issue_id)
+ self.assertEqual(2, len(comments_1))
+ self.assertEqual(
+ 'Issue 2 has been merged into this issue.',
+ comments_1[1].content)
+
+ # Making another comment on issue 2 does not affect issue 1.
+ self.servlet.ProcessFormData(mr, post_data)
+ comments_1 = self.services.issue.GetCommentsForIssue(
+ self.cnxn, issue_1.issue_id)
+ self.assertEqual(2, len(comments_1))
+
+ notify.PrepareAndSendIssueChangeNotification = orig_prepsend
+ tracker_helpers.GetNewIssueStarrers = orig_get_starrers
+
+ # TODO(jrobbins): add more unit tests for other aspects of ProcessForm.
+
+
+class SetStarFormTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue_star=fake.IssueStarService())
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.servlet = issuedetail.SetStarForm(
+ 'req', 'res', services=self.services)
+
+ def testAssertBasePermission(self):
+ """Only users with SET_STAR could set star."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_1', 'status', 111L, [], [], [], [], 111L, 'description_1')
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ mr.local_id = local_id_1
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ mr.local_id = local_id_1
+ self.servlet.AssertBasePermission(mr)
+
+
+class IssueCommentDeletionTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue_star=fake.IssueStarService())
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.servlet = issuedetail.IssueCommentDeletion(
+ 'req', 'res', services=self.services)
+
+ def testProcessFormData_Permission(self):
+ """Permit users who can delete."""
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_1', 'status', 111L, [], [], [], [], 111L, 'description_1')
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ mr.local_id = local_id_1
+ mr.auth.user_id = 222L
+ post_data = {
+ 'id': local_id_1,
+ 'sequence_num': 0,
+ 'mode': 0}
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.ProcessFormData, mr, post_data)
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ mr.local_id = local_id_1
+ mr.auth.user_id = 222L
+ self.servlet.ProcessFormData(mr, post_data)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issueentry_test.py b/appengine/monorail/tracker/test/issueentry_test.py
new file mode 100644
index 0000000..b2a7536
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueentry_test.py
@@ -0,0 +1,145 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for the issueentry servlet."""
+
+import mox
+import unittest
+
+from framework import framework_views
+from framework import permissions
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issueentry
+
+from google.appengine.ext import testbed
+
+class IssueEntryTest(unittest.TestCase):
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_taskqueue_stub()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_datastore_v3_stub()
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService())
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.servlet = issueentry.IssueEntry(
+ 'req', 'res', services=self.services)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testAssertBasePermission(self):
+ """Permit users with CREATE_ISSUE."""
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/entry', services=self.services,
+ perms=permissions.EMPTY_PERMISSIONSET)
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/entry', services=self.services,
+ perms=permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ self.servlet.AssertBasePermission(mr)
+
+ def testDiscardUnusedTemplateLabelPrefixes(self):
+ labels = ['pre-val', 'other-value', 'oneword', 'x', '-y', '-w-z', '', '-']
+ self.assertEqual(labels,
+ issueentry._DiscardUnusedTemplateLabelPrefixes(labels))
+
+ labels = ['prefix-value', 'other-?', 'third-', '', '-', '-?']
+ self.assertEqual(['prefix-value', 'third-', '', '-'],
+ issueentry._DiscardUnusedTemplateLabelPrefixes(labels))
+
+ def testGatherPageData(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/entry', services=self.services)
+ mr.auth.user_view = framework_views.UserView(100, 'user@invalid', True)
+ user = self.services.user.TestAddUser('user@invalid', 100)
+
+ self.mox.StubOutWithMock(self.services.user, 'GetUser')
+ self.services.user.GetUser(
+ mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(user)
+ self.mox.ReplayAll()
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.mox.VerifyAll()
+ self.assertEqual(page_data['initial_owner'], 'user@invalid')
+ self.assertEqual(page_data['initial_status'], 'New')
+ self.assertTrue(page_data['clear_summary_on_click'])
+ self.assertTrue(page_data['must_edit_summary'])
+
+ def testGatherPageData_TemplateAllowsKeepingSummary(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/entry', services=self.services)
+ mr.auth.user_view = framework_views.UserView(100, 'user@invalid', True)
+ user = self.services.user.TestAddUser('user@invalid', 100)
+
+ self.mox.StubOutWithMock(self.services.user, 'GetUser')
+ self.services.user.GetUser(
+ mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(user)
+ self.mox.ReplayAll()
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+ self.services.config.StoreConfig(mr.cnxn, config)
+ config.templates[1].summary_must_be_edited = False
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.mox.VerifyAll()
+ self.assertEqual(page_data['initial_owner'], 'user@invalid')
+ self.assertEqual(page_data['initial_status'], 'New')
+ self.assertFalse(page_data['clear_summary_on_click'])
+ self.assertFalse(page_data['must_edit_summary'])
+
+ def testGatherPageData_DeepLinkSetsSummary(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/entry?summary=foo', services=self.services)
+ mr.auth.user_view = framework_views.UserView(100, 'user@invalid', True)
+ user = self.services.user.TestAddUser('user@invalid', 100)
+
+ self.mox.StubOutWithMock(self.services.user, 'GetUser')
+ self.services.user.GetUser(
+ mox.IgnoreArg(), mox.IgnoreArg()).MultipleTimes().AndReturn(user)
+ self.mox.ReplayAll()
+
+ page_data = self.servlet.GatherPageData(mr)
+ self.mox.VerifyAll()
+ self.assertEqual(page_data['initial_owner'], 'user@invalid')
+ self.assertEqual(page_data['initial_status'], 'New')
+ self.assertFalse(page_data['clear_summary_on_click'])
+ self.assertTrue(page_data['must_edit_summary'])
+
+ def testProcessFormData(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/entry')
+ mr.auth.user_view = framework_views.UserView(100, 'user@invalid', True)
+ mr.perms = []
+ post_data = fake.PostData(
+ summary=['fake summary'],
+ comment=['fake comment'],
+ status=['New'])
+ url = self.servlet.ProcessFormData(mr, post_data)
+ self.assertTrue('/p/proj/issues/detail?id=' in url)
+
+
+ def test_SelectTemplate(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ path='/p/proj/issues/entry')
+ config = self.services.config.GetProjectConfig(mr.cnxn, mr.project_id)
+
+ templ = issueentry._SelectTemplate(None, config, False)
+ self.assertEquals('Defect report from user', templ.name)
+
+ templ = issueentry._SelectTemplate(None, config, True)
+ self.assertEquals('Defect report from developer', templ.name)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issueexport_test.py b/appengine/monorail/tracker/test/issueexport_test.py
new file mode 100644
index 0000000..cf5cd94
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueexport_test.py
@@ -0,0 +1,29 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for the issueexport servlet."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from testing import testing_helpers
+from tracker import issueexport
+
+
+class IssueExportTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services()
+ self.servlet = issueexport.IssueExport(
+ 'req', 'res', services=self.services)
+
+ def testAssertBasePermission(self):
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ mr.auth.user_pb.is_site_admin = True
+ self.servlet.AssertBasePermission(mr)
\ No newline at end of file
diff --git a/appengine/monorail/tracker/test/issueimport_test.py b/appengine/monorail/tracker/test/issueimport_test.py
new file mode 100644
index 0000000..656e76d
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueimport_test.py
@@ -0,0 +1,30 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for the issueimport servlet."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from testing import testing_helpers
+from tracker import issueimport
+
+
+class IssueExportTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services()
+ self.servlet = issueimport.IssueImport(
+ 'req', 'res', services=self.services)
+
+ def testAssertBasePermission(self):
+ """Only site admins can import issues."""
+ mr = testing_helpers.MakeMonorailRequest(
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ mr.auth.user_pb.is_site_admin = True
+ self.servlet.AssertBasePermission(mr)
\ No newline at end of file
diff --git a/appengine/monorail/tracker/test/issuelist_test.py b/appengine/monorail/tracker/test/issuelist_test.py
new file mode 100644
index 0000000..07770c3
--- /dev/null
+++ b/appengine/monorail/tracker/test/issuelist_test.py
@@ -0,0 +1,338 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for issuelist module."""
+
+import unittest
+
+import settings
+from framework import permissions
+from framework import table_view_helpers
+from proto import tracker_pb2
+from proto import user_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issuelist
+from tracker import tablecell
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+class DisplayNameMock(object):
+
+ def __init__(self, name):
+ self.display_name = name
+
+
+class IssueListUnitTest(unittest.TestCase):
+
+ def testGatherPageData(self):
+ # TODO(jrobbins): write tests for this method.
+ pass
+
+ def testGetGridViewData(self):
+ # TODO(jrobbins): write tests for this method.
+ pass
+
+ def testGetTableViewData(self):
+ # TODO(jrobbins): write tests for this method.
+ pass
+
+ def testGatherHelpData_GridSwitchesToIDs(self):
+ services = service_manager.Services()
+ servlet = issuelist.IssueList('req', 'res', services=services)
+ mr = testing_helpers.MakeMonorailRequest()
+ page_data = {'results': [1, 2, 3]}
+
+ # Don't show cue if in issue list mode (the default).
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertNotEqual('showing_ids_instead_of_tiles', help_data['cue'])
+
+ mr.mode = 'grid'
+ # Don't show cue if showing already IDs (the default).
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertNotEqual('showing_ids_instead_of_tiles', help_data['cue'])
+
+ mr.cells = 'counts'
+ # Don't show cue if showing counts.
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertNotEqual('showing_ids_instead_of_tiles', help_data['cue'])
+
+ mr.cells = 'tiles'
+ # Don't show cue if there were <= 1000 results
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertNotEqual('showing_ids_instead_of_tiles', help_data['cue'])
+
+ # Show cue if there are more than 1000 results
+ page_data = {'results': [1] * (settings.max_tiles_in_grid + 1)}
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertEqual('showing_ids_instead_of_tiles', help_data['cue'])
+
+ def testGatherHelpData_KeystrokeHelp(self):
+ services = service_manager.Services()
+ servlet = issuelist.IssueList('req', 'res', services=services)
+ mr = testing_helpers.MakeMonorailRequest()
+
+ page_data = {'table_data': []}
+
+ # Owners and members see a cue to try "?" to see keyboard shortcuts.
+ mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertEqual('dit_keystrokes', help_data['cue'])
+ mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertEqual('dit_keystrokes', help_data['cue'])
+
+ # Non-members do not see the cue.
+ mr.perms = permissions.USER_PERMISSIONSET
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertEqual(None, help_data['cue'])
+
+ def testGatherHelpData_ItalicsMeanDerived(self):
+ services = service_manager.Services()
+ servlet = issuelist.IssueList('req', 'res', services=services)
+ mr = testing_helpers.MakeMonorailRequest()
+
+ page_data = {'table_data': []}
+
+ cell = table_view_helpers.TableCell(
+ table_view_helpers.CELL_TYPE_ATTR, [1, 2, 3],
+ derived_values=[4, 5, 6])
+ page_data_with_derived = {
+ 'table_data': [table_view_helpers.TableRow([cell], True)]
+ }
+
+ # Owners and members see a cue about italics, iff there are any
+ # derived values shown in the list.
+ mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ help_data = servlet.GatherHelpData(mr, page_data_with_derived)
+ self.assertEqual('italics_mean_derived', help_data['cue'])
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertNotEqual('italics_mean_derived', help_data['cue'])
+ mr.perms = permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ help_data = servlet.GatherHelpData(mr, page_data_with_derived)
+ self.assertEqual('italics_mean_derived', help_data['cue'])
+ help_data = servlet.GatherHelpData(mr, page_data)
+ self.assertNotEqual('italics_mean_derived', help_data['cue'])
+
+ # Non-members do not see the cue.
+ mr.perms = permissions.USER_PERMISSIONSET
+ help_data = servlet.GatherHelpData(mr, page_data_with_derived)
+ self.assertNotEqual('italics_mean_derived', help_data['cue'])
+
+
+CELL_FACTORIES = {
+ 'id': tablecell.TableCellID,
+ 'summary': table_view_helpers.TableCellSummary,
+ 'status': tablecell.TableCellStatus,
+ 'owner': tablecell.TableCellOwner,
+ }
+
+
+class IssueListFunctionsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+
+ def testAnyDerivedValues(self):
+ cell1 = table_view_helpers.TableCell(
+ table_view_helpers.CELL_TYPE_SUMMARY, ['this is a summary'])
+ cell2 = table_view_helpers.TableCell(
+ table_view_helpers.CELL_TYPE_ATTR, ['value'],
+ derived_values=['derived'])
+
+ table_data = [
+ table_view_helpers.TableRow([cell1], False),
+ table_view_helpers.TableRow([], False)]
+ self.assertFalse(issuelist._AnyDerivedValues(table_data))
+
+ table_data = [
+ table_view_helpers.TableRow([cell1, cell2], False),
+ table_view_helpers.TableRow([], False)]
+ self.assertTrue(issuelist._AnyDerivedValues(table_data))
+
+ def testMakeTableData_Normal(self):
+ issue = fake.MakeTestIssue(
+ 789, 123, 'summary', 'New', 0,
+ labels=['Type-Defect', 'Priority-Medium'])
+ issue.project_name = 'proj'
+ visible_results = [issue]
+
+ # Standard columns
+ lower_columns = _GetColumns()
+ table_data = issuelist._MakeTableData(
+ visible_results, None, [], lower_columns, [], {}, CELL_FACTORIES, {},
+ self.config)
+ self.assertEqual(1, len(table_data))
+ row = table_data[0]
+ self.assertEqual(len(lower_columns), len(row.cells))
+ self.assertEqual([], row.group.cells)
+
+ # Also test row info that we pass to JS code.
+ self.assertEqual(123, row.local_id)
+ self.assertEqual('proj', row.project_name)
+ self.assertEqual('proj:123', row.issue_ref)
+ self.assertEqual('/p/proj/issues/detail?id=123', row.issue_url)
+
+ # 2 columns -> 2 cells with 1 value in each cell.
+ lower_columns = ['type', 'priority']
+ table_data = issuelist._MakeTableData(
+ visible_results, None, [], lower_columns, [], {}, CELL_FACTORIES, {},
+ self.config)
+ self.assertEqual(1, len(table_data))
+ row = table_data[0]
+ self.assertEqual(len(lower_columns), len(row.cells))
+ self.assertEqual(0, row.cells[0].col_index)
+ self.assertEqual(1, len(row.cells[0].values))
+ self.assertEqual('Defect', row.cells[0].values[0].item)
+ self.assertEqual(1, row.cells[1].col_index)
+ self.assertEqual(1, len(row.cells[1].values))
+ self.assertEqual('Medium', row.cells[1].values[0].item)
+ self.assertEqual([], row.group.cells)
+
+ def testMakeTableData_Combined(self):
+ issue = fake.MakeTestIssue(
+ 789, 1, 'summary', 'New', 0, labels=['Type-Defect', 'Priority-Medium'])
+ visible_results = [issue]
+
+ # A combined column -> 1 cell with 2 values in it.
+ lower_columns = ['type/priority']
+ table_data = issuelist._MakeTableData(
+ visible_results, None, [], lower_columns, [], {}, CELL_FACTORIES, {},
+ self.config)
+ self.assertEqual(1, len(table_data))
+ row = table_data[0]
+ self.assertEqual(len(lower_columns), len(row.cells))
+ self.assertEqual(0, row.cells[0].col_index)
+ self.assertEqual(2, len(row.cells[0].values))
+ self.assertEqual('Defect', row.cells[0].values[0].item)
+ self.assertEqual('Medium', row.cells[0].values[1].item)
+ self.assertEqual([], row.group.cells)
+
+ def testMakeTableData_GroupBy(self):
+ issue = fake.MakeTestIssue(
+ 789, 1, 'summary', 'New', 0, labels=['Type-Defect', 'Priority-Medium'])
+ visible_results = [issue]
+
+ # 2 columns -> 2 cells with 1 value in each cell, row is part of a 1-row
+ # group of issues with type=defect.
+ lower_columns = ['type', 'priority']
+ table_data = issuelist._MakeTableData(
+ visible_results, None, [], lower_columns, ['type'], {}, CELL_FACTORIES,
+ {}, self.config)
+ self.assertEqual(1, len(table_data))
+ row = table_data[0]
+ self.assertEqual(len(lower_columns), len(row.cells))
+ self.assertEqual(0, row.cells[0].col_index)
+ self.assertEqual(1, len(row.cells[0].values))
+ self.assertEqual('Defect', row.cells[0].values[0].item)
+ self.assertEqual(1, row.cells[1].col_index)
+ self.assertEqual(1, len(row.cells[1].values))
+ self.assertEqual('Medium', row.cells[1].values[0].item)
+ self.assertEqual(1, len(row.group.cells))
+ self.assertEqual('Defect', row.group.cells[0].values[0].item)
+
+ def testGetStarredIssues_Anon(self):
+ services = service_manager.Services(issue_star=fake.IssueStarService())
+ mr = testing_helpers.MakeMonorailRequest()
+
+ self.assertEqual(set(), issuelist._GetStarredIssues(
+ mr.cnxn, mr.auth.user_id, services))
+
+ def testGetStarredIssues_SignedIn(self):
+ services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ issue_star=fake.IssueStarService())
+ project = fake.Project(project_name='proj', project_id=789)
+ mr = testing_helpers.MakeMonorailRequest(
+ project=project, user_info={'user_id': 111L})
+
+ # User has not starred anything yet.
+ self.assertEqual(set(), issuelist._GetStarredIssues(
+ mr.cnxn, mr.auth.user_id, services))
+
+ # User starred 2 issues in 1 project. Other users have also starred stuff.
+ cnxn = 'fake connection'
+ config = services.config.GetProjectConfig(cnxn, project.project_id)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100001, 111L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100002, 111L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100002, 111L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100099, 999L, True)
+ self.assertEqual(
+ {100001, 100002},
+ issuelist._GetStarredIssues(mr.cnxn, mr.auth.user_id, services))
+
+ def testGetStarredIssues_CrossProject(self):
+ services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ issue_star=fake.IssueStarService())
+ project = fake.Project(project_name='proj', project_id=789)
+ mr = testing_helpers.MakeMonorailRequest(
+ project=project, user_info={'user_id': 111L})
+ mr.query = 'project=proj,otherproj'
+
+ # User has not starred anything yet.
+ self.assertEqual(set(), issuelist._GetStarredIssues(
+ mr.cnxn, mr.auth.user_id, services))
+
+ # User starred 2 issues in 1 project, and 1 in another project.
+ # Other users have also starred stuff.
+ cnxn = 'fake connection'
+ config = services.config.GetProjectConfig(cnxn, project.project_id)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100001, 111L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100002, 111L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100002, 999L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 100099, 999L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 200001, 111L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 200001, 999L, True)
+ services.issue_star.SetStar(
+ cnxn, services, config, 200099, 999L, True)
+ self.assertEqual(
+ {100001, 100002, 200001},
+ issuelist._GetStarredIssues(mr.cnxn, mr.auth.user_id, services))
+
+ def testShouldPreviewOnHover(self):
+ saved_flag = settings.enable_quick_edit
+ user = user_pb2.User()
+
+ settings.enable_quick_edit = True
+ user.preview_on_hover = True
+ self.assertTrue(issuelist._ShouldPreviewOnHover(user))
+ user.preview_on_hover = False
+ self.assertFalse(issuelist._ShouldPreviewOnHover(user))
+
+ settings.enable_quick_edit = False
+ user.preview_on_hover = True
+ self.assertFalse(issuelist._ShouldPreviewOnHover(user))
+ user.preview_on_hover = False
+ self.assertFalse(issuelist._ShouldPreviewOnHover(user))
+
+ settings.enable_quick_edit = saved_flag
+
+
+def _GetColumns():
+ """Return a list of all well known column names."""
+
+ columns = tracker_constants.DEFAULT_COL_SPEC.split()
+ columns.extend(tracker_constants.OTHER_BUILT_IN_COLS)
+ return [c.lower() for c in columns]
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issuelistcsv_test.py b/appengine/monorail/tracker/test/issuelistcsv_test.py
new file mode 100644
index 0000000..94062bc
--- /dev/null
+++ b/appengine/monorail/tracker/test/issuelistcsv_test.py
@@ -0,0 +1,44 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for issuelistcsv module."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from testing import testing_helpers
+from tracker import issuelistcsv
+
+
+class IssueListCSVTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services()
+ self.servlet = issuelistcsv.IssueListCsv(
+ 'req', 'res', services=self.services)
+
+ def testRewriteColspec(self):
+ self.assertEqual('', issuelistcsv._RewriteColspec(''))
+
+ self.assertEqual('a B c', issuelistcsv._RewriteColspec('a B c'))
+
+ self.assertEqual('a Summary AllLabels B Opened OpenedTimestamp c',
+ issuelistcsv._RewriteColspec('a summary B opened c'))
+
+ self.assertEqual('Closed ClosedTimestamp Modified ModifiedTimestamp',
+ issuelistcsv._RewriteColspec('Closed Modified'))
+
+
+ def testGatherPageData_AnonUsers(self):
+ """Anonymous users cannot download the issue list."""
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.auth.user_id = 0
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.GatherPageData, mr)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issueoptions_test.py b/appengine/monorail/tracker/test/issueoptions_test.py
new file mode 100644
index 0000000..3995bb8
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueoptions_test.py
@@ -0,0 +1,273 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for the issueoptions JSON feed."""
+
+import unittest
+
+import webapp2
+
+from framework import permissions
+from proto import project_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issueoptions
+
+
+class IssueOptionsJSONTest(unittest.TestCase):
+
+ def setUp(self):
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+ services.user.TestAddUser('user_111@domain.com', 111L)
+ services.user.TestAddUser('user_222@domain.com', 222L)
+ services.user.TestAddUser('user_333@domain.com', 333L)
+
+ # User group 888 has members: user_555 and proj@monorail.com
+ services.user.TestAddUser('group888@googlegroups.com', 888L)
+ services.usergroup.TestAddGroupSettings(888L, 'group888@googlegroups.com')
+ services.usergroup.TestAddMembers(888L, [555L, 1001L])
+
+ # User group 999 has members: user_111 and user_444
+ services.user.TestAddUser('group999@googlegroups.com', 999L)
+ services.usergroup.TestAddGroupSettings(999L, 'group999@googlegroups.com')
+ services.usergroup.TestAddMembers(999L, [111L, 444L])
+
+ self.project = services.project.TestAddProject('proj')
+ self.project.owner_ids.extend([111L])
+ self.project.committer_ids.extend([222L])
+ self.project.contributor_ids.extend([333L])
+ self.servlet = issueoptions.IssueOptionsJSON(
+ 'req', webapp2.Response(), services=services)
+
+ def RunHandleRequest(self, logged_in_user_id, perms, effective_ids=None):
+ mr = testing_helpers.MakeMonorailRequest(project=self.project, perms=perms)
+ mr.auth.user_id = logged_in_user_id
+ if effective_ids:
+ mr.auth.effective_ids = effective_ids
+ json_data = self.servlet.HandleRequest(mr)
+ return json_data
+
+ def RunAndGetMemberEmails(
+ self, logged_in_user_id, perms, effective_ids=None):
+ json_data = self.RunHandleRequest(
+ logged_in_user_id, perms, effective_ids=effective_ids)
+ member_emails = [member['name'] for member in json_data['members']]
+ return member_emails
+
+ def VerifyMembersInFeeds(self, logged_in_user_id, perms, expected_visible):
+ member_emails = self.RunAndGetMemberEmails(logged_in_user_id, perms)
+ if expected_visible:
+ self.assertEqual(
+ ['user_111@domain.com', 'user_222@domain.com',
+ 'user_333@domain.com'],
+ member_emails)
+ else:
+ self.assertEqual(
+ ['user_111@domain.com', 'user_222@domain.com'],
+ member_emails)
+
+ def testHandleRequest_Normal(self):
+ # Everyone can see everyone
+ self.VerifyMembersInFeeds(
+ 111L, permissions.OWNER_ACTIVE_PERMISSIONSET, True)
+ self.VerifyMembersInFeeds(
+ 222L, permissions.COMMITTER_ACTIVE_PERMISSIONSET, True)
+ self.VerifyMembersInFeeds(
+ 333L, permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET, True)
+
+ def testHandleRequest_HideMembers(self):
+ self.project.only_owners_see_contributors = True
+ # Only project owners and committers can see everyone.
+ self.VerifyMembersInFeeds(
+ 111L, permissions.OWNER_ACTIVE_PERMISSIONSET, True)
+ self.VerifyMembersInFeeds(
+ 222L, permissions.COMMITTER_ACTIVE_PERMISSIONSET, True)
+ self.VerifyMembersInFeeds(
+ 333L, permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET, False)
+
+ def testHandleRequest_MemberIsGroup(self):
+ self.project.contributor_ids.extend([999L])
+ json_data = self.RunHandleRequest(
+ 999L, permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ for member in json_data['members']:
+ if member['name'] == 'group999@googlegroups.com':
+ self.assertTrue(member['is_group'])
+ else:
+ self.assertNotIn('is_group', member)
+
+ @unittest.skip('TODO(jrobbins): reimplement')
+ def skip_testHandleRequest_Groups(self):
+ self.project.contributor_ids.extend([888L, 999L])
+
+ # User 111 can see 444 because they are both in the same user group,
+ # and he can see 555 because of the project-is-a-member-of-group rule.
+ member_emails = self.RunAndGetMemberEmails(
+ 111L, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ effective_ids={111L, 999L})
+ self.assertIn('user_444@domain.com', member_emails)
+ self.assertIn('user_555@domain.com', member_emails)
+
+ # User 333 can see 555 because 555 is in a user group that includes
+ # proj@monorail.com.
+ member_emails = self.RunAndGetMemberEmails(
+ 333L, permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.assertTrue('user_555@domain.com' in member_emails)
+
+ self.project.only_owners_see_contributors = True
+
+ # User 111 can see 444 and 555, hub-and-spoke does not limit
+ # project owners.
+ member_emails = self.RunAndGetMemberEmails(
+ 111L, permissions.OWNER_ACTIVE_PERMISSIONSET,
+ effective_ids={111L, 999L})
+ self.assertTrue('user_444@domain.com' in member_emails)
+ self.assertTrue('user_555@domain.com' in member_emails)
+
+ # User 333 can no longer see 555 because the project-is-a-
+ # member-of-group rule does not exend to contributors when
+ # hub-and-spoke is set. In that mode, contributors are not
+ # supposed to know about all the other users.
+ member_emails = self.RunAndGetMemberEmails(
+ 333L, permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.assertTrue('user_555@domain.com' in member_emails)
+
+ def testHandleRequest_RestrictionLabels(self):
+ json_data = self.RunHandleRequest(
+ 111L, permissions.OWNER_ACTIVE_PERMISSIONSET)
+ labels = [lab['name'] for lab in json_data['labels']]
+ self.assertIn('Restrict-View-EditIssue', labels)
+ self.assertIn('Restrict-AddIssueComment-EditIssue', labels)
+ self.assertIn('Restrict-View-CoreTeam', labels)
+
+
+class FilterMemberDataTest(unittest.TestCase):
+
+ def setUp(self):
+ services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService())
+ self.owner_email = 'owner@dom.com'
+ self.committer_email = 'commit@dom.com'
+ self.contributor_email = 'contrib@dom.com'
+ self.indirect_member_email = 'ind@dom.com'
+ self.all_emails = [self.owner_email, self.committer_email,
+ self.contributor_email, self.indirect_member_email]
+ self.project = services.project.TestAddProject('proj')
+
+ def DoFiltering(self, perms, unsigned_user=False):
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.project, perms=perms)
+ if not unsigned_user:
+ mr.auth.user_id = 111L
+ mr.auth.user_view = testing_helpers.Blank(domain='jrobbins.org')
+ return issueoptions._FilterMemberData(
+ mr, [self.owner_email], [self.committer_email],
+ [self.contributor_email], [self.indirect_member_email])
+
+ def testUnsignedUser_NormalProject(self):
+ visible_members = self.DoFiltering(
+ permissions.READ_ONLY_PERMISSIONSET, unsigned_user=True)
+ self.assertItemsEqual(
+ [self.owner_email, self.committer_email, self.contributor_email,
+ self.indirect_member_email],
+ visible_members)
+
+ def testUnsignedUser_RestrictedProject(self):
+ self.project.only_owners_see_contributors = True
+ visible_members = self.DoFiltering(
+ permissions.READ_ONLY_PERMISSIONSET, unsigned_user=True)
+ self.assertItemsEqual(
+ [self.owner_email, self.committer_email, self.indirect_member_email],
+ visible_members)
+
+ def testOwnersAndAdminsCanSeeAll_NormalProject(self):
+ visible_members = self.DoFiltering(
+ permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.assertItemsEqual(self.all_emails, visible_members)
+
+ visible_members = self.DoFiltering(
+ permissions.ADMIN_PERMISSIONSET)
+ self.assertItemsEqual(self.all_emails, visible_members)
+
+ def testOwnersAndAdminsCanSeeAll_HubAndSpoke(self):
+ self.project.only_owners_see_contributors = True
+
+ visible_members = self.DoFiltering(
+ permissions.OWNER_ACTIVE_PERMISSIONSET)
+ self.assertItemsEqual(self.all_emails, visible_members)
+
+ visible_members = self.DoFiltering(
+ permissions.ADMIN_PERMISSIONSET)
+ self.assertItemsEqual(self.all_emails, visible_members)
+
+ visible_members = self.DoFiltering(
+ permissions.COMMITTER_ACTIVE_PERMISSIONSET)
+ self.assertItemsEqual(self.all_emails, visible_members)
+
+ def testNonOwnersCanSeeAll_NormalProject(self):
+ visible_members = self.DoFiltering(
+ permissions.COMMITTER_ACTIVE_PERMISSIONSET)
+ self.assertItemsEqual(self.all_emails, visible_members)
+
+ visible_members = self.DoFiltering(
+ permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ self.assertItemsEqual(self.all_emails, visible_members)
+
+ def testCommittersSeeOnlySameDomain_HubAndSpoke(self):
+ self.project.only_owners_see_contributors = True
+
+ visible_members = self.DoFiltering(
+ permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET)
+ self.assertItemsEqual(
+ [self.owner_email, self.committer_email, self.indirect_member_email],
+ visible_members)
+
+
+class BuildRestrictionChoicesTest(unittest.TestCase):
+
+ def testBuildRestrictionChoices(self):
+ project = project_pb2.Project()
+ choices = issueoptions._BuildRestrictionChoices(project, [], [])
+ self.assertEquals([], choices)
+
+ choices = issueoptions._BuildRestrictionChoices(
+ project, [], ['Hop', 'Jump'])
+ self.assertEquals([], choices)
+
+ freq = [('View', 'B', 'You need permission B to do anything'),
+ ('A', 'B', 'You need B to use A')]
+ choices = issueoptions._BuildRestrictionChoices(project, freq, [])
+ expected = [dict(name='Restrict-View-B',
+ doc='You need permission B to do anything'),
+ dict(name='Restrict-A-B',
+ doc='You need B to use A')]
+ self.assertListEqual(expected, choices)
+
+ extra_perms = project_pb2.Project.ExtraPerms(
+ perms=['Over18', 'Over21'])
+ project.extra_perms.append(extra_perms)
+ choices = issueoptions._BuildRestrictionChoices(
+ project, [], ['Drink', 'Smoke'])
+ expected = [dict(name='Restrict-Drink-Over18',
+ doc='Permission Over18 needed to use Drink'),
+ dict(name='Restrict-Drink-Over21',
+ doc='Permission Over21 needed to use Drink'),
+ dict(name='Restrict-Smoke-Over18',
+ doc='Permission Over18 needed to use Smoke'),
+ dict(name='Restrict-Smoke-Over21',
+ doc='Permission Over21 needed to use Smoke')]
+ self.assertListEqual(expected, choices)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issueoriginal_test.py b/appengine/monorail/tracker/test/issueoriginal_test.py
new file mode 100644
index 0000000..aa17954
--- /dev/null
+++ b/appengine/monorail/tracker/test/issueoriginal_test.py
@@ -0,0 +1,220 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for the issueoriginal module."""
+
+import unittest
+
+import webapp2
+
+from framework import framework_helpers
+from framework import permissions
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issueoriginal
+
+
+STRIPPED_MSG = 'Are you sure that it is plugged in?\n'
+ORIG_MSG = ('Are you sure that it is plugged in?\n'
+ '\n'
+ '> Issue 1 entered by user foo:\n'
+ '> http://blah blah\n'
+ '> The screen is just dark when I press power on\n')
+XXX_GOOD_UNICODE_MSG = u'Thanks,\n\342\230\206*username*'.encode('utf-8')
+GOOD_UNICODE_MSG = u'Thanks,\n XXX *username*'
+XXX_BAD_UNICODE_MSG = ORIG_MSG + ('\xff' * 1000)
+BAD_UNICODE_MSG = ORIG_MSG + 'XXX'
+GMAIL_CRUFT_MSG = ORIG_MSG # XXX .replace(' ', ' \xa0 ')
+
+
+class IssueOriginalTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService())
+ self.servlet = issueoriginal.IssueOriginal(
+ 'req', 'res', services=self.services)
+
+ self.proj = self.services.project.TestAddProject('proj', project_id=789)
+ summary = 'System wont boot'
+ status = 'New'
+ cnxn = 'fake connection'
+ self.local_id_1 = self.services.issue.CreateIssue(
+ cnxn, self.services,
+ 789, summary, status, 111L, [], [], [], [], 111L,
+ 'The screen is just dark when I press power on')
+ _amendments, comment_0 = self.services.issue.ApplyIssueComment(
+ cnxn, self.services, 222L, 789, 1,
+ summary, status, 222L, [], [], [], [], [], [], [], [], 0,
+ comment=STRIPPED_MSG, inbound_message=ORIG_MSG)
+ _amendments, comment_1 = self.services.issue.ApplyIssueComment(
+ cnxn, self.services, 222L, 789, 1,
+ summary, status, 222L, [], [], [], [], [], [], [], [], None,
+ comment=STRIPPED_MSG, inbound_message=BAD_UNICODE_MSG)
+ _amendments, comment_2 = self.services.issue.ApplyIssueComment(
+ cnxn, self.services, 222L, 789, 1,
+ summary, status, 222L, [], [], [], [], [], [], [], [], 0,
+ comment=STRIPPED_MSG, inbound_message=GMAIL_CRUFT_MSG)
+ _amendments, comment_3 = self.services.issue.ApplyIssueComment(
+ cnxn, self.services, 222L, 789, 1,
+ summary, status, 222L, [], [], [], [], [], [], [], [], 0,
+ comment=STRIPPED_MSG, inbound_message=GOOD_UNICODE_MSG)
+ self.issue_1 = self.services.issue.GetIssueByLocalID(
+ cnxn, 789, self.local_id_1)
+ self.comments = [comment_0, comment_1, comment_2, comment_3]
+
+ def testAssertBasePermission(self):
+ """Permit users who can view issue, view inbound message and delete."""
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=1',
+ project=self.proj)
+ mr.perms = permissions.EMPTY_PERMISSIONSET
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ mr.perms = permissions.CONTRIBUTOR_ACTIVE_PERMISSIONSET
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ mr.perms = permissions.OWNER_ACTIVE_PERMISSIONSET
+ self.servlet.AssertBasePermission(mr)
+
+ def testGatherPageData_Normal(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=1',
+ project=self.proj)
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(1, page_data['local_id'])
+ self.assertEqual(1, page_data['seq'])
+ self.assertFalse(page_data['is_binary'])
+ self.assertEqual(ORIG_MSG, page_data['message_body'])
+
+ def testGatherPageData_GoodUnicode(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=4',
+ project=self.proj)
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(1, page_data['local_id'])
+ self.assertEqual(4, page_data['seq'])
+ self.assertEqual(GOOD_UNICODE_MSG, page_data['message_body'])
+ self.assertFalse(page_data['is_binary'])
+
+ def testGatherPageData_BadUnicode(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=2',
+ project=self.proj)
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(1, page_data['local_id'])
+ self.assertEqual(2, page_data['seq'])
+ # xxx: should be true if cruft was there.
+ # self.assertTrue(page_data['is_binary'])
+
+ def testGatherPageData_GmailCruft(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=3',
+ project=self.proj)
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual(1, page_data['local_id'])
+ self.assertEqual(3, page_data['seq'])
+ self.assertFalse(page_data['is_binary'])
+ self.assertEqual(ORIG_MSG, page_data['message_body'])
+
+ def testGatherPageData_404(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original',
+ project=self.proj)
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=999',
+ project=self.proj)
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=999&seq=1',
+ project=self.proj)
+ try:
+ self.servlet.GatherPageData(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ def testGetIssueAndComment_Normal(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=1',
+ project=self.proj)
+ issue, comment = self.servlet._GetIssueAndComment(mr)
+ self.assertEqual(self.issue_1, issue)
+ self.assertEqual(self.comments[1].content, comment.content)
+
+ def testGetIssueAndComment_NoSuchComment(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1&seq=99',
+ project=self.proj)
+ try:
+ self.servlet._GetIssueAndComment(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ def testGetIssueAndComment_Malformed(self):
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original',
+ project=self.proj)
+ try:
+ self.servlet._GetIssueAndComment(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=1',
+ project=self.proj)
+ try:
+ self.servlet._GetIssueAndComment(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?seq=1',
+ project=self.proj)
+ try:
+ self.servlet._GetIssueAndComment(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?id=abc',
+ project=self.proj)
+ try:
+ self.servlet._GetIssueAndComment(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+ _request, mr = testing_helpers.GetRequestObjects(
+ path='/p/proj/issues/original?seq=abc',
+ project=self.proj)
+ try:
+ self.servlet._GetIssueAndComment(mr)
+ self.fail()
+ except webapp2.HTTPException as e:
+ self.assertEquals(404, e.code)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issuepeek_test.py b/appengine/monorail/tracker/test/issuepeek_test.py
new file mode 100644
index 0000000..b4dc3d4
--- /dev/null
+++ b/appengine/monorail/tracker/test/issuepeek_test.py
@@ -0,0 +1,119 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.tracker.issuepeek."""
+
+import unittest
+
+from google.appengine.ext import testbed
+
+from framework import permissions
+from proto import tracker_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issuepeek
+from tracker import tracker_bizobj
+
+
+class IssuePeekTest(unittest.TestCase):
+
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_user_stub()
+ self.testbed.init_memcache_stub()
+ self.testbed.init_datastore_v3_stub()
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ spam=fake.SpamService())
+ self.proj = self.services.project.TestAddProject('proj', project_id=789)
+ self.cnxn = 'fake cnxn'
+ self.servlet = issuepeek.IssuePeek(
+ 'req', 'res', services=self.services)
+ self.local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services,
+ 789, 'summary', 'status', 111L, [], [], [], [], 111L,
+ 'The screen is just dark when I press power on')
+
+ def testAssertBasePermission(self):
+ """Permit users who can view issues."""
+ mr = testing_helpers.MakeMonorailRequest(
+ project=self.proj,
+ perms=permissions.EMPTY_PERMISSIONSET)
+ mr.local_id = self.local_id_1
+ self.assertRaises(permissions.PermissionException,
+ self.servlet.AssertBasePermission, mr)
+ mr.perms = permissions.USER_PERMISSIONSET
+ self.servlet.AssertBasePermission(mr)
+
+ def testPaginateComments_NotVisible(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ issue = fake.MakeTestIssue(789, 1, 'summary', 'New', 111L)
+ issuecomment_list = [tracker_pb2.IssueComment()]
+
+ # No comments yet.
+ description, visible_comments, pagination = issuepeek.PaginateComments(
+ mr, issue, issuecomment_list, config)
+ self.assertEqual(issuecomment_list[0], description)
+ self.assertEqual(issuecomment_list[1:], visible_comments)
+ self.assertFalse(pagination.visible)
+
+ # 5 comments, none deleted.
+ for _ in range(5):
+ issuecomment_list.append(tracker_pb2.IssueComment())
+ description, visible_comments, pagination = issuepeek.PaginateComments(
+ mr, issue, issuecomment_list, config)
+ self.assertEqual(issuecomment_list[0], description)
+ self.assertEqual(issuecomment_list[1:], visible_comments)
+ self.assertFalse(pagination.visible)
+
+ # 5 comments, 1 of them deleted.
+ issuecomment_list[1].deleted_by = 123
+ description, visible_comments, pagination = issuepeek.PaginateComments(
+ mr, issue, issuecomment_list, config)
+ self.assertEqual(issuecomment_list[0], description)
+ self.assertEqual(issuecomment_list[2:], visible_comments)
+ self.assertFalse(pagination.visible)
+
+ def testPaginateComments_Visible(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ issue = fake.MakeTestIssue(789, 1, 'summary', 'New', 111L)
+ issuecomment_list = [tracker_pb2.IssueComment()]
+ # 500 comments, none deleted.
+ for _ in range(500):
+ issuecomment_list.append(tracker_pb2.IssueComment())
+ description, visible_comments, pagination = issuepeek.PaginateComments(
+ mr, issue, issuecomment_list, config)
+ self.assertEqual(issuecomment_list[0], description)
+ self.assertEqual(issuecomment_list[1:], visible_comments)
+ self.assertFalse(pagination.visible)
+
+ # 501 comments, none deleted.
+ issuecomment_list.append(tracker_pb2.IssueComment())
+ description, visible_comments, pagination = issuepeek.PaginateComments(
+ mr, issue, issuecomment_list, config)
+ self.assertEqual(issuecomment_list[0], description)
+ self.assertEqual(issuecomment_list[2:], visible_comments)
+ self.assertTrue(pagination.visible)
+ self.assertEqual(2, pagination.last)
+ self.assertEqual(501, pagination.start)
+
+ # 501 comments, 1 of them deleted.
+ issuecomment_list[1].deleted_by = 123
+ description, visible_comments, pagination = issuepeek.PaginateComments(
+ mr, issue, issuecomment_list, config)
+ self.assertEqual(issuecomment_list[0], description)
+ self.assertEqual(issuecomment_list[2:], visible_comments)
+ self.assertFalse(pagination.visible)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issuereindex_test.py b/appengine/monorail/tracker/test/issuereindex_test.py
new file mode 100644
index 0000000..5474f5d
--- /dev/null
+++ b/appengine/monorail/tracker/test/issuereindex_test.py
@@ -0,0 +1,128 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.tracker.issuereindex."""
+
+import unittest
+
+import mox
+
+import settings
+from framework import permissions
+from framework import template_helpers
+from services import service_manager
+from services import tracker_fulltext
+from testing import fake
+from testing import testing_helpers
+from tracker import issuereindex
+
+
+class IssueReindexTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService())
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testAssertBasePermission_NoAccess(self):
+ # Non-members and contributors do not have permission to view this page.
+ for permission in (permissions.USER_PERMISSIONSET,
+ permissions.COMMITTER_ACTIVE_PERMISSIONSET):
+ request, mr = testing_helpers.GetRequestObjects(
+ project=self.project, perms=permission)
+ servlet = issuereindex.IssueReindex(
+ request, 'res', services=self.services)
+ try:
+ servlet.AssertBasePermission(mr)
+ self.fail('Expected PermissisonException not thrown.')
+ except permissions.PermissionException, e:
+ self.assertEqual('You are not allowed to administer this project',
+ e.message)
+
+ def testAssertBasePermission_WithAccess(self):
+ # Owners and admins have permission to view this page.
+ for permission in (permissions.OWNER_ACTIVE_PERMISSIONSET,
+ permissions.ADMIN_PERMISSIONSET):
+ request, mr = testing_helpers.GetRequestObjects(
+ project=self.project, perms=permission)
+ servlet = issuereindex.IssueReindex(
+ request, 'res', services=self.services)
+ servlet.AssertBasePermission(mr)
+
+ def testGatherPageData(self):
+ servlet = issuereindex.IssueReindex('req', 'res', services=self.services)
+
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.auto_submit = True
+ ret = servlet.GatherPageData(mr)
+
+ self.assertTrue(ret['auto_submit'])
+ self.assertIsNone(ret['issue_tab_mode'])
+ self.assertTrue(ret['page_perms'].CreateIssue)
+
+ def _callProcessFormData(self, post_data, index_issue_1=True):
+ servlet = issuereindex.IssueReindex('req', 'res', services=self.services)
+
+ mr = testing_helpers.MakeMonorailRequest(project=self.project)
+ mr.cnxn = self.cnxn
+
+ issue1 = fake.MakeTestIssue(
+ project_id=self.project.project_id, local_id=1, summary='sum',
+ status='New', owner_id=111L)
+ issue1.project_name = self.project.project_name
+ self.services.issue.TestAddIssue(issue1)
+
+ self.mox.StubOutWithMock(tracker_fulltext, 'IndexIssues')
+ if index_issue_1:
+ tracker_fulltext.IndexIssues(
+ self.cnxn, [issue1], self.services.user, self.services.issue,
+ self.services.config)
+
+ self.mox.ReplayAll()
+
+ ret = servlet.ProcessFormData(mr, post_data)
+ self.mox.VerifyAll()
+ return ret
+
+ def testProcessFormData_NormalInputs(self):
+ post_data = {'start': 1, 'num': 5}
+ ret = self._callProcessFormData(post_data)
+ self.assertEquals(
+ '/p/None/issues/reindex?start=6&auto_submit=False&num=5', ret)
+
+ def testProcessFormData_LargeInputs(self):
+ post_data = {'start': 0, 'num': 10000000}
+ ret = self._callProcessFormData(post_data)
+ self.assertEquals(
+ '/p/None/issues/reindex?start=%s&auto_submit=False&num=%s' % (
+ settings.max_artifact_search_results_per_page,
+ settings.max_artifact_search_results_per_page),
+ ret)
+
+ def testProcessFormData_WithAutoSubmit(self):
+ post_data = {'start': 1, 'num': 5, 'auto_submit': 1}
+ ret = self._callProcessFormData(post_data)
+ self.assertEquals(
+ '/p/None/issues/reindex?start=6&auto_submit=True&num=5', ret)
+
+ def testProcessFormData_WithAutoSubmitButNoMoreIssues(self):
+ """This project has no issues 6-10, so stop autosubmitting."""
+ post_data = {'start': 6, 'num': 5, 'auto_submit': 1}
+ ret = self._callProcessFormData(post_data, index_issue_1=False)
+ self.assertEquals(
+ '/p/None/issues/reindex?start=11&auto_submit=False&num=5', ret)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/issuetips_test.py b/appengine/monorail/tracker/test/issuetips_test.py
new file mode 100644
index 0000000..fed2d92
--- /dev/null
+++ b/appengine/monorail/tracker/test/issuetips_test.py
@@ -0,0 +1,34 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for issuetips module."""
+
+import unittest
+
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import issuetips
+
+
+class IssueTipsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService())
+ self.servlet = issuetips.IssueSearchTips(
+ 'req', 'res', services=self.services)
+
+ def testGatherPageData(self):
+ mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/tips')
+ page_data = self.servlet.GatherPageData(mr)
+ self.assertEqual('issueSearchTips', page_data['issue_tab_mode'])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/spam_test.py b/appengine/monorail/tracker/test/spam_test.py
new file mode 100644
index 0000000..2aeb535
--- /dev/null
+++ b/appengine/monorail/tracker/test/spam_test.py
@@ -0,0 +1,114 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittests for monorail.tracker.spam."""
+
+import unittest
+
+from framework import permissions
+from services import service_manager
+from services import issue_svc
+from testing import fake
+from testing import testing_helpers
+from tracker import spam
+
+
+class FlagSpamFormTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ spam=fake.SpamService()
+ )
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.servlet = spam.FlagSpamForm(
+ 'req', 'res', services=self.services)
+
+ def testProcessFormData_Permission(self):
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_1', 'status', 111L, [], [], [], [], 111L, 'description_1')
+
+ # test owner case.
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ mr.local_id = local_id_1
+ mr.auth_user_id = 222L
+ post_data = {
+ 'id': local_id_1,
+ 'spam': 'true'
+ }
+ res = self.servlet.ProcessFormData(mr, post_data)
+ self.assertEqual('http://127.0.0.1/p/None/issues/detail?id=1', res)
+
+ # test member case.
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.COMMITTER_ACTIVE_PERMISSIONSET)
+ mr.local_id = local_id_1
+ mr.auth_user_id = 222L
+ post_data = {
+ 'id': local_id_1,
+ 'spam': 'true'
+ }
+ res = self.servlet.ProcessFormData(mr, post_data)
+ self.assertEqual('http://127.0.0.1/p/None/issues/detail?id=1', res)
+
+ # test non-member case.
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.READ_ONLY_PERMISSIONSET)
+ mr.local_id = local_id_1
+ mr.auth_user_id = 222L
+ post_data = {
+ 'id': local_id_1,
+ 'spam': 'true'
+ }
+
+ with self.assertRaises(permissions.PermissionException):
+ res = self.servlet.ProcessFormData(mr, post_data)
+
+
+ def testProcessFormData_Comment(self):
+ local_id_1 = self.services.issue.CreateIssue(
+ self.cnxn, self.services, self.project.project_id,
+ 'summary_1', 'status', 111L, [], [], [], [], 111L, 'description_1')
+
+ # test owner case, non-existent comment.
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ mr.local_id = local_id_1
+ mr.auth_user_id = 222L
+ post_data = {
+ 'id': local_id_1,
+ 'comment_id': 123,
+ 'spam': 'true'
+ }
+ with self.assertRaises(issue_svc.NoSuchCommentException):
+ res = self.servlet.ProcessFormData(mr, post_data)
+
+ comment = self.services.issue.CreateIssueComment(
+ self.cnxn, self.project.project_id, local_id_1, 111L, "Test comment")
+
+ _, mr = testing_helpers.GetRequestObjects(
+ project=self.project,
+ perms=permissions.OWNER_ACTIVE_PERMISSIONSET)
+ mr.local_id = local_id_1
+ mr.auth_user_id = 222L
+ post_data = {
+ 'id': local_id_1,
+ 'comment_id': comment.id,
+ 'sequence_num': 2,
+ 'spam': 'true'
+ }
+
+ res = self.servlet.ProcessFormData(mr, post_data)
+ self.assertEqual('http://127.0.0.1/p/None/issues/detail?id=1', res)
diff --git a/appengine/monorail/tracker/test/tablecell_test.py b/appengine/monorail/tracker/test/tablecell_test.py
new file mode 100644
index 0000000..31617d0
--- /dev/null
+++ b/appengine/monorail/tracker/test/tablecell_test.py
@@ -0,0 +1,290 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unit tests for issuelist module."""
+
+import unittest
+
+from framework import framework_constants
+from framework import table_view_helpers
+from proto import tracker_pb2
+from testing import fake
+from tracker import tablecell
+
+
+class DisplayNameMock(object):
+
+ def __init__(self, name):
+ self.display_name = name
+
+
+def MakeTestIssue(local_id, issue_id, summary):
+ issue = tracker_pb2.Issue()
+ issue.local_id = local_id
+ issue.issue_id = issue_id
+ issue.summary = summary
+ return issue
+
+
+class TableCellUnitTest(unittest.TestCase):
+
+ USERS_BY_ID = {
+ 23456: DisplayNameMock('Jason'),
+ 34567: DisplayNameMock('Nathan'),
+ }
+
+ def setUp(self):
+ self.issue1 = MakeTestIssue(
+ local_id=1, issue_id=100001, summary='One')
+ self.issue2 = MakeTestIssue(
+ local_id=2, issue_id=100002, summary='Two')
+ self.issue3 = MakeTestIssue(
+ local_id=3, issue_id=100003, summary='Three')
+
+ def testTableCellID(self):
+ cell = tablecell.TableCellID(
+ MakeTestIssue(4, 4, 'Four'), None, self.USERS_BY_ID, [], {}, {},
+ 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ID)
+ # Note that the ID itself is accessed from the row, not the cell.
+
+ def testTableCellOwner(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.owner_id=23456
+
+ cell = tablecell.TableCellOwner(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, 'Jason')
+
+ def testTableCellOwnerNoOwner(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.owner_id=framework_constants.NO_USER_SPECIFIED
+
+ cell = tablecell.TableCellOwner(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values, [])
+
+ def testTableCellReporter(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.reporter_id=34567
+
+ cell = tablecell.TableCellReporter(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, 'Nathan')
+
+ def testTableCellCc(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.cc_ids = [23456, 34567]
+
+ cell = tablecell.TableCellCc(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, 'Jason')
+ self.assertEqual(cell.values[1].item, 'Nathan')
+
+ def testTableCellCcNoCcs(self):
+ cell = tablecell.TableCellCc(
+ MakeTestIssue(4, 4, 'Four'),
+ None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values, [])
+
+ def testTableCellAttachmentsNone(self):
+ cell = tablecell.TableCellAttachments(
+ MakeTestIssue(4, 4, 'Four'),
+ None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, 0)
+
+ def testTableCellAttachments(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.attachment_count = 2
+
+ cell = tablecell.TableCellAttachments(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, 2)
+
+ def testTableCellOpened(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.opened_timestamp = 1200000000
+
+ cell = tablecell.TableCellOpened(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 'Jan 2008')
+
+ def testTableCellClosed(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.closed_timestamp = None
+
+ cell = tablecell.TableCellClosed(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values, [])
+
+ test_issue.closed_timestamp = 1200000000
+ cell = tablecell.TableCellClosed(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 'Jan 2008')
+
+ def testTableCellModified(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.modified_timestamp = None
+
+ cell = tablecell.TableCellModified(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values, [])
+
+ test_issue.modified_timestamp = 1200000000
+ cell = tablecell.TableCellModified(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 'Jan 2008')
+
+ def testTableCellBlockedOn(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.blocked_on_iids = [
+ self.issue1.issue_id, self.issue2.issue_id, self.issue3.issue_id]
+
+ cell = tablecell.TableCellBlockedOn(
+ test_issue,
+ None, self.USERS_BY_ID, [], {},
+ {self.issue1.issue_id: self.issue1, self.issue2.issue_id: self.issue2,
+ self.issue3.issue_id: self.issue3}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, '1')
+ self.assertEqual(cell.values[1].item, '2')
+ self.assertEqual(cell.values[2].item, '3')
+
+ def testTableCellBlockedOnNone(self):
+ cell = tablecell.TableCellBlockedOn(
+ MakeTestIssue(4, 4, 'Four'),
+ None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values, [])
+
+ def testTableCellBlocking(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.blocking_iids = [
+ self.issue1.issue_id, self.issue2.issue_id, self.issue3.issue_id]
+
+ cell = tablecell.TableCellBlocking(
+ test_issue,
+ None, self.USERS_BY_ID, [], {},
+ {self.issue1.issue_id: self.issue1, self.issue2.issue_id: self.issue2,
+ self.issue3.issue_id: self.issue3}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, '1')
+ self.assertEqual(cell.values[1].item, '2')
+ self.assertEqual(cell.values[2].item, '3')
+
+ def testTableCellBlockingNone(self):
+ cell = tablecell.TableCellBlocking(
+ MakeTestIssue(4, 4, 'Four'),
+ None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values, [])
+
+ def testTableCellBlocked(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.blocked_on_iids = [1, 2, 3]
+
+ cell = tablecell.TableCellBlocked(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, 'Yes')
+
+ def testTableCellBlockedNotBlocked(self):
+ cell = tablecell.TableCellBlocked(
+ MakeTestIssue(4, 4, 'Four'),
+ None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, 'No')
+
+ def testTableCellMergedInto(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.merged_into = self.issue3.issue_id
+
+ cell = tablecell.TableCellMergedInto(
+ test_issue, None, self.USERS_BY_ID, [], {},
+ {self.issue3.issue_id: self.issue3}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values[0].item, '3')
+
+ def testTableCellMergedIntoNotMerged(self):
+ cell = tablecell.TableCellMergedInto(
+ MakeTestIssue(4, 4, 'Four'),
+ None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual(cell.values, [])
+
+
+class TableCellCSVTest(unittest.TestCase):
+
+ USERS_BY_ID = {
+ 23456: DisplayNameMock('Jason'),
+ 34567: DisplayNameMock('Nathan'),
+ }
+
+ def testTableCellOpenedTimestamp(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.opened_timestamp = 1200000000
+
+ cell = tablecell.TableCellOpenedTimestamp(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 1200000000)
+
+ def testTableCellClosedTimestamp(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.closed_timestamp = None
+
+ cell = tablecell.TableCellClosedTimestamp(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 0)
+
+ test_issue.closed_timestamp = 1200000000
+ cell = tablecell.TableCellClosedTimestamp(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 1200000000)
+
+ def testTableCellModifiedTimestamp(self):
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.modified_timestamp = 0
+
+ cell = tablecell.TableCellModifiedTimestamp(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 0)
+
+ test_issue.modified_timestamp = 1200000000
+ cell = tablecell.TableCellModifiedTimestamp(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_UNFILTERABLE)
+ self.assertEqual(cell.values[0].item, 1200000000)
+
+ def testTableCellAllLabels(self):
+ labels = ['A', 'B', 'C', 'D-E', 'F-G']
+ derived_labels = ['W', 'X', 'Y-Z']
+
+ test_issue = MakeTestIssue(4, 4, 'Four')
+ test_issue.labels = labels
+ test_issue.derived_labels = derived_labels
+
+ cell = tablecell.TableCellAllLabels(
+ test_issue, None, self.USERS_BY_ID, [], {}, {}, 'fake config')
+ self.assertEqual(cell.type, table_view_helpers.CELL_TYPE_ATTR)
+ self.assertEqual([v.item for v in cell.values], labels + derived_labels)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/tracker_bizobj_test.py b/appengine/monorail/tracker/test/tracker_bizobj_test.py
new file mode 100644
index 0000000..437ff01
--- /dev/null
+++ b/appengine/monorail/tracker/test/tracker_bizobj_test.py
@@ -0,0 +1,562 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Tests for issue tracker bizobj functions."""
+
+import unittest
+
+from framework import framework_constants
+from framework import framework_views
+from proto import tracker_pb2
+from tracker import tracker_bizobj
+
+
+class BizobjTest(unittest.TestCase):
+
+ def testGetOwnerId(self):
+ issue = tracker_pb2.Issue()
+ self.assertEquals(
+ tracker_bizobj.GetOwnerId(issue), framework_constants.NO_USER_SPECIFIED)
+
+ issue.derived_owner_id = 123
+ self.assertEquals(tracker_bizobj.GetOwnerId(issue), 123)
+
+ issue.owner_id = 456
+ self.assertEquals(tracker_bizobj.GetOwnerId(issue), 456)
+
+ def testGetStatus(self):
+ issue = tracker_pb2.Issue()
+ self.assertEquals(tracker_bizobj.GetStatus(issue), '')
+
+ issue.derived_status = 'InReview'
+ self.assertEquals(tracker_bizobj.GetStatus(issue), 'InReview')
+
+ issue.status = 'Forgotten'
+ self.assertEquals(tracker_bizobj.GetStatus(issue), 'Forgotten')
+
+ def testGetCcIds(self):
+ issue = tracker_pb2.Issue()
+ self.assertEquals(tracker_bizobj.GetCcIds(issue), [])
+
+ issue.derived_cc_ids.extend([1, 2, 3])
+ self.assertEquals(tracker_bizobj.GetCcIds(issue), [1, 2, 3])
+
+ issue.cc_ids.extend([4, 5, 6])
+ self.assertEquals(tracker_bizobj.GetCcIds(issue), [4, 5, 6, 1, 2, 3])
+
+ def testGetLabels(self):
+ issue = tracker_pb2.Issue()
+ self.assertEquals(tracker_bizobj.GetLabels(issue), [])
+
+ issue.derived_labels.extend(['a', 'b', 'c'])
+ self.assertEquals(tracker_bizobj.GetLabels(issue), ['a', 'b', 'c'])
+
+ issue.labels.extend(['d', 'e', 'f'])
+ self.assertEquals(tracker_bizobj.GetLabels(issue),
+ ['d', 'e', 'f', 'a', 'b', 'c'])
+
+ def CheckDefaultConfig(self, config):
+ self.assertTrue(len(config.well_known_statuses) > 0)
+ self.assertTrue(config.statuses_offer_merge > 0)
+ self.assertTrue(len(config.well_known_labels) > 0)
+ self.assertTrue(len(config.templates) > 0)
+ self.assertTrue(len(config.exclusive_label_prefixes) > 0)
+ # TODO(jrobbins): test actual values from default config
+
+ def testMakeDefaultProjectIssueConfig(self):
+ config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ config.default_template_for_developers = 1
+ config.default_template_for_users = 2
+ self.CheckDefaultConfig(config)
+
+ def testConvertDictToTemplate(self):
+ template = tracker_bizobj.ConvertDictToTemplate(
+ dict(name='name', content='content', summary='summary',
+ status='status', owner_id=111L))
+ self.assertEqual('name', template.name)
+ self.assertEqual('content', template.content)
+ self.assertEqual('summary', template.summary)
+ self.assertEqual('status', template.status)
+ self.assertEqual(111L, template.owner_id)
+ self.assertFalse(template.summary_must_be_edited)
+ self.assertTrue(template.owner_defaults_to_member)
+ self.assertFalse(template.component_required)
+
+ template = tracker_bizobj.ConvertDictToTemplate(
+ dict(name='name', content='content', labels=['a', 'b', 'c']))
+ self.assertListEqual(
+ ['a', 'b', 'c'], list(template.labels))
+
+ template = tracker_bizobj.ConvertDictToTemplate(
+ dict(name='name', content='content', summary_must_be_edited=True,
+ owner_defaults_to_member=True, component_required=True))
+ self.assertTrue(template.summary_must_be_edited)
+ self.assertTrue(template.owner_defaults_to_member)
+ self.assertTrue(template.component_required)
+
+ template = tracker_bizobj.ConvertDictToTemplate(
+ dict(name='name', content='content', summary_must_be_edited=False,
+ owner_defaults_to_member=False, component_required=False))
+ self.assertFalse(template.summary_must_be_edited)
+ self.assertFalse(template.owner_defaults_to_member)
+ self.assertFalse(template.component_required)
+
+ def testHarmonizeConfigs_Empty(self):
+ harmonized = tracker_bizobj.HarmonizeConfigs([])
+ self.CheckDefaultConfig(harmonized)
+
+ def testHarmonizeConfigs(self):
+ c1 = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
+ harmonized = tracker_bizobj.HarmonizeConfigs([c1])
+ self.assertListEqual(
+ [stat.status for stat in c1.well_known_statuses],
+ [stat.status for stat in harmonized.well_known_statuses])
+ self.assertListEqual(
+ [lab.label for lab in c1.well_known_labels],
+ [lab.label for lab in harmonized.well_known_labels])
+ self.assertEqual('', harmonized.default_sort_spec)
+
+ c2 = tracker_bizobj.MakeDefaultProjectIssueConfig(678)
+ tracker_bizobj.SetConfigStatuses(c2, [
+ ('Unconfirmed', '', True, False),
+ ('New', '', True, True),
+ ('Accepted', '', True, False),
+ ('Begun', '', True, False),
+ ('Fixed', '', False, False),
+ ('Obsolete', '', False, False)])
+ tracker_bizobj.SetConfigLabels(c2, [
+ ('Pri-0', '', False),
+ ('Priority-High', '', True),
+ ('Pri-1', '', False),
+ ('Priority-Medium', '', True),
+ ('Pri-2', '', False),
+ ('Priority-Low', '', True),
+ ('Pri-3', '', False),
+ ('Pri-4', '', False)])
+ c2.default_sort_spec = 'Pri -status'
+
+ harmonized = tracker_bizobj.HarmonizeConfigs([c1, c2])
+ result_statuses = [stat.status
+ for stat in harmonized.well_known_statuses]
+ result_labels = [lab.label
+ for lab in harmonized.well_known_labels]
+ self.assertListEqual(
+ ['Unconfirmed', 'New', 'Accepted', 'Begun', 'Started', 'Fixed',
+ 'Obsolete', 'Verified', 'Invalid', 'Duplicate', 'WontFix', 'Done'],
+ result_statuses)
+ self.assertListEqual(
+ ['Pri-0', 'Type-Defect', 'Type-Enhancement', 'Type-Task',
+ 'Type-Other', 'Priority-Critical', 'Priority-High',
+ 'Pri-1', 'Priority-Medium', 'Pri-2', 'Priority-Low', 'Pri-3',
+ 'Pri-4'],
+ result_labels[:result_labels.index('OpSys-All')])
+ self.assertEqual('Pri -status', harmonized.default_sort_spec.strip())
+
+ def testCombineOrderedLists_Empty(self):
+ self.assertEqual([], tracker_bizobj._CombineOrderedLists([]))
+
+ def testCombineOrderedLists_Normal(self):
+ a = ['Mon', 'Wed', 'Fri']
+ b = ['Mon', 'Tue']
+ c = ['Wed', 'Thu']
+ self.assertEqual(['Mon', 'Tue', 'Wed', 'Thu', 'Fri'],
+ tracker_bizobj._CombineOrderedLists([a, b, c]))
+
+ d = ['Mon', 'StartOfWeek', 'Wed', 'MidWeek', 'Fri', 'EndOfWeek']
+ self.assertEqual(['Mon', 'StartOfWeek', 'Tue', 'Wed', 'MidWeek', 'Thu',
+ 'Fri', 'EndOfWeek'],
+ tracker_bizobj._CombineOrderedLists([a, b, c, d]))
+
+ def testUsersInvolvedInComment(self):
+ comment = tracker_pb2.IssueComment()
+ self.assertEqual({0}, tracker_bizobj.UsersInvolvedInComment(comment))
+
+ comment.user_id = 111L
+ self.assertEqual(
+ {111L}, tracker_bizobj.UsersInvolvedInComment(comment))
+
+ amendment = tracker_pb2.Amendment(newvalue='foo')
+ comment.amendments.append(amendment)
+ self.assertEqual(
+ {111L}, tracker_bizobj.UsersInvolvedInComment(comment))
+
+ amendment.added_user_ids.append(222L)
+ amendment.removed_user_ids.append(333L)
+ self.assertEqual({111L, 222L, 333L},
+ tracker_bizobj.UsersInvolvedInComment(comment))
+
+ def testUsersInvolvedInCommentList(self):
+ self.assertEqual(set(), tracker_bizobj.UsersInvolvedInCommentList([]))
+
+ c1 = tracker_pb2.IssueComment()
+ c1.user_id = 111L
+ c1.amendments.append(tracker_pb2.Amendment(newvalue='foo'))
+
+ c2 = tracker_pb2.IssueComment()
+ c2.user_id = 111L
+ c2.amendments.append(tracker_pb2.Amendment(
+ added_user_ids=[222L], removed_user_ids=[333L]))
+
+ self.assertEqual({111L},
+ tracker_bizobj.UsersInvolvedInCommentList([c1]))
+
+ self.assertEqual({111L, 222L, 333L},
+ tracker_bizobj.UsersInvolvedInCommentList([c2]))
+
+ self.assertEqual({111L, 222L, 333L},
+ tracker_bizobj.UsersInvolvedInCommentList([c1, c2]))
+
+ def testMakeAmendment(self):
+ amendment = tracker_bizobj.MakeAmendment(
+ tracker_pb2.FieldID.STATUS, 'new', [111L], [222L])
+ self.assertEqual(tracker_pb2.FieldID.STATUS, amendment.field)
+ self.assertEqual('new', amendment.newvalue)
+ self.assertEqual([111L], amendment.added_user_ids)
+ self.assertEqual([222L], amendment.removed_user_ids)
+
+ def testPlusMinusString(self):
+ self.assertEqual('', tracker_bizobj._PlusMinusString([], []))
+ self.assertEqual('-a -b c d',
+ tracker_bizobj._PlusMinusString(['c', 'd'], ['a', 'b']))
+
+ def testPlusMinusAmendment(self):
+ amendment = tracker_bizobj._PlusMinusAmendment(
+ tracker_pb2.FieldID.STATUS, ['add1', 'add2'], ['remove1'])
+ self.assertEqual(tracker_pb2.FieldID.STATUS, amendment.field)
+ self.assertEqual('-remove1 add1 add2', amendment.newvalue)
+
+ def testPlusMinusRefsAmendment(self):
+ ref1 = (None, 1)
+ ref2 = ('other-proj', 2)
+ amendment = tracker_bizobj._PlusMinusRefsAmendment(
+ tracker_pb2.FieldID.STATUS, [ref1], [ref2])
+ self.assertEqual(tracker_pb2.FieldID.STATUS, amendment.field)
+ self.assertEqual('-other-proj:2 1', amendment.newvalue)
+
+ def testMakeSummaryAmendment(self):
+ amendment = tracker_bizobj.MakeSummaryAmendment('', None)
+ self.assertEqual(tracker_pb2.FieldID.SUMMARY, amendment.field)
+ self.assertEqual('', amendment.newvalue)
+ self.assertEqual(None, amendment.oldvalue)
+
+ amendment = tracker_bizobj.MakeSummaryAmendment('new summary', '')
+ self.assertEqual(tracker_pb2.FieldID.SUMMARY, amendment.field)
+ self.assertEqual('new summary', amendment.newvalue)
+ self.assertEqual('', amendment.oldvalue)
+
+ def testMakeStatusAmendment(self):
+ amendment = tracker_bizobj.MakeStatusAmendment('', None)
+ self.assertEqual(tracker_pb2.FieldID.STATUS, amendment.field)
+ self.assertEqual('', amendment.newvalue)
+ self.assertEqual(None, amendment.oldvalue)
+
+ amendment = tracker_bizobj.MakeStatusAmendment('New', '')
+ self.assertEqual(tracker_pb2.FieldID.STATUS, amendment.field)
+ self.assertEqual('New', amendment.newvalue)
+ self.assertEqual('', amendment.oldvalue)
+
+ def testMakeOwnerAmendment(self):
+ amendment = tracker_bizobj.MakeOwnerAmendment(111L, 0)
+ self.assertEqual(tracker_pb2.FieldID.OWNER, amendment.field)
+ self.assertEqual('', amendment.newvalue)
+ self.assertEqual([111L], amendment.added_user_ids)
+ self.assertEqual([0], amendment.removed_user_ids)
+
+ def testMakeCcAmendment(self):
+ amendment = tracker_bizobj.MakeCcAmendment([111L], [222L])
+ self.assertEqual(tracker_pb2.FieldID.CC, amendment.field)
+ self.assertEqual('', amendment.newvalue)
+ self.assertEqual([111L], amendment.added_user_ids)
+ self.assertEqual([222L], amendment.removed_user_ids)
+
+ def testMakeLabelsAmendment(self):
+ amendment = tracker_bizobj.MakeLabelsAmendment(['added1'], ['removed1'])
+ self.assertEqual(tracker_pb2.FieldID.LABELS, amendment.field)
+ self.assertEqual('-removed1 added1', amendment.newvalue)
+
+ def testMakeBlockedOnAmendment(self):
+ ref1 = (None, 1)
+ ref2 = ('other-proj', 2)
+ amendment = tracker_bizobj.MakeBlockedOnAmendment([ref1], [ref2])
+ self.assertEqual(tracker_pb2.FieldID.BLOCKEDON, amendment.field)
+ self.assertEqual('-other-proj:2 1', amendment.newvalue)
+
+ def testMakeBlockingAmendment(self):
+ ref1 = (None, 1)
+ ref2 = ('other-proj', 2)
+ amendment = tracker_bizobj.MakeBlockingAmendment([ref1], [ref2])
+ self.assertEqual(tracker_pb2.FieldID.BLOCKING, amendment.field)
+ self.assertEqual('-other-proj:2 1', amendment.newvalue)
+
+ def testMakeMergedIntoAmendment(self):
+ ref1 = (None, 1)
+ ref2 = ('other-proj', 2)
+ amendment = tracker_bizobj.MakeMergedIntoAmendment(ref1, ref2)
+ self.assertEqual(tracker_pb2.FieldID.MERGEDINTO, amendment.field)
+ self.assertEqual('-other-proj:2 1', amendment.newvalue)
+
+ def testAmendmentString(self):
+ users_by_id = {
+ 111L: framework_views.UserView(111L, 'username@gmail.com', True)
+ }
+ summary_amendment = tracker_bizobj.MakeSummaryAmendment('new summary', None)
+ self.assertEqual(
+ 'new summary',
+ tracker_bizobj.AmendmentString(summary_amendment, users_by_id))
+
+ status_amendment = tracker_bizobj.MakeStatusAmendment('', None)
+ self.assertEqual(
+ '', tracker_bizobj.AmendmentString(status_amendment, users_by_id))
+ status_amendment = tracker_bizobj.MakeStatusAmendment('Assigned', 'New')
+ self.assertEqual(
+ 'Assigned',
+ tracker_bizobj.AmendmentString(status_amendment, users_by_id))
+
+ owner_amendment = tracker_bizobj.MakeOwnerAmendment(0, 0)
+ self.assertEqual(
+ '----', tracker_bizobj.AmendmentString(owner_amendment, users_by_id))
+ owner_amendment = tracker_bizobj.MakeOwnerAmendment(111L, 0)
+ self.assertEqual(
+ 'usern...@gmail.com',
+ tracker_bizobj.AmendmentString(owner_amendment, users_by_id))
+
+ def testAmendmentLinks(self):
+ users_by_id = {
+ 111L: framework_views.UserView(111L, 'foo@gmail.com', False),
+ 222L: framework_views.UserView(222L, 'bar@gmail.com', False),
+ 333L: framework_views.UserView(333L, 'baz@gmail.com', False)
+ }
+ # SUMMARY
+ summary_amendment = tracker_bizobj.MakeSummaryAmendment('new summary', None)
+ self.assertEqual(
+ [{'value': 'new summary', 'url': None}],
+ tracker_bizobj.AmendmentLinks(summary_amendment, users_by_id, 'proj'))
+
+ # OWNER
+ owner_amendment = tracker_bizobj.MakeOwnerAmendment(0, 0)
+ self.assertEqual(
+ [{'value': '----', 'url': None}],
+ tracker_bizobj.AmendmentLinks(owner_amendment, users_by_id, 'proj'))
+ owner_amendment = tracker_bizobj.MakeOwnerAmendment(111L, 0)
+ self.assertEqual(
+ [{'value': 'foo@gmail.com', 'url': None}],
+ tracker_bizobj.AmendmentLinks(owner_amendment, users_by_id, 'proj'))
+
+ # BLOCKEDON, BLOCKING, MERGEDINTO
+ blocking_amendment = tracker_bizobj.MakeBlockingAmendment(
+ [(None, 123), ('blah', 234)], [(None, 345), ('blah', 456)])
+ self.assertEqual([
+ {'value': '-345', 'url': '/p/proj/issues/detail?id=345'},
+ {'value': '-blah:456', 'url': '/p/blah/issues/detail?id=456'},
+ {'value': '123', 'url': '/p/proj/issues/detail?id=123'},
+ {'value': 'blah:234', 'url': '/p/blah/issues/detail?id=234'}],
+ tracker_bizobj.AmendmentLinks(blocking_amendment, users_by_id, 'proj'))
+
+ # newvalue catchall
+ label_amendment = tracker_bizobj.MakeLabelsAmendment(
+ ['My-Label', 'Your-Label'], ['Their-Label'])
+ self.assertEqual([
+ {'value': '-Their-Label', 'url': None},
+ {'value': 'My-Label', 'url': None},
+ {'value': 'Your-Label', 'url': None}],
+ tracker_bizobj.AmendmentLinks(label_amendment, users_by_id, 'proj'))
+
+ # CC, or CUSTOM with user type
+ cc_amendment = tracker_bizobj.MakeCcAmendment([222L, 333L], [111L])
+ self.assertEqual([
+ {'value': '-foo@gmail.com', 'url': None},
+ {'value': 'bar@gmail.com', 'url': None},
+ {'value': 'baz@gmail.com', 'url': None}],
+ tracker_bizobj.AmendmentLinks(cc_amendment, users_by_id, 'proj'))
+ user_amendment = tracker_bizobj.MakeAmendment(
+ tracker_pb2.FieldID.CUSTOM, None, [222L, 333L], [111L], 'ultracc')
+ self.assertEqual([
+ {'value': '-foo@gmail.com', 'url': None},
+ {'value': 'bar@gmail.com', 'url': None},
+ {'value': 'baz@gmail.com', 'url': None}],
+ tracker_bizobj.AmendmentLinks(user_amendment, users_by_id, 'proj'))
+
+
+ def testDiffValueLists(self):
+ added, removed = tracker_bizobj.DiffValueLists([], [])
+ self.assertItemsEqual([], added)
+ self.assertItemsEqual([], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists([], None)
+ self.assertItemsEqual([], added)
+ self.assertItemsEqual([], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists([1, 2], [])
+ self.assertItemsEqual([1, 2], added)
+ self.assertItemsEqual([], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists([], [8, 9])
+ self.assertItemsEqual([], added)
+ self.assertItemsEqual([8, 9], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists([1, 2], [8, 9])
+ self.assertItemsEqual([1, 2], added)
+ self.assertItemsEqual([8, 9], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists([1, 2, 5, 6], [5, 6, 8, 9])
+ self.assertItemsEqual([1, 2], added)
+ self.assertItemsEqual([8, 9], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists([5, 6], [5, 6, 8, 9])
+ self.assertItemsEqual([], added)
+ self.assertItemsEqual([8, 9], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists([1, 2, 5, 6], [5, 6])
+ self.assertItemsEqual([1, 2], added)
+ self.assertItemsEqual([], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists(
+ [1, 2, 2, 5, 6], [5, 6, 8, 9])
+ self.assertItemsEqual([1, 2, 2], added)
+ self.assertItemsEqual([8, 9], removed)
+
+ added, removed = tracker_bizobj.DiffValueLists(
+ [1, 2, 5, 6], [5, 6, 8, 8, 9])
+ self.assertItemsEqual([1, 2], added)
+ self.assertItemsEqual([8, 8, 9], removed)
+
+ def testFormatIssueRef(self):
+ self.assertEqual('', tracker_bizobj.FormatIssueRef(None))
+
+ self.assertEqual(
+ 'p:1', tracker_bizobj.FormatIssueRef(('p', 1)))
+
+ self.assertEqual(
+ '1', tracker_bizobj.FormatIssueRef((None, 1)))
+
+ def testParseIssueRef(self):
+ self.assertEqual(None, tracker_bizobj.ParseIssueRef(''))
+ self.assertEqual(None, tracker_bizobj.ParseIssueRef(' \t '))
+
+ ref_pn, ref_id = tracker_bizobj.ParseIssueRef('1')
+ self.assertEqual(None, ref_pn)
+ self.assertEqual(1, ref_id)
+
+ ref_pn, ref_id = tracker_bizobj.ParseIssueRef('-1')
+ self.assertEqual(None, ref_pn)
+ self.assertEqual(1, ref_id)
+
+ ref_pn, ref_id = tracker_bizobj.ParseIssueRef('p:2')
+ self.assertEqual('p', ref_pn)
+ self.assertEqual(2, ref_id)
+
+ ref_pn, ref_id = tracker_bizobj.ParseIssueRef('-p:2')
+ self.assertEqual('p', ref_pn)
+ self.assertEqual(2, ref_id)
+
+ def testSafeParseIssueRef(self):
+ self.assertEqual(None, tracker_bizobj._SafeParseIssueRef('-'))
+ self.assertEqual(None, tracker_bizobj._SafeParseIssueRef('test:'))
+ ref_pn, ref_id = tracker_bizobj.ParseIssueRef('p:2')
+ self.assertEqual('p', ref_pn)
+ self.assertEqual(2, ref_id)
+
+ def testGetFieldValueWithRawValue(self):
+ class MockUser(object):
+ def __init__(self):
+ self.email = 'test@example.com'
+ users_by_id = {111: MockUser()}
+
+ class MockFieldValue(object):
+ def __init__(self, int_value=None, str_value=None, user_id=None):
+ self.int_value = int_value
+ self.str_value = str_value
+ self.user_id = user_id
+
+ # Test user types.
+ # Use user_id from the field_value and get user from users_by_id.
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.USER_TYPE,
+ users_by_id=users_by_id,
+ field_value=MockFieldValue(user_id=111),
+ raw_value=113,
+ )
+ self.assertEqual('test@example.com', val)
+ # Specify user_id that does not exist in users_by_id.
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.USER_TYPE,
+ users_by_id=users_by_id,
+ field_value=MockFieldValue(user_id=112),
+ raw_value=113,
+ )
+ self.assertEqual(112, val)
+ # Pass in empty users_by_id.
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.USER_TYPE,
+ users_by_id={},
+ field_value=MockFieldValue(user_id=111),
+ raw_value=113,
+ )
+ self.assertEqual(111, val)
+ # Test different raw_values.
+ raw_value_tests = (
+ (111, 'test@example.com'),
+ (112, 112),
+ (framework_constants.NO_USER_NAME, framework_constants.NO_USER_NAME))
+ for (raw_value, expected_output) in raw_value_tests:
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.USER_TYPE,
+ users_by_id=users_by_id,
+ field_value=None,
+ raw_value=raw_value,
+ )
+ self.assertEqual(expected_output, val)
+
+ # Test enum types.
+ # The returned value should be the raw_value regardless of field_value being
+ # specified.
+ for field_value in (MockFieldValue(), None):
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.ENUM_TYPE,
+ users_by_id=users_by_id,
+ field_value=field_value,
+ raw_value='abc',
+ )
+ self.assertEqual('abc', val)
+
+ # Test int type.
+ # Use int_type from the field_value.
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.INT_TYPE,
+ users_by_id=users_by_id,
+ field_value=MockFieldValue(int_value=100),
+ raw_value=101,
+ )
+ self.assertEqual(100, val)
+ # Use the raw_value when field_value is not specified.
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.INT_TYPE,
+ users_by_id=users_by_id,
+ field_value=None,
+ raw_value=101,
+ )
+ self.assertEqual(101, val)
+
+ # Test str type.
+ # Use str_type from the field_value.
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.STR_TYPE,
+ users_by_id=users_by_id,
+ field_value=MockFieldValue(str_value='testing'),
+ raw_value='test',
+ )
+ self.assertEqual('testing', val)
+ # Use the raw_value when field_value is not specified.
+ val = tracker_bizobj.GetFieldValueWithRawValue(
+ field_type=tracker_pb2.FieldTypes.STR_TYPE,
+ users_by_id=users_by_id,
+ field_value=None,
+ raw_value='test',
+ )
+ self.assertEqual('test', val)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/tracker_helpers_test.py b/appengine/monorail/tracker/test/tracker_helpers_test.py
new file mode 100644
index 0000000..012bd5e
--- /dev/null
+++ b/appengine/monorail/tracker/test/tracker_helpers_test.py
@@ -0,0 +1,1064 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for the tracker helpers module."""
+
+import unittest
+
+import settings
+
+from framework import framework_constants
+from framework import framework_helpers
+from framework import permissions
+from framework import template_helpers
+from framework import urls
+from proto import project_pb2
+from proto import tracker_pb2
+from proto import user_pb2
+from services import service_manager
+from testing import fake
+from testing import testing_helpers
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+
+TEST_ID_MAP = {
+ 'a@example.com': 1,
+ 'b@example.com': 2,
+ 'c@example.com': 3,
+ 'd@example.com': 4,
+ }
+
+
+def _Issue(project_name, local_id, summary, status):
+ issue = tracker_pb2.Issue()
+ issue.project_name = project_name
+ issue.project_id = 789
+ issue.local_id = local_id
+ issue.issue_id = 100000 + local_id
+ issue.summary = summary
+ issue.status = status
+ return issue
+
+
+def _MakeConfig():
+ config = tracker_pb2.ProjectIssueConfig()
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ means_open=True, status='New', deprecated=False))
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='Old', means_open=False, deprecated=False))
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='StatusThatWeDontUseAnymore', means_open=False, deprecated=True))
+
+ return config
+
+
+class HelpersTest(unittest.TestCase):
+
+ def setUp(self):
+ self.services = service_manager.Services(
+ project=fake.ProjectService(),
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ usergroup=fake.UserGroupService())
+
+ for email, user_id in TEST_ID_MAP.iteritems():
+ self.services.user.TestAddUser(email, user_id)
+
+ self.services.project.TestAddProject('testproj', project_id=789)
+ self.issue1 = fake.MakeTestIssue(789, 1, 'one', 'New', 111L)
+ self.issue1.project_name = 'testproj'
+ self.services.issue.TestAddIssue(self.issue1)
+ self.issue2 = fake.MakeTestIssue(789, 2, 'two', 'New', 111L)
+ self.issue2.project_name = 'testproj'
+ self.services.issue.TestAddIssue(self.issue2)
+ self.issue3 = fake.MakeTestIssue(789, 3, 'three', 'New', 111L)
+ self.issue3.project_name = 'testproj'
+ self.services.issue.TestAddIssue(self.issue3)
+ self.cnxn = 'fake connextion'
+ self.errors = template_helpers.EZTError()
+ self.default_colspec_param = 'colspec=%s' % (
+ tracker_constants.DEFAULT_COL_SPEC.replace(' ', '%20'))
+ self.services.usergroup.TestAddGroupSettings(999L, 'group@example.com')
+
+ def testParseIssueRequest_Empty(self):
+ post_data = fake.PostData()
+ errors = template_helpers.EZTError()
+ parsed = tracker_helpers.ParseIssueRequest(
+ 'fake cnxn', post_data, self.services, errors, 'proj')
+ self.assertEqual('', parsed.summary)
+ self.assertEqual('', parsed.comment)
+ self.assertEqual('', parsed.status)
+ self.assertEqual('', parsed.users.owner_username)
+ self.assertEqual(0, parsed.users.owner_id)
+ self.assertEqual([], parsed.users.cc_usernames)
+ self.assertEqual([], parsed.users.cc_usernames_remove)
+ self.assertEqual([], parsed.users.cc_ids)
+ self.assertEqual([], parsed.users.cc_ids_remove)
+ self.assertEqual('', parsed.template_name)
+ self.assertEqual([], parsed.labels)
+ self.assertEqual([], parsed.labels_remove)
+ self.assertEqual({}, parsed.fields.vals)
+ self.assertEqual({}, parsed.fields.vals_remove)
+ self.assertEqual([], parsed.fields.fields_clear)
+ self.assertEqual('', parsed.blocked_on.entered_str)
+ self.assertEqual([], parsed.blocked_on.iids)
+
+ def testParseIssueRequest_Normal(self):
+ post_data = fake.PostData({
+ 'summary': ['some summary'],
+ 'comment': ['some comment'],
+ 'status': ['SomeStatus'],
+ 'template_name': ['some template'],
+ 'label': ['lab1', '-lab2'],
+ 'custom_123': ['field1123a', 'field1123b'],
+ })
+ errors = template_helpers.EZTError()
+ parsed = tracker_helpers.ParseIssueRequest(
+ 'fake cnxn', post_data, self.services, errors, 'proj')
+ self.assertEqual('some summary', parsed.summary)
+ self.assertEqual('some comment', parsed.comment)
+ self.assertEqual('SomeStatus', parsed.status)
+ self.assertEqual('', parsed.users.owner_username)
+ self.assertEqual(0, parsed.users.owner_id)
+ self.assertEqual([], parsed.users.cc_usernames)
+ self.assertEqual([], parsed.users.cc_usernames_remove)
+ self.assertEqual([], parsed.users.cc_ids)
+ self.assertEqual([], parsed.users.cc_ids_remove)
+ self.assertEqual('some template', parsed.template_name)
+ self.assertEqual(['lab1'], parsed.labels)
+ self.assertEqual(['lab2'], parsed.labels_remove)
+ self.assertEqual({123: ['field1123a', 'field1123b']}, parsed.fields.vals)
+ self.assertEqual({}, parsed.fields.vals_remove)
+ self.assertEqual([], parsed.fields.fields_clear)
+
+ def testParseBlockers_BlockedOnNothing(self):
+ """Was blocked on nothing, still nothing."""
+ post_data = {tracker_helpers.BLOCKED_ON: ''}
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKED_ON)
+
+ self.assertEqual('', parsed_blockers.entered_str)
+ self.assertEqual([], parsed_blockers.iids)
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKING))
+
+ def testParseBlockers_BlockedOnAdded(self):
+ """Was blocked on nothing; now 1, 2, 3."""
+ post_data = {tracker_helpers.BLOCKED_ON: '1, 2, 3'}
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKED_ON)
+
+ self.assertEqual('1, 2, 3', parsed_blockers.entered_str)
+ self.assertEqual([100001, 100002, 100003], parsed_blockers.iids)
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKING))
+
+ def testParseBlockers_BlockedOnDuplicateRef(self):
+ """Was blocked on nothing; now just 2, but repeated in input."""
+ post_data = {tracker_helpers.BLOCKED_ON: '2, 2, 2'}
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKED_ON)
+
+ self.assertEqual('2, 2, 2', parsed_blockers.entered_str)
+ self.assertEqual([100002], parsed_blockers.iids)
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKING))
+
+ def testParseBlockers_Missing(self):
+ """Parsing an input field that was not in the POST."""
+ post_data = {}
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKED_ON)
+
+ self.assertEqual('', parsed_blockers.entered_str)
+ self.assertEqual([], parsed_blockers.iids)
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKING))
+
+ def testParseBlockers_SameIssueNoProject(self):
+ """Adding same issue as blocker should modify the errors object."""
+ post_data = {'id': '2', tracker_helpers.BLOCKING: '2, 3'}
+
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKING)
+ self.assertEqual('2, 3', parsed_blockers.entered_str)
+ self.assertEqual([], parsed_blockers.iids)
+ self.assertEqual(
+ getattr(self.errors, tracker_helpers.BLOCKING),
+ 'Cannot be blocking the same issue')
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+
+ def testParseBlockers_SameIssueSameProject(self):
+ """Adding same issue as blocker should modify the errors object."""
+ post_data = {'id': '2', tracker_helpers.BLOCKING: 'testproj:2, 3'}
+
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKING)
+ self.assertEqual('testproj:2, 3', parsed_blockers.entered_str)
+ self.assertEqual([], parsed_blockers.iids)
+ self.assertEqual(
+ getattr(self.errors, tracker_helpers.BLOCKING),
+ 'Cannot be blocking the same issue')
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+
+ def testParseBlockers_SameIssueDifferentProject(self):
+ """Adding different blocker issue should not modify the errors object."""
+ post_data = {'id': '2', tracker_helpers.BLOCKING: 'testproj:2'}
+
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testprojB',
+ tracker_helpers.BLOCKING)
+ self.assertEqual('testproj:2', parsed_blockers.entered_str)
+ self.assertEqual([100002], parsed_blockers.iids)
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKING))
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+
+ def testParseBlockers_Invalid(self):
+ """Input fields with invalid values should modify the errors object."""
+ post_data = {tracker_helpers.BLOCKING: '2, foo',
+ tracker_helpers.BLOCKED_ON: '3, bar'}
+
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKING)
+ self.assertEqual('2, foo', parsed_blockers.entered_str)
+ self.assertEqual([100002], parsed_blockers.iids)
+ self.assertEqual(
+ getattr(self.errors, tracker_helpers.BLOCKING), 'Invalid issue ID foo')
+ self.assertIsNone(getattr(self.errors, tracker_helpers.BLOCKED_ON))
+
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKED_ON)
+ self.assertEqual('3, bar', parsed_blockers.entered_str)
+ self.assertEqual([100003], parsed_blockers.iids)
+ self.assertEqual(
+ getattr(self.errors, tracker_helpers.BLOCKED_ON),
+ 'Invalid issue ID bar')
+
+ def testParseBlockers_Dangling(self):
+ """A ref to a sanctioned projected should be allowed."""
+ post_data = {'id': '2', tracker_helpers.BLOCKING: 'otherproj:2'}
+ real_codesite_projects = settings.recognized_codesite_projects
+ settings.recognized_codesite_projects = ['otherproj']
+ parsed_blockers = tracker_helpers._ParseBlockers(
+ self.cnxn, post_data, self.services, self.errors, 'testproj',
+ tracker_helpers.BLOCKING)
+ self.assertEqual('otherproj:2', parsed_blockers.entered_str)
+ self.assertEqual([('otherproj', 2)], parsed_blockers.dangling_refs)
+ settings.recognized_codesite_projects = real_codesite_projects
+
+ def testMeansOpenInProject(self):
+ config = _MakeConfig()
+
+ # ensure open means open
+ self.assertTrue(tracker_helpers.MeansOpenInProject('New', config))
+ self.assertTrue(tracker_helpers.MeansOpenInProject('new', config))
+
+ # ensure an unrecognized status means open
+ self.assertTrue(tracker_helpers.MeansOpenInProject(
+ '_undefined_status_', config))
+
+ # ensure closed means closed
+ self.assertFalse(tracker_helpers.MeansOpenInProject('Old', config))
+ self.assertFalse(tracker_helpers.MeansOpenInProject('old', config))
+ self.assertFalse(tracker_helpers.MeansOpenInProject(
+ 'StatusThatWeDontUseAnymore', config))
+
+ def testIsNoisy(self):
+ self.assertTrue(tracker_helpers.IsNoisy(778, 320))
+ self.assertFalse(tracker_helpers.IsNoisy(20, 500))
+ self.assertFalse(tracker_helpers.IsNoisy(500, 20))
+ self.assertFalse(tracker_helpers.IsNoisy(1, 1))
+
+ def testClassifyPlusMinusItems(self):
+ add, remove = tracker_helpers._ClassifyPlusMinusItems([])
+ self.assertEquals([], add)
+ self.assertEquals([], remove)
+
+ add, remove = tracker_helpers._ClassifyPlusMinusItems(
+ ['', ' ', ' \t', '-'])
+ self.assertItemsEqual([], add)
+ self.assertItemsEqual([], remove)
+
+ add, remove = tracker_helpers._ClassifyPlusMinusItems(
+ ['a', 'b', 'c'])
+ self.assertItemsEqual(['a', 'b', 'c'], add)
+ self.assertItemsEqual([], remove)
+
+ add, remove = tracker_helpers._ClassifyPlusMinusItems(
+ ['a-a-a', 'b-b', 'c-'])
+ self.assertItemsEqual(['a-a-a', 'b-b', 'c-'], add)
+ self.assertItemsEqual([], remove)
+
+ add, remove = tracker_helpers._ClassifyPlusMinusItems(
+ ['-a'])
+ self.assertItemsEqual([], add)
+ self.assertItemsEqual(['a'], remove)
+
+ add, remove = tracker_helpers._ClassifyPlusMinusItems(
+ ['-a', 'b', 'c-c'])
+ self.assertItemsEqual(['b', 'c-c'], add)
+ self.assertItemsEqual(['a'], remove)
+
+ add, remove = tracker_helpers._ClassifyPlusMinusItems(
+ ['-a', '-b-b', '-c-'])
+ self.assertItemsEqual([], add)
+ self.assertItemsEqual(['a', 'b-b', 'c-'], remove)
+
+ # We dedup, but we don't cancel out items that are both added and removed.
+ add, remove = tracker_helpers._ClassifyPlusMinusItems(
+ ['a', 'a', '-a'])
+ self.assertItemsEqual(['a'], add)
+ self.assertItemsEqual(['a'], remove)
+
+ def testParseIssueRequestAttachments(self):
+ file1 = testing_helpers.Blank(
+ filename='hello.c',
+ value='hello world')
+
+ file2 = testing_helpers.Blank(
+ filename='README',
+ value='Welcome to our project')
+
+ file3 = testing_helpers.Blank(
+ filename='c:\\dir\\subdir\\FILENAME.EXT',
+ value='Abort, Retry, or Fail?')
+
+ # Browsers send this if FILE field was not filled in.
+ file4 = testing_helpers.Blank(
+ filename='',
+ value='')
+
+ attachments = tracker_helpers._ParseIssueRequestAttachments({})
+ self.assertEquals([], attachments)
+
+ attachments = tracker_helpers._ParseIssueRequestAttachments(fake.PostData({
+ 'file1': [file1],
+ }))
+ self.assertEquals(
+ [('hello.c', 'hello world', 'text/plain')],
+ attachments)
+
+ attachments = tracker_helpers._ParseIssueRequestAttachments(fake.PostData({
+ 'file1': [file1],
+ 'file2': [file2],
+ }))
+ self.assertEquals(
+ [('hello.c', 'hello world', 'text/plain'),
+ ('README', 'Welcome to our project', 'text/plain')],
+ attachments)
+
+ attachments = tracker_helpers._ParseIssueRequestAttachments(fake.PostData({
+ 'file3': [file3],
+ }))
+ self.assertEquals(
+ [('FILENAME.EXT', 'Abort, Retry, or Fail?',
+ 'application/octet-stream')],
+ attachments)
+
+ attachments = tracker_helpers._ParseIssueRequestAttachments(fake.PostData({
+ 'file1': [file4], # Does not appear in result
+ 'file3': [file3],
+ 'file4': [file4], # Does not appear in result
+ }))
+ self.assertEquals(
+ [('FILENAME.EXT', 'Abort, Retry, or Fail?',
+ 'application/octet-stream')],
+ attachments)
+
+ def testParseIssueRequestUsers(self):
+ post_data = {}
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('', parsed_users.owner_username)
+ self.assertEquals(
+ framework_constants.NO_USER_SPECIFIED, parsed_users.owner_id)
+ self.assertEquals([], parsed_users.cc_usernames)
+ self.assertEquals([], parsed_users.cc_usernames_remove)
+ self.assertEquals([], parsed_users.cc_ids)
+ self.assertEquals([], parsed_users.cc_ids_remove)
+
+ post_data = fake.PostData({
+ 'owner': [''],
+ })
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('', parsed_users.owner_username)
+ self.assertEquals(
+ framework_constants.NO_USER_SPECIFIED, parsed_users.owner_id)
+ self.assertEquals([], parsed_users.cc_usernames)
+ self.assertEquals([], parsed_users.cc_usernames_remove)
+ self.assertEquals([], parsed_users.cc_ids)
+ self.assertEquals([], parsed_users.cc_ids_remove)
+
+ post_data = fake.PostData({
+ 'owner': [' \t'],
+ })
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('', parsed_users.owner_username)
+ self.assertEquals(
+ framework_constants.NO_USER_SPECIFIED, parsed_users.owner_id)
+ self.assertEquals([], parsed_users.cc_usernames)
+ self.assertEquals([], parsed_users.cc_usernames_remove)
+ self.assertEquals([], parsed_users.cc_ids)
+ self.assertEquals([], parsed_users.cc_ids_remove)
+
+ post_data = fake.PostData({
+ 'owner': ['b@example.com'],
+ })
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('b@example.com', parsed_users.owner_username)
+ self.assertEquals(TEST_ID_MAP['b@example.com'], parsed_users.owner_id)
+ self.assertEquals([], parsed_users.cc_usernames)
+ self.assertEquals([], parsed_users.cc_usernames_remove)
+ self.assertEquals([], parsed_users.cc_ids)
+ self.assertEquals([], parsed_users.cc_ids_remove)
+
+ post_data = fake.PostData({
+ 'owner': ['b@example.com'],
+ })
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('b@example.com', parsed_users.owner_username)
+ self.assertEquals(TEST_ID_MAP['b@example.com'], parsed_users.owner_id)
+ self.assertEquals([], parsed_users.cc_usernames)
+ self.assertEquals([], parsed_users.cc_usernames_remove)
+ self.assertEquals([], parsed_users.cc_ids)
+ self.assertEquals([], parsed_users.cc_ids_remove)
+
+ post_data = fake.PostData({
+ 'cc': ['b@example.com'],
+ })
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('', parsed_users.owner_username)
+ self.assertEquals(
+ framework_constants.NO_USER_SPECIFIED, parsed_users.owner_id)
+ self.assertEquals(['b@example.com'], parsed_users.cc_usernames)
+ self.assertEquals([], parsed_users.cc_usernames_remove)
+ self.assertEquals([TEST_ID_MAP['b@example.com']], parsed_users.cc_ids)
+ self.assertEquals([], parsed_users.cc_ids_remove)
+
+ post_data = fake.PostData({
+ 'cc': ['-b@example.com, c@example.com,,'
+ 'a@example.com,'],
+ })
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('', parsed_users.owner_username)
+ self.assertEquals(
+ framework_constants.NO_USER_SPECIFIED, parsed_users.owner_id)
+ self.assertItemsEqual(['c@example.com', 'a@example.com'],
+ parsed_users.cc_usernames)
+ self.assertEquals(['b@example.com'], parsed_users.cc_usernames_remove)
+ self.assertItemsEqual([TEST_ID_MAP['c@example.com'],
+ TEST_ID_MAP['a@example.com']],
+ parsed_users.cc_ids)
+ self.assertEquals([TEST_ID_MAP['b@example.com']],
+ parsed_users.cc_ids_remove)
+
+ post_data = fake.PostData({
+ 'owner': ['fuhqwhgads@example.com'],
+ 'cc': ['c@example.com, fuhqwhgads@example.com'],
+ })
+ parsed_users = tracker_helpers._ParseIssueRequestUsers(
+ 'fake connection', post_data, self.services)
+ self.assertEquals('fuhqwhgads@example.com', parsed_users.owner_username)
+ gen_uid = framework_helpers.MurmurHash3_x86_32(parsed_users.owner_username)
+ self.assertEquals(gen_uid, parsed_users.owner_id) # autocreated user
+ self.assertItemsEqual(
+ ['c@example.com', 'fuhqwhgads@example.com'], parsed_users.cc_usernames)
+ self.assertEquals([], parsed_users.cc_usernames_remove)
+ self.assertItemsEqual(
+ [TEST_ID_MAP['c@example.com'], gen_uid], parsed_users.cc_ids)
+ self.assertEquals([], parsed_users.cc_ids_remove)
+
+ def testIsValidIssueOwner(self):
+ project = project_pb2.Project()
+ project.owner_ids.extend([1L, 2L])
+ project.committer_ids.extend([3L])
+ project.contributor_ids.extend([4L, 999L])
+
+ valid, _ = tracker_helpers.IsValidIssueOwner(
+ 'fake cnxn', project, framework_constants.NO_USER_SPECIFIED,
+ self.services)
+ self.assertTrue(valid)
+
+ valid, _ = tracker_helpers.IsValidIssueOwner(
+ 'fake cnxn', project, 1L,
+ self.services)
+ self.assertTrue(valid)
+ valid, _ = tracker_helpers.IsValidIssueOwner(
+ 'fake cnxn', project, 2L,
+ self.services)
+ self.assertTrue(valid)
+ valid, _ = tracker_helpers.IsValidIssueOwner(
+ 'fake cnxn', project, 3L,
+ self.services)
+ self.assertTrue(valid)
+ valid, _ = tracker_helpers.IsValidIssueOwner(
+ 'fake cnxn', project, 4L,
+ self.services)
+ self.assertTrue(valid)
+
+ valid, _ = tracker_helpers.IsValidIssueOwner(
+ 'fake cnxn', project, 7L,
+ self.services)
+ self.assertFalse(valid)
+
+ valid, _ = tracker_helpers.IsValidIssueOwner(
+ 'fake cnxn', project, 999L,
+ self.services)
+ self.assertFalse(valid)
+
+ def testGetAllowedOpenAndClosedRelatedIssues(self):
+ gaoacri = tracker_helpers.GetAllowedOpenAndClosedRelatedIssues
+ opened = {
+ 100001: _Issue('proj', 1, 'summary 1', 'New'),
+ 100002: _Issue('proj', 2, 'summary 2', 'Accepted'),
+ }
+ closed = {
+ 100003: _Issue('proj', 3, 'summary 3', 'Accepted'),
+ 100004: _Issue('proj', 4, 'summary 4', 'Invalid'),
+ }
+ project = project_pb2.Project()
+ project.project_id = 789
+ project.project_name = 'proj'
+ project.state = project_pb2.ProjectState.LIVE
+ mr = testing_helpers.MakeMonorailRequest(project=project)
+ fake_issue_service = testing_helpers.Blank(
+ GetOpenAndClosedIssues=lambda _cnxn, iids: (
+ [opened[iid] for iid in iids if iid in opened],
+ [closed[iid] for iid in iids if iid in closed]))
+ fake_config_service = testing_helpers.Blank(
+ GetProjectConfigs=lambda _cnxn, pids: (
+ {pid: tracker_bizobj.MakeDefaultProjectIssueConfig(pid)
+ for pid in pids}))
+ fake_project_service = testing_helpers.Blank(
+ GetProjects=lambda _, project_ids: {project.project_id: project})
+ services = service_manager.Services(
+ issue=fake_issue_service, config=fake_config_service,
+ project=fake_project_service)
+
+ issue = tracker_pb2.Issue()
+ issue.project_id = 789
+ # No merged into, no blocking, no blocked on.
+ open_dict, closed_dict = gaoacri(services, mr, issue)
+ self.assertEqual({}, open_dict)
+ self.assertEqual({}, closed_dict)
+
+ # An open "merged into"
+ issue.merged_into = 100001
+ open_dict, closed_dict = gaoacri(services, mr, issue)
+ self.assertEqual({100001: opened[100001]}, open_dict)
+ self.assertEqual({}, closed_dict)
+
+ # A closed "merged into"
+ issue.merged_into = 100003
+ open_dict, closed_dict = gaoacri(services, mr, issue)
+ self.assertEqual({}, open_dict)
+ self.assertEqual({100003: closed[100003]}, closed_dict)
+
+ # Some blocking and blocked on
+ issue.blocking_iids.append(100001)
+ issue.blocked_on_iids.append(100004)
+ open_dict, closed_dict = gaoacri(services, mr, issue)
+ self.assertEqual({100001: opened[100001]}, open_dict)
+ self.assertEqual({100003: closed[100003],
+ 100004: closed[100004]}, closed_dict)
+
+ def testMergeCCsAndAddComment(self):
+ target_issue = fake.MakeTestIssue(
+ 789, 10, 'Target issue', 'New', 111L)
+ source_issue = fake.MakeTestIssue(
+ 789, 100, 'Source issue', 'New', 222L)
+ source_issue.cc_ids.append(111L)
+ # Issue without owner
+ source_issue_2 = fake.MakeTestIssue(
+ 789, 101, 'Source issue 2', 'New', 0L)
+
+ project = self.services.project.TestAddProject(
+ 'testproj', owner_ids=[222L], project_id=789)
+ self.services.issue.TestAddIssue(target_issue)
+ self.services.issue.TestAddIssue(source_issue)
+ self.services.issue.TestAddIssue(source_issue_2)
+
+ # We copy this list so that it isn't updated by the test framework
+ initial_issue_comments = (
+ self.services.issue.GetCommentsForIssue(
+ 'fake cnxn', target_issue.issue_id)[:])
+ mr = testing_helpers.MakeMonorailRequest(user_info={'user_id': 111L})
+
+ # Merging source into target should create a comment.
+ self.assertIsNotNone(
+ tracker_helpers.MergeCCsAndAddComment(
+ self.services, mr, source_issue, project, target_issue))
+ updated_issue_comments = self.services.issue.GetCommentsForIssue(
+ 'fake cnxn', target_issue.issue_id)
+ for comment in initial_issue_comments:
+ self.assertIn(comment, updated_issue_comments)
+ self.assertEqual(
+ len(initial_issue_comments) + 1, len(updated_issue_comments))
+
+ # Merging source into target should add source's owner to target's CCs.
+ updated_target_issue = self.services.issue.GetIssueByLocalID(
+ 'fake cnxn', 789, 10)
+ self.assertIn(111L, updated_target_issue.cc_ids)
+ self.assertIn(222L, updated_target_issue.cc_ids)
+
+ # Merging source 2 into target should make a comment, but not update CCs.
+ self.assertIsNotNone(
+ tracker_helpers.MergeCCsAndAddComment(
+ self.services, mr, source_issue_2, project, updated_target_issue))
+ updated_target_issue = self.services.issue.GetIssueByLocalID(
+ 'fake cnxn', 789, 10)
+ self.assertNotIn(0L, updated_target_issue.cc_ids)
+
+ def testMergeCCsAndAddCommentRestrictedSourceIssue(self):
+ target_issue = fake.MakeTestIssue(
+ 789, 10, 'Target issue', 'New', 222L)
+ target_issue_2 = fake.MakeTestIssue(
+ 789, 11, 'Target issue 2', 'New', 222L)
+ source_issue = fake.MakeTestIssue(
+ 789, 100, 'Source issue', 'New', 111L)
+ source_issue.cc_ids.append(111L)
+ source_issue.labels.append('Restrict-View-Commit')
+ target_issue_2.labels.append('Restrict-View-Commit')
+
+ project = self.services.project.TestAddProject(
+ 'testproj', owner_ids=[222L], project_id=789)
+ self.services.issue.TestAddIssue(source_issue)
+ self.services.issue.TestAddIssue(target_issue)
+ self.services.issue.TestAddIssue(target_issue_2)
+
+ # We copy this list so that it isn't updated by the test framework
+ initial_issue_comments = self.services.issue.GetCommentsForIssue(
+ 'fake cnxn', target_issue.issue_id)[:]
+ mr = testing_helpers.MakeMonorailRequest(user_info={'user_id': 111L})
+ self.assertIsNotNone(
+ tracker_helpers.MergeCCsAndAddComment(
+ self.services, mr, source_issue, project, target_issue))
+
+ # When the source is restricted, we update the target comments...
+ updated_issue_comments = self.services.issue.GetCommentsForIssue(
+ 'fake cnxn', target_issue.issue_id)
+ for comment in initial_issue_comments:
+ self.assertIn(comment, updated_issue_comments)
+ self.assertEqual(
+ len(initial_issue_comments) + 1, len(updated_issue_comments))
+ # ...but not the target CCs...
+ updated_target_issue = self.services.issue.GetIssueByLocalID(
+ 'fake cnxn', 789, 10)
+ self.assertNotIn(111L, updated_target_issue.cc_ids)
+ # ...unless both issues have the same restrictions.
+ self.assertIsNotNone(
+ tracker_helpers.MergeCCsAndAddComment(
+ self.services, mr, source_issue, project, target_issue_2))
+ updated_target_issue_2 = self.services.issue.GetIssueByLocalID(
+ 'fake cnxn', 789, 11)
+ self.assertIn(111L, updated_target_issue_2.cc_ids)
+
+ def testFormatIssueListURLNoCurrentState(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ path = '/p/proj/issues/detail?id=123'
+ mr = testing_helpers.MakeMonorailRequest(
+ path=path, headers={'Host': 'code.google.com'})
+ mr.ComputeColSpec(config)
+
+ absolute_base_url = 'http://code.google.com'
+
+ url_1 = tracker_helpers.FormatIssueListURL(mr, config)
+ self.assertEquals(
+ '%s/p/proj/issues/list?%s' % (
+ absolute_base_url, self.default_colspec_param),
+ url_1)
+
+ url_2 = tracker_helpers.FormatIssueListURL(
+ mr, config, foo=123)
+ self.assertEquals(
+ '%s/p/proj/issues/list?%s&foo=123' % (
+ absolute_base_url, self.default_colspec_param),
+ url_2)
+
+ url_3 = tracker_helpers.FormatIssueListURL(
+ mr, config, foo=123, bar='abc')
+ self.assertEquals(
+ '%s/p/proj/issues/list?bar=abc&%s&foo=123' % (
+ absolute_base_url, self.default_colspec_param),
+ url_3)
+
+ url_4 = tracker_helpers.FormatIssueListURL(
+ mr, config, baz='escaped+encoded&and100% "safe"')
+ self.assertEquals(
+ '%s/p/proj/issues/list?'
+ 'baz=escaped%%2Bencoded%%26and100%%25%%20%%22safe%%22&%s' % (
+ absolute_base_url, self.default_colspec_param),
+ url_4)
+
+ def testFormatIssueListURLKeepCurrentState(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ path = '/p/proj/issues/detail?id=123&sort=aa&colspec=a b c&groupby=d'
+ mr = testing_helpers.MakeMonorailRequest(
+ path=path, headers={'Host': 'localhost:8080'})
+ mr.ComputeColSpec(config)
+
+ absolute_base_url = 'http://localhost:8080'
+
+ url_1 = tracker_helpers.FormatIssueListURL(mr, config)
+ self.assertEquals(
+ '%s/p/proj/issues/list?colspec=a%%20b%%20c'
+ '&groupby=d&sort=aa' % absolute_base_url,
+ url_1)
+
+ url_2 = tracker_helpers.FormatIssueListURL(
+ mr, config, foo=123)
+ self.assertEquals(
+ '%s/p/proj/issues/list?'
+ 'colspec=a%%20b%%20c&foo=123&groupby=d&sort=aa' % absolute_base_url,
+ url_2)
+
+ url_3 = tracker_helpers.FormatIssueListURL(
+ mr, config, colspec='X Y Z')
+ self.assertEquals(
+ '%s/p/proj/issues/list?colspec=a%%20b%%20c'
+ '&groupby=d&sort=aa' % absolute_base_url,
+ url_3)
+
+ def testFormatRelativeIssueURL(self):
+ self.assertEquals(
+ '/p/proj/issues/attachment',
+ tracker_helpers.FormatRelativeIssueURL(
+ 'proj', urls.ISSUE_ATTACHMENT))
+
+ self.assertEquals(
+ '/p/proj/issues/detail?id=123',
+ tracker_helpers.FormatRelativeIssueURL(
+ 'proj', urls.ISSUE_DETAIL, id=123))
+
+
+class MakeViewsForUsersInIssuesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.issue1 = _Issue('proj', 1, 'summary 1', 'New')
+ self.issue1.owner_id = 1001
+ self.issue1.reporter_id = 1002
+
+ self.issue2 = _Issue('proj', 2, 'summary 2', 'New')
+ self.issue2.owner_id = 2001
+ self.issue2.reporter_id = 2002
+ self.issue2.cc_ids.extend([1, 1001, 1002, 1003])
+
+ self.issue3 = _Issue('proj', 3, 'summary 3', 'New')
+ self.issue3.owner_id = 1001
+ self.issue3.reporter_id = 3002
+
+ self.user = fake.UserService()
+ for user_id in [1, 1001, 1002, 1003, 2001, 2002, 3002]:
+ self.user.TestAddUser(
+ 'test%d' % user_id, user_id, add_user=True)
+
+ def testMakeViewsForUsersInIssues(self):
+ issue_list = [self.issue1, self.issue2, self.issue3]
+ users_by_id = tracker_helpers.MakeViewsForUsersInIssues(
+ 'fake cnxn', issue_list, self.user)
+ self.assertItemsEqual([1, 1001, 1002, 1003, 2001, 2002, 3002],
+ users_by_id.keys())
+ for user_id in [1001, 1002, 1003, 2001]:
+ self.assertEqual(users_by_id[user_id].user_id, user_id)
+
+ def testMakeViewsForUsersInIssuesOmittingSome(self):
+ issue_list = [self.issue1, self.issue2, self.issue3]
+ users_by_id = tracker_helpers.MakeViewsForUsersInIssues(
+ 'fake cnxn', issue_list, self.user, omit_ids=[1001, 1003])
+ self.assertItemsEqual([1, 1002, 2001, 2002, 3002], users_by_id.keys())
+ for user_id in [1002, 2001, 2002, 3002]:
+ self.assertEqual(users_by_id[user_id].user_id, user_id)
+
+ def testMakeViewsForUsersInIssuesEmpty(self):
+ issue_list = []
+ users_by_id = tracker_helpers.MakeViewsForUsersInIssues(
+ 'fake cnxn', issue_list, self.user)
+ self.assertItemsEqual([], users_by_id.keys())
+
+
+class GetAllIssueProjectsTest(unittest.TestCase):
+ issue_x_1 = tracker_pb2.Issue()
+ issue_x_1.project_id = 789
+ issue_x_1.local_id = 1
+ issue_x_1.reporter_id = 1002
+
+ issue_x_2 = tracker_pb2.Issue()
+ issue_x_2.project_id = 789
+ issue_x_2.local_id = 2
+ issue_x_2.reporter_id = 2002
+
+ issue_y_1 = tracker_pb2.Issue()
+ issue_y_1.project_id = 678
+ issue_y_1.local_id = 1
+ issue_y_1.reporter_id = 2002
+
+ def setUp(self):
+ self.project_service = fake.ProjectService()
+ self.project_service.TestAddProject('proj-x', project_id=789)
+ self.project_service.TestAddProject('proj-y', project_id=678)
+ self.cnxn = 'fake connection'
+
+ def testGetAllIssueProjects_Empty(self):
+ self.assertEqual(
+ {}, tracker_helpers.GetAllIssueProjects(
+ self.cnxn, [], self.project_service))
+
+ def testGetAllIssueProjects_Normal(self):
+ self.assertEqual(
+ {789: self.project_service.GetProjectByName(self.cnxn, 'proj-x')},
+ tracker_helpers.GetAllIssueProjects(
+ self.cnxn, [self.issue_x_1, self.issue_x_2], self.project_service))
+ self.assertEqual(
+ {789: self.project_service.GetProjectByName(self.cnxn, 'proj-x'),
+ 678: self.project_service.GetProjectByName(self.cnxn, 'proj-y')},
+ tracker_helpers.GetAllIssueProjects(
+ self.cnxn, [self.issue_x_1, self.issue_x_2, self.issue_y_1],
+ self.project_service))
+
+
+class FilterOutNonViewableIssuesTest(unittest.TestCase):
+ owner_id = 111L
+ committer_id = 222L
+ nonmember_1_id = 1002L
+ nonmember_2_id = 2002L
+ nonmember_3_id = 3002L
+
+ issue1 = tracker_pb2.Issue()
+ issue1.project_name = 'proj'
+ issue1.project_id = 789
+ issue1.local_id = 1
+ issue1.reporter_id = nonmember_1_id
+
+ issue2 = tracker_pb2.Issue()
+ issue2.project_name = 'proj'
+ issue2.project_id = 789
+ issue2.local_id = 2
+ issue2.reporter_id = nonmember_2_id
+ issue2.labels.extend(['foo', 'bar'])
+
+ issue3 = tracker_pb2.Issue()
+ issue3.project_name = 'proj'
+ issue3.project_id = 789
+ issue3.local_id = 3
+ issue3.reporter_id = nonmember_3_id
+ issue3.labels.extend(['restrict-view-commit'])
+
+ issue4 = tracker_pb2.Issue()
+ issue4.project_name = 'proj'
+ issue4.project_id = 789
+ issue4.local_id = 4
+ issue4.reporter_id = nonmember_3_id
+ issue4.labels.extend(['Foo', 'Restrict-View-Commit'])
+
+ def setUp(self):
+ self.user = user_pb2.User()
+ self.project = self.MakeProject(project_pb2.ProjectState.LIVE)
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(
+ self.project.project_id)
+ self.project_dict = {self.project.project_id: self.project}
+ self.config_dict = {self.config.project_id: self.config}
+
+ def MakeProject(self, state):
+ p = project_pb2.Project(
+ project_id=789, project_name='proj', state=state,
+ owner_ids=[self.owner_id], committer_ids=[self.committer_id])
+ return p
+
+ def testFilterOutNonViewableIssues_Member(self):
+ # perms will be permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ filtered_issues = tracker_helpers.FilterOutNonViewableIssues(
+ {self.committer_id}, self.user, self.project_dict,
+ self.config_dict,
+ [self.issue1, self.issue2, self.issue3, self.issue4])
+ self.assertListEqual([1, 2, 3, 4],
+ [issue.local_id for issue in filtered_issues])
+
+ def testFilterOutNonViewableIssues_Owner(self):
+ # perms will be permissions.OWNER_ACTIVE_PERMISSIONSET
+ filtered_issues = tracker_helpers.FilterOutNonViewableIssues(
+ {self.owner_id}, self.user, self.project_dict, self.config_dict,
+ [self.issue1, self.issue2, self.issue3, self.issue4])
+ self.assertListEqual([1, 2, 3, 4],
+ [issue.local_id for issue in filtered_issues])
+
+ def testFilterOutNonViewableIssues_Empty(self):
+ # perms will be permissions.COMMITTER_ACTIVE_PERMISSIONSET
+ filtered_issues = tracker_helpers.FilterOutNonViewableIssues(
+ {self.committer_id}, self.user, self.project_dict,
+ self.config_dict, [])
+ self.assertListEqual([], filtered_issues)
+
+ def testFilterOutNonViewableIssues_NonMember(self):
+ # perms will be permissions.READ_ONLY_PERMISSIONSET
+ filtered_issues = tracker_helpers.FilterOutNonViewableIssues(
+ {self.nonmember_1_id}, self.user, self.project_dict,
+ self.config_dict, [self.issue1, self.issue2, self.issue3, self.issue4])
+ self.assertListEqual([1, 2],
+ [issue.local_id for issue in filtered_issues])
+
+ def testFilterOutNonViewableIssues_Reporter(self):
+ # perms will be permissions.READ_ONLY_PERMISSIONSET
+ filtered_issues = tracker_helpers.FilterOutNonViewableIssues(
+ {self.nonmember_3_id}, self.user, self.project_dict,
+ self.config_dict, [self.issue1, self.issue2, self.issue3, self.issue4])
+ self.assertListEqual([1, 2, 3, 4],
+ [issue.local_id for issue in filtered_issues])
+
+
+class IssueMergeTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cnxn = 'fake cnxn'
+ self.services = service_manager.Services(
+ config=fake.ConfigService(),
+ issue=fake.IssueService(),
+ user=fake.UserService(),
+ project=fake.ProjectService(),
+ issue_star=fake.IssueStarService(),
+ spam=fake.SpamService()
+ )
+ self.project = self.services.project.TestAddProject('proj', project_id=987)
+ self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(
+ self.project.project_id)
+ self.project_dict = {self.project.project_id: self.project}
+ self.config_dict = {self.config.project_id: self.config}
+
+ def testParseMergeFields_NotSpecified(self):
+ issue = fake.MakeTestIssue(987, 1, 'summary', 'New', 111L)
+ errors = template_helpers.EZTError()
+ post_data = {}
+
+ text, merge_into_issue = tracker_helpers.ParseMergeFields(
+ self.cnxn, None, 'proj', post_data, 'New', self.config, issue, errors)
+ self.assertEqual('', text)
+ self.assertEqual(None, merge_into_issue)
+
+ text, merge_into_issue = tracker_helpers.ParseMergeFields(
+ self.cnxn, None, 'proj', post_data, 'Duplicate', self.config, issue,
+ errors)
+ self.assertEqual('', text)
+ self.assertTrue(errors.merge_into_id)
+ self.assertEqual(None, merge_into_issue)
+
+ def testParseMergeFields_WrongStatus(self):
+ issue = fake.MakeTestIssue(987, 1, 'summary', 'New', 111L)
+ errors = template_helpers.EZTError()
+ post_data = {'merge_into': '12'}
+
+ text, merge_into_issue = tracker_helpers.ParseMergeFields(
+ self.cnxn, None, 'proj', post_data, 'New', self.config, issue, errors)
+ self.assertEqual('', text)
+ self.assertEqual(None, merge_into_issue)
+
+ def testParseMergeFields_NoSuchIssue(self):
+ issue = fake.MakeTestIssue(987, 1, 'summary', 'New', 111L)
+ issue.merged_into = 12
+ errors = template_helpers.EZTError()
+ post_data = {'merge_into': '12'}
+
+ text, merge_into_issue = tracker_helpers.ParseMergeFields(
+ self.cnxn, self.services, 'proj', post_data, 'Duplicate',
+ self.config, issue, errors)
+ self.assertEqual('12', text)
+ self.assertEqual(None, merge_into_issue)
+
+ def testParseMergeFields_DontSelfMerge(self):
+ issue = fake.MakeTestIssue(987, 1, 'summary', 'New', 111L)
+ errors = template_helpers.EZTError()
+ post_data = {'merge_into': '1'}
+
+ text, merge_into_issue = tracker_helpers.ParseMergeFields(
+ self.cnxn, self.services, 'proj', post_data, 'Duplicate', self.config,
+ issue, errors)
+ self.assertEqual('1', text)
+ self.assertEqual(None, merge_into_issue)
+ self.assertEqual('Cannot merge issue into itself', errors.merge_into_id)
+
+ def testParseMergeFields_NewIssueToMerge(self):
+ merged_local_id = self.services.issue.CreateIssue(
+ self.cnxn, self.services,
+ self.project.project_id, 'unused_summary', 'unused_status', 111L,
+ [], [], [], [], 111L, 'unused_marked_description')
+ mergee_local_id = self.services.issue.CreateIssue(
+ self.cnxn, self.services,
+ self.project.project_id, 'unused_summary', 'unused_status', 111L,
+ [], [], [], [], 111L, 'unused_marked_description')
+ merged_issue = self.services.issue.GetIssueByLocalID(
+ self.cnxn, self.project.project_id, merged_local_id)
+ mergee_issue = self.services.issue.GetIssueByLocalID(
+ self.cnxn, self.project.project_id, mergee_local_id)
+
+ errors = template_helpers.EZTError()
+ post_data = {'merge_into': str(mergee_issue.local_id)}
+
+ text, merge_into_issue = tracker_helpers.ParseMergeFields(
+ self.cnxn, self.services, 'proj', post_data, 'Duplicate', self.config,
+ merged_issue, errors)
+ self.assertEqual(str(mergee_issue.local_id), text)
+ self.assertEqual(mergee_issue, merge_into_issue)
+
+ def testIsMergeAllowed(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ issue = fake.MakeTestIssue(987, 1, 'summary', 'New', 111L)
+ issue.project_name = self.project.project_name
+
+ for (perm_set, expected_merge_allowed) in (
+ (permissions.READ_ONLY_PERMISSIONSET, False),
+ (permissions.COMMITTER_INACTIVE_PERMISSIONSET, False),
+ (permissions.COMMITTER_ACTIVE_PERMISSIONSET, True),
+ (permissions.OWNER_ACTIVE_PERMISSIONSET, True)):
+ mr.perms = perm_set
+ merge_allowed = tracker_helpers.IsMergeAllowed(issue, mr, self.services)
+ self.assertEquals(expected_merge_allowed, merge_allowed)
+
+ def testMergeIssueStars(self):
+ mr = testing_helpers.MakeMonorailRequest()
+ mr.project_name = self.project.project_name
+ mr.project = self.project
+
+ config = self.services.config.GetProjectConfig(
+ self.cnxn, self.project.project_id)
+ self.services.issue_star.SetStar(
+ self.cnxn, self.services, config, 1, 1, True)
+ self.services.issue_star.SetStar(
+ self.cnxn, self.services, config, 1, 2, True)
+ self.services.issue_star.SetStar(
+ self.cnxn, self.services, config, 1, 3, True)
+ self.services.issue_star.SetStar(
+ self.cnxn, self.services, config, 2, 3, True)
+ self.services.issue_star.SetStar(
+ self.cnxn, self.services, config, 2, 4, True)
+ self.services.issue_star.SetStar(
+ self.cnxn, self.services, config, 2, 5, True)
+
+ new_starrers = tracker_helpers.GetNewIssueStarrers(
+ self.cnxn, self.services, 1, 2)
+ self.assertItemsEqual(new_starrers, [1, 2])
+ tracker_helpers.AddIssueStarrers(
+ self.cnxn, self.services, mr, 2, self.project, new_starrers)
+ issue_2_starrers = self.services.issue_star.LookupItemStarrers(
+ self.cnxn, 2)
+ # XXX(jrobbins): these tests incorrectly mix local IDs with IIDs.
+ self.assertItemsEqual([1, 2, 3, 4, 5], issue_2_starrers)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/test/tracker_views_test.py b/appengine/monorail/tracker/test/tracker_views_test.py
new file mode 100644
index 0000000..124aa7f
--- /dev/null
+++ b/appengine/monorail/tracker/test/tracker_views_test.py
@@ -0,0 +1,444 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Unittest for issue tracker views."""
+
+import unittest
+
+import mox
+
+from google.appengine.api import app_identity
+from third_party import ezt
+
+from framework import gcs_helpers
+from framework import urls
+from proto import project_pb2
+from proto import tracker_pb2
+from testing import testing_helpers
+from tracker import tracker_views
+
+
+def _Issue(project_name, local_id, summary, status):
+ issue = tracker_pb2.Issue()
+ issue.project_name = project_name
+ issue.local_id = local_id
+ issue.issue_id = 100000 + local_id
+ issue.summary = summary
+ issue.status = status
+ return issue
+
+
+def _MakeConfig():
+ config = tracker_pb2.ProjectIssueConfig()
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='New', means_open=True))
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='Old', means_open=False))
+ return config
+
+
+class IssueViewTest(unittest.TestCase):
+
+ def setUp(self):
+ self.issue1 = _Issue('proj', 1, 'not too long summary', 'New')
+ self.issue2 = _Issue('proj', 2, 'sum 2', '')
+ self.issue3 = _Issue('proj', 3, 'sum 3', '')
+ self.issue4 = _Issue('proj', 4, 'sum 4', '')
+
+ self.issue1.reporter_id = 1002
+ self.issue1.owner_id = 2002
+ self.issue1.labels.extend(['A', 'B'])
+ self.issue1.derived_labels.extend(['C', 'D'])
+
+ self.issue2.reporter_id = 2002
+ self.issue2.labels.extend(['foo', 'bar'])
+ self.issue2.blocked_on_iids.extend(
+ [self.issue1.issue_id, self.issue3.issue_id])
+ self.issue2.blocking_iids.extend(
+ [self.issue1.issue_id, self.issue4.issue_id])
+ dref = tracker_pb2.DanglingIssueRef()
+ dref.project = 'codesite'
+ dref.issue_id = 5001
+ self.issue2.dangling_blocking_refs.append(dref)
+
+ self.issue3.reporter_id = 3002
+ self.issue3.labels.extend(['Hot'])
+
+ self.issue4.reporter_id = 3002
+ self.issue4.labels.extend(['Foo', 'Bar'])
+
+ self.restricted = _Issue('proj', 7, 'summary 7', '')
+ self.restricted.labels.extend([
+ 'Restrict-View-Commit', 'Restrict-View-MyCustomPerm'])
+ self.restricted.derived_labels.extend([
+ 'Restrict-AddIssueComment-Commit', 'Restrict-EditIssue-Commit',
+ 'Restrict-Action-NeededPerm'])
+
+ self.users_by_id = {
+ 0: 'user 0',
+ 1002: 'user 1002',
+ 2002: 'user 2002',
+ 3002: 'user 3002',
+ 4002: 'user 4002',
+ }
+
+ def CheckSimpleIssueView(self, config):
+ view1 = tracker_views.IssueView(
+ self.issue1, self.users_by_id, config)
+ self.assertEqual('not too long summary', view1.summary)
+ self.assertEqual('New', view1.status.name)
+ self.assertEqual('user 2002', view1.owner)
+ self.assertEqual('A', view1.labels[0].name)
+ self.assertEqual('B', view1.labels[1].name)
+ self.assertEqual('C', view1.derived_labels[0].name)
+ self.assertEqual('D', view1.derived_labels[1].name)
+ self.assertEqual([], view1.blocked_on)
+ self.assertEqual([], view1.blocking)
+ detail_url = '/p/%s%s?id=%d' % (
+ self.issue1.project_name, urls.ISSUE_DETAIL,
+ self.issue1.local_id)
+ self.assertEqual(detail_url, view1.detail_relative_url)
+ return view1
+
+ def testSimpleIssueView(self):
+ config = tracker_pb2.ProjectIssueConfig()
+ view1 = self.CheckSimpleIssueView(config)
+ self.assertEqual('', view1.status.docstring)
+
+ config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status='New', status_docstring='Issue has not had review yet'))
+ view1 = self.CheckSimpleIssueView(config)
+ self.assertEqual('Issue has not had review yet',
+ view1.status.docstring)
+ self.assertIsNone(view1.restrictions.has_restrictions)
+ self.assertEqual('', view1.restrictions.view)
+ self.assertEqual('', view1.restrictions.add_comment)
+ self.assertEqual('', view1.restrictions.edit)
+
+ def testIsOpen(self):
+ config = _MakeConfig()
+ view1 = tracker_views.IssueView(
+ self.issue1, self.users_by_id, config)
+ self.assertEqual(ezt.boolean(True), view1.is_open)
+
+ self.issue1.status = 'Old'
+ view1 = tracker_views.IssueView(
+ self.issue1, self.users_by_id, config)
+ self.assertEqual(ezt.boolean(False), view1.is_open)
+
+ def testIssueViewWithBlocking(self):
+ # Treat issues 3 and 4 as visible to the current user.
+ view2 = tracker_views.IssueView(
+ self.issue2, self.users_by_id, _MakeConfig(),
+ open_related={self.issue1.issue_id: self.issue1,
+ self.issue3.issue_id: self.issue3},
+ closed_related={self.issue4.issue_id: self.issue4})
+ self.assertEqual(['not too long summary', 'sum 3'],
+ [ref.summary for ref in view2.blocked_on])
+ self.assertEqual(['not too long summary', 'sum 4',
+ 'Issue 5001 in codesite.'],
+ [ref.summary for ref in view2.blocking])
+
+ # Now, treat issues 3 and 4 as not visible to the current user.
+ view2 = tracker_views.IssueView(
+ self.issue2, self.users_by_id, _MakeConfig(),
+ open_related={self.issue1.issue_id: self.issue1}, closed_related={})
+ self.assertEqual(['not too long summary'],
+ [ref.summary for ref in view2.blocked_on])
+ self.assertEqual(['not too long summary', 'Issue 5001 in codesite.'],
+ [ref.summary for ref in view2.blocking])
+
+ # Treat nothing as visible to the current user. Can still see dangling ref.
+ view2 = tracker_views.IssueView(
+ self.issue2, self.users_by_id, _MakeConfig(),
+ open_related={}, closed_related={})
+ self.assertEqual([], view2.blocked_on)
+ self.assertEqual(['Issue 5001 in codesite.'],
+ [ref.summary for ref in view2.blocking])
+
+ def testIssueViewWithRestrictions(self):
+ view = tracker_views.IssueView(
+ self.restricted, self.users_by_id, _MakeConfig())
+ self.assertTrue(view.restrictions.has_restrictions)
+ self.assertEqual('Commit and MyCustomPerm', view.restrictions.view)
+ self.assertEqual('Commit', view.restrictions.add_comment)
+ self.assertEqual('Commit', view.restrictions.edit)
+ self.assertEqual(['Restrict-Action-NeededPerm'], view.restrictions.other)
+ self.assertEqual('Restrict-View-Commit', view.labels[0].name)
+ self.assertTrue(view.labels[0].is_restrict)
+
+
+class RestrictionsViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class IssueRefViewTest(unittest.TestCase):
+
+ issue1 = testing_helpers.Blank(
+ local_id=1,
+ project_name='foo',
+ summary='blue screen')
+ issue2 = testing_helpers.Blank(
+ local_id=2,
+ project_name='foo',
+ summary='hissing noise')
+ issue3 = testing_helpers.Blank(
+ local_id=3,
+ project_name='foo',
+ summary='sinking feeling')
+ issue4 = testing_helpers.Blank(
+ local_id=4,
+ project_name='bar',
+ summary='aliens among us')
+
+ def testNormalCase(self):
+ open_list = {1: self.issue1,
+ 2: self.issue2}
+ closed_list = {3: self.issue3}
+
+ ref_iid = 1
+ irv = tracker_views.IssueRefView('foo', ref_iid, open_list, closed_list)
+ self.assertEquals(irv.visible, ezt.boolean(True))
+ self.assertEquals(irv.is_open, ezt.boolean(True))
+ self.assertEquals(irv.url, 'detail?id=1')
+ self.assertEquals(irv.display_name, 'issue 1')
+ self.assertEquals(irv.summary, 'blue screen')
+
+ ref_iid = 3
+ irv = tracker_views.IssueRefView('foo', ref_iid, open_list, closed_list)
+ self.assertEquals(irv.visible, ezt.boolean(True))
+ self.assertEquals(irv.is_open, ezt.boolean(False))
+ self.assertEquals(irv.url, 'detail?id=3')
+ self.assertEquals(irv.display_name, 'issue 3')
+ self.assertEquals(irv.summary, 'sinking feeling')
+
+ def testMissingIssueShouldNotBeVisible(self):
+ open_list = {1: self.issue1,
+ 2: self.issue2}
+ closed_list = {3: self.issue3}
+
+ ref_iid = 99
+ irv = tracker_views.IssueRefView('foo', ref_iid, open_list, closed_list)
+ self.assertEquals(irv.visible, ezt.boolean(False))
+
+ def testCrossProjectReference(self):
+ open_list = {1: self.issue1,
+ 2: self.issue2}
+ closed_list = {3: self.issue3,
+ 4: self.issue4}
+
+ ref_iid = 4
+ irv = tracker_views.IssueRefView('foo', ref_iid, open_list, closed_list)
+ self.assertEquals(irv.visible, ezt.boolean(True))
+ self.assertEquals(irv.is_open, ezt.boolean(False))
+ self.assertEquals(
+ irv.url, '/p/bar%s?id=4' % urls.ISSUE_DETAIL)
+ self.assertEquals(irv.display_name, 'issue bar:4')
+ self.assertEquals(irv.summary, 'aliens among us')
+
+
+class DanglingIssueRefViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class AttachmentViewTest(unittest.TestCase):
+
+ def MakeViewAndVerifyFields(
+ self, size, name, mimetype, expected_size_str, expect_viewable):
+ attach_pb = tracker_pb2.Attachment()
+ attach_pb.filesize = size
+ attach_pb.attachment_id = 12345
+ attach_pb.filename = name
+ attach_pb.mimetype = mimetype
+
+ view = tracker_views.AttachmentView(attach_pb, 'proj')
+ self.assertEqual('/images/paperclip.png', view.iconurl)
+ self.assertEqual(expected_size_str, view.filesizestr)
+ dl = 'attachment?aid=12345'
+ self.assertEqual(dl, view.downloadurl)
+ if expect_viewable:
+ self.assertEqual(dl + '&inline=1', view.url)
+ self.assertEqual(dl + '&inline=1&thumb=1', view.thumbnail_url)
+ else:
+ self.assertEqual(None, view.url)
+ self.assertEqual(None, view.thumbnail_url)
+
+ def testNonImage(self):
+ self.MakeViewAndVerifyFields(
+ 123, 'file.ext', 'funky/bits', '123 bytes', False)
+
+ def testViewableImage(self):
+ self.MakeViewAndVerifyFields(
+ 123, 'logo.gif', 'image/gif', '123 bytes', True)
+
+ self.MakeViewAndVerifyFields(
+ 123, 'screenshot.jpg', 'image/jpeg', '123 bytes', True)
+
+ def testHugeImage(self):
+ self.MakeViewAndVerifyFields(
+ 18 * 1024 * 1024, 'panorama.png', 'image/jpeg', '18.0 MB', False)
+
+ def testViewableText(self):
+ name = 'hello.c'
+ attach_pb = tracker_pb2.Attachment()
+ attach_pb.filesize = 1234
+ attach_pb.attachment_id = 12345
+ attach_pb.filename = name
+ attach_pb.mimetype = 'text/plain'
+ view = tracker_views.AttachmentView(attach_pb, 'proj')
+
+ view_url = '/p/proj/issues/attachmentText?aid=12345'
+ self.assertEqual(view_url, view.url)
+
+
+class LogoViewTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mox = mox.Mox()
+
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ self.mox.ResetAll()
+
+ def testProjectWithLogo(self):
+ bucket_name = 'testbucket'
+ logo_gcs_id = '123'
+ logo_file_name = 'logo.png'
+ project_pb = project_pb2.MakeProject(
+ 'testProject', logo_gcs_id=logo_gcs_id, logo_file_name=logo_file_name)
+ object_path = '/' + bucket_name + logo_gcs_id
+
+ self.mox.StubOutWithMock(app_identity, 'get_default_gcs_bucket_name')
+ app_identity.get_default_gcs_bucket_name().AndReturn(bucket_name)
+
+ self.mox.StubOutWithMock(gcs_helpers, 'SignUrl')
+ gcs_helpers.SignUrl(object_path + '-thumbnail').AndReturn('signed/url')
+ gcs_helpers.SignUrl(object_path).AndReturn('signed/url')
+
+ self.mox.ReplayAll()
+
+ view = tracker_views.LogoView(project_pb)
+ self.mox.VerifyAll()
+ self.assertEquals('logo.png', view.filename)
+ self.assertEquals('image/png', view.mimetype)
+ self.assertEquals('signed/url', view.thumbnail_url)
+ self.assertEquals('signed/url&response-content-displacement=attachment%3B'
+ '+filename%3Dlogo.png', view.viewurl)
+
+ def testProjectWithNoLogo(self):
+ project_pb = project_pb2.MakeProject('testProject')
+ view = tracker_views.LogoView(project_pb)
+ self.assertEquals('', view.thumbnail_url)
+ self.assertEquals('', view.viewurl)
+
+
+class IsViewableImageTest(unittest.TestCase):
+
+ def testIsViewableImage(self):
+ self.assertTrue(tracker_views.IsViewableImage('image/gif', 123))
+ self.assertTrue(tracker_views.IsViewableImage(
+ 'image/gif; charset=binary', 123))
+ self.assertTrue(tracker_views.IsViewableImage('image/png', 123))
+ self.assertTrue(tracker_views.IsViewableImage(
+ 'image/png; charset=binary', 123))
+ self.assertTrue(tracker_views.IsViewableImage('image/x-png', 123))
+ self.assertTrue(tracker_views.IsViewableImage('image/jpeg', 123))
+ self.assertTrue(tracker_views.IsViewableImage(
+ 'image/jpeg; charset=binary', 123))
+ self.assertTrue(tracker_views.IsViewableImage(
+ 'image/jpeg', 3 * 1024 * 1024))
+
+ self.assertFalse(tracker_views.IsViewableImage('junk/bits', 123))
+ self.assertFalse(tracker_views.IsViewableImage(
+ 'junk/bits; charset=binary', 123))
+ self.assertFalse(tracker_views.IsViewableImage(
+ 'image/jpeg', 13 * 1024 * 1024))
+
+
+class IsViewableTextTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class AmendmentViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class ComponentDefViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class ComponentValueTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class FieldValueViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+class FieldValueViewTest_Applicability(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class MakeFieldValueViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class FindFieldValuesTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class MakeBounceFieldValueViewsTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class ConvertLabelsToFieldValuesTest(unittest.TestCase):
+
+ def testConvertLabelsToFieldValues_NoLabels(self):
+ result = tracker_views._ConvertLabelsToFieldValues(
+ [], 'opsys', {})
+ self.assertEqual([], result)
+
+ def testConvertLabelsToFieldValues_NoMatch(self):
+ result = tracker_views._ConvertLabelsToFieldValues(
+ ['Pri-3', 'M-44', 'Security', 'Via-Wizard'], 'opsys', {})
+ self.assertEqual([], result)
+
+ def testConvertLabelsToFieldValues_HasMatch(self):
+ result = tracker_views._ConvertLabelsToFieldValues(
+ ['Pri-3', 'M-44', 'Security', 'OpSys-OSX'], 'opsys', {})
+ self.assertEqual(1, len(result))
+ self.assertEqual('OSX', result[0].val)
+ self.assertEqual('OSX', result[0].val_short)
+ self.assertEqual('', result[0].docstring)
+
+ result = tracker_views._ConvertLabelsToFieldValues(
+ ['Pri-3', 'M-44', 'Security', 'OpSys-OSX', 'OpSys-All'],
+ 'opsys', {'OpSys-All': 'Happens everywhere'})
+ self.assertEqual(2, len(result))
+ self.assertEqual('OSX', result[0].val)
+ self.assertEqual('OSX', result[0].val_short)
+ self.assertEqual('', result[0].docstring)
+ self.assertEqual('All', result[1].val)
+ self.assertEqual('All', result[1].val_short)
+ self.assertEqual('Happens everywhere', result[1].docstring)
+
+
+class FieldDefViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class IssueTemplateViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class MakeFieldUserViewsTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+class ConfigViewTest(unittest.TestCase):
+ pass # TODO(jrobbins): write tests
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/appengine/monorail/tracker/tracker_bizobj.py b/appengine/monorail/tracker/tracker_bizobj.py
new file mode 100644
index 0000000..4c434e8
--- /dev/null
+++ b/appengine/monorail/tracker/tracker_bizobj.py
@@ -0,0 +1,1032 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Business objects for the Monorail issue tracker.
+
+These are classes and functions that operate on the objects that
+users care about in the issue tracker: e.g., issues, and the issue
+tracker configuration.
+"""
+
+import logging
+
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import urls
+from proto import tracker_pb2
+from tracker import tracker_constants
+
+
+def GetOwnerId(issue):
+ """Get the owner of an issue, whether it is explicit or derived."""
+ return (issue.owner_id or issue.derived_owner_id or
+ framework_constants.NO_USER_SPECIFIED)
+
+
+def GetStatus(issue):
+ """Get the status of an issue, whether it is explicit or derived."""
+ return issue.status or issue.derived_status or ''
+
+
+def GetCcIds(issue):
+ """Get the Cc's of an issue, whether they are explicit or derived."""
+ return issue.cc_ids + issue.derived_cc_ids
+
+
+def GetLabels(issue):
+ """Get the labels of an issue, whether explicit or derived."""
+ return issue.labels + issue.derived_labels
+
+
+def MakeProjectIssueConfig(
+ project_id, well_known_statuses, statuses_offer_merge, well_known_labels,
+ excl_label_prefixes, templates, col_spec):
+ """Return a ProjectIssueConfig with the given values."""
+ # pylint: disable=multiple-statements
+ if not well_known_statuses: well_known_statuses = []
+ if not statuses_offer_merge: statuses_offer_merge = []
+ if not well_known_labels: well_known_labels = []
+ if not excl_label_prefixes: excl_label_prefixes = []
+ if not templates: templates = []
+ if not col_spec: col_spec = ' '
+
+ project_config = tracker_pb2.ProjectIssueConfig()
+ if project_id: # There is no ID for harmonized configs.
+ project_config.project_id = project_id
+
+ SetConfigStatuses(project_config, well_known_statuses)
+ project_config.statuses_offer_merge = statuses_offer_merge
+ SetConfigLabels(project_config, well_known_labels)
+ SetConfigTemplates(project_config, templates)
+ project_config.exclusive_label_prefixes = excl_label_prefixes
+
+ # ID 0 means that nothing has been specified, so use hard-coded defaults.
+ project_config.default_template_for_developers = 0
+ project_config.default_template_for_users = 0
+
+ project_config.default_col_spec = col_spec
+
+ # Note: default project issue config has no filter rules.
+
+ return project_config
+
+
+def UsersInvolvedInConfig(config):
+ """Return a set of all user IDs referenced in the ProjectIssueConfig."""
+ result = set()
+ for template in config.templates:
+ result.add(template.owner_id)
+ result.update(template.admin_ids)
+ for field in config.field_defs:
+ result.update(field.admin_ids)
+ return result
+
+
+def FindFieldDef(field_name, config):
+ """Find the specified field, or return None."""
+ field_name_lower = field_name.lower()
+ for fd in config.field_defs:
+ if fd.field_name.lower() == field_name_lower:
+ return fd
+
+ return None
+
+
+def FindFieldDefByID(field_id, config):
+ """Find the specified field, or return None."""
+ for fd in config.field_defs:
+ if fd.field_id == field_id:
+ return fd
+
+ return None
+
+
+def GetGrantedPerms(issue, effective_ids, config):
+ """Return a set of permissions granted by user-valued fields in an issue."""
+ granted_perms = set()
+ for field_value in issue.field_values:
+ if field_value.user_id in effective_ids:
+ field_def = FindFieldDefByID(field_value.field_id, config)
+ if field_def and field_def.grants_perm:
+ # TODO(jrobbins): allow comma-separated list in grants_perm
+ granted_perms.add(field_def.grants_perm.lower())
+
+ return granted_perms
+
+
+def LabelIsMaskedByField(label, field_names):
+ """If the label should be displayed as a field, return the field name.
+
+ Args:
+ label: string label to consider.
+ field_names: a list of field names in lowercase.
+
+ Returns:
+ If masked, return the lowercase name of the field, otherwise None. A label
+ is masked by a custom field if the field name "Foo" matches the key part of
+ a key-value label "Foo-Bar".
+ """
+ if '-' not in label:
+ return None
+
+ for field_name_lower in field_names:
+ if label.lower().startswith(field_name_lower + '-'):
+ return field_name_lower
+
+ return None
+
+
+def NonMaskedLabels(labels, field_names):
+ """Return only those labels that are not masked by custom fields."""
+ return [lab for lab in labels
+ if not LabelIsMaskedByField(lab, field_names)]
+
+
+def MakeFieldDef(
+ field_id, project_id, field_name, field_type_int, applic_type, applic_pred,
+ is_required, is_multivalued, min_value, max_value, regex, needs_member,
+ needs_perm, grants_perm, notify_on, docstring, is_deleted):
+ """Make a FieldDef PB for the given FieldDef table row tuple."""
+ fd = tracker_pb2.FieldDef(
+ field_id=field_id, project_id=project_id, field_name=field_name,
+ field_type=field_type_int, is_required=bool(is_required),
+ is_multivalued=bool(is_multivalued), docstring=docstring,
+ is_deleted=bool(is_deleted), applicable_type=applic_type or '',
+ applicable_predicate=applic_pred or '',
+ needs_member=bool(needs_member), grants_perm=grants_perm or '',
+ notify_on=tracker_pb2.NotifyTriggers(notify_on or 0))
+ if min_value is not None:
+ fd.min_value = min_value
+ if max_value is not None:
+ fd.max_value = max_value
+ if regex is not None:
+ fd.regex = regex
+ if needs_perm is not None:
+ fd.needs_perm = needs_perm
+ return fd
+
+
+def MakeFieldValue(field_id, int_value, str_value, user_id, derived):
+ """Make a FieldValue based on the given information."""
+ fv = tracker_pb2.FieldValue(field_id=field_id, derived=derived)
+ if int_value is not None:
+ fv.int_value = int_value
+ elif str_value is not None:
+ fv.str_value = str_value
+ elif user_id is not None:
+ fv.user_id = user_id
+
+ return fv
+
+
+def GetFieldValueWithRawValue(field_type, field_value, users_by_id, raw_value):
+ """Find and return the field value of the specified field type.
+
+ If the specified field_value is None or is empty then the raw_value is
+ returned. When the field type is USER_TYPE the raw_value is used as a key to
+ lookup users_by_id.
+
+ Args:
+ field_type: tracker_pb2.FieldTypes type.
+ field_value: tracker_pb2.FieldValue type.
+ users_by_id: Dict mapping user_ids to UserViews.
+ raw_value: String to use if field_value is not specified.
+
+ Returns:
+ Value of the specified field type.
+ """
+ ret_value = GetFieldValue(field_value, users_by_id)
+ if ret_value:
+ return ret_value
+ # Special case for user types.
+ if field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ if raw_value in users_by_id:
+ return users_by_id[raw_value].email
+ return raw_value
+
+
+def GetFieldValue(fv, users_by_id):
+ """Return the value of this field. Give emails for users in users_by_id."""
+ if fv is None:
+ return None
+ elif fv.int_value is not None:
+ return fv.int_value
+ elif fv.str_value is not None:
+ return fv.str_value
+ elif fv.user_id is not None:
+ if fv.user_id in users_by_id:
+ return users_by_id[fv.user_id].email
+ else:
+ logging.info('Failed to lookup user %d when getting field', fv.user_id)
+ return fv.user_id
+ else:
+ return None
+
+
+def FindComponentDef(path, config):
+ """Find the specified component, or return None."""
+ path_lower = path.lower()
+ for cd in config.component_defs:
+ if cd.path.lower() == path_lower:
+ return cd
+
+ return None
+
+
+def FindMatchingComponentIDs(path, config, exact=True):
+ """Return a list of components that match the given path."""
+ component_ids = []
+ path_lower = path.lower()
+
+ if exact:
+ for cd in config.component_defs:
+ if cd.path.lower() == path_lower:
+ component_ids.append(cd.component_id)
+ else:
+ path_lower_delim = path.lower() + '>'
+ for cd in config.component_defs:
+ target_delim = cd.path.lower() + '>'
+ if target_delim.startswith(path_lower_delim):
+ component_ids.append(cd.component_id)
+
+ return component_ids
+
+
+def FindComponentDefByID(component_id, config):
+ """Find the specified component, or return None."""
+ for cd in config.component_defs:
+ if cd.component_id == component_id:
+ return cd
+
+ return None
+
+
+def FindAncestorComponents(config, component_def):
+ """Return a list of all components the given component is under."""
+ path_lower = component_def.path.lower()
+ return [cd for cd in config.component_defs
+ if path_lower.startswith(cd.path.lower() + '>')]
+
+
+def FindDescendantComponents(config, component_def):
+ """Return a list of all nested components under the given component."""
+ path_plus_delim = component_def.path.lower() + '>'
+ return [cd for cd in config.component_defs
+ if cd.path.lower().startswith(path_plus_delim)]
+
+
+def MakeComponentDef(
+ component_id, project_id, path, docstring, deprecated, admin_ids, cc_ids,
+ created, creator_id, modified=None, modifier_id=None):
+ """Make a ComponentDef PB for the given FieldDef table row tuple."""
+ cd = tracker_pb2.ComponentDef(
+ component_id=component_id, project_id=project_id, path=path,
+ docstring=docstring, deprecated=bool(deprecated),
+ admin_ids=admin_ids, cc_ids=cc_ids, created=created,
+ creator_id=creator_id, modified=modified, modifier_id=modifier_id)
+ return cd
+
+
+def MakeSavedQuery(
+ query_id, name, base_query_id, query, subscription_mode=None,
+ executes_in_project_ids=None):
+ """Make SavedQuery PB for the given info."""
+ saved_query = tracker_pb2.SavedQuery(
+ name=name, base_query_id=base_query_id, query=query)
+ if query_id is not None:
+ saved_query.query_id = query_id
+ if subscription_mode is not None:
+ saved_query.subscription_mode = subscription_mode
+ if executes_in_project_ids is not None:
+ saved_query.executes_in_project_ids = executes_in_project_ids
+ return saved_query
+
+
+def SetConfigStatuses(project_config, well_known_statuses):
+ """Internal method to set the well-known statuses of ProjectIssueConfig."""
+ project_config.well_known_statuses = []
+ for status, docstring, means_open, deprecated in well_known_statuses:
+ canonical_status = framework_bizobj.CanonicalizeLabel(status)
+ project_config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status_docstring=docstring, status=canonical_status,
+ means_open=means_open, deprecated=deprecated))
+
+
+def SetConfigLabels(project_config, well_known_labels):
+ """Internal method to set the well-known labels of a ProjectIssueConfig."""
+ project_config.well_known_labels = []
+ for label, docstring, deprecated in well_known_labels:
+ canonical_label = framework_bizobj.CanonicalizeLabel(label)
+ project_config.well_known_labels.append(tracker_pb2.LabelDef(
+ label=canonical_label, label_docstring=docstring,
+ deprecated=deprecated))
+
+
+def SetConfigTemplates(project_config, template_dict_list):
+ """Internal method to set the templates of a ProjectIssueConfig."""
+ templates = [ConvertDictToTemplate(template_dict)
+ for template_dict in template_dict_list]
+ project_config.templates = templates
+
+
+def ConvertDictToTemplate(template_dict):
+ """Construct a Template PB with the values from template_dict.
+
+ Args:
+ template_dict: dictionary with fields corresponding to the Template
+ PB fields.
+
+ Returns:
+ A Template protocol buffer thatn can be stored in the
+ project's ProjectIssueConfig PB.
+ """
+ return MakeIssueTemplate(
+ template_dict.get('name'), template_dict.get('summary'),
+ template_dict.get('status'), template_dict.get('owner_id'),
+ template_dict.get('content'), template_dict.get('labels'), [], [],
+ template_dict.get('components'),
+ summary_must_be_edited=template_dict.get('summary_must_be_edited'),
+ owner_defaults_to_member=template_dict.get('owner_defaults_to_member'),
+ component_required=template_dict.get('component_required'),
+ members_only=template_dict.get('members_only'))
+
+
+def MakeIssueTemplate(
+ name, summary, status, owner_id, content, labels, field_values, admin_ids,
+ component_ids, summary_must_be_edited=None, owner_defaults_to_member=None,
+ component_required=None, members_only=None):
+ """Make an issue template PB."""
+ template = tracker_pb2.TemplateDef()
+ template.name = name
+ if summary:
+ template.summary = summary
+ if status:
+ template.status = status
+ if owner_id:
+ template.owner_id = owner_id
+ template.content = content
+ template.field_values = field_values
+ template.labels = labels or []
+ template.admin_ids = admin_ids
+ template.component_ids = component_ids or []
+
+ if summary_must_be_edited is not None:
+ template.summary_must_be_edited = summary_must_be_edited
+ if owner_defaults_to_member is not None:
+ template.owner_defaults_to_member = owner_defaults_to_member
+ if component_required is not None:
+ template.component_required = component_required
+ if members_only is not None:
+ template.members_only = members_only
+
+ return template
+
+
+def MakeDefaultProjectIssueConfig(project_id):
+ """Return a ProjectIssueConfig with use by projects that don't have one."""
+ return MakeProjectIssueConfig(
+ project_id,
+ tracker_constants.DEFAULT_WELL_KNOWN_STATUSES,
+ tracker_constants.DEFAULT_STATUSES_OFFER_MERGE,
+ tracker_constants.DEFAULT_WELL_KNOWN_LABELS,
+ tracker_constants.DEFAULT_EXCL_LABEL_PREFIXES,
+ tracker_constants.DEFAULT_TEMPLATES,
+ tracker_constants.DEFAULT_COL_SPEC)
+
+
+def HarmonizeConfigs(config_list):
+ """Combine several ProjectIssueConfigs into one for cross-project sorting.
+
+ Args:
+ config_list: a list of ProjectIssueConfig PBs with labels and statuses
+ among other fields.
+
+ Returns:
+ A new ProjectIssueConfig with just the labels and status values filled
+ in to be a logical union of the given configs. Specifically, the order
+ of the combined status and label lists should be maintained.
+ """
+ if not config_list:
+ return MakeDefaultProjectIssueConfig(None)
+
+ harmonized_status_names = _CombineOrderedLists(
+ [[stat.status for stat in config.well_known_statuses]
+ for config in config_list])
+ harmonized_label_names = _CombineOrderedLists(
+ [[lab.label for lab in config.well_known_labels]
+ for config in config_list])
+ harmonized_default_sort_spec = ' '.join(
+ config.default_sort_spec for config in config_list)
+ # This col_spec is probably not what the user wants to view because it is
+ # too much information. We join all the col_specs here so that we are sure
+ # to lookup all users needed for sorting, even if it is more than needed.
+ # xxx we need to look up users based on colspec rather than sortspec?
+ harmonized_default_col_spec = ' '.join(
+ config.default_col_spec for config in config_list)
+
+ result_config = tracker_pb2.ProjectIssueConfig()
+ # The combined config is only used during sorting, never stored.
+ result_config.default_col_spec = harmonized_default_col_spec
+ result_config.default_sort_spec = harmonized_default_sort_spec
+
+ for status_name in harmonized_status_names:
+ result_config.well_known_statuses.append(tracker_pb2.StatusDef(
+ status=status_name, means_open=True))
+
+ for label_name in harmonized_label_names:
+ result_config.well_known_labels.append(tracker_pb2.LabelDef(
+ label=label_name))
+
+ for config in config_list:
+ result_config.field_defs.extend(config.field_defs)
+ result_config.component_defs.extend(config.component_defs)
+
+ return result_config
+
+
+def HarmonizeLabelOrStatusRows(def_rows):
+ """Put the given label defs into a logical global order."""
+ ranked_defs_by_project = {}
+ oddball_defs = []
+ for row in def_rows:
+ def_id, project_id, rank, label = row[0], row[1], row[2], row[3]
+ if rank is not None:
+ ranked_defs_by_project.setdefault(project_id, []).append(
+ (def_id, rank, label))
+ else:
+ oddball_defs.append((def_id, rank, label))
+
+ oddball_defs.sort(reverse=True, key=lambda def_tuple: def_tuple[2].lower())
+ # Compose the list-of-lists in a consistent order by project_id.
+ list_of_lists = [ranked_defs_by_project[pid]
+ for pid in sorted(ranked_defs_by_project.keys())]
+ harmonized_ranked_defs = _CombineOrderedLists(
+ list_of_lists, include_duplicate_keys=True,
+ key=lambda def_tuple: def_tuple[2])
+
+ return oddball_defs + harmonized_ranked_defs
+
+
+def _CombineOrderedLists(
+ list_of_lists, include_duplicate_keys=False, key=lambda x: x):
+ """Combine lists of items while maintaining their desired order.
+
+ Args:
+ list_of_lists: a list of lists of strings.
+ include_duplicate_keys: Pass True to make the combined list have the
+ same total number of elements as the sum of the input lists.
+ key: optional function to choose which part of the list items hold the
+ string used for comparison. The result will have the whole items.
+
+ Returns:
+ A single list of items containing one copy of each of the items
+ in any of the original list, and in an order that maintains the original
+ list ordering as much as possible.
+ """
+ combined_items = []
+ combined_keys = []
+ seen_keys_set = set()
+ for one_list in list_of_lists:
+ _AccumulateCombinedList(
+ one_list, combined_items, combined_keys, seen_keys_set, key=key,
+ include_duplicate_keys=include_duplicate_keys)
+
+ return combined_items
+
+
+def _AccumulateCombinedList(
+ one_list, combined_items, combined_keys, seen_keys_set,
+ include_duplicate_keys=False, key=lambda x: x):
+ """Accumulate strings into a combined list while its maintaining ordering.
+
+ Args:
+ one_list: list of strings in a desired order.
+ combined_items: accumulated list of items in the desired order.
+ combined_keys: accumulated list of key strings in the desired order.
+ seen_keys_set: set of strings that are already in combined_list.
+ include_duplicate_keys: Pass True to make the combined list have the
+ same total number of elements as the sum of the input lists.
+ key: optional function to choose which part of the list items hold the
+ string used for comparison. The result will have the whole items.
+
+ Returns:
+ Nothing. But, combined_items is modified to mix in all the items of
+ one_list at appropriate points such that nothing in combined_items
+ is reordered, and the ordering of items from one_list is maintained
+ as much as possible. Also, seen_keys_set is modified to add any keys
+ for items that were added to combined_items.
+
+ Also, any strings that begin with "#" are compared regardless of the "#".
+ The purpose of such strings is to guide the final ordering.
+ """
+ insert_idx = 0
+ for item in one_list:
+ s = key(item).lower()
+ if s in seen_keys_set:
+ item_idx = combined_keys.index(s) # Need parallel list of keys
+ insert_idx = max(insert_idx, item_idx + 1)
+
+ if s not in seen_keys_set or include_duplicate_keys:
+ combined_items.insert(insert_idx, item)
+ combined_keys.insert(insert_idx, s)
+ insert_idx += 1
+
+ seen_keys_set.add(s)
+
+
+def GetBuiltInQuery(query_id):
+ """If the given query ID is for a built-in query, return that string."""
+ return tracker_constants.DEFAULT_CANNED_QUERY_CONDS.get(query_id, '')
+
+
+def UsersInvolvedInAmendments(amendments):
+ """Return a set of all user IDs mentioned in the given Amendments."""
+ user_id_set = set()
+ for amendment in amendments:
+ user_id_set.update(amendment.added_user_ids)
+ user_id_set.update(amendment.removed_user_ids)
+
+ return user_id_set
+
+
+def _AccumulateUsersInvolvedInComment(comment, user_id_set):
+ """Build up a set of all users involved in an IssueComment.
+
+ Args:
+ comment: an IssueComment PB.
+ user_id_set: a set of user IDs to build up.
+
+ Returns:
+ The same set, but modified to have the user IDs of user who
+ entered the comment, and all the users mentioned in any amendments.
+ """
+ user_id_set.add(comment.user_id)
+ user_id_set.update(UsersInvolvedInAmendments(comment.amendments))
+
+ return user_id_set
+
+
+def UsersInvolvedInComment(comment):
+ """Return a set of all users involved in an IssueComment.
+
+ Args:
+ comment: an IssueComment PB.
+
+ Returns:
+ A set with the user IDs of user who entered the comment, and all the
+ users mentioned in any amendments.
+ """
+ return _AccumulateUsersInvolvedInComment(comment, set())
+
+
+def UsersInvolvedInCommentList(comments):
+ """Return a set of all users involved in a list of IssueComments.
+
+ Args:
+ comments: a list of IssueComment PBs.
+
+ Returns:
+ A set with the user IDs of user who entered the comment, and all the
+ users mentioned in any amendments.
+ """
+ result = set()
+ for c in comments:
+ _AccumulateUsersInvolvedInComment(c, result)
+
+ return result
+
+
+def UsersInvolvedInIssues(issues):
+ """Return a set of all user IDs referenced in the issues' metadata."""
+ result = set()
+ for issue in issues:
+ result.update([issue.reporter_id, issue.owner_id, issue.derived_owner_id])
+ result.update(issue.cc_ids)
+ result.update(issue.derived_cc_ids)
+ result.update(fv.user_id for fv in issue.field_values if fv.user_id)
+
+ return result
+
+
+def MakeAmendment(
+ field, new_value, added_ids, removed_ids, custom_field_name=None,
+ old_value=None):
+ """Utility function to populate an Amendment PB.
+
+ Args:
+ field: enum for the field being updated.
+ new_value: new string value of that field.
+ added_ids: list of user IDs being added.
+ removed_ids: list of user IDs being removed.
+ custom_field_name: optional name of a custom field.
+ old_value: old string value of that field.
+
+ Returns:
+ An instance of Amendment.
+ """
+ amendment = tracker_pb2.Amendment()
+ amendment.field = field
+ amendment.newvalue = new_value
+ amendment.added_user_ids.extend(added_ids)
+ amendment.removed_user_ids.extend(removed_ids)
+
+ if old_value is not None:
+ amendment.oldvalue = old_value
+
+ if custom_field_name is not None:
+ amendment.custom_field_name = custom_field_name
+
+ return amendment
+
+
+def _PlusMinusString(added_items, removed_items):
+ """Return a concatenation of the items, with a minus on removed items.
+
+ Args:
+ added_items: list of string items added.
+ removed_items: list of string items removed.
+
+ Returns:
+ A unicode string with all the removed items first (preceeded by minus
+ signs) and then the added items.
+ """
+ assert all(isinstance(item, basestring)
+ for item in added_items + removed_items)
+ # TODO(jrobbins): this is not good when values can be negative ints.
+ return ' '.join(
+ ['-%s' % item.strip()
+ for item in removed_items if item] +
+ ['%s' % item for item in added_items if item])
+
+
+def _PlusMinusAmendment(
+ field, added_items, removed_items, custom_field_name=None):
+ """Make an Amendment PB with the given added/removed items."""
+ return MakeAmendment(
+ field, _PlusMinusString(added_items, removed_items), [], [],
+ custom_field_name=custom_field_name)
+
+
+def _PlusMinusRefsAmendment(
+ field, added_refs, removed_refs, default_project_name=None):
+ """Make an Amendment PB with the given added/removed refs."""
+ return _PlusMinusAmendment(
+ field,
+ [FormatIssueRef(r, default_project_name=default_project_name)
+ for r in added_refs if r],
+ [FormatIssueRef(r, default_project_name=default_project_name)
+ for r in removed_refs if r])
+
+
+def MakeSummaryAmendment(new_summary, old_summary):
+ """Make an Amendment PB for a change to the summary."""
+ return MakeAmendment(
+ tracker_pb2.FieldID.SUMMARY, new_summary, [], [], old_value=old_summary)
+
+
+def MakeStatusAmendment(new_status, old_status):
+ """Make an Amendment PB for a change to the status."""
+ return MakeAmendment(
+ tracker_pb2.FieldID.STATUS, new_status, [], [], old_value=old_status)
+
+
+def MakeOwnerAmendment(new_owner_id, old_owner_id):
+ """Make an Amendment PB for a change to the owner."""
+ return MakeAmendment(
+ tracker_pb2.FieldID.OWNER, '', [new_owner_id], [old_owner_id])
+
+
+def MakeCcAmendment(added_cc_ids, removed_cc_ids):
+ """Make an Amendment PB for a change to the Cc list."""
+ return MakeAmendment(
+ tracker_pb2.FieldID.CC, '', added_cc_ids, removed_cc_ids)
+
+
+def MakeLabelsAmendment(added_labels, removed_labels):
+ """Make an Amendment PB for a change to the labels."""
+ return _PlusMinusAmendment(
+ tracker_pb2.FieldID.LABELS, added_labels, removed_labels)
+
+
+def DiffValueLists(new_list, old_list):
+ """Give an old list and a new list, return the added and removed items."""
+ if not old_list:
+ return new_list, []
+ if not new_list:
+ return [], old_list
+
+ added = []
+ removed = old_list[:] # Assume everything was removed, then narrow that down
+ for val in new_list:
+ if val in removed:
+ removed.remove(val)
+ else:
+ added.append(val)
+
+ return added, removed
+
+
+def MakeFieldAmendment(field_id, config, new_values, old_values=None):
+ """Return an amendment showing how an issue's field changed.
+
+ Args:
+ field_id: int field ID of a built-in or custom issue field.
+ config: config info for the current project, including field_defs.
+ new_values: list of strings representing new values of field.
+ old_values: list of strings representing old values of field.
+
+ Returns:
+ A new Amemdnent object.
+
+ Raises:
+ ValueError: if the specified field was not found.
+ """
+ fd = FindFieldDefByID(field_id, config)
+
+ if fd is None:
+ raise ValueError('field %r vanished mid-request', field_id)
+
+ if fd.is_multivalued:
+ old_values = old_values or []
+ added, removed = DiffValueLists(new_values, old_values)
+ if fd.field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ return MakeAmendment(
+ tracker_pb2.FieldID.CUSTOM, '', added, removed,
+ custom_field_name=fd.field_name)
+ else:
+ return _PlusMinusAmendment(
+ tracker_pb2.FieldID.CUSTOM,
+ ['%s' % item for item in added],
+ ['%s' % item for item in removed],
+ custom_field_name=fd.field_name)
+
+ else:
+ if fd.field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ return MakeAmendment(
+ tracker_pb2.FieldID.CUSTOM, '', new_values, [],
+ custom_field_name=fd.field_name)
+
+ if new_values:
+ new_str = ', '.join('%s' % item for item in new_values)
+ else:
+ new_str = '----'
+
+ return MakeAmendment(
+ tracker_pb2.FieldID.CUSTOM, new_str, [], [],
+ custom_field_name=fd.field_name)
+
+
+def MakeFieldClearedAmendment(field_id, config):
+ fd = FindFieldDefByID(field_id, config)
+
+ if fd is None:
+ raise ValueError('field %r vanished mid-request', field_id)
+
+ return MakeAmendment(
+ tracker_pb2.FieldID.CUSTOM, '----', [], [],
+ custom_field_name=fd.field_name)
+
+
+def MakeComponentsAmendment(added_comp_ids, removed_comp_ids, config):
+ """Make an Amendment PB for a change to the components."""
+ # TODO(jrobbins): record component IDs as ints and display them with
+ # lookups (and maybe permission checks in the future). But, what
+ # about history that references deleleted components?
+ added_comp_paths = []
+ for comp_id in added_comp_ids:
+ cd = FindComponentDefByID(comp_id, config)
+ if cd:
+ added_comp_paths.append(cd.path)
+
+ removed_comp_paths = []
+ for comp_id in removed_comp_ids:
+ cd = FindComponentDefByID(comp_id, config)
+ if cd:
+ removed_comp_paths.append(cd.path)
+
+ return _PlusMinusAmendment(
+ tracker_pb2.FieldID.COMPONENTS,
+ added_comp_paths, removed_comp_paths)
+
+
+def MakeBlockedOnAmendment(
+ added_refs, removed_refs, default_project_name=None):
+ """Make an Amendment PB for a change to the blocked on issues."""
+ return _PlusMinusRefsAmendment(
+ tracker_pb2.FieldID.BLOCKEDON, added_refs, removed_refs,
+ default_project_name=default_project_name)
+
+
+def MakeBlockingAmendment(added_refs, removed_refs, default_project_name=None):
+ """Make an Amendment PB for a change to the blocking issues."""
+ return _PlusMinusRefsAmendment(
+ tracker_pb2.FieldID.BLOCKING, added_refs, removed_refs,
+ default_project_name=default_project_name)
+
+
+def MakeMergedIntoAmendment(added_ref, removed_ref, default_project_name=None):
+ """Make an Amendment PB for a change to the merged-into issue."""
+ return _PlusMinusRefsAmendment(
+ tracker_pb2.FieldID.MERGEDINTO, [added_ref], [removed_ref],
+ default_project_name=default_project_name)
+
+
+def MakeProjectAmendment(new_project_name):
+ """Make an Amendment PB for a change to an issue's project."""
+ return MakeAmendment(
+ tracker_pb2.FieldID.PROJECT, new_project_name, [], [])
+
+
+def AmendmentString(amendment, users_by_id):
+ """Produce a displayable string for an Amendment PB.
+
+ Args:
+ amendment: Amendment PB to display.
+ users_by_id: dict {user_id: user_view, ...} including all users
+ mentioned in amendment.
+
+ Returns:
+ A string that could be displayed on a web page or sent in email.
+ """
+ if amendment.newvalue:
+ return amendment.newvalue
+
+ # Display new owner only
+ if amendment.field == tracker_pb2.FieldID.OWNER:
+ if amendment.added_user_ids and amendment.added_user_ids[0] > 0:
+ uid = amendment.added_user_ids[0]
+ result = users_by_id[uid].display_name
+ else:
+ result = framework_constants.NO_USER_NAME
+ else:
+ result = _PlusMinusString(
+ [users_by_id[uid].display_name for uid in amendment.added_user_ids
+ if uid in users_by_id],
+ [users_by_id[uid].display_name for uid in amendment.removed_user_ids
+ if uid in users_by_id])
+
+ return result
+
+
+def AmendmentLinks(amendment, users_by_id, project_name):
+ """Produce a list of value/url pairs for an Amendment PB.
+
+ Args:
+ amendment: Amendment PB to display.
+ users_by_id: dict {user_id: user_view, ...} including all users
+ mentioned in amendment.
+ project_nme: Name of project the issue/comment/amendment is in.
+
+ Returns:
+ A list of dicts with 'value' and 'url' keys. 'url' may be None.
+ """
+ # Display both old and new summary
+ if amendment.field == tracker_pb2.FieldID.SUMMARY:
+ result = amendment.newvalue
+ if amendment.oldvalue:
+ result += ' (was: %s)' % amendment.oldvalue
+ return [{'value': result, 'url': None}]
+ # Display new owner only
+ elif amendment.field == tracker_pb2.FieldID.OWNER:
+ if amendment.added_user_ids and amendment.added_user_ids[0] > 0:
+ uid = amendment.added_user_ids[0]
+ return [{'value': users_by_id[uid].display_name, 'url': None}]
+ else:
+ return [{'value': framework_constants.NO_USER_NAME, 'url': None}]
+ elif amendment.field in (tracker_pb2.FieldID.BLOCKEDON,
+ tracker_pb2.FieldID.BLOCKING,
+ tracker_pb2.FieldID.MERGEDINTO):
+ values = amendment.newvalue.split()
+ bug_refs = [_SafeParseIssueRef(v.strip()) for v in values]
+ issue_urls = [FormatIssueUrl(ref, default_project_name=project_name)
+ for ref in bug_refs]
+ # TODO(jrobbins): Permission checks on referenced issues to allow
+ # showing summary on hover.
+ return [{'value': v, 'url': u} for (v, u) in zip(values, issue_urls)]
+ elif amendment.newvalue:
+ # Catchall for everything except user-valued fields.
+ return [{'value': v, 'url': None} for v in amendment.newvalue.split()]
+ else:
+ # Applies to field==CC or CUSTOM with user type.
+ values = _PlusMinusString(
+ [users_by_id[uid].display_name for uid in amendment.added_user_ids
+ if uid in users_by_id],
+ [users_by_id[uid].display_name for uid in amendment.removed_user_ids
+ if uid in users_by_id])
+ return [{'value': v.strip(), 'url': None} for v in values.split()]
+
+
+def GetAmendmentFieldName(amendment):
+ """Get user-visible name for an amendment to a built-in or custom field."""
+ if amendment.custom_field_name:
+ return amendment.custom_field_name
+ else:
+ field_name = str(amendment.field)
+ return field_name.capitalize()
+
+
+def MakeDanglingIssueRef(project_name, issue_id):
+ """Create a DanglingIssueRef pb."""
+ ret = tracker_pb2.DanglingIssueRef()
+ ret.project = project_name
+ ret.issue_id = issue_id
+ return ret
+
+
+def FormatIssueUrl(issue_ref_tuple, default_project_name=None):
+ """Format an issue url from an issue ref."""
+ if issue_ref_tuple is None:
+ return ''
+ project_name, local_id = issue_ref_tuple
+ project_name = project_name or default_project_name
+ url = framework_helpers.FormatURL(
+ None, '/p/%s%s' % (project_name, urls.ISSUE_DETAIL), id=local_id)
+ return url
+
+
+def FormatIssueRef(issue_ref_tuple, default_project_name=None):
+ """Format an issue reference for users: e.g., 123, or projectname:123."""
+ if issue_ref_tuple is None:
+ return ''
+ project_name, local_id = issue_ref_tuple
+ if project_name and project_name != default_project_name:
+ return '%s:%d' % (project_name, local_id)
+ else:
+ return str(local_id)
+
+
+def ParseIssueRef(ref_str):
+ """Parse an issue ref string: e.g., 123, or projectname:123 into a tuple.
+
+ Raises ValueError if the ref string exists but can't be parsed.
+ """
+ if not ref_str.strip():
+ return None
+
+ if ':' in ref_str:
+ project_name, id_str = ref_str.split(':', 1)
+ project_name = project_name.strip().lstrip('-')
+ else:
+ project_name = None
+ id_str = ref_str
+
+ id_str = id_str.lstrip('-')
+
+ return project_name, int(id_str)
+
+
+def _SafeParseIssueRef(ref_str):
+ """Same as ParseIssueRef, but catches ValueError and returns None instead."""
+ try:
+ return ParseIssueRef(ref_str)
+ except ValueError:
+ return None
+
+
+def MergeFields(field_values, fields_add, fields_remove, field_defs):
+ """Merge the fields to add/remove into the current field values.
+
+ Args:
+ field_values: list of current FieldValue PBs.
+ fields_add: list of FieldValue PBs to add to field_values. If any of these
+ is for a single-valued field, it replaces all previous values for the
+ same field_id in field_values.
+ fields_remove: list of FieldValues to remove from field_values, if found.
+ field_defs: list of FieldDef PBs from the issue's project's config.
+
+ Returns:
+ A 3-tuple with the merged field values, the specific values that added
+ or removed. The actual added or removed might be fewer than the requested
+ ones if the issue already had one of the values-to-add or lacked one of the
+ values-to-remove.
+ """
+ is_multi = {fd.field_id: fd.is_multivalued for fd in field_defs}
+ merged_fvs = list(field_values)
+ fvs_added = []
+ for fv_consider in fields_add:
+ consider_value = GetFieldValue(fv_consider, {})
+ for old_fv in field_values:
+ if (fv_consider.field_id == old_fv.field_id and
+ GetFieldValue(old_fv, {}) == consider_value):
+ break
+ else:
+ # Drop any existing values for non-multi fields.
+ if not is_multi.get(fv_consider.field_id):
+ merged_fvs = [fv for fv in merged_fvs
+ if fv.field_id != fv_consider.field_id]
+ fvs_added.append(fv_consider)
+ merged_fvs.append(fv_consider)
+
+ fvs_removed = []
+ for fv_consider in fields_remove:
+ consider_value = GetFieldValue(fv_consider, {})
+ for old_fv in field_values:
+ if (fv_consider.field_id == old_fv.field_id and
+ GetFieldValue(old_fv, {}) == consider_value):
+ fvs_removed.append(fv_consider)
+ merged_fvs.remove(old_fv)
+
+ return merged_fvs, fvs_added, fvs_removed
diff --git a/appengine/monorail/tracker/tracker_constants.py b/appengine/monorail/tracker/tracker_constants.py
new file mode 100644
index 0000000..2cfa583
--- /dev/null
+++ b/appengine/monorail/tracker/tracker_constants.py
@@ -0,0 +1,222 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Some constants used in Monorail issue tracker pages."""
+
+import re
+
+from proto import user_pb2
+
+
+# Default columns shown on issue list page, and other built-in cols.
+DEFAULT_COL_SPEC = 'ID Type Status Priority Milestone Owner Summary'
+OTHER_BUILT_IN_COLS = ['Attachments', 'Stars', 'Opened', 'Closed', 'Modified',
+ 'BlockedOn', 'Blocking', 'Blocked', 'MergedInto',
+ 'Reporter', 'Cc', 'Project', 'Component']
+
+# These are label prefixes that would conflict with built-in column names.
+# E.g., no issue should have a *label* id-1234 or status-foo because any
+# search for "id:1234" or "status:foo" would not look at labels.
+RESERVED_PREFIXES = [
+ 'id', 'project', 'reporter', 'summary', 'status', 'owner', 'cc',
+ 'attachments', 'attachment', 'component', 'opened', 'closed',
+ 'modified', 'is', 'has', 'blockedon', 'blocking', 'blocked', 'mergedinto',
+ 'stars', 'starredby', 'description', 'comment', 'commentby', 'label',
+ 'hotlist', 'rank', 'explicit_status', 'derived_status', 'explicit_owner',
+ 'derived_owner', 'explicit_cc', 'derived_cc', 'explicit_label',
+ 'derived_label', 'last_comment_by', 'exact_component',
+ 'explicit_component', 'derived_component']
+
+# These columns are sorted as user names.
+USERNAME_COLS = ['owner', 'reporter', 'cc']
+
+# The columns are useless in the grid view, so don't offer them.
+NOT_USED_IN_GRID_AXES = ['Summary', 'ID', 'Opened', 'Closed', 'Modified']
+
+# Issues per page in the issue list
+DEFAULT_RESULTS_PER_PAGE = 100
+
+# Search field input indicating that the user wants to
+# jump to the specified issue.
+JUMP_RE = re.compile(r'^\d+$')
+
+# Regular expression defining a single search term.
+# Used when parsing the contents of the issue search field.
+TERM_RE = re.compile(r'[-a-zA-Z0-9._]+')
+
+# Regular expression used to validate new component leaf names.
+# This should never match any string with a ">" in it.
+COMPONENT_NAME_RE = re.compile(r'^[a-zA-Z]([-_]?[a-zA-Z0-9])+$')
+
+# Regular expression used to validate new field names.
+FIELD_NAME_RE = re.compile(r'^[a-zA-Z]([-_]?[a-zA-Z0-9])*$')
+
+# The next few items are specifications of the defaults for project
+# issue configurations. These are used for projects that do not have
+# their own config.
+DEFAULT_CANNED_QUERIES = [
+ # Query ID, Name, Base query ID (not used for built-in queries), conditions
+ (1, 'All issues', 0, ''),
+ (2, 'Open issues', 0, 'is:open'),
+ (3, 'Open and owned by me', 0, 'is:open owner:me'),
+ (4, 'Open and reported by me', 0, 'is:open reporter:me'),
+ (5, 'Open and starred by me', 0, 'is:open is:starred'),
+ (6, 'New issues', 0, 'status:new'),
+ (7, 'Issues to verify', 0, 'status=fixed,done'),
+ (8, 'Open with comment by me', 0, 'is:open commentby:me'),
+ ]
+
+DEFAULT_CANNED_QUERY_CONDS = {
+ query_id: cond
+ for (query_id, _name, _base, cond) in DEFAULT_CANNED_QUERIES}
+
+ALL_ISSUES_CAN = 1
+OPEN_ISSUES_CAN = 2
+
+# Define well-known issue statuses. Each status has 3 parts: a name, a
+# description, and True if the status means that an issue should be
+# considered to be open or False if it should be considered closed.
+DEFAULT_WELL_KNOWN_STATUSES = [
+ # Name, docstring, means_open, deprecated
+ ('New', 'Issue has not had initial review yet', True, False),
+ ('Accepted', 'Problem reproduced / Need acknowledged', True, False),
+ ('Started', 'Work on this issue has begun', True, False),
+ ('Fixed', 'Developer made source code changes, QA should verify', False,
+ False),
+ ('Verified', 'QA has verified that the fix worked', False, False),
+ ('Invalid', 'This was not a valid issue report', False, False),
+ ('Duplicate', 'This report duplicates an existing issue', False, False),
+ ('WontFix', 'We decided to not take action on this issue', False, False),
+ ('Done', 'The requested non-coding task was completed', False, False),
+ ]
+
+DEFAULT_WELL_KNOWN_LABELS = [
+ # Name, docstring, deprecated
+ ('Type-Defect', 'Report of a software defect', False),
+ ('Type-Enhancement', 'Request for enhancement', False),
+ ('Type-Task', 'Work item that doesn\'t change the code or docs', False),
+ ('Type-Other', 'Some other kind of issue', False),
+ ('Priority-Critical', 'Must resolve in the specified milestone', False),
+ ('Priority-High', 'Strongly want to resolve in the specified milestone',
+ False),
+ ('Priority-Medium', 'Normal priority', False),
+ ('Priority-Low', 'Might slip to later milestone', False),
+ ('OpSys-All', 'Affects all operating systems', False),
+ ('OpSys-Windows', 'Affects Windows users', False),
+ ('OpSys-Linux', 'Affects Linux users', False),
+ ('OpSys-OSX', 'Affects Mac OS X users', False),
+ ('Milestone-Release1.0', 'All essential functionality working', False),
+ ('Security', 'Security risk to users', False),
+ ('Performance', 'Performance issue', False),
+ ('Usability', 'Affects program usability', False),
+ ('Maintainability', 'Hinders future changes', False),
+ ]
+
+# Exclusive label prefixes are ones that can only be used once per issue.
+# For example, an issue would normally have only one Priority-* label, whereas
+# an issue might have many OpSys-* labels.
+DEFAULT_EXCL_LABEL_PREFIXES = ['Type', 'Priority', 'Milestone']
+
+DEFAULT_USER_DEFECT_REPORT_TEMPLATE = {
+ 'name': 'Defect report from user',
+ 'summary': 'Enter one-line summary',
+ 'summary_must_be_edited': True,
+ 'content': (
+ 'What steps will reproduce the problem?\n'
+ '1. \n'
+ '2. \n'
+ '3. \n'
+ '\n'
+ 'What is the expected output?\n'
+ '\n'
+ '\n'
+ 'What do you see instead?\n'
+ '\n'
+ '\n'
+ 'What version of the product are you using? '
+ 'On what operating system?\n'
+ '\n'
+ '\n'
+ 'Please provide any additional information below.\n'),
+ 'status': 'New',
+ 'labels': ['Type-Defect', 'Priority-Medium'],
+ }
+
+DEFAULT_DEVELOPER_DEFECT_REPORT_TEMPLATE = {
+ 'name': 'Defect report from developer',
+ 'summary': 'Enter one-line summary',
+ 'summary_must_be_edited': True,
+ 'content': (
+ 'What steps will reproduce the problem?\n'
+ '1. \n'
+ '2. \n'
+ '3. \n'
+ '\n'
+ 'What is the expected output?\n'
+ '\n'
+ '\n'
+ 'What do you see instead?\n'
+ '\n'
+ '\n'
+ 'Please use labels and text to provide additional information.\n'),
+ 'status': 'Accepted',
+ 'labels': ['Type-Defect', 'Priority-Medium'],
+ 'members_only': True,
+ }
+
+
+DEFAULT_TEMPLATES = [
+ DEFAULT_DEVELOPER_DEFECT_REPORT_TEMPLATE,
+ DEFAULT_USER_DEFECT_REPORT_TEMPLATE,
+ ]
+
+DEFAULT_STATUSES_OFFER_MERGE = ['Duplicate']
+
+
+# This is used by JS on the issue admin page to indicate that the user deleted
+# this template, so it should not be considered when updating the project's
+# issue config.
+DELETED_TEMPLATE_NAME = '<DELETED>'
+
+
+# This is the default maximum total bytes of files attached
+# to all the issues in a project.
+ISSUE_ATTACHMENTS_QUOTA_HARD = 50 * 1024 * 1024L
+ISSUE_ATTACHMENTS_QUOTA_SOFT = ISSUE_ATTACHMENTS_QUOTA_HARD - 1 * 1024 * 1024L
+
+# Default value for nav action after updating an issue.
+DEFAULT_AFTER_ISSUE_UPDATE = user_pb2.IssueUpdateNav.STAY_SAME_ISSUE
+
+# Maximum comment length to mitigate spammy comments
+MAX_COMMENT_CHARS = 50 * 1024
+MAX_SUMMARY_CHARS = 500
+
+SHORT_SUMMARY_LENGTH = 45
+
+# Number of recent commands to offer the user on the quick edit form.
+MAX_RECENT_COMMANDS = 5
+
+# These recent commands are shown if the user has no history of their own.
+DEFAULT_RECENT_COMMANDS = [
+ ('owner=me status=Accepted', "I'll handle this one."),
+ ('owner=me Priority=High status=Accepted', "I'll look into it soon."),
+ ('status=Fixed', 'The change for this is done now.'),
+ ('Type=Enhancement', 'This is an enhancement, not a defect.'),
+ ('status=Invalid', 'Please report this in a more appropriate place.'),
+ ]
+
+# Consider an issue to be a "noisy" issue if it has more than these:
+NOISY_ISSUE_COMMENT_COUNT = 100
+NOISY_ISSUE_STARRER_COUNT = 100
+
+# After a project owner edits the filter rules, we recompute the
+# derived field values in work items that each handle a chunk of
+# of this many items.
+RECOMPUTE_DERIVED_FIELDS_BLOCK_SIZE = 1000
+
+# This is the number of issues listed in the ReindexQueue table that will
+# be processed each minute.
+MAX_ISSUES_TO_REINDEX_PER_MINUTE = 500
+
diff --git a/appengine/monorail/tracker/tracker_helpers.py b/appengine/monorail/tracker/tracker_helpers.py
new file mode 100644
index 0000000..ec74994
--- /dev/null
+++ b/appengine/monorail/tracker/tracker_helpers.py
@@ -0,0 +1,851 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""Helper functions and classes used by the Monorail Issue Tracker pages.
+
+This module has functions that are reused in multiple servlets or
+other modules.
+"""
+
+import collections
+import logging
+import re
+import urllib
+
+import settings
+
+from framework import filecontent
+from framework import framework_bizobj
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import monorailrequest
+from framework import permissions
+from framework import sorting
+from framework import template_helpers
+from framework import urls
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+
+
+# HTML input field names for blocked on and blocking issue refs.
+BLOCKED_ON = 'blocked_on'
+BLOCKING = 'blocking'
+
+# This string is used in HTML form element names to identify custom fields.
+# E.g., a value for a custom field with field_id 12 would be specified in
+# an HTML form element with name="custom_12".
+_CUSTOM_FIELD_NAME_PREFIX = 'custom_'
+
+# When the attachment quota gets within 1MB of the limit, stop offering
+# users the option to attach files.
+_SOFT_QUOTA_LEEWAY = 1024 * 1024
+
+# Accessors for sorting built-in fields.
+SORTABLE_FIELDS = {
+ 'project': lambda issue: issue.project_name,
+ 'id': lambda issue: issue.local_id,
+ 'owner': tracker_bizobj.GetOwnerId,
+ 'reporter': lambda issue: issue.reporter_id,
+ 'component': lambda issue: issue.component_ids,
+ 'cc': tracker_bizobj.GetCcIds,
+ 'summary': lambda issue: issue.summary.lower(),
+ 'stars': lambda issue: issue.star_count,
+ 'attachments': lambda issue: issue.attachment_count,
+ 'opened': lambda issue: issue.opened_timestamp,
+ 'closed': lambda issue: issue.closed_timestamp,
+ 'modified': lambda issue: issue.modified_timestamp,
+ 'status': tracker_bizobj.GetStatus,
+ 'blocked': lambda issue: bool(issue.blocked_on_iids),
+ 'blockedon': lambda issue: issue.blocked_on_iids or sorting.MAX_STRING,
+ 'blocking': lambda issue: issue.blocking_iids or sorting.MAX_STRING,
+ }
+
+
+# Namedtuples that hold data parsed from post_data.
+ParsedComponents = collections.namedtuple(
+ 'ParsedComponents', 'entered_str, paths, paths_remove')
+ParsedFields = collections.namedtuple(
+ 'ParsedFields', 'vals, vals_remove, fields_clear')
+ParsedUsers = collections.namedtuple(
+ 'ParsedUsers', 'owner_username, owner_id, cc_usernames, '
+ 'cc_usernames_remove, cc_ids, cc_ids_remove')
+ParsedBlockers = collections.namedtuple(
+ 'ParsedBlockers', 'entered_str, iids, dangling_refs')
+ParsedIssue = collections.namedtuple(
+ 'ParsedIssue', 'summary, comment, status, users, labels, '
+ 'labels_remove, components, fields, template_name, attachments, '
+ 'blocked_on, blocking')
+
+
+def ParseIssueRequest(cnxn, post_data, services, errors, default_project_name):
+ """Parse all the possible arguments out of the request.
+
+ Args:
+ cnxn: connection to SQL database.
+ post_data: HTML form information.
+ services: Connections to persistence layer.
+ errors: object to accumulate validation error info.
+ default_project_name: name of the project that contains the issue.
+
+ Returns:
+ A namedtuple with all parsed information. User IDs are looked up, but
+ also the strings are returned to allow bouncing the user back to correct
+ any errors.
+ """
+ summary = post_data.get('summary', '')
+ comment = post_data.get('comment', '')
+ status = post_data.get('status', '')
+ template_name = post_data.get('template_name', '')
+ component_str = post_data.get('components', '')
+ label_strs = post_data.getall('label')
+
+ comp_paths, comp_paths_remove = _ClassifyPlusMinusItems(
+ re.split('[,;\s]+', component_str))
+ parsed_components = ParsedComponents(
+ component_str, comp_paths, comp_paths_remove)
+ labels, labels_remove = _ClassifyPlusMinusItems(label_strs)
+ parsed_fields = _ParseIssueRequestFields(post_data)
+ # TODO(jrobbins): change from numbered fields to a multi-valued field.
+ attachments = _ParseIssueRequestAttachments(post_data)
+ parsed_users = _ParseIssueRequestUsers(cnxn, post_data, services)
+ parsed_blocked_on = _ParseBlockers(
+ cnxn, post_data, services, errors, default_project_name, BLOCKED_ON)
+ parsed_blocking = _ParseBlockers(
+ cnxn, post_data, services, errors, default_project_name, BLOCKING)
+
+ parsed_issue = ParsedIssue(
+ summary, comment, status, parsed_users, labels, labels_remove,
+ parsed_components, parsed_fields, template_name, attachments,
+ parsed_blocked_on, parsed_blocking)
+ return parsed_issue
+
+
+def _ClassifyPlusMinusItems(add_remove_list):
+ """Classify the given plus-or-minus items into add and remove lists."""
+ add_remove_set = {s.strip() for s in add_remove_list}
+ add_strs = [s for s in add_remove_set if s and not s.startswith('-')]
+ remove_strs = [s[1:] for s in add_remove_set if s[1:] and s.startswith('-')]
+ return add_strs, remove_strs
+
+
+def _ParseIssueRequestFields(post_data):
+ """Iterate over post_data and return custom field values found in it."""
+ field_val_strs = {}
+ field_val_strs_remove = {}
+ for key in post_data.keys():
+ if key.startswith(_CUSTOM_FIELD_NAME_PREFIX):
+ val_strs = [v for v in post_data.getall(key) if v]
+ if val_strs:
+ field_id = int(key[len(_CUSTOM_FIELD_NAME_PREFIX):])
+ if post_data.get('op_' + key) == 'remove':
+ field_val_strs_remove[field_id] = val_strs
+ else:
+ field_val_strs[field_id] = val_strs
+
+ fields_clear = []
+ op_prefix = 'op_' + _CUSTOM_FIELD_NAME_PREFIX
+ for op_key in post_data.keys():
+ if op_key.startswith(op_prefix):
+ if post_data.get(op_key) == 'clear':
+ field_id = int(op_key[len(op_prefix):])
+ fields_clear.append(field_id)
+
+ return ParsedFields(field_val_strs, field_val_strs_remove, fields_clear)
+
+
+def _ParseIssueRequestAttachments(post_data):
+ """Extract and clean-up any attached files from the post data.
+
+ Args:
+ post_data: dict w/ values from the user's HTTP POST form data.
+
+ Returns:
+ [(filename, filecontents, mimetype), ...] with items for each attachment.
+ """
+ # TODO(jrobbins): change from numbered fields to a multi-valued field.
+ attachments = []
+ for i in xrange(1, 16):
+ if 'file%s' % i in post_data:
+ item = post_data['file%s' % i]
+ if isinstance(item, basestring):
+ continue
+ if '\\' in item.filename: # IE insists on giving us the whole path.
+ item.filename = item.filename[item.filename.rindex('\\') + 1:]
+ if not item.filename:
+ continue # Skip any FILE fields that were not filled in.
+ attachments.append((
+ item.filename, item.value,
+ filecontent.GuessContentTypeFromFilename(item.filename)))
+
+ return attachments
+
+
+def _ParseIssueRequestUsers(cnxn, post_data, services):
+ """Extract usernames from the POST data, categorize them, and look up IDs.
+
+ Args:
+ cnxn: connection to SQL database.
+ post_data: dict w/ data from the HTTP POST.
+ services: Services.
+
+ Returns:
+ A namedtuple (owner_username, owner_id, cc_usernames, cc_usernames_remove,
+ cc_ids, cc_ids_remove), containing:
+ - issue owner's name and user ID, if any
+ - the list of all cc'd usernames
+ - the user IDs to add or remove from the issue CC list.
+ Any of these user IDs may be None if the corresponding username
+ or email address is invalid.
+ """
+ # Get the user-entered values from post_data.
+ cc_username_str = post_data.get('cc', '')
+ owner_email = post_data.get('owner', '').strip()
+
+ cc_usernames, cc_usernames_remove = _ClassifyPlusMinusItems(
+ re.split('[,;\s]+', cc_username_str))
+
+ # Figure out the email addresses to lookup and do the lookup.
+ emails_to_lookup = cc_usernames + cc_usernames_remove
+ if owner_email:
+ emails_to_lookup.append(owner_email)
+ all_user_ids = services.user.LookupUserIDs(
+ cnxn, emails_to_lookup, autocreate=True)
+ if owner_email:
+ owner_id = all_user_ids.get(owner_email)
+ else:
+ owner_id = framework_constants.NO_USER_SPECIFIED
+
+ # Lookup the user IDs of the Cc addresses to add or remove.
+ cc_ids = [all_user_ids.get(cc) for cc in cc_usernames]
+ cc_ids_remove = [all_user_ids.get(cc) for cc in cc_usernames_remove]
+
+ return ParsedUsers(owner_email, owner_id, cc_usernames, cc_usernames_remove,
+ cc_ids, cc_ids_remove)
+
+
+def _ParseBlockers(cnxn, post_data, services, errors, default_project_name,
+ field_name):
+ """Parse input for issues that the current issue is blocking/blocked on.
+
+ Args:
+ cnxn: connection to SQL database.
+ post_data: dict w/ values from the user's HTTP POST.
+ services: connections to backend services.
+ errors: object to accumulate validation error info.
+ default_project_name: name of the project that contains the issue.
+ field_name: string HTML input field name, e.g., BLOCKED_ON or BLOCKING.
+
+ Returns:
+ A namedtuple with the user input string, and a list of issue IDs.
+ """
+ entered_str = post_data.get(field_name, '').strip()
+ blocker_iids = []
+ dangling_ref_tuples = []
+
+ issue_ref = None
+ for ref_str in re.split('[,;\s]+', entered_str):
+ try:
+ issue_ref = tracker_bizobj.ParseIssueRef(ref_str)
+ except ValueError:
+ setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip())
+ break
+
+ if not issue_ref:
+ continue
+
+ blocker_project_name, blocker_issue_id = issue_ref
+ if not blocker_project_name:
+ blocker_project_name = default_project_name
+
+ # Detect and report if the same issue was specified.
+ current_issue_id = int(post_data.get('id')) if post_data.get('id') else -1
+ if (blocker_issue_id == current_issue_id and
+ blocker_project_name == default_project_name):
+ setattr(errors, field_name, 'Cannot be %s the same issue' % field_name)
+ break
+
+ ref_projects = services.project.GetProjectsByName(
+ cnxn, set([blocker_project_name]))
+ blocker_iid = services.issue.ResolveIssueRefs(
+ cnxn, ref_projects, default_project_name, [issue_ref])
+ if not blocker_iid:
+ if blocker_project_name in settings.recognized_codesite_projects:
+ # We didn't find the issue, but it had a explicitly-specified project
+ # which we know is on Codesite. Allow it as a dangling reference.
+ dangling_ref_tuples.append(issue_ref)
+ continue
+ else:
+ # Otherwise, it doesn't exist, so report it.
+ setattr(errors, field_name, 'Invalid issue ID %s' % ref_str.strip())
+ break
+ if blocker_iid[0] not in blocker_iids:
+ blocker_iids.extend(blocker_iid)
+
+ blocker_iids.sort()
+ dangling_ref_tuples.sort()
+ return ParsedBlockers(entered_str, blocker_iids, dangling_ref_tuples)
+
+
+def IsValidIssueOwner(cnxn, project, owner_id, services):
+ """Return True if the given user ID can be an issue owner.
+
+ Args:
+ cnxn: connection to SQL database.
+ project: the current Project PB.
+ owner_id: the user ID of the proposed issue owner.
+ services: connections to backends.
+
+ It is OK to have 0 for the owner_id, that simply means that the issue is
+ unassigned.
+
+ Returns:
+ A pair (valid, err_msg). valid is True if the given user ID can be an
+ issue owner. err_msg is an error message string to display to the user
+ if valid == False, and is None if valid == True.
+ """
+ # An issue is always allowed to have no owner specified.
+ if owner_id == framework_constants.NO_USER_SPECIFIED:
+ return True, None
+
+ auth = monorailrequest.AuthData.FromUserID(cnxn, owner_id, services)
+ if not framework_bizobj.UserIsInProject(project, auth.effective_ids):
+ return False, 'Issue owner must be a project member'
+
+ group_ids = services.usergroup.DetermineWhichUserIDsAreGroups(
+ cnxn, [owner_id])
+ if owner_id in group_ids:
+ return False, 'Issue owner cannot be a user group'
+
+ return True, None
+
+
+def GetAllowedOpenedAndClosedIssues(mr, issue_ids, services):
+ """Get filtered lists of open and closed issues identified by issue_ids.
+
+ The function then filters the results to only the issues that the user
+ is allowed to view. E.g., we only auto-link to issues that the user
+ would be able to view if he/she clicked the link.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ issue_ids: list of int issue IDs for the target issues.
+ services: connection to issue, config, and project persistence layers.
+
+ Returns:
+ Two lists of issues that the user is allowed to view: one for open
+ issues and one for closed issues.
+ """
+ open_issues, closed_issues = services.issue.GetOpenAndClosedIssues(
+ mr.cnxn, issue_ids)
+ project_dict = GetAllIssueProjects(
+ mr.cnxn, open_issues + closed_issues, services.project)
+ config_dict = services.config.GetProjectConfigs(mr.cnxn, project_dict.keys())
+ allowed_open_issues = FilterOutNonViewableIssues(
+ mr.auth.effective_ids, mr.auth.user_pb, project_dict, config_dict,
+ open_issues)
+ allowed_closed_issues = FilterOutNonViewableIssues(
+ mr.auth.effective_ids, mr.auth.user_pb, project_dict, config_dict,
+ closed_issues)
+
+ return allowed_open_issues, allowed_closed_issues
+
+
+def GetAllowedOpenAndClosedRelatedIssues(services, mr, issue):
+ """Retrieve the issues that the given issue references.
+
+ Related issues are the blocked on, blocking, and merged-into issues.
+ This function also filters the results to only the issues that the
+ user is allowed to view.
+
+ Args:
+ services: connection to issue, config, and project persistence layers.
+ mr: commonly used info parsed from the request.
+ issue: the Issue PB being viewed.
+
+ Returns:
+ Two dictionaries of issues that the user is allowed to view: one for open
+ issues and one for closed issues.
+ """
+ related_issue_iids = list(issue.blocked_on_iids) + list(issue.blocking_iids)
+ if issue.merged_into:
+ related_issue_iids.append(issue.merged_into)
+ open_issues, closed_issues = GetAllowedOpenedAndClosedIssues(
+ mr, related_issue_iids, services)
+ open_dict = {issue.issue_id: issue for issue in open_issues}
+ closed_dict = {issue.issue_id: issue for issue in closed_issues}
+ return open_dict, closed_dict
+
+
+def MakeViewsForUsersInIssues(cnxn, issue_list, user_service, omit_ids=None):
+ """Lookup all the users involved in any of the given issues.
+
+ Args:
+ cnxn: connection to SQL database.
+ issue_list: list of Issue PBs from a result query.
+ user_service: Connection to User backend storage.
+ omit_ids: a list of user_ids to omit, e.g., because we already have them.
+
+ Returns:
+ A dictionary {user_id: user_view,...} for all the users involved
+ in the given issues.
+ """
+ issue_participant_id_set = tracker_bizobj.UsersInvolvedInIssues(issue_list)
+ if omit_ids:
+ issue_participant_id_set.difference_update(omit_ids)
+
+ # TODO(jrobbins): consider caching View objects as well.
+ users_by_id = framework_views.MakeAllUserViews(
+ cnxn, user_service, issue_participant_id_set)
+
+ return users_by_id
+
+
+def FormatIssueListURL(
+ mr, config, absolute=True, project_names=None, **kwargs):
+ """Format a link back to list view as configured by user."""
+ if project_names is None:
+ project_names = [mr.project_name]
+ if not tracker_constants.JUMP_RE.match(mr.query):
+ if mr.query:
+ kwargs['q'] = mr.query
+ if mr.can and mr.can != 2:
+ kwargs['can'] = mr.can
+ def_col_spec = config.default_col_spec
+ if mr.col_spec and mr.col_spec != def_col_spec:
+ kwargs['colspec'] = mr.col_spec
+ if mr.sort_spec:
+ kwargs['sort'] = mr.sort_spec
+ if mr.group_by_spec:
+ kwargs['groupby'] = mr.group_by_spec
+ if mr.start:
+ kwargs['start'] = mr.start
+ if mr.num != tracker_constants.DEFAULT_RESULTS_PER_PAGE:
+ kwargs['num'] = mr.num
+
+ if len(project_names) == 1:
+ url = '/p/%s%s' % (project_names[0], urls.ISSUE_LIST)
+ else:
+ url = urls.ISSUE_LIST
+ kwargs['projects'] = ','.join(sorted(project_names))
+
+ param_strings = ['%s=%s' % (k, urllib.quote((u'%s' % v).encode('utf-8')))
+ for k, v in kwargs.iteritems()]
+ if param_strings:
+ url += '?' + '&'.join(sorted(param_strings))
+ if absolute:
+ url = '%s://%s%s' % (mr.request.scheme, mr.request.host, url)
+
+ return url
+
+
+def FormatRelativeIssueURL(project_name, path, **kwargs):
+ """Format a URL to get to an issue in the named project.
+
+ Args:
+ project_name: string name of the project containing the issue.
+ path: string servlet path, e.g., from framework/urls.py.
+ **kwargs: additional query-string parameters to include in the URL.
+
+ Returns:
+ A URL string.
+ """
+ return framework_helpers.FormatURL(
+ None, '/p/%s%s' % (project_name, path), **kwargs)
+
+
+def ComputeNewQuotaBytesUsed(project, attachments):
+ """Add the given attachments to the project's attachment quota usage.
+
+ Args:
+ project: Project PB for the project being updated.
+ attachments: a list of attachments being added to an issue.
+
+ Returns:
+ The new number of bytes used.
+
+ Raises:
+ OverAttachmentQuota: If project would go over quota.
+ """
+ total_attach_size = 0L
+ for _filename, content, _mimetype in attachments:
+ total_attach_size += len(content)
+
+ new_bytes_used = project.attachment_bytes_used + total_attach_size
+ quota = (project.attachment_quota or
+ tracker_constants.ISSUE_ATTACHMENTS_QUOTA_HARD)
+ if new_bytes_used > quota:
+ raise OverAttachmentQuota(new_bytes_used - quota)
+ return new_bytes_used
+
+
+def IsUnderSoftAttachmentQuota(project):
+ """Check the project's attachment quota against the soft quota limit.
+
+ If there is a custom quota on the project, this will check against
+ that instead of the system-wide default quota.
+
+ Args:
+ project: Project PB for the project to examine
+
+ Returns:
+ True if the project is under quota, false otherwise.
+ """
+ quota = tracker_constants.ISSUE_ATTACHMENTS_QUOTA_SOFT
+ if project.attachment_quota:
+ quota = project.attachment_quota - _SOFT_QUOTA_LEEWAY
+
+ return project.attachment_bytes_used < quota
+
+
+def GetAllIssueProjects(cnxn, issues, project_service):
+ """Get all the projects that the given issues belong to.
+
+ Args:
+ cnxn: connection to SQL database.
+ issues: list of issues, which may come from different projects.
+ project_service: connection to project persistence layer.
+
+ Returns:
+ A dictionary {project_id: project} of all the projects that
+ any of the given issues belongs to.
+ """
+ needed_project_ids = {issue.project_id for issue in issues}
+ project_dict = project_service.GetProjects(cnxn, needed_project_ids)
+ return project_dict
+
+
+def GetPermissionsInAllProjects(user, effective_ids, projects):
+ """Look up the permissions for the given user in each project."""
+ return {
+ project.project_id:
+ permissions.GetPermissions(user, effective_ids, project)
+ for project in projects}
+
+
+def FilterOutNonViewableIssues(
+ effective_ids, user, project_dict, config_dict, issues):
+ """Return a filtered list of issues that the user can view."""
+ perms_dict = GetPermissionsInAllProjects(
+ user, effective_ids, project_dict.values())
+
+ denied_project_ids = {
+ pid for pid, p in project_dict.iteritems()
+ if not permissions.CanView(effective_ids, perms_dict[pid], p, [])}
+
+ results = []
+ for issue in issues:
+ if issue.deleted or issue.project_id in denied_project_ids:
+ continue
+
+ if not permissions.HasRestrictions(issue):
+ may_view = True
+ else:
+ perms = perms_dict[issue.project_id]
+ project = project_dict[issue.project_id]
+ config = config_dict.get(issue.project_id, config_dict.get('harmonized'))
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, effective_ids, config)
+ may_view = permissions.CanViewRestrictedIssueInVisibleProject(
+ effective_ids, perms, project, issue, granted_perms=granted_perms)
+
+ if may_view:
+ results.append(issue)
+
+ return results
+
+
+def MeansOpenInProject(status, config):
+ """Return true if this status means that the issue is still open.
+
+ Args:
+ status: issue status string. E.g., 'New'.
+ config: the config of the current project.
+
+ Returns:
+ Boolean True if the status means that the issue is open.
+ """
+ status_lower = status.lower()
+
+ # iterate over the list of known statuses for this project
+ # return true if we find a match that declares itself to be open
+ for wks in config.well_known_statuses:
+ if wks.status.lower() == status_lower:
+ return wks.means_open
+
+ # if we didn't find a matching status we consider the status open
+ return True
+
+
+def IsNoisy(num_comments, num_starrers):
+ """Return True if this is a "noisy" issue that would send a ton of emails.
+
+ The rule is that a very active issue with a large number of comments
+ and starrers will only send notification when a comment (or change)
+ is made by a project member.
+
+ Args:
+ num_comments: int number of comments on issue so far.
+ num_starrers: int number of users who starred the issue.
+
+ Returns:
+ True if we will not bother starrers with an email notification for
+ changes made by non-members.
+ """
+ return (num_comments >= tracker_constants.NOISY_ISSUE_COMMENT_COUNT and
+ num_starrers >= tracker_constants.NOISY_ISSUE_STARRER_COUNT)
+
+
+def MergeCCsAndAddComment(
+ services, mr, issue, merge_into_project, merge_into_issue):
+ """Modify the CC field of the target issue and add a comment to it."""
+ return MergeCCsAndAddCommentMultipleIssues(
+ services, mr, [issue], merge_into_project, merge_into_issue)
+
+
+def MergeCCsAndAddCommentMultipleIssues(
+ services, mr, issues, merge_into_project, merge_into_issue):
+ """Modify the CC field of the target issue and add a comment to it."""
+ merge_into_restricts = permissions.GetRestrictions(merge_into_issue)
+ merge_comment = ''
+ source_cc = set()
+ for issue in issues:
+ if issue.project_name == merge_into_issue.project_name:
+ issue_ref_str = '%d' % issue.local_id
+ else:
+ issue_ref_str = '%s:%d' % (issue.project_name, issue.local_id)
+ if merge_comment:
+ merge_comment += '\n'
+ merge_comment += 'Issue %s has been merged into this issue.' % issue_ref_str
+
+ if permissions.HasRestrictions(issue, perm='View'):
+ restricts = permissions.GetRestrictions(issue)
+ # Don't leak metadata from a restricted issue.
+ if (issue.project_id != merge_into_issue.project_id or
+ set(restricts) != set(merge_into_restricts)):
+ # TODO(jrobbins): user option to choose to merge CC or not.
+ # TODO(jrobbins): add a private comment rather than nothing
+ continue
+
+ source_cc.update(issue.cc_ids)
+ if issue.owner_id: # owner_id == 0 means no owner
+ source_cc.update([issue.owner_id])
+
+ target_cc = merge_into_issue.cc_ids
+ add_cc = [user_id for user_id in source_cc if user_id not in target_cc]
+
+ services.issue.ApplyIssueComment(
+ mr.cnxn, services, mr.auth.user_id,
+ merge_into_project.project_id, merge_into_issue.local_id,
+ merge_into_issue.summary, merge_into_issue.status,
+ merge_into_issue.owner_id, list(target_cc) + list(add_cc),
+ merge_into_issue.labels, merge_into_issue.field_values,
+ merge_into_issue.component_ids, merge_into_issue.blocked_on_iids,
+ merge_into_issue.blocking_iids, merge_into_issue.dangling_blocked_on_refs,
+ merge_into_issue.dangling_blocking_refs, merge_into_issue.merged_into,
+ index_now=False, comment=merge_comment)
+
+ return merge_comment
+
+
+def GetAttachmentIfAllowed(mr, services):
+ """Retrieve the requested attachment, or raise an appropriate exception.
+
+ Args:
+ mr: commonly used info parsed from the request.
+ services: connections to backend services.
+
+ Returns:
+ The requested Attachment PB, and the Issue that it belongs to.
+
+ Raises:
+ NoSuchAttachmentException: attachment was not found or was marked deleted.
+ NoSuchIssueException: issue that contains attachment was not found.
+ PermissionException: the user is not allowed to view the attachment.
+ """
+ attachment = None
+
+ attachment, cid, issue_id = services.issue.GetAttachmentAndContext(
+ mr.cnxn, mr.aid)
+
+ issue = services.issue.GetIssue(mr.cnxn, issue_id)
+ config = services.config.GetProjectConfig(mr.cnxn, issue.project_id)
+ granted_perms = tracker_bizobj.GetGrantedPerms(
+ issue, mr.auth.effective_ids, config)
+ permit_view = permissions.CanViewIssue(
+ mr.auth.effective_ids, mr.perms, mr.project, issue,
+ granted_perms=granted_perms)
+ if not permit_view:
+ raise permissions.PermissionException('Cannot view attachment\'s issue')
+
+ comment = services.issue.GetComment(mr.cnxn, cid)
+ can_delete = False
+ if mr.auth.user_id and mr.project:
+ can_delete = permissions.CanDelete(
+ mr.auth.user_id, mr.auth.effective_ids, mr.perms,
+ comment.deleted_by, comment.user_id, mr.project,
+ permissions.GetRestrictions(issue), granted_perms=granted_perms)
+ if comment.deleted_by and not can_delete:
+ raise permissions.PermissionException('Cannot view attachment\'s comment')
+
+ return attachment, issue
+
+
+def LabelsMaskedByFields(config, field_names, trim_prefix=False):
+ """Return a list of EZTItems for labels that would be masked by fields."""
+ return _LabelsMaskedOrNot(config, field_names, trim_prefix=trim_prefix)
+
+
+def LabelsNotMaskedByFields(config, field_names, trim_prefix=False):
+ """Return a list of EZTItems for labels that would not be masked."""
+ return _LabelsMaskedOrNot(
+ config, field_names, invert=True, trim_prefix=trim_prefix)
+
+
+def _LabelsMaskedOrNot(config, field_names, invert=False, trim_prefix=False):
+ """Return EZTItems for labels that'd be masked. Or not, when invert=True."""
+ field_names = [fn.lower() for fn in field_names]
+ result = []
+ for wkl in config.well_known_labels:
+ masked_by = tracker_bizobj.LabelIsMaskedByField(wkl.label, field_names)
+ if (masked_by and not invert) or (not masked_by and invert):
+ display_name = wkl.label
+ if trim_prefix:
+ display_name = display_name[len(masked_by) + 1:]
+ result.append(template_helpers.EZTItem(
+ name=display_name,
+ name_padded=display_name.ljust(20),
+ commented='#' if wkl.deprecated else '',
+ docstring=wkl.label_docstring,
+ docstring_short=template_helpers.FitUnsafeText(
+ wkl.label_docstring, 40),
+ idx=len(result)))
+
+ return result
+
+
+def LookupComponentIDs(component_paths, config, errors):
+ """Look up the IDs of the specified components in the given config."""
+ component_ids = []
+ for path in component_paths:
+ if not path:
+ continue
+ cd = tracker_bizobj.FindComponentDef(path, config)
+ if cd:
+ component_ids.append(cd.component_id)
+ else:
+ errors.components = 'Unknown component %s' % path
+
+ return component_ids
+
+
+def ParseAdminUsers(cnxn, admins_str, user_service):
+ """Parse all the usernames of component, field, or template admins."""
+ admins, _remove = _ClassifyPlusMinusItems(
+ re.split('[,;\s]+', admins_str))
+ all_user_ids = user_service.LookupUserIDs(cnxn, admins, autocreate=True)
+ admin_ids = [all_user_ids[username] for username in admins]
+ return admin_ids, admins_str
+
+
+def FilterIssueTypes(config):
+ """Return a list of well-known issue types."""
+ well_known_issue_types = []
+ for wk_label in config.well_known_labels:
+ if wk_label.label.lower().startswith('type-'):
+ _, type_name = wk_label.label.split('-', 1)
+ well_known_issue_types.append(type_name)
+
+ return well_known_issue_types
+
+
+def ParseMergeFields(
+ cnxn, services, project_name, post_data, status, config, issue, errors):
+ """Parse info that identifies the issue to merge into, if any."""
+ merge_into_text = ''
+ merge_into_ref = None
+ merge_into_issue = None
+
+ if status not in config.statuses_offer_merge:
+ return '', None
+
+ merge_into_text = post_data.get('merge_into', '')
+ if merge_into_text:
+ try:
+ merge_into_ref = tracker_bizobj.ParseIssueRef(merge_into_text)
+ except ValueError:
+ logging.info('merge_into not an int: %r', merge_into_text)
+ errors.merge_into_id = 'Please enter a valid issue ID'
+
+ if not merge_into_ref:
+ errors.merge_into_id = 'Please enter an issue ID'
+ return merge_into_text, None
+
+ merge_into_project_name, merge_into_id = merge_into_ref
+ if (merge_into_id == issue.local_id and
+ (merge_into_project_name == project_name or
+ not merge_into_project_name)):
+ logging.info('user tried to merge issue into itself: %r', merge_into_ref)
+ errors.merge_into_id = 'Cannot merge issue into itself'
+ return merge_into_text, None
+
+ project = services.project.GetProjectByName(
+ cnxn, merge_into_project_name or project_name)
+ try:
+ merge_into_issue = services.issue.GetIssueByLocalID(
+ cnxn, project.project_id, merge_into_id)
+ except Exception:
+ logging.info('merge_into issue not found: %r', merge_into_ref)
+ errors.merge_into_id = 'No such issue'
+ return merge_into_text, None
+
+ return merge_into_text, merge_into_issue
+
+
+def GetNewIssueStarrers(cnxn, services, issue_id, merge_into_iid):
+ """Get starrers of current issue who have not starred the target issue."""
+ source_starrers = services.issue_star.LookupItemStarrers(cnxn, issue_id)
+ target_starrers = services.issue_star.LookupItemStarrers(
+ cnxn, merge_into_iid)
+ return set(source_starrers) - set(target_starrers)
+
+
+def AddIssueStarrers(
+ cnxn, services, mr, merge_into_iid, merge_into_project, new_starrers):
+ """Merge all the starrers for the current issue into the target issue."""
+ project = merge_into_project or mr.project
+ config = services.config.GetProjectConfig(mr.cnxn, project.project_id)
+ for starrer_id in new_starrers:
+ services.issue_star.SetStar(
+ cnxn, services, config, merge_into_iid, starrer_id, True)
+
+
+def IsMergeAllowed(merge_into_issue, mr, services):
+ """Check to see if user has permission to merge with specified issue."""
+ merge_into_project = services.project.GetProjectByName(
+ mr.cnxn, merge_into_issue.project_name)
+ merge_into_config = services.config.GetProjectConfig(
+ mr.cnxn, merge_into_project.project_id)
+ merge_granted_perms = tracker_bizobj.GetGrantedPerms(
+ merge_into_issue, mr.auth.effective_ids, merge_into_config)
+
+ merge_view_allowed = mr.perms.CanUsePerm(
+ permissions.VIEW, mr.auth.effective_ids,
+ merge_into_project, permissions.GetRestrictions(merge_into_issue),
+ granted_perms=merge_granted_perms)
+ merge_edit_allowed = mr.perms.CanUsePerm(
+ permissions.EDIT_ISSUE, mr.auth.effective_ids,
+ merge_into_project, permissions.GetRestrictions(merge_into_issue),
+ granted_perms=merge_granted_perms)
+
+ return merge_view_allowed and merge_edit_allowed
+
+
+class Error(Exception):
+ """Base class for errors from this module."""
+
+
+class OverAttachmentQuota(Error):
+ """Project will exceed quota if the current operation is allowed."""
diff --git a/appengine/monorail/tracker/tracker_views.py b/appengine/monorail/tracker/tracker_views.py
new file mode 100644
index 0000000..402f92c
--- /dev/null
+++ b/appengine/monorail/tracker/tracker_views.py
@@ -0,0 +1,839 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is govered by a BSD-style
+# license that can be found in the LICENSE file or at
+# https://developers.google.com/open-source/licenses/bsd
+
+"""View objects to help display tracker business objects in templates."""
+
+import collections
+import logging
+import re
+import time
+import urllib
+
+from google.appengine.api import app_identity
+from third_party import ezt
+
+from framework import filecontent
+from framework import framework_constants
+from framework import framework_helpers
+from framework import framework_views
+from framework import gcs_helpers
+from framework import permissions
+from framework import template_helpers
+from framework import timestr
+from framework import urls
+from proto import tracker_pb2
+from services import user_svc
+from tracker import tracker_bizobj
+from tracker import tracker_constants
+from tracker import tracker_helpers
+
+
+class IssueView(template_helpers.PBProxy):
+ """Wrapper class that makes it easier to display an Issue via EZT."""
+
+ def __init__(
+ self, issue, users_by_id, config, open_related=None,
+ closed_related=None, all_related=None):
+ """Store relevant values for later display by EZT.
+
+ Args:
+ issue: An Issue protocol buffer.
+ users_by_id: dict {user_id: UserViews} for all users mentioned in issue.
+ config: ProjectIssueConfig for this issue.
+ open_related: dict of visible open issues that are related to this issue.
+ closed_related: dict {issue_id: issue} of visible closed issues that
+ are related to this issue.
+ all_related: dict {issue_id: issue} of all blocked-on, blocking,
+ or merged-into issues referenced from this issue, regardless of
+ perms.
+ """
+ super(IssueView, self).__init__(issue)
+
+ # The users involved in this issue must be present in users_by_id if
+ # this IssueView is to be used on the issue detail or peek pages. But,
+ # they can be absent from users_by_id if the IssueView is used as a
+ # tile in the grid view.
+ self.owner = users_by_id.get(issue.owner_id)
+ self.derived_owner = users_by_id.get(issue.derived_owner_id)
+ self.cc = [users_by_id.get(cc_id) for cc_id in issue.cc_ids
+ if cc_id]
+ self.derived_cc = [users_by_id.get(cc_id)
+ for cc_id in issue.derived_cc_ids
+ if cc_id]
+ self.status = framework_views.StatusView(issue.status, config)
+ self.derived_status = framework_views.StatusView(
+ issue.derived_status, config)
+ # If we don't have a config available, we don't need to access is_open, so
+ # let it be True.
+ self.is_open = ezt.boolean(
+ not config or
+ tracker_helpers.MeansOpenInProject(
+ tracker_bizobj.GetStatus(issue), config))
+
+ self.components = sorted(
+ [ComponentValueView(component_id, config, False)
+ for component_id in issue.component_ids
+ if tracker_bizobj.FindComponentDefByID(component_id, config)] +
+ [ComponentValueView(component_id, config, True)
+ for component_id in issue.derived_component_ids
+ if tracker_bizobj.FindComponentDefByID(component_id, config)],
+ key=lambda cvv: cvv.path)
+
+ self.fields = [
+ MakeFieldValueView(
+ fd, config, issue.labels, issue.derived_labels, issue.field_values,
+ users_by_id)
+ # TODO(jrobbins): field-level view restrictions, display options
+ for fd in config.field_defs
+ if not fd.is_deleted]
+ self.fields = sorted(
+ self.fields, key=lambda f: (f.applicable_type, f.field_name))
+
+ field_names = [fd.field_name.lower() for fd in config.field_defs
+ if not fd.is_deleted] # TODO(jrobbins): restricts
+ self.labels = [
+ framework_views.LabelView(label, config)
+ for label in tracker_bizobj.NonMaskedLabels(issue.labels, field_names)]
+ self.derived_labels = [
+ framework_views.LabelView(label, config)
+ for label in issue.derived_labels
+ if not tracker_bizobj.LabelIsMaskedByField(label, field_names)]
+ self.restrictions = _RestrictionsView(issue)
+
+ # TODO(jrobbins): sort by order of labels in project config
+
+ self.short_summary = issue.summary[:tracker_constants.SHORT_SUMMARY_LENGTH]
+
+ if issue.closed_timestamp:
+ self.closed = timestr.FormatAbsoluteDate(issue.closed_timestamp)
+ else:
+ self.closed = ''
+
+ blocked_on_iids = issue.blocked_on_iids
+ blocking_iids = issue.blocking_iids
+
+ # Note that merged_into_str and blocked_on_str includes all issue
+ # references, even those referring to issues that the user can't view,
+ # so open_related and closed_related cannot be used.
+ if all_related is not None:
+ all_blocked_on_refs = [
+ (all_related[ref_iid].project_name, all_related[ref_iid].local_id)
+ for ref_iid in issue.blocked_on_iids]
+ all_blocked_on_refs.extend([
+ (r.project, r.issue_id) for r in issue.dangling_blocked_on_refs])
+ self.blocked_on_str = ', '.join(
+ tracker_bizobj.FormatIssueRef(
+ ref, default_project_name=issue.project_name)
+ for ref in all_blocked_on_refs)
+ all_blocking_refs = [
+ (all_related[ref_iid].project_name, all_related[ref_iid].local_id)
+ for ref_iid in issue.blocking_iids]
+ all_blocking_refs.extend([
+ (r.project, r.issue_id) for r in issue.dangling_blocking_refs])
+ self.blocking_str = ', '.join(
+ tracker_bizobj.FormatIssueRef(
+ ref, default_project_name=issue.project_name)
+ for ref in all_blocking_refs)
+ if issue.merged_into:
+ merged_issue = all_related[issue.merged_into]
+ merged_into_ref = merged_issue.project_name, merged_issue.local_id
+ else:
+ merged_into_ref = None
+ self.merged_into_str = tracker_bizobj.FormatIssueRef(
+ merged_into_ref, default_project_name=issue.project_name)
+
+ self.blocked_on = []
+ self.blocking = []
+ current_project_name = issue.project_name
+
+ if open_related is not None and closed_related is not None:
+ self.merged_into = IssueRefView(
+ current_project_name, issue.merged_into,
+ open_related, closed_related)
+
+ self.blocked_on = [
+ IssueRefView(current_project_name, iid, open_related, closed_related)
+ for iid in blocked_on_iids]
+ self.blocked_on.extend(
+ [DanglingIssueRefView(ref.project, ref.issue_id)
+ for ref in issue.dangling_blocked_on_refs])
+ self.blocked_on = [irv for irv in self.blocked_on if irv.visible]
+ # TODO(jrobbins): sort by irv project_name and local_id
+
+ self.blocking = [
+ IssueRefView(current_project_name, iid, open_related, closed_related)
+ for iid in blocking_iids]
+ self.blocking.extend(
+ [DanglingIssueRefView(ref.project, ref.issue_id)
+ for ref in issue.dangling_blocking_refs])
+ self.blocking = [irv for irv in self.blocking if irv.visible]
+ # TODO(jrobbins): sort by irv project_name and local_id
+
+ self.detail_relative_url = tracker_helpers.FormatRelativeIssueURL(
+ issue.project_name, urls.ISSUE_DETAIL, id=issue.local_id)
+
+
+class _RestrictionsView(object):
+ """An EZT object for the restrictions associated with an issue."""
+
+ # Restrict label fragments that correspond to known permissions.
+ _VIEW = permissions.VIEW.lower()
+ _EDIT = permissions.EDIT_ISSUE.lower()
+ _ADD_COMMENT = permissions.ADD_ISSUE_COMMENT.lower()
+ _KNOWN_ACTION_KINDS = {_VIEW, _EDIT, _ADD_COMMENT}
+
+ def __init__(self, issue):
+ # List of restrictions that don't map to a known action kind.
+ self.other = []
+
+ restrictions_by_action = collections.defaultdict(list)
+ # We can't use GetRestrictions here, as we prefer to preserve
+ # the case of the label when showing restrictions in the UI.
+ for label in tracker_bizobj.GetLabels(issue):
+ if permissions.IsRestrictLabel(label):
+ _kw, action_kind, needed_perm = label.split('-', 2)
+ action_kind = action_kind.lower()
+ if action_kind in self._KNOWN_ACTION_KINDS:
+ restrictions_by_action[action_kind].append(needed_perm)
+ else:
+ self.other.append(label)
+
+ self.view = ' and '.join(restrictions_by_action[self._VIEW])
+ self.add_comment = ' and '.join(restrictions_by_action[self._ADD_COMMENT])
+ self.edit = ' and '.join(restrictions_by_action[self._EDIT])
+
+ self.has_restrictions = ezt.boolean(
+ self.view or self.add_comment or self.edit or self.other)
+
+
+class IssueRefView(object):
+ """A simple object to easily display links to issues in EZT."""
+
+ def __init__(self, current_project_name, issue_id, open_dict, closed_dict):
+ """Make a simple object to display a link to a referenced issue.
+
+ Args:
+ current_project_name: string name of the current project.
+ issue_id: int issue ID of the target issue.
+ open_dict: dict {issue_id: issue} of pre-fetched open issues that the
+ user is allowed to view.
+ closed_dict: dict of pre-fetched closed issues that the user is
+ allowed to view.
+
+ Note, the target issue may be a member of either open_dict or
+ closed_dict, or neither one. If neither, nothing is displayed.
+ """
+ if (not issue_id or
+ issue_id not in open_dict and issue_id not in closed_dict):
+ # Issue not found or not visible to this user, so don't link to it.
+ self.visible = ezt.boolean(False)
+ return
+
+ self.visible = ezt.boolean(True)
+
+ if issue_id in open_dict:
+ related_issue = open_dict[issue_id]
+ self.is_open = ezt.boolean(True)
+ else:
+ related_issue = closed_dict[issue_id]
+ self.is_open = ezt.boolean(False)
+
+ if current_project_name == related_issue.project_name:
+ self.url = 'detail?id=%s' % related_issue.local_id
+ self.display_name = 'issue %s' % related_issue.local_id
+ else:
+ self.url = '/p/%s%s?id=%s' % (
+ related_issue.project_name, urls.ISSUE_DETAIL,
+ related_issue.local_id)
+ self.display_name = 'issue %s:%s' % (
+ related_issue.project_name, related_issue.local_id)
+
+ self.summary = related_issue.summary
+
+ def DebugString(self):
+ if not self.visible:
+ return 'IssueRefView(not visible)'
+
+ return 'IssueRefView(%s)' % self.display_name
+
+
+class DanglingIssueRefView(object):
+
+ def __init__(self, project_name, issue_id):
+ """Makes a simple object to display a link to an issue still in Codesite.
+
+ Satisfies the same API and internal data members as IssueRefView,
+ excpet for the arguments to __init__.
+
+ Args:
+ project_name: The name of the project on Codesite
+ issue_id: The local id of the issue in that project
+ """
+ self.visible = True
+ self.is_open = True # TODO(agable) Make a call to Codesite to set this?
+ self.url = 'https://code.google.com/p/%s/issues/detail?id=%d' % (
+ project_name, issue_id)
+ self.display_name = 'issue %s:%d' % (project_name, issue_id)
+ self.short_name = 'issue %s:%d' % (project_name, issue_id)
+ self.summary = 'Issue %d in %s.' % (issue_id, project_name)
+
+ def DebugString(self):
+ return 'DanglingIssueRefView(%s)' % self.display_name
+
+
+class IssueCommentView(template_helpers.PBProxy):
+ """Wrapper class that makes it easier to display an IssueComment via EZT."""
+
+ def __init__(
+ self, project_name, comment_pb, users_by_id, autolink,
+ all_referenced_artifacts, mr, issue, effective_ids=None):
+ """Get IssueComment PB and make its fields available as attrs.
+
+ Args:
+ project_name: Name of the project this issue belongs to.
+ comment_pb: Comment protocol buffer.
+ users_by_id: dict mapping user_ids to UserViews, including
+ the user that entered the comment, and any changed participants.
+ autolink: utility object for automatically linking to other
+ issues, svn revisions, etc.
+ all_referenced_artifacts: opaque object with details of referenced
+ artifacts that is needed by autolink.
+ mr: common information parsed from the HTTP request.
+ issue: Issue PB for the issue that this comment is part of.
+ effective_ids: optional set of int user IDs for the comment author.
+ """
+ super(IssueCommentView, self).__init__(comment_pb)
+
+ self.id = comment_pb.id
+ self.creator = users_by_id[comment_pb.user_id]
+
+ # TODO(jrobbins): this should be based on the issue project, not the
+ # request project for non-project views and cross-project.
+ if mr.project:
+ self.creator_role = framework_helpers.GetRoleName(
+ effective_ids or {self.creator.user_id}, mr.project)
+ else:
+ self.creator_role = None
+
+ time_tuple = time.localtime(comment_pb.timestamp)
+ self.date_string = timestr.FormatAbsoluteDate(
+ comment_pb.timestamp, old_format=timestr.MONTH_DAY_FMT)
+ self.date_relative = timestr.FormatRelativeDate(comment_pb.timestamp)
+ self.date_tooltip = time.asctime(time_tuple)
+ self.text_runs = _ParseTextRuns(comment_pb.content)
+ if autolink:
+ self.text_runs = autolink.MarkupAutolinks(
+ mr, self.text_runs, all_referenced_artifacts)
+
+ self.attachments = [AttachmentView(attachment, project_name)
+ for attachment in comment_pb.attachments]
+ self.amendments = [
+ AmendmentView(amendment, users_by_id, mr.project_name)
+ for amendment in comment_pb.amendments]
+ # Treat comments from banned users as being deleted.
+ self.is_deleted = (comment_pb.deleted_by or
+ (self.creator and self.creator.banned))
+ self.can_delete = False
+ if mr.auth.user_id and mr.project:
+ # TODO(jrobbins): pass through config, then I can do:
+ # granted_perms = tracker_bizobj.GetGrantedPerms(
+ # issue, mr.auth.effective_ids, config)
+ self.can_delete = permissions.CanDelete(
+ mr.auth.user_id, mr.auth.effective_ids, mr.perms,
+ comment_pb.deleted_by, comment_pb.user_id,
+ mr.project, permissions.GetRestrictions(issue))
+
+ # Prevent spammers from undeleting their own comments, but
+ # allow people with permission to undelete their own comments.
+ if comment_pb.is_spam and comment_pb.user_id == mr.auth.user_id:
+ self.can_delete = mr.perms.HasPerm(permissions.MODERATE_SPAM,
+ mr.auth.user_id, mr.project)
+
+ self.visible = self.can_delete or not self.is_deleted
+
+
+_TEMPLATE_TEXT_RE = re.compile('^(<b>[^<]+</b>)', re.MULTILINE)
+
+
+def _ParseTextRuns(content):
+ """Convert the user's comment to a list of TextRun objects."""
+ chunks = _TEMPLATE_TEXT_RE.split(content)
+ runs = [_ChunkToRun(chunk) for chunk in chunks]
+ return runs
+
+
+def _ChunkToRun(chunk):
+ """Convert a substring of the user's comment to a TextRun object."""
+ if chunk.startswith('<b>') and chunk.endswith('</b>'):
+ return template_helpers.TextRun(chunk[3:-4], tag='b')
+ else:
+ return template_helpers.TextRun(chunk)
+
+
+VIEWABLE_IMAGE_TYPES = ['image/jpeg', 'image/gif', 'image/png', 'image/x-png']
+MAX_PREVIEW_FILESIZE = 4 * 1024 * 1024 # 4MB
+
+
+class LogoView(template_helpers.PBProxy):
+ """Wrapper class to make it easier to display project logos via EZT."""
+
+ def __init__(self, project_pb):
+ if (not project_pb or
+ not project_pb.logo_gcs_id or
+ not project_pb.logo_file_name):
+ self.thumbnail_url = ''
+ self.viewurl = ''
+ return
+
+ object_path = ('/' + app_identity.get_default_gcs_bucket_name() +
+ project_pb.logo_gcs_id)
+ self.filename = project_pb.logo_file_name
+ self.mimetype = filecontent.GuessContentTypeFromFilename(self.filename)
+
+ self.thumbnail_url = gcs_helpers.SignUrl(object_path + '-thumbnail')
+ self.viewurl = (
+ gcs_helpers.SignUrl(object_path) + '&' + urllib.urlencode(
+ {'response-content-displacement':
+ ('attachment; filename=%s' % self.filename)}))
+
+
+class AttachmentView(template_helpers.PBProxy):
+ """Wrapper class to make it easier to display issue attachments via EZT."""
+
+ def __init__(self, attach_pb, project_name):
+ """Get IssueAttachmentContent PB and make its fields available as attrs.
+
+ Args:
+ attach_pb: Attachment part of IssueComment protocol buffer.
+ project_name: string Name of the current project.
+ """
+ super(AttachmentView, self).__init__(attach_pb)
+ self.filesizestr = template_helpers.BytesKbOrMb(attach_pb.filesize)
+ self.downloadurl = 'attachment?aid=%s' % attach_pb.attachment_id
+
+ self.url = None
+ self.thumbnail_url = None
+ if IsViewableImage(attach_pb.mimetype, attach_pb.filesize):
+ self.url = self.downloadurl + '&inline=1'
+ self.thumbnail_url = self.url + '&thumb=1'
+ elif IsViewableText(attach_pb.mimetype, attach_pb.filesize):
+ self.url = tracker_helpers.FormatRelativeIssueURL(
+ project_name, urls.ISSUE_ATTACHMENT_TEXT,
+ aid=attach_pb.attachment_id)
+
+ self.iconurl = '/images/paperclip.png'
+
+
+def IsViewableImage(mimetype_charset, filesize):
+ """Return true if we can safely display such an image in the browser.
+
+ Args:
+ mimetype_charset: string with the mimetype string that we got back
+ from the 'file' command. It may have just the mimetype, or it
+ may have 'foo/bar; charset=baz'.
+ filesize: int length of the file in bytes.
+
+ Returns:
+ True iff we should allow the user to view a thumbnail or safe version
+ of the image in the browser. False if this might not be safe to view,
+ in which case we only offer a download link.
+ """
+ mimetype = mimetype_charset.split(';', 1)[0]
+ return (mimetype in VIEWABLE_IMAGE_TYPES and
+ filesize < MAX_PREVIEW_FILESIZE)
+
+
+def IsViewableText(mimetype, filesize):
+ """Return true if we can safely display such a file as escaped text."""
+ return (mimetype.startswith('text/') and
+ filesize < MAX_PREVIEW_FILESIZE)
+
+
+class AmendmentView(object):
+ """Wrapper class that makes it easier to display an Amendment via EZT."""
+
+ def __init__(self, amendment, users_by_id, project_name):
+ """Get the info from the PB and put it into easily accessible attrs.
+
+ Args:
+ amendment: Amendment part of an IssueComment protocol buffer.
+ users_by_id: dict mapping user_ids to UserViews.
+ project_name: Name of the project the issue/comment/amendment is in.
+ """
+ # TODO(jrobbins): take field-level restrictions into account.
+ # Including the case where user is not allowed to see any amendments.
+ self.field_name = tracker_bizobj.GetAmendmentFieldName(amendment)
+ self.newvalue = tracker_bizobj.AmendmentString(amendment, users_by_id)
+ self.values = tracker_bizobj.AmendmentLinks(
+ amendment, users_by_id, project_name)
+
+
+class ComponentDefView(template_helpers.PBProxy):
+ """Wrapper class to make it easier to display component definitions."""
+
+ def __init__(self, component_def, users_by_id):
+ super(ComponentDefView, self).__init__(component_def)
+
+ c_path = component_def.path
+ if '>' in c_path:
+ self.parent_path = c_path[:c_path.rindex('>')]
+ self.leaf_name = c_path[c_path.rindex('>') + 1:]
+ else:
+ self.parent_path = ''
+ self.leaf_name = c_path
+
+ self.docstring_short = template_helpers.FitUnsafeText(
+ component_def.docstring, 200)
+
+ self.admins = [users_by_id.get(admin_id)
+ for admin_id in component_def.admin_ids]
+ self.cc = [users_by_id.get(cc_id) for cc_id in component_def.cc_ids]
+ self.classes = 'all '
+ if self.parent_path == '':
+ self.classes += 'toplevel '
+ self.classes += 'deprecated ' if component_def.deprecated else 'active '
+
+
+class ComponentValueView(object):
+ """Wrapper class that makes it easier to display a component value."""
+
+ def __init__(self, component_id, config, derived):
+ """Make the component name and docstring available as attrs.
+
+ Args:
+ component_id: int component_id to look up in the config
+ config: ProjectIssueConfig PB for the issue's project.
+ derived: True if this component was derived.
+ """
+ cd = tracker_bizobj.FindComponentDefByID(component_id, config)
+ self.path = cd.path
+ self.docstring = cd.docstring
+ self.docstring_short = template_helpers.FitUnsafeText(cd.docstring, 60)
+ self.derived = ezt.boolean(derived)
+
+
+class FieldValueView(object):
+ """Wrapper class that makes it easier to display a custom field value."""
+
+ def __init__(
+ self, fd, config, values, derived_values, issue_types, applicable=None):
+ """Make several values related to this field available as attrs.
+
+ Args:
+ fd: field definition to be displayed (or not, if no value).
+ config: ProjectIssueConfig PB for the issue's project.
+ values: list of explicit field values.
+ derived_values: list of derived field values.
+ issue_types: set of lowered string values from issues' "Type-*" labels.
+ applicable: optional boolean that overrides the rule that determines
+ when a field is applicable.
+ """
+ self.field_def = FieldDefView(fd, config)
+ self.field_id = fd.field_id
+ self.field_name = fd.field_name
+ self.field_docstring = fd.docstring
+ self.field_docstring_short = template_helpers.FitUnsafeText(
+ fd.docstring, 60)
+
+ self.values = values
+ self.derived_values = derived_values
+
+ self.applicable_type = fd.applicable_type
+ if applicable is not None:
+ self.applicable = ezt.boolean(applicable)
+ else:
+ # A field is applicable to a given issue if it (a) applies to all issues,
+ # or (b) already has a value on this issue, or (c) says that it applies to
+ # issues with this type (or a prefix of it).
+ self.applicable = ezt.boolean(
+ not self.applicable_type or values or
+ any(type_label.startswith(self.applicable_type.lower())
+ for type_label in issue_types))
+ # TODO(jrobbins): also evaluate applicable_predicate
+
+ self.display = ezt.boolean( # or fd.show_empty
+ self.values or self.derived_values or self.applicable)
+
+
+def MakeFieldValueView(
+ fd, config, labels, derived_labels, field_values, users_by_id):
+ """Return a view on the issue's field value."""
+ field_name_lower = fd.field_name.lower()
+ values = []
+ derived_values = []
+
+ if fd.field_type == tracker_pb2.FieldTypes.ENUM_TYPE:
+ label_docs = {wkl.label: wkl.label_docstring
+ for wkl in config.well_known_labels}
+ values = _ConvertLabelsToFieldValues(
+ labels, field_name_lower, label_docs)
+ derived_values = _ConvertLabelsToFieldValues(
+ derived_labels, field_name_lower, label_docs)
+
+ else:
+ values = FindFieldValues(
+ [fv for fv in field_values if not fv.derived],
+ fd.field_id, fd.field_type, users_by_id)
+ derived_values = FindFieldValues(
+ [fv for fv in field_values if fv.derived],
+ fd.field_id, fd.field_type, users_by_id)
+
+ issue_types = set()
+ for lab in list(derived_labels) + list(labels):
+ if lab.lower().startswith('type-'):
+ issue_types.add(lab.split('-', 1)[1].lower())
+
+ return FieldValueView(fd, config, values, derived_values, issue_types)
+
+
+def FindFieldValues(field_values, field_id, field_type, users_by_id):
+ """Accumulate appropriate int, string, or user values in the given fields."""
+ result = []
+ for fv in field_values:
+ if fv.field_id != field_id:
+ continue
+
+ if field_type == tracker_pb2.FieldTypes.INT_TYPE:
+ val = fv.int_value
+ elif field_type == tracker_pb2.FieldTypes.STR_TYPE:
+ val = fv.str_value
+ elif field_type == tracker_pb2.FieldTypes.USER_TYPE:
+ if fv.user_id in users_by_id:
+ val = users_by_id[fv.user_id].email
+ else:
+ val = 'USER_%d' % fv.user_id # Should never be visible
+ else:
+ logging.error('unexpected field type %r', field_type)
+ val = ''
+
+ # Use ellipsis in the display val if the val is too long.
+ result.append(template_helpers.EZTItem(
+ val=val, docstring=val, idx=len(result)))
+
+ return result
+
+
+def MakeBounceFieldValueViews(field_vals, config):
+ """Return a list of field values to display on a validation bounce page."""
+ field_value_views = []
+ for fd in config.field_defs:
+ if fd.field_id in field_vals:
+ # TODO(jrobbins): also bounce derived values.
+ val_items = [
+ template_helpers.EZTItem(val=v, docstring='', idx=idx)
+ for idx, v in enumerate(field_vals[fd.field_id])]
+ field_value_views.append(FieldValueView(
+ fd, config, val_items, [], None, applicable=True))
+
+ return field_value_views
+
+
+def _ConvertLabelsToFieldValues(labels, field_name_lower, label_docs):
+ """Iterate through the given labels and pull out values for the field.
+
+ Args:
+ labels: a list of label strings.
+ field_name_lower: lowercase string name of the custom field.
+ label_docs: {label: docstring} for well-known labels in the project.
+
+ Returns:
+ A list of EZT items with val and docstring fields. One item is included
+ for each label that matches the given field name.
+ """
+ values = []
+ field_delim = field_name_lower + '-'
+ for idx, lab in enumerate(labels):
+ if lab.lower().startswith(field_delim):
+ val = lab[len(field_delim):]
+ # Use ellipsis in the display val if the val is too long.
+ val_short = template_helpers.FitUnsafeText(str(val), 20)
+ values.append(template_helpers.EZTItem(
+ val=val, val_short=val_short, docstring=label_docs.get(lab, ''),
+ idx=idx))
+
+ return values
+
+
+class FieldDefView(template_helpers.PBProxy):
+ """Wrapper class to make it easier to display field definitions via EZT."""
+
+ def __init__(self, field_def, config, user_views=None):
+ super(FieldDefView, self).__init__(field_def)
+
+ self.type_name = str(field_def.field_type)
+
+ self.choices = []
+ if field_def.field_type == tracker_pb2.FieldTypes.ENUM_TYPE:
+ self.choices = tracker_helpers.LabelsMaskedByFields(
+ config, [field_def.field_name], trim_prefix=True)
+
+ self.docstring_short = template_helpers.FitUnsafeText(
+ field_def.docstring, 200)
+ self.validate_help = None
+
+ if field_def.min_value is not None:
+ self.min_value = field_def.min_value
+ self.validate_help = 'Value must be >= %d' % field_def.min_value
+ else:
+ self.min_value = None # Otherwise it would default to 0
+
+ if field_def.max_value is not None:
+ self.max_value = field_def.max_value
+ self.validate_help = 'Value must be <= %d' % field_def.max_value
+ else:
+ self.max_value = None # Otherwise it would default to 0
+
+ if field_def.min_value is not None and field_def.max_value is not None:
+ self.validate_help = 'Value must be between %d and %d' % (
+ field_def.min_value, field_def.max_value)
+
+ if field_def.regex:
+ self.validate_help = 'Value must match regex: %s' % field_def.regex
+
+ if field_def.needs_member:
+ self.validate_help = 'Value must be a project member'
+
+ if field_def.needs_perm:
+ self.validate_help = (
+ 'Value must be a project member with permission %s' %
+ field_def.needs_perm)
+
+ self.admins = []
+ if user_views:
+ self.admins = [user_views.get(admin_id)
+ for admin_id in field_def.admin_ids]
+
+
+class IssueTemplateView(template_helpers.PBProxy):
+ """Wrapper class to make it easier to display an issue template via EZT."""
+
+ def __init__(self, mr, template, user_service, config):
+ super(IssueTemplateView, self).__init__(template)
+
+ self.ownername = ''
+ try:
+ self.owner_view = framework_views.MakeUserView(
+ mr.cnxn, user_service, template.owner_id)
+ except user_svc.NoSuchUserException:
+ self.owner_view = None
+ if self.owner_view:
+ self.ownername = self.owner_view.email
+
+ self.admin_views = framework_views.MakeAllUserViews(
+ mr.cnxn, user_service, template.admin_ids).values()
+ self.admin_names = ', '.join(sorted([
+ admin_view.email for admin_view in self.admin_views]))
+
+ self.summary_must_be_edited = ezt.boolean(template.summary_must_be_edited)
+ self.members_only = ezt.boolean(template.members_only)
+ self.owner_defaults_to_member = ezt.boolean(
+ template.owner_defaults_to_member)
+ self.component_required = ezt.boolean(template.component_required)
+
+ component_paths = []
+ for component_id in template.component_ids:
+ component_paths.append(
+ tracker_bizobj.FindComponentDefByID(component_id, config).path)
+ self.components = ', '.join(component_paths)
+
+ self.can_view = ezt.boolean(permissions.CanViewTemplate(
+ mr.auth.effective_ids, mr.perms, mr.project, template))
+ self.can_edit = ezt.boolean(permissions.CanEditTemplate(
+ mr.auth.effective_ids, mr.perms, mr.project, template))
+
+ field_name_set = {fd.field_name.lower() for fd in config.field_defs
+ if not fd.is_deleted} # TODO(jrobbins): restrictions
+ non_masked_labels = [
+ lab for lab in template.labels
+ if not tracker_bizobj.LabelIsMaskedByField(lab, field_name_set)]
+
+ for i, label in enumerate(non_masked_labels):
+ setattr(self, 'label%d' % i, label)
+ for i in range(len(non_masked_labels), framework_constants.MAX_LABELS):
+ setattr(self, 'label%d' % i, '')
+
+ field_user_views = MakeFieldUserViews(mr.cnxn, template, user_service)
+ self.field_values = []
+ for fv in template.field_values:
+ self.field_values.append(template_helpers.EZTItem(
+ field_id=fv.field_id,
+ val=tracker_bizobj.GetFieldValue(fv, field_user_views),
+ idx=len(self.field_values)))
+
+ self.complete_field_values = [
+ MakeFieldValueView(
+ fd, config, template.labels, [], template.field_values,
+ field_user_views)
+ # TODO(jrobbins): field-level view restrictions, display options
+ for fd in config.field_defs
+ if not fd.is_deleted]
+
+ # Templates only display and edit the first value of multi-valued fields, so
+ # expose a single value, if any.
+ # TODO(jrobbins): Fully support multi-valued fields in templates.
+ for idx, field_value_view in enumerate(self.complete_field_values):
+ field_value_view.idx = idx
+ if field_value_view.values:
+ field_value_view.val = field_value_view.values[0].val
+ else:
+ field_value_view.val = None
+
+
+def MakeFieldUserViews(cnxn, template, user_service):
+ """Return {user_id: user_view} for users in template field values."""
+ field_user_ids = [
+ fv.user_id for fv in template.field_values
+ if fv.user_id]
+ field_user_views = framework_views.MakeAllUserViews(
+ cnxn, user_service, field_user_ids)
+ return field_user_views
+
+
+class ConfigView(template_helpers.PBProxy):
+ """Make it easy to display most fieds of a ProjectIssueConfig in EZT."""
+
+ def __init__(self, mr, services, config):
+ """Gather data for the issue section of a project admin page.
+
+ Args:
+ mr: MonorailRequest, including a database connection, the current
+ project, and authenticated user IDs.
+ services: Persist services with ProjectService, ConfigService, and
+ UserService included.
+ config: ProjectIssueConfig for the current project..
+
+ Returns:
+ Project info in a dict suitable for EZT.
+ """
+ super(ConfigView, self).__init__(config)
+ self.open_statuses = []
+ self.closed_statuses = []
+ for wks in config.well_known_statuses:
+ item = template_helpers.EZTItem(
+ name=wks.status,
+ name_padded=wks.status.ljust(20),
+ commented='#' if wks.deprecated else '',
+ docstring=wks.status_docstring)
+ if tracker_helpers.MeansOpenInProject(wks.status, config):
+ self.open_statuses.append(item)
+ else:
+ self.closed_statuses.append(item)
+
+ self.templates = [
+ IssueTemplateView(mr, tmpl, services.user, config)
+ for tmpl in config.templates]
+ for index, template in enumerate(self.templates):
+ template.index = index
+
+ self.field_names = [ # TODO(jrobbins): field-level controls
+ fd.field_name for fd in config.field_defs if not fd.is_deleted]
+ self.issue_labels = tracker_helpers.LabelsNotMaskedByFields(
+ config, self.field_names)
+ self.excl_prefixes = [
+ prefix.lower() for prefix in config.exclusive_label_prefixes]
+ self.restrict_to_known = ezt.boolean(config.restrict_to_known)
+
+ self.default_col_spec = (
+ config.default_col_spec or tracker_constants.DEFAULT_COL_SPEC)