Fixed tests, Hint at ShouldResemble if helpful, vendored diffmatchpatch.
diff --git a/equality.go b/equality.go
index 92095ef..37a49f4 100644
--- a/equality.go
+++ b/equality.go
@@ -26,8 +26,7 @@
 func shouldEqual(actual, expected interface{}) (message string) {
 	defer func() {
 		if r := recover(); r != nil {
-			diff := fmt.Sprintf(shouldHaveBeenEqual, expected, actual) + diff(expected, actual)
-			message = serializer.serialize(expected, actual, diff)
+			message = serializer.serialize(expected, actual, composeEqualityMismatchMessage(expected, actual))
 		}
 	}()
 
@@ -40,10 +39,17 @@
 	return serializer.serialize(expected, actual, composeEqualityMismatchMessage(expected, actual))
 }
 func composeEqualityMismatchMessage(expected, actual interface{}) string {
-	if fmt.Sprintf("%v", expected) == fmt.Sprintf("%v", actual) && reflect.TypeOf(expected) != reflect.TypeOf(actual) {
+	var (
+		renderedExpected = fmt.Sprintf("%v", expected)
+		renderedActual   = fmt.Sprintf("%v", actual)
+	)
+
+	if renderedExpected != renderedActual {
+		return fmt.Sprintf(shouldHaveBeenEqual+composePrettyDiff(renderedExpected, renderedActual), expected, actual)
+	} else if reflect.TypeOf(expected) != reflect.TypeOf(actual) {
 		return fmt.Sprintf(shouldHaveBeenEqualTypeMismatch, expected, expected, actual, actual)
 	} else {
-		return fmt.Sprintf(shouldHaveBeenEqual+diff(expected, actual), expected, actual)
+		return fmt.Sprintf(shouldHaveBeenEqualNoResemblance, renderedExpected)
 	}
 }
 
@@ -180,10 +186,10 @@
 	}
 
 	if matchError := oglematchers.DeepEquals(expected[0]).Matches(actual); matchError != nil {
-		message := fmt.Sprintf(shouldHaveResembled, render.Render(expected[0]), render.Render(actual))
-		message += diff(expected, actual)
-		return serializer.serializeDetailed(expected[0], actual,
-			message)
+		renderedExpected, renderedActual := render.Render(expected[0]), render.Render(actual)
+		message := fmt.Sprintf(shouldHaveResembled, renderedExpected, renderedActual) +
+			composePrettyDiff(renderedExpected, renderedActual)
+		return serializer.serializeDetailed(expected[0], actual, message)
 	}
 
 	return success
diff --git a/equality_diff.go b/equality_diff.go
index 16a7f43..bd698ff 100644
--- a/equality_diff.go
+++ b/equality_diff.go
@@ -3,7 +3,7 @@
 import (
 	"fmt"
 
-	"github.com/sergi/go-diff/diffmatchpatch"
+	"github.com/smartystreets/assertions/internal/go-diff/diffmatchpatch"
 )
 
 func composePrettyDiff(expected, actual string) string {
diff --git a/equality_test.go b/equality_test.go
index 893327c..873dee4 100644
--- a/equality_test.go
+++ b/equality_test.go
@@ -26,10 +26,10 @@
 	this.pass(so(42, ShouldEqual, uint(42)))
 
 	this.fail(so(Thing1{"hi"}, ShouldEqual, Thing1{}), "{}|{hi}|Expected: '{}' Actual: '{hi}' (Should be equal)")
-	this.fail(so(Thing1{"hi"}, ShouldEqual, Thing1{"hi"}), "{hi}|{hi}|Expected: '{hi}' Actual: '{hi}' (Should be equal)")
-	this.fail(so(&Thing1{"hi"}, ShouldEqual, &Thing1{"hi"}), "&{hi}|&{hi}|Expected: '&{hi}' Actual: '&{hi}' (Should be equal)")
+	this.fail(so(Thing1{"hi"}, ShouldEqual, Thing1{"hi"}), "{hi}|{hi}|Both the actual and expected values render equally ('{hi}') and their types are the same. Try using ShouldResemble instead.")
+	this.fail(so(&Thing1{"hi"}, ShouldEqual, &Thing1{"hi"}), "&{hi}|&{hi}|Both the actual and expected values render equally ('&{hi}') and their types are the same. Try using ShouldResemble instead.")
 
-	this.fail(so(Thing1{}, ShouldEqual, Thing2{}), "{}|{}|Expected: '{}' Actual: '{}' (Should be equal)")
+	this.fail(so(Thing1{}, ShouldEqual, Thing2{}), "{}|{}|Expected: '{}' (assertions.Thing2) Actual: '{}' (assertions.Thing1) (Should be equal, type mismatch)")
 
 	this.pass(so(ThingWithEqualMethod{"hi"}, ShouldEqual, ThingWithEqualMethod{"hi"}))
 	this.fail(so(ThingWithEqualMethod{"hi"}, ShouldEqual, ThingWithEqualMethod{"bye"}),
@@ -137,7 +137,7 @@
 	this.fail(so(Thing1{"hi"}, ShouldResemble, Thing1{"hi"}, Thing1{"hi"}), "This assertion requires exactly 1 comparison values (you provided 2).")
 
 	this.pass(so(Thing1{"hi"}, ShouldResemble, Thing1{"hi"}))
-	this.fail(so(Thing1{"hi"}, ShouldResemble, Thing1{"bye"}), `{bye}|{hi}|Expected: 'assertions.Thing1{a:"bye"}' Actual: 'assertions.Thing1{a:"hi"}' (Should resemble)!`)
+	this.fail(so(Thing1{"hi"}, ShouldResemble, Thing1{"bye"}), `{bye}|{hi}|Expected: 'assertions.Thing1{a:"bye"}' Actual: 'assertions.Thing1{a:"hi"}' (Should resemble)! Diff: 'assertions.Thing1{a:"byehi"}'`)
 
 	var (
 		a []int
@@ -148,7 +148,7 @@
 	this.fail(so(2, ShouldResemble, 1), `1|2|Expected: '1' Actual: '2' (Should resemble)!`)
 
 	this.fail(so(StringStringMapAlias{"hi": "bye"}, ShouldResemble, map[string]string{"hi": "bye"}),
-		`map[hi:bye]|map[hi:bye]|Expected: 'map[string]string{"hi":"bye"}' Actual: 'assertions.StringStringMapAlias{"hi":"bye"}' (Should resemble)!`)
+		`map[hi:bye]|map[hi:bye]|Expected: 'map[string]string{"hi":"bye"}' Actual: 'assertions.StringStringMapAlias{"hi":"bye"}' (Should resemble)! Diff: 'map[ssertions.String]sStringMapAlias{"hi":"bye"}'`)
 	this.fail(so(StringSliceAlias{"hi", "bye"}, ShouldResemble, []string{"hi", "bye"}),
 		`[hi bye]|[hi bye]|Expected: '[]string{"hi", "bye"}' Actual: 'assertions.StringSliceAlias{"hi", "bye"}' (Should resemble)!`)
 
@@ -178,12 +178,12 @@
 	// basic identity of keys/values
 	this.pass(so(`{"my":"val"}`, ShouldEqualJSON, `{"my":"val"}`))
 	this.fail(so(`{"my":"val"}`, ShouldEqualJSON, `{"your":"val"}`),
-		`{"your":"val"}|{"my":"val"}|Expected: '{"your":"val"}' Actual: '{"my":"val"}' (Should be equal)`)
+		`{"your":"val"}|{"my":"val"}|Expected: '{"your":"val"}' Actual: '{"my":"val"}' (Should be equal) Diff: '{"myour":"val"}'`)
 
 	// out of order values causes comparison failure:
 	this.pass(so(`{"key0":"val0","key1":"val1"}`, ShouldEqualJSON, `{"key1":"val1","key0":"val0"}`))
 	this.fail(so(`{"key0":"val0","key1":"val1"}`, ShouldEqualJSON, `{"key1":"val0","key0":"val0"}`),
-		`{"key0":"val0","key1":"val0"}|{"key0":"val0","key1":"val1"}|Expected: '{"key0":"val0","key1":"val0"}' Actual: '{"key0":"val0","key1":"val1"}' (Should be equal)`)
+		`{"key0":"val0","key1":"val0"}|{"key0":"val0","key1":"val1"}|Expected: '{"key0":"val0","key1":"val0"}' Actual: '{"key0":"val0","key1":"val1"}' (Should be equal)  Diff: '{"key0":"val0","key1":"val01"}'`)
 
 	// missing values causes comparison failure:
 	this.fail(so(
diff --git a/internal/Makefile b/internal/Makefile
index be7dec5..a03fc81 100644
--- a/internal/Makefile
+++ b/internal/Makefile
@@ -1,4 +1,4 @@
-# This Makefile pulls the latest oglematchers (with dependencies),
+# This Makefile pulls the latest dependencies,
 # rewrites the imports to match this location,
 # and ensures that all the tests pass.
 # BTW, things used from oglematchers: Contains, Equals, DeepEquals, GreaterThan, LessThan, GreaterOrEqual, LessOrEqual
@@ -6,19 +6,16 @@
 test:
 	go test github.com/smartystreets/assertions/...
 
-update: clear clone rewrite trim
+update: clear clone trim
 
 clear:
 	rm -rf ogle*
-	rm -rf reqtrace
 	rm -rf go-render
 
 clone:
 	git clone https://github.com/jacobsa/oglematchers.git && rm -rf oglematchers/.git
 	git clone https://github.com/luci/go-render.git && rm -rf go-render/.git
-
-rewrite:
-	grep -rl --exclude Makefile 'github.com/jacobsa' . | xargs sed -i '' 's#github.com/jacobsa#github.com/smartystreets/assertions/internal#g'
+	git clone https://github.com/sergi/go-diff.git && rm -rf go-diff/.git
 
 trim:
 	git checkout oglematchers/contains.go # This file diverged at 6acd0337
@@ -35,3 +32,4 @@
 	rm oglematchers/panics.go
 	rm oglematchers/pointee.go
 	rm go-render/render/*_test.go
+	rm go-diff/diffmatchpatch/*_test.go
diff --git a/internal/go-diff/.gitignore b/internal/go-diff/.gitignore
new file mode 100644
index 0000000..0026861
--- /dev/null
+++ b/internal/go-diff/.gitignore
@@ -0,0 +1,22 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
diff --git a/internal/go-diff/.travis.yml b/internal/go-diff/.travis.yml
new file mode 100644
index 0000000..85868de
--- /dev/null
+++ b/internal/go-diff/.travis.yml
@@ -0,0 +1,27 @@
+language: go
+
+os:
+  - linux
+  - osx
+
+go:
+  - 1.8.x
+  - 1.9.x
+
+sudo: false
+
+env:
+  global:
+    # Coveralls.io
+    - secure: OGYOsFNXNarEZ5yA4/M6ZdVguD0jL8vXgXrbLzjcpkKcq8ObHSCtNINoUlnNf6l6Z92kPnuV+LSm7jKTojBlov4IwgiY1ACbvg921SdjxYkg1AiwHTRTLR1g/esX8RdaBpJ0TOcXOFFsYMRVvl5sxxtb0tXSuUrT+Ch4SUCY7X8=
+
+install:
+  - make install-dependencies
+  - make install-tools
+  - make install
+
+script:
+  - make lint
+  - make test-with-coverage
+  - gover
+  - if [ "$TRAVIS_SECURE_ENV_VARS" = "true" ]; then goveralls -coverprofile=gover.coverprofile -service=travis-ci -repotoken $COVERALLS_TOKEN; fi
diff --git a/internal/go-diff/APACHE-LICENSE-2.0 b/internal/go-diff/APACHE-LICENSE-2.0
new file mode 100644
index 0000000..f433b1a
--- /dev/null
+++ b/internal/go-diff/APACHE-LICENSE-2.0
@@ -0,0 +1,177 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
diff --git a/internal/go-diff/AUTHORS b/internal/go-diff/AUTHORS
new file mode 100644
index 0000000..2d7bb2b
--- /dev/null
+++ b/internal/go-diff/AUTHORS
@@ -0,0 +1,25 @@
+# This is the official list of go-diff authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+
+# Names should be added to this file as
+#	Name or Organization <email address>
+# The email address is not required for organizations.
+
+# Please keep the list sorted.
+
+Danny Yoo <dannyyoo@google.com>
+James Kolb <jkolb@google.com>
+Jonathan Amsterdam <jba@google.com>
+Markus Zimmermann <markus.zimmermann@nethead.at> <markus.zimmermann@symflower.com> <zimmski@gmail.com>
+Matt Kovars <akaskik@gmail.com>
+Örjan Persson <orjan@spotify.com>
+Osman Masood <oamasood@gmail.com>
+Robert Carlsen <rwcarlsen@gmail.com>
+Rory Flynn <roryflynn@users.noreply.github.com>
+Sergi Mansilla <sergi.mansilla@gmail.com>
+Shatrugna Sadhu <ssadhu@apcera.com>
+Shawn Smith <shawnpsmith@gmail.com>
+Stas Maksimov <maksimov@gmail.com>
+Tor Arvid Lund <torarvid@gmail.com>
+Zac Bergquist <zbergquist99@gmail.com>
diff --git a/internal/go-diff/CONTRIBUTORS b/internal/go-diff/CONTRIBUTORS
new file mode 100644
index 0000000..369e3d5
--- /dev/null
+++ b/internal/go-diff/CONTRIBUTORS
@@ -0,0 +1,32 @@
+# This is the official list of people who can contribute
+# (and typically have contributed) code to the go-diff
+# repository.
+#
+# The AUTHORS file lists the copyright holders; this file
+# lists people.  For example, ACME Inc. employees would be listed here
+# but not in AUTHORS, because ACME Inc. would hold the copyright.
+#
+# When adding J Random Contributor's name to this file,
+# either J's name or J's organization's name should be
+# added to the AUTHORS file.
+#
+# Names should be added to this file like so:
+#     Name <email address>
+#
+# Please keep the list sorted.
+
+Danny Yoo <dannyyoo@google.com>
+James Kolb <jkolb@google.com>
+Jonathan Amsterdam <jba@google.com>
+Markus Zimmermann <markus.zimmermann@nethead.at> <markus.zimmermann@symflower.com> <zimmski@gmail.com>
+Matt Kovars <akaskik@gmail.com>
+Örjan Persson <orjan@spotify.com>
+Osman Masood <oamasood@gmail.com>
+Robert Carlsen <rwcarlsen@gmail.com>
+Rory Flynn <roryflynn@users.noreply.github.com>
+Sergi Mansilla <sergi.mansilla@gmail.com>
+Shatrugna Sadhu <ssadhu@apcera.com>
+Shawn Smith <shawnpsmith@gmail.com>
+Stas Maksimov <maksimov@gmail.com>
+Tor Arvid Lund <torarvid@gmail.com>
+Zac Bergquist <zbergquist99@gmail.com>
diff --git a/internal/go-diff/LICENSE b/internal/go-diff/LICENSE
new file mode 100644
index 0000000..937942c
--- /dev/null
+++ b/internal/go-diff/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2012-2016 The go-diff Authors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+
diff --git a/internal/go-diff/Makefile b/internal/go-diff/Makefile
new file mode 100644
index 0000000..e013f0b
--- /dev/null
+++ b/internal/go-diff/Makefile
@@ -0,0 +1,44 @@
+.PHONY: all clean clean-coverage install install-dependencies install-tools lint test test-verbose test-with-coverage
+
+export ARGS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS))
+export PKG := github.com/sergi/go-diff
+export ROOT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
+
+$(eval $(ARGS):;@:) # turn arguments into do-nothing targets
+export ARGS
+
+ifdef ARGS
+	PKG_TEST := $(ARGS)
+else
+	PKG_TEST := $(PKG)/...
+endif
+
+all: install-tools install-dependencies install lint test
+
+clean:
+	go clean -i $(PKG)/...
+	go clean -i -race $(PKG)/...
+clean-coverage:
+	find $(ROOT_DIR) | grep .coverprofile | xargs rm
+install:
+	go install -v $(PKG)/...
+install-dependencies:
+	go get -t -v $(PKG)/...
+	go build -v $(PKG)/...
+install-tools:
+	# Install linting tools
+	go get -u -v github.com/golang/lint/...
+	go get -u -v github.com/kisielk/errcheck/...
+
+	# Install code coverage tools
+	go get -u -v github.com/onsi/ginkgo/ginkgo/...
+	go get -u -v github.com/modocache/gover/...
+	go get -u -v github.com/mattn/goveralls/...
+lint:
+	$(ROOT_DIR)/scripts/lint.sh
+test:
+	go test -race -test.timeout 120s $(PKG_TEST)
+test-verbose:
+	go test -race -test.timeout 120s -v $(PKG_TEST)
+test-with-coverage:
+	ginkgo -r -cover -race -skipPackage="testdata"
diff --git a/internal/go-diff/README.md b/internal/go-diff/README.md
new file mode 100644
index 0000000..597437b
--- /dev/null
+++ b/internal/go-diff/README.md
@@ -0,0 +1,84 @@
+# go-diff [![GoDoc](https://godoc.org/github.com/sergi/go-diff?status.png)](https://godoc.org/github.com/sergi/go-diff/diffmatchpatch) [![Build Status](https://travis-ci.org/sergi/go-diff.svg?branch=master)](https://travis-ci.org/sergi/go-diff) [![Coverage Status](https://coveralls.io/repos/sergi/go-diff/badge.png?branch=master)](https://coveralls.io/r/sergi/go-diff?branch=master)
+
+go-diff offers algorithms to perform operations required for synchronizing plain text:
+
+- Compare two texts and return their differences.
+- Perform fuzzy matching of text.
+- Apply patches onto text.
+
+## Installation
+
+```bash
+go get -u github.com/sergi/go-diff/...
+```
+
+## Usage
+
+The following example compares two texts and writes out the differences to standard output.
+
+```go
+package main
+
+import (
+	"fmt"
+
+	"github.com/sergi/go-diff/diffmatchpatch"
+)
+
+const (
+	text1 = "Lorem ipsum dolor."
+	text2 = "Lorem dolor sit amet."
+)
+
+func main() {
+	dmp := diffmatchpatch.New()
+
+	diffs := dmp.DiffMain(text1, text2, false)
+
+	fmt.Println(dmp.DiffPrettyText(diffs))
+}
+```
+
+## Found a bug or are you missing a feature in go-diff?
+
+Please make sure to have the latest version of go-diff. If the problem still persists go through the [open issues](https://github.com/sergi/go-diff/issues) in the tracker first. If you cannot find your request just open up a [new issue](https://github.com/sergi/go-diff/issues/new).
+
+## How to contribute?
+
+You want to contribute to go-diff? GREAT! If you are here because of a bug you want to fix or a feature you want to add, you can just read on. Otherwise we have a list of [open issues in the tracker](https://github.com/sergi/go-diff/issues). Just choose something you think you can work on and discuss your plans in the issue by commenting on it.
+
+Please make sure that every behavioral change is accompanied by test cases. Additionally, every contribution must pass the `lint` and `test` Makefile targets which can be run using the following commands in the repository root directory.
+
+```bash
+make lint
+make test
+```
+
+After your contribution passes these commands, [create a PR](https://help.github.com/articles/creating-a-pull-request/) and we will review your contribution.
+
+## Origins
+
+go-diff is a Go language port of Neil Fraser's google-diff-match-patch code. His original code is available at [http://code.google.com/p/google-diff-match-patch/](http://code.google.com/p/google-diff-match-patch/).
+
+## Copyright and License
+
+The original Google Diff, Match and Patch Library is licensed under the [Apache License 2.0](http://www.apache.org/licenses/LICENSE-2.0). The full terms of that license are included here in the [APACHE-LICENSE-2.0](/APACHE-LICENSE-2.0) file.
+
+Diff, Match and Patch Library
+
+> Written by Neil Fraser
+> Copyright (c) 2006 Google Inc.
+> <http://code.google.com/p/google-diff-match-patch/>
+
+This Go version of Diff, Match and Patch Library is licensed under the [MIT License](http://www.opensource.org/licenses/MIT) (a.k.a. the Expat License) which is included here in the [LICENSE](/LICENSE) file.
+
+Go version of Diff, Match and Patch Library
+
+> Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+> <https://github.com/sergi/go-diff>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/internal/go-diff/diffmatchpatch/diff.go b/internal/go-diff/diffmatchpatch/diff.go
new file mode 100644
index 0000000..cb25b43
--- /dev/null
+++ b/internal/go-diff/diffmatchpatch/diff.go
@@ -0,0 +1,1345 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+	"bytes"
+	"errors"
+	"fmt"
+	"html"
+	"math"
+	"net/url"
+	"regexp"
+	"strconv"
+	"strings"
+	"time"
+	"unicode/utf8"
+)
+
+// Operation defines the operation of a diff item.
+type Operation int8
+
+//go:generate stringer -type=Operation -trimprefix=Diff
+
+const (
+	// DiffDelete item represents a delete diff.
+	DiffDelete Operation = -1
+	// DiffInsert item represents an insert diff.
+	DiffInsert Operation = 1
+	// DiffEqual item represents an equal diff.
+	DiffEqual Operation = 0
+)
+
+// Diff represents one diff operation
+type Diff struct {
+	Type Operation
+	Text string
+}
+
+// splice removes amount elements from slice at index index, replacing them with elements.
+func splice(slice []Diff, index int, amount int, elements ...Diff) []Diff {
+	if len(elements) == amount {
+		// Easy case: overwrite the relevant items.
+		copy(slice[index:], elements)
+		return slice
+	}
+	if len(elements) < amount {
+		// Fewer new items than old.
+		// Copy in the new items.
+		copy(slice[index:], elements)
+		// Shift the remaining items left.
+		copy(slice[index+len(elements):], slice[index+amount:])
+		// Calculate the new end of the slice.
+		end := len(slice) - amount + len(elements)
+		// Zero stranded elements at end so that they can be garbage collected.
+		tail := slice[end:]
+		for i := range tail {
+			tail[i] = Diff{}
+		}
+		return slice[:end]
+	}
+	// More new items than old.
+	// Make room in slice for new elements.
+	// There's probably an even more efficient way to do this,
+	// but this is simple and clear.
+	need := len(slice) - amount + len(elements)
+	for len(slice) < need {
+		slice = append(slice, Diff{})
+	}
+	// Shift slice elements right to make room for new elements.
+	copy(slice[index+len(elements):], slice[index+amount:])
+	// Copy in new elements.
+	copy(slice[index:], elements)
+	return slice
+}
+
+// DiffMain finds the differences between two texts.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+func (dmp *DiffMatchPatch) DiffMain(text1, text2 string, checklines bool) []Diff {
+	return dmp.DiffMainRunes([]rune(text1), []rune(text2), checklines)
+}
+
+// DiffMainRunes finds the differences between two rune sequences.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+func (dmp *DiffMatchPatch) DiffMainRunes(text1, text2 []rune, checklines bool) []Diff {
+	var deadline time.Time
+	if dmp.DiffTimeout > 0 {
+		deadline = time.Now().Add(dmp.DiffTimeout)
+	}
+	return dmp.diffMainRunes(text1, text2, checklines, deadline)
+}
+
+func (dmp *DiffMatchPatch) diffMainRunes(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
+	if runesEqual(text1, text2) {
+		var diffs []Diff
+		if len(text1) > 0 {
+			diffs = append(diffs, Diff{DiffEqual, string(text1)})
+		}
+		return diffs
+	}
+	// Trim off common prefix (speedup).
+	commonlength := commonPrefixLength(text1, text2)
+	commonprefix := text1[:commonlength]
+	text1 = text1[commonlength:]
+	text2 = text2[commonlength:]
+
+	// Trim off common suffix (speedup).
+	commonlength = commonSuffixLength(text1, text2)
+	commonsuffix := text1[len(text1)-commonlength:]
+	text1 = text1[:len(text1)-commonlength]
+	text2 = text2[:len(text2)-commonlength]
+
+	// Compute the diff on the middle block.
+	diffs := dmp.diffCompute(text1, text2, checklines, deadline)
+
+	// Restore the prefix and suffix.
+	if len(commonprefix) != 0 {
+		diffs = append([]Diff{Diff{DiffEqual, string(commonprefix)}}, diffs...)
+	}
+	if len(commonsuffix) != 0 {
+		diffs = append(diffs, Diff{DiffEqual, string(commonsuffix)})
+	}
+
+	return dmp.DiffCleanupMerge(diffs)
+}
+
+// diffCompute finds the differences between two rune slices.  Assumes that the texts do not have any common prefix or suffix.
+func (dmp *DiffMatchPatch) diffCompute(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
+	diffs := []Diff{}
+	if len(text1) == 0 {
+		// Just add some text (speedup).
+		return append(diffs, Diff{DiffInsert, string(text2)})
+	} else if len(text2) == 0 {
+		// Just delete some text (speedup).
+		return append(diffs, Diff{DiffDelete, string(text1)})
+	}
+
+	var longtext, shorttext []rune
+	if len(text1) > len(text2) {
+		longtext = text1
+		shorttext = text2
+	} else {
+		longtext = text2
+		shorttext = text1
+	}
+
+	if i := runesIndex(longtext, shorttext); i != -1 {
+		op := DiffInsert
+		// Swap insertions for deletions if diff is reversed.
+		if len(text1) > len(text2) {
+			op = DiffDelete
+		}
+		// Shorter text is inside the longer text (speedup).
+		return []Diff{
+			Diff{op, string(longtext[:i])},
+			Diff{DiffEqual, string(shorttext)},
+			Diff{op, string(longtext[i+len(shorttext):])},
+		}
+	} else if len(shorttext) == 1 {
+		// Single character string.
+		// After the previous speedup, the character can't be an equality.
+		return []Diff{
+			Diff{DiffDelete, string(text1)},
+			Diff{DiffInsert, string(text2)},
+		}
+		// Check to see if the problem can be split in two.
+	} else if hm := dmp.diffHalfMatch(text1, text2); hm != nil {
+		// A half-match was found, sort out the return data.
+		text1A := hm[0]
+		text1B := hm[1]
+		text2A := hm[2]
+		text2B := hm[3]
+		midCommon := hm[4]
+		// Send both pairs off for separate processing.
+		diffsA := dmp.diffMainRunes(text1A, text2A, checklines, deadline)
+		diffsB := dmp.diffMainRunes(text1B, text2B, checklines, deadline)
+		// Merge the results.
+		diffs := diffsA
+		diffs = append(diffs, Diff{DiffEqual, string(midCommon)})
+		diffs = append(diffs, diffsB...)
+		return diffs
+	} else if checklines && len(text1) > 100 && len(text2) > 100 {
+		return dmp.diffLineMode(text1, text2, deadline)
+	}
+	return dmp.diffBisect(text1, text2, deadline)
+}
+
+// diffLineMode does a quick line-level diff on both []runes, then rediff the parts for greater accuracy. This speedup can produce non-minimal diffs.
+func (dmp *DiffMatchPatch) diffLineMode(text1, text2 []rune, deadline time.Time) []Diff {
+	// Scan the text on a line-by-line basis first.
+	text1, text2, linearray := dmp.diffLinesToRunes(text1, text2)
+
+	diffs := dmp.diffMainRunes(text1, text2, false, deadline)
+
+	// Convert the diff back to original text.
+	diffs = dmp.DiffCharsToLines(diffs, linearray)
+	// Eliminate freak matches (e.g. blank lines)
+	diffs = dmp.DiffCleanupSemantic(diffs)
+
+	// Rediff any replacement blocks, this time character-by-character.
+	// Add a dummy entry at the end.
+	diffs = append(diffs, Diff{DiffEqual, ""})
+
+	pointer := 0
+	countDelete := 0
+	countInsert := 0
+
+	// NOTE: Rune slices are slower than using strings in this case.
+	textDelete := ""
+	textInsert := ""
+
+	for pointer < len(diffs) {
+		switch diffs[pointer].Type {
+		case DiffInsert:
+			countInsert++
+			textInsert += diffs[pointer].Text
+		case DiffDelete:
+			countDelete++
+			textDelete += diffs[pointer].Text
+		case DiffEqual:
+			// Upon reaching an equality, check for prior redundancies.
+			if countDelete >= 1 && countInsert >= 1 {
+				// Delete the offending records and add the merged ones.
+				diffs = splice(diffs, pointer-countDelete-countInsert,
+					countDelete+countInsert)
+
+				pointer = pointer - countDelete - countInsert
+				a := dmp.diffMainRunes([]rune(textDelete), []rune(textInsert), false, deadline)
+				for j := len(a) - 1; j >= 0; j-- {
+					diffs = splice(diffs, pointer, 0, a[j])
+				}
+				pointer = pointer + len(a)
+			}
+
+			countInsert = 0
+			countDelete = 0
+			textDelete = ""
+			textInsert = ""
+		}
+		pointer++
+	}
+
+	return diffs[:len(diffs)-1] // Remove the dummy entry at the end.
+}
+
+// DiffBisect finds the 'middle snake' of a diff, split the problem in two and return the recursively constructed diff.
+// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
+// See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+func (dmp *DiffMatchPatch) DiffBisect(text1, text2 string, deadline time.Time) []Diff {
+	// Unused in this code, but retained for interface compatibility.
+	return dmp.diffBisect([]rune(text1), []rune(text2), deadline)
+}
+
+// diffBisect finds the 'middle snake' of a diff, splits the problem in two and returns the recursively constructed diff.
+// See Myers's 1986 paper: An O(ND) Difference Algorithm and Its Variations.
+func (dmp *DiffMatchPatch) diffBisect(runes1, runes2 []rune, deadline time.Time) []Diff {
+	// Cache the text lengths to prevent multiple calls.
+	runes1Len, runes2Len := len(runes1), len(runes2)
+
+	maxD := (runes1Len + runes2Len + 1) / 2
+	vOffset := maxD
+	vLength := 2 * maxD
+
+	v1 := make([]int, vLength)
+	v2 := make([]int, vLength)
+	for i := range v1 {
+		v1[i] = -1
+		v2[i] = -1
+	}
+	v1[vOffset+1] = 0
+	v2[vOffset+1] = 0
+
+	delta := runes1Len - runes2Len
+	// If the total number of characters is odd, then the front path will collide with the reverse path.
+	front := (delta%2 != 0)
+	// Offsets for start and end of k loop. Prevents mapping of space beyond the grid.
+	k1start := 0
+	k1end := 0
+	k2start := 0
+	k2end := 0
+	for d := 0; d < maxD; d++ {
+		// Bail out if deadline is reached.
+		if !deadline.IsZero() && d%16 == 0 && time.Now().After(deadline) {
+			break
+		}
+
+		// Walk the front path one step.
+		for k1 := -d + k1start; k1 <= d-k1end; k1 += 2 {
+			k1Offset := vOffset + k1
+			var x1 int
+
+			if k1 == -d || (k1 != d && v1[k1Offset-1] < v1[k1Offset+1]) {
+				x1 = v1[k1Offset+1]
+			} else {
+				x1 = v1[k1Offset-1] + 1
+			}
+
+			y1 := x1 - k1
+			for x1 < runes1Len && y1 < runes2Len {
+				if runes1[x1] != runes2[y1] {
+					break
+				}
+				x1++
+				y1++
+			}
+			v1[k1Offset] = x1
+			if x1 > runes1Len {
+				// Ran off the right of the graph.
+				k1end += 2
+			} else if y1 > runes2Len {
+				// Ran off the bottom of the graph.
+				k1start += 2
+			} else if front {
+				k2Offset := vOffset + delta - k1
+				if k2Offset >= 0 && k2Offset < vLength && v2[k2Offset] != -1 {
+					// Mirror x2 onto top-left coordinate system.
+					x2 := runes1Len - v2[k2Offset]
+					if x1 >= x2 {
+						// Overlap detected.
+						return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
+					}
+				}
+			}
+		}
+		// Walk the reverse path one step.
+		for k2 := -d + k2start; k2 <= d-k2end; k2 += 2 {
+			k2Offset := vOffset + k2
+			var x2 int
+			if k2 == -d || (k2 != d && v2[k2Offset-1] < v2[k2Offset+1]) {
+				x2 = v2[k2Offset+1]
+			} else {
+				x2 = v2[k2Offset-1] + 1
+			}
+			var y2 = x2 - k2
+			for x2 < runes1Len && y2 < runes2Len {
+				if runes1[runes1Len-x2-1] != runes2[runes2Len-y2-1] {
+					break
+				}
+				x2++
+				y2++
+			}
+			v2[k2Offset] = x2
+			if x2 > runes1Len {
+				// Ran off the left of the graph.
+				k2end += 2
+			} else if y2 > runes2Len {
+				// Ran off the top of the graph.
+				k2start += 2
+			} else if !front {
+				k1Offset := vOffset + delta - k2
+				if k1Offset >= 0 && k1Offset < vLength && v1[k1Offset] != -1 {
+					x1 := v1[k1Offset]
+					y1 := vOffset + x1 - k1Offset
+					// Mirror x2 onto top-left coordinate system.
+					x2 = runes1Len - x2
+					if x1 >= x2 {
+						// Overlap detected.
+						return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
+					}
+				}
+			}
+		}
+	}
+	// Diff took too long and hit the deadline or number of diffs equals number of characters, no commonality at all.
+	return []Diff{
+		Diff{DiffDelete, string(runes1)},
+		Diff{DiffInsert, string(runes2)},
+	}
+}
+
+func (dmp *DiffMatchPatch) diffBisectSplit(runes1, runes2 []rune, x, y int,
+	deadline time.Time) []Diff {
+	runes1a := runes1[:x]
+	runes2a := runes2[:y]
+	runes1b := runes1[x:]
+	runes2b := runes2[y:]
+
+	// Compute both diffs serially.
+	diffs := dmp.diffMainRunes(runes1a, runes2a, false, deadline)
+	diffsb := dmp.diffMainRunes(runes1b, runes2b, false, deadline)
+
+	return append(diffs, diffsb...)
+}
+
+// DiffLinesToChars splits two texts into a list of strings, and educes the texts to a string of hashes where each Unicode character represents one line.
+// It's slightly faster to call DiffLinesToRunes first, followed by DiffMainRunes.
+func (dmp *DiffMatchPatch) DiffLinesToChars(text1, text2 string) (string, string, []string) {
+	chars1, chars2, lineArray := dmp.DiffLinesToRunes(text1, text2)
+	return string(chars1), string(chars2), lineArray
+}
+
+// DiffLinesToRunes splits two texts into a list of runes. Each rune represents one line.
+func (dmp *DiffMatchPatch) DiffLinesToRunes(text1, text2 string) ([]rune, []rune, []string) {
+	// '\x00' is a valid character, but various debuggers don't like it. So we'll insert a junk entry to avoid generating a null character.
+	lineArray := []string{""}    // e.g. lineArray[4] == 'Hello\n'
+	lineHash := map[string]int{} // e.g. lineHash['Hello\n'] == 4
+
+	chars1 := dmp.diffLinesToRunesMunge(text1, &lineArray, lineHash)
+	chars2 := dmp.diffLinesToRunesMunge(text2, &lineArray, lineHash)
+
+	return chars1, chars2, lineArray
+}
+
+func (dmp *DiffMatchPatch) diffLinesToRunes(text1, text2 []rune) ([]rune, []rune, []string) {
+	return dmp.DiffLinesToRunes(string(text1), string(text2))
+}
+
+// diffLinesToRunesMunge splits a text into an array of strings, and reduces the texts to a []rune where each Unicode character represents one line.
+// We use strings instead of []runes as input mainly because you can't use []rune as a map key.
+func (dmp *DiffMatchPatch) diffLinesToRunesMunge(text string, lineArray *[]string, lineHash map[string]int) []rune {
+	// Walk the text, pulling out a substring for each line. text.split('\n') would would temporarily double our memory footprint. Modifying text would create many large strings to garbage collect.
+	lineStart := 0
+	lineEnd := -1
+	runes := []rune{}
+
+	for lineEnd < len(text)-1 {
+		lineEnd = indexOf(text, "\n", lineStart)
+
+		if lineEnd == -1 {
+			lineEnd = len(text) - 1
+		}
+
+		line := text[lineStart : lineEnd+1]
+		lineStart = lineEnd + 1
+		lineValue, ok := lineHash[line]
+
+		if ok {
+			runes = append(runes, rune(lineValue))
+		} else {
+			*lineArray = append(*lineArray, line)
+			lineHash[line] = len(*lineArray) - 1
+			runes = append(runes, rune(len(*lineArray)-1))
+		}
+	}
+
+	return runes
+}
+
+// DiffCharsToLines rehydrates the text in a diff from a string of line hashes to real lines of text.
+func (dmp *DiffMatchPatch) DiffCharsToLines(diffs []Diff, lineArray []string) []Diff {
+	hydrated := make([]Diff, 0, len(diffs))
+	for _, aDiff := range diffs {
+		chars := aDiff.Text
+		text := make([]string, len(chars))
+
+		for i, r := range chars {
+			text[i] = lineArray[r]
+		}
+
+		aDiff.Text = strings.Join(text, "")
+		hydrated = append(hydrated, aDiff)
+	}
+	return hydrated
+}
+
+// DiffCommonPrefix determines the common prefix length of two strings.
+func (dmp *DiffMatchPatch) DiffCommonPrefix(text1, text2 string) int {
+	// Unused in this code, but retained for interface compatibility.
+	return commonPrefixLength([]rune(text1), []rune(text2))
+}
+
+// DiffCommonSuffix determines the common suffix length of two strings.
+func (dmp *DiffMatchPatch) DiffCommonSuffix(text1, text2 string) int {
+	// Unused in this code, but retained for interface compatibility.
+	return commonSuffixLength([]rune(text1), []rune(text2))
+}
+
+// commonPrefixLength returns the length of the common prefix of two rune slices.
+func commonPrefixLength(text1, text2 []rune) int {
+	// Linear search. See comment in commonSuffixLength.
+	n := 0
+	for ; n < len(text1) && n < len(text2); n++ {
+		if text1[n] != text2[n] {
+			return n
+		}
+	}
+	return n
+}
+
+// commonSuffixLength returns the length of the common suffix of two rune slices.
+func commonSuffixLength(text1, text2 []rune) int {
+	// Use linear search rather than the binary search discussed at https://neil.fraser.name/news/2007/10/09/.
+	// See discussion at https://github.com/sergi/go-diff/issues/54.
+	i1 := len(text1)
+	i2 := len(text2)
+	for n := 0; ; n++ {
+		i1--
+		i2--
+		if i1 < 0 || i2 < 0 || text1[i1] != text2[i2] {
+			return n
+		}
+	}
+}
+
+// DiffCommonOverlap determines if the suffix of one string is the prefix of another.
+func (dmp *DiffMatchPatch) DiffCommonOverlap(text1 string, text2 string) int {
+	// Cache the text lengths to prevent multiple calls.
+	text1Length := len(text1)
+	text2Length := len(text2)
+	// Eliminate the null case.
+	if text1Length == 0 || text2Length == 0 {
+		return 0
+	}
+	// Truncate the longer string.
+	if text1Length > text2Length {
+		text1 = text1[text1Length-text2Length:]
+	} else if text1Length < text2Length {
+		text2 = text2[0:text1Length]
+	}
+	textLength := int(math.Min(float64(text1Length), float64(text2Length)))
+	// Quick check for the worst case.
+	if text1 == text2 {
+		return textLength
+	}
+
+	// Start by looking for a single character match and increase length until no match is found. Performance analysis: http://neil.fraser.name/news/2010/11/04/
+	best := 0
+	length := 1
+	for {
+		pattern := text1[textLength-length:]
+		found := strings.Index(text2, pattern)
+		if found == -1 {
+			break
+		}
+		length += found
+		if found == 0 || text1[textLength-length:] == text2[0:length] {
+			best = length
+			length++
+		}
+	}
+
+	return best
+}
+
+// DiffHalfMatch checks whether the two texts share a substring which is at least half the length of the longer text. This speedup can produce non-minimal diffs.
+func (dmp *DiffMatchPatch) DiffHalfMatch(text1, text2 string) []string {
+	// Unused in this code, but retained for interface compatibility.
+	runeSlices := dmp.diffHalfMatch([]rune(text1), []rune(text2))
+	if runeSlices == nil {
+		return nil
+	}
+
+	result := make([]string, len(runeSlices))
+	for i, r := range runeSlices {
+		result[i] = string(r)
+	}
+	return result
+}
+
+func (dmp *DiffMatchPatch) diffHalfMatch(text1, text2 []rune) [][]rune {
+	if dmp.DiffTimeout <= 0 {
+		// Don't risk returning a non-optimal diff if we have unlimited time.
+		return nil
+	}
+
+	var longtext, shorttext []rune
+	if len(text1) > len(text2) {
+		longtext = text1
+		shorttext = text2
+	} else {
+		longtext = text2
+		shorttext = text1
+	}
+
+	if len(longtext) < 4 || len(shorttext)*2 < len(longtext) {
+		return nil // Pointless.
+	}
+
+	// First check if the second quarter is the seed for a half-match.
+	hm1 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+3)/4))
+
+	// Check again based on the third quarter.
+	hm2 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+1)/2))
+
+	hm := [][]rune{}
+	if hm1 == nil && hm2 == nil {
+		return nil
+	} else if hm2 == nil {
+		hm = hm1
+	} else if hm1 == nil {
+		hm = hm2
+	} else {
+		// Both matched.  Select the longest.
+		if len(hm1[4]) > len(hm2[4]) {
+			hm = hm1
+		} else {
+			hm = hm2
+		}
+	}
+
+	// A half-match was found, sort out the return data.
+	if len(text1) > len(text2) {
+		return hm
+	}
+
+	return [][]rune{hm[2], hm[3], hm[0], hm[1], hm[4]}
+}
+
+// diffHalfMatchI checks if a substring of shorttext exist within longtext such that the substring is at least half the length of longtext?
+// Returns a slice containing the prefix of longtext, the suffix of longtext, the prefix of shorttext, the suffix of shorttext and the common middle, or null if there was no match.
+func (dmp *DiffMatchPatch) diffHalfMatchI(l, s []rune, i int) [][]rune {
+	var bestCommonA []rune
+	var bestCommonB []rune
+	var bestCommonLen int
+	var bestLongtextA []rune
+	var bestLongtextB []rune
+	var bestShorttextA []rune
+	var bestShorttextB []rune
+
+	// Start with a 1/4 length substring at position i as a seed.
+	seed := l[i : i+len(l)/4]
+
+	for j := runesIndexOf(s, seed, 0); j != -1; j = runesIndexOf(s, seed, j+1) {
+		prefixLength := commonPrefixLength(l[i:], s[j:])
+		suffixLength := commonSuffixLength(l[:i], s[:j])
+
+		if bestCommonLen < suffixLength+prefixLength {
+			bestCommonA = s[j-suffixLength : j]
+			bestCommonB = s[j : j+prefixLength]
+			bestCommonLen = len(bestCommonA) + len(bestCommonB)
+			bestLongtextA = l[:i-suffixLength]
+			bestLongtextB = l[i+prefixLength:]
+			bestShorttextA = s[:j-suffixLength]
+			bestShorttextB = s[j+prefixLength:]
+		}
+	}
+
+	if bestCommonLen*2 < len(l) {
+		return nil
+	}
+
+	return [][]rune{
+		bestLongtextA,
+		bestLongtextB,
+		bestShorttextA,
+		bestShorttextB,
+		append(bestCommonA, bestCommonB...),
+	}
+}
+
+// DiffCleanupSemantic reduces the number of edits by eliminating semantically trivial equalities.
+func (dmp *DiffMatchPatch) DiffCleanupSemantic(diffs []Diff) []Diff {
+	changes := false
+	// Stack of indices where equalities are found.
+	equalities := make([]int, 0, len(diffs))
+
+	var lastequality string
+	// Always equal to diffs[equalities[equalitiesLength - 1]][1]
+	var pointer int // Index of current position.
+	// Number of characters that changed prior to the equality.
+	var lengthInsertions1, lengthDeletions1 int
+	// Number of characters that changed after the equality.
+	var lengthInsertions2, lengthDeletions2 int
+
+	for pointer < len(diffs) {
+		if diffs[pointer].Type == DiffEqual {
+			// Equality found.
+			equalities = append(equalities, pointer)
+			lengthInsertions1 = lengthInsertions2
+			lengthDeletions1 = lengthDeletions2
+			lengthInsertions2 = 0
+			lengthDeletions2 = 0
+			lastequality = diffs[pointer].Text
+		} else {
+			// An insertion or deletion.
+
+			if diffs[pointer].Type == DiffInsert {
+				lengthInsertions2 += len(diffs[pointer].Text)
+			} else {
+				lengthDeletions2 += len(diffs[pointer].Text)
+			}
+			// Eliminate an equality that is smaller or equal to the edits on both sides of it.
+			difference1 := int(math.Max(float64(lengthInsertions1), float64(lengthDeletions1)))
+			difference2 := int(math.Max(float64(lengthInsertions2), float64(lengthDeletions2)))
+			if len(lastequality) > 0 &&
+				(len(lastequality) <= difference1) &&
+				(len(lastequality) <= difference2) {
+				// Duplicate record.
+				insPoint := equalities[len(equalities)-1]
+				diffs = splice(diffs, insPoint, 0, Diff{DiffDelete, lastequality})
+
+				// Change second copy to insert.
+				diffs[insPoint+1].Type = DiffInsert
+				// Throw away the equality we just deleted.
+				equalities = equalities[:len(equalities)-1]
+
+				if len(equalities) > 0 {
+					equalities = equalities[:len(equalities)-1]
+				}
+				pointer = -1
+				if len(equalities) > 0 {
+					pointer = equalities[len(equalities)-1]
+				}
+
+				lengthInsertions1 = 0 // Reset the counters.
+				lengthDeletions1 = 0
+				lengthInsertions2 = 0
+				lengthDeletions2 = 0
+				lastequality = ""
+				changes = true
+			}
+		}
+		pointer++
+	}
+
+	// Normalize the diff.
+	if changes {
+		diffs = dmp.DiffCleanupMerge(diffs)
+	}
+	diffs = dmp.DiffCleanupSemanticLossless(diffs)
+	// Find any overlaps between deletions and insertions.
+	// e.g: <del>abcxxx</del><ins>xxxdef</ins>
+	//   -> <del>abc</del>xxx<ins>def</ins>
+	// e.g: <del>xxxabc</del><ins>defxxx</ins>
+	//   -> <ins>def</ins>xxx<del>abc</del>
+	// Only extract an overlap if it is as big as the edit ahead or behind it.
+	pointer = 1
+	for pointer < len(diffs) {
+		if diffs[pointer-1].Type == DiffDelete &&
+			diffs[pointer].Type == DiffInsert {
+			deletion := diffs[pointer-1].Text
+			insertion := diffs[pointer].Text
+			overlapLength1 := dmp.DiffCommonOverlap(deletion, insertion)
+			overlapLength2 := dmp.DiffCommonOverlap(insertion, deletion)
+			if overlapLength1 >= overlapLength2 {
+				if float64(overlapLength1) >= float64(len(deletion))/2 ||
+					float64(overlapLength1) >= float64(len(insertion))/2 {
+
+					// Overlap found. Insert an equality and trim the surrounding edits.
+					diffs = splice(diffs, pointer, 0, Diff{DiffEqual, insertion[:overlapLength1]})
+					diffs[pointer-1].Text =
+						deletion[0 : len(deletion)-overlapLength1]
+					diffs[pointer+1].Text = insertion[overlapLength1:]
+					pointer++
+				}
+			} else {
+				if float64(overlapLength2) >= float64(len(deletion))/2 ||
+					float64(overlapLength2) >= float64(len(insertion))/2 {
+					// Reverse overlap found. Insert an equality and swap and trim the surrounding edits.
+					overlap := Diff{DiffEqual, deletion[:overlapLength2]}
+					diffs = splice(diffs, pointer, 0, overlap)
+					diffs[pointer-1].Type = DiffInsert
+					diffs[pointer-1].Text = insertion[0 : len(insertion)-overlapLength2]
+					diffs[pointer+1].Type = DiffDelete
+					diffs[pointer+1].Text = deletion[overlapLength2:]
+					pointer++
+				}
+			}
+			pointer++
+		}
+		pointer++
+	}
+
+	return diffs
+}
+
+// Define some regex patterns for matching boundaries.
+var (
+	nonAlphaNumericRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
+	whitespaceRegex      = regexp.MustCompile(`\s`)
+	linebreakRegex       = regexp.MustCompile(`[\r\n]`)
+	blanklineEndRegex    = regexp.MustCompile(`\n\r?\n$`)
+	blanklineStartRegex  = regexp.MustCompile(`^\r?\n\r?\n`)
+)
+
+// diffCleanupSemanticScore computes a score representing whether the internal boundary falls on logical boundaries.
+// Scores range from 6 (best) to 0 (worst). Closure, but does not reference any external variables.
+func diffCleanupSemanticScore(one, two string) int {
+	if len(one) == 0 || len(two) == 0 {
+		// Edges are the best.
+		return 6
+	}
+
+	// Each port of this function behaves slightly differently due to subtle differences in each language's definition of things like 'whitespace'.  Since this function's purpose is largely cosmetic, the choice has been made to use each language's native features rather than force total conformity.
+	rune1, _ := utf8.DecodeLastRuneInString(one)
+	rune2, _ := utf8.DecodeRuneInString(two)
+	char1 := string(rune1)
+	char2 := string(rune2)
+
+	nonAlphaNumeric1 := nonAlphaNumericRegex.MatchString(char1)
+	nonAlphaNumeric2 := nonAlphaNumericRegex.MatchString(char2)
+	whitespace1 := nonAlphaNumeric1 && whitespaceRegex.MatchString(char1)
+	whitespace2 := nonAlphaNumeric2 && whitespaceRegex.MatchString(char2)
+	lineBreak1 := whitespace1 && linebreakRegex.MatchString(char1)
+	lineBreak2 := whitespace2 && linebreakRegex.MatchString(char2)
+	blankLine1 := lineBreak1 && blanklineEndRegex.MatchString(one)
+	blankLine2 := lineBreak2 && blanklineEndRegex.MatchString(two)
+
+	if blankLine1 || blankLine2 {
+		// Five points for blank lines.
+		return 5
+	} else if lineBreak1 || lineBreak2 {
+		// Four points for line breaks.
+		return 4
+	} else if nonAlphaNumeric1 && !whitespace1 && whitespace2 {
+		// Three points for end of sentences.
+		return 3
+	} else if whitespace1 || whitespace2 {
+		// Two points for whitespace.
+		return 2
+	} else if nonAlphaNumeric1 || nonAlphaNumeric2 {
+		// One point for non-alphanumeric.
+		return 1
+	}
+	return 0
+}
+
+// DiffCleanupSemanticLossless looks for single edits surrounded on both sides by equalities which can be shifted sideways to align the edit to a word boundary.
+// E.g: The c<ins>at c</ins>ame. -> The <ins>cat </ins>came.
+func (dmp *DiffMatchPatch) DiffCleanupSemanticLossless(diffs []Diff) []Diff {
+	pointer := 1
+
+	// Intentionally ignore the first and last element (don't need checking).
+	for pointer < len(diffs)-1 {
+		if diffs[pointer-1].Type == DiffEqual &&
+			diffs[pointer+1].Type == DiffEqual {
+
+			// This is a single edit surrounded by equalities.
+			equality1 := diffs[pointer-1].Text
+			edit := diffs[pointer].Text
+			equality2 := diffs[pointer+1].Text
+
+			// First, shift the edit as far left as possible.
+			commonOffset := dmp.DiffCommonSuffix(equality1, edit)
+			if commonOffset > 0 {
+				commonString := edit[len(edit)-commonOffset:]
+				equality1 = equality1[0 : len(equality1)-commonOffset]
+				edit = commonString + edit[:len(edit)-commonOffset]
+				equality2 = commonString + equality2
+			}
+
+			// Second, step character by character right, looking for the best fit.
+			bestEquality1 := equality1
+			bestEdit := edit
+			bestEquality2 := equality2
+			bestScore := diffCleanupSemanticScore(equality1, edit) +
+				diffCleanupSemanticScore(edit, equality2)
+
+			for len(edit) != 0 && len(equality2) != 0 {
+				_, sz := utf8.DecodeRuneInString(edit)
+				if len(equality2) < sz || edit[:sz] != equality2[:sz] {
+					break
+				}
+				equality1 += edit[:sz]
+				edit = edit[sz:] + equality2[:sz]
+				equality2 = equality2[sz:]
+				score := diffCleanupSemanticScore(equality1, edit) +
+					diffCleanupSemanticScore(edit, equality2)
+				// The >= encourages trailing rather than leading whitespace on edits.
+				if score >= bestScore {
+					bestScore = score
+					bestEquality1 = equality1
+					bestEdit = edit
+					bestEquality2 = equality2
+				}
+			}
+
+			if diffs[pointer-1].Text != bestEquality1 {
+				// We have an improvement, save it back to the diff.
+				if len(bestEquality1) != 0 {
+					diffs[pointer-1].Text = bestEquality1
+				} else {
+					diffs = splice(diffs, pointer-1, 1)
+					pointer--
+				}
+
+				diffs[pointer].Text = bestEdit
+				if len(bestEquality2) != 0 {
+					diffs[pointer+1].Text = bestEquality2
+				} else {
+					diffs = append(diffs[:pointer+1], diffs[pointer+2:]...)
+					pointer--
+				}
+			}
+		}
+		pointer++
+	}
+
+	return diffs
+}
+
+// DiffCleanupEfficiency reduces the number of edits by eliminating operationally trivial equalities.
+func (dmp *DiffMatchPatch) DiffCleanupEfficiency(diffs []Diff) []Diff {
+	changes := false
+	// Stack of indices where equalities are found.
+	type equality struct {
+		data int
+		next *equality
+	}
+	var equalities *equality
+	// Always equal to equalities[equalitiesLength-1][1]
+	lastequality := ""
+	pointer := 0 // Index of current position.
+	// Is there an insertion operation before the last equality.
+	preIns := false
+	// Is there a deletion operation before the last equality.
+	preDel := false
+	// Is there an insertion operation after the last equality.
+	postIns := false
+	// Is there a deletion operation after the last equality.
+	postDel := false
+	for pointer < len(diffs) {
+		if diffs[pointer].Type == DiffEqual { // Equality found.
+			if len(diffs[pointer].Text) < dmp.DiffEditCost &&
+				(postIns || postDel) {
+				// Candidate found.
+				equalities = &equality{
+					data: pointer,
+					next: equalities,
+				}
+				preIns = postIns
+				preDel = postDel
+				lastequality = diffs[pointer].Text
+			} else {
+				// Not a candidate, and can never become one.
+				equalities = nil
+				lastequality = ""
+			}
+			postIns = false
+			postDel = false
+		} else { // An insertion or deletion.
+			if diffs[pointer].Type == DiffDelete {
+				postDel = true
+			} else {
+				postIns = true
+			}
+
+			// Five types to be split:
+			// <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
+			// <ins>A</ins>X<ins>C</ins><del>D</del>
+			// <ins>A</ins><del>B</del>X<ins>C</ins>
+			// <ins>A</del>X<ins>C</ins><del>D</del>
+			// <ins>A</ins><del>B</del>X<del>C</del>
+			var sumPres int
+			if preIns {
+				sumPres++
+			}
+			if preDel {
+				sumPres++
+			}
+			if postIns {
+				sumPres++
+			}
+			if postDel {
+				sumPres++
+			}
+			if len(lastequality) > 0 &&
+				((preIns && preDel && postIns && postDel) ||
+					((len(lastequality) < dmp.DiffEditCost/2) && sumPres == 3)) {
+
+				insPoint := equalities.data
+
+				// Duplicate record.
+				diffs = splice(diffs, insPoint, 0, Diff{DiffDelete, lastequality})
+
+				// Change second copy to insert.
+				diffs[insPoint+1].Type = DiffInsert
+				// Throw away the equality we just deleted.
+				equalities = equalities.next
+				lastequality = ""
+
+				if preIns && preDel {
+					// No changes made which could affect previous entry, keep going.
+					postIns = true
+					postDel = true
+					equalities = nil
+				} else {
+					if equalities != nil {
+						equalities = equalities.next
+					}
+					if equalities != nil {
+						pointer = equalities.data
+					} else {
+						pointer = -1
+					}
+					postIns = false
+					postDel = false
+				}
+				changes = true
+			}
+		}
+		pointer++
+	}
+
+	if changes {
+		diffs = dmp.DiffCleanupMerge(diffs)
+	}
+
+	return diffs
+}
+
+// DiffCleanupMerge reorders and merges like edit sections. Merge equalities.
+// Any edit section can move as long as it doesn't cross an equality.
+func (dmp *DiffMatchPatch) DiffCleanupMerge(diffs []Diff) []Diff {
+	// Add a dummy entry at the end.
+	diffs = append(diffs, Diff{DiffEqual, ""})
+	pointer := 0
+	countDelete := 0
+	countInsert := 0
+	commonlength := 0
+	textDelete := []rune(nil)
+	textInsert := []rune(nil)
+
+	for pointer < len(diffs) {
+		switch diffs[pointer].Type {
+		case DiffInsert:
+			countInsert++
+			textInsert = append(textInsert, []rune(diffs[pointer].Text)...)
+			pointer++
+			break
+		case DiffDelete:
+			countDelete++
+			textDelete = append(textDelete, []rune(diffs[pointer].Text)...)
+			pointer++
+			break
+		case DiffEqual:
+			// Upon reaching an equality, check for prior redundancies.
+			if countDelete+countInsert > 1 {
+				if countDelete != 0 && countInsert != 0 {
+					// Factor out any common prefixies.
+					commonlength = commonPrefixLength(textInsert, textDelete)
+					if commonlength != 0 {
+						x := pointer - countDelete - countInsert
+						if x > 0 && diffs[x-1].Type == DiffEqual {
+							diffs[x-1].Text += string(textInsert[:commonlength])
+						} else {
+							diffs = append([]Diff{Diff{DiffEqual, string(textInsert[:commonlength])}}, diffs...)
+							pointer++
+						}
+						textInsert = textInsert[commonlength:]
+						textDelete = textDelete[commonlength:]
+					}
+					// Factor out any common suffixies.
+					commonlength = commonSuffixLength(textInsert, textDelete)
+					if commonlength != 0 {
+						insertIndex := len(textInsert) - commonlength
+						deleteIndex := len(textDelete) - commonlength
+						diffs[pointer].Text = string(textInsert[insertIndex:]) + diffs[pointer].Text
+						textInsert = textInsert[:insertIndex]
+						textDelete = textDelete[:deleteIndex]
+					}
+				}
+				// Delete the offending records and add the merged ones.
+				if countDelete == 0 {
+					diffs = splice(diffs, pointer-countInsert,
+						countDelete+countInsert,
+						Diff{DiffInsert, string(textInsert)})
+				} else if countInsert == 0 {
+					diffs = splice(diffs, pointer-countDelete,
+						countDelete+countInsert,
+						Diff{DiffDelete, string(textDelete)})
+				} else {
+					diffs = splice(diffs, pointer-countDelete-countInsert,
+						countDelete+countInsert,
+						Diff{DiffDelete, string(textDelete)},
+						Diff{DiffInsert, string(textInsert)})
+				}
+
+				pointer = pointer - countDelete - countInsert + 1
+				if countDelete != 0 {
+					pointer++
+				}
+				if countInsert != 0 {
+					pointer++
+				}
+			} else if pointer != 0 && diffs[pointer-1].Type == DiffEqual {
+				// Merge this equality with the previous one.
+				diffs[pointer-1].Text += diffs[pointer].Text
+				diffs = append(diffs[:pointer], diffs[pointer+1:]...)
+			} else {
+				pointer++
+			}
+			countInsert = 0
+			countDelete = 0
+			textDelete = nil
+			textInsert = nil
+			break
+		}
+	}
+
+	if len(diffs[len(diffs)-1].Text) == 0 {
+		diffs = diffs[0 : len(diffs)-1] // Remove the dummy entry at the end.
+	}
+
+	// Second pass: look for single edits surrounded on both sides by equalities which can be shifted sideways to eliminate an equality. E.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
+	changes := false
+	pointer = 1
+	// Intentionally ignore the first and last element (don't need checking).
+	for pointer < (len(diffs) - 1) {
+		if diffs[pointer-1].Type == DiffEqual &&
+			diffs[pointer+1].Type == DiffEqual {
+			// This is a single edit surrounded by equalities.
+			if strings.HasSuffix(diffs[pointer].Text, diffs[pointer-1].Text) {
+				// Shift the edit over the previous equality.
+				diffs[pointer].Text = diffs[pointer-1].Text +
+					diffs[pointer].Text[:len(diffs[pointer].Text)-len(diffs[pointer-1].Text)]
+				diffs[pointer+1].Text = diffs[pointer-1].Text + diffs[pointer+1].Text
+				diffs = splice(diffs, pointer-1, 1)
+				changes = true
+			} else if strings.HasPrefix(diffs[pointer].Text, diffs[pointer+1].Text) {
+				// Shift the edit over the next equality.
+				diffs[pointer-1].Text += diffs[pointer+1].Text
+				diffs[pointer].Text =
+					diffs[pointer].Text[len(diffs[pointer+1].Text):] + diffs[pointer+1].Text
+				diffs = splice(diffs, pointer+1, 1)
+				changes = true
+			}
+		}
+		pointer++
+	}
+
+	// If shifts were made, the diff needs reordering and another shift sweep.
+	if changes {
+		diffs = dmp.DiffCleanupMerge(diffs)
+	}
+
+	return diffs
+}
+
+// DiffXIndex returns the equivalent location in s2.
+func (dmp *DiffMatchPatch) DiffXIndex(diffs []Diff, loc int) int {
+	chars1 := 0
+	chars2 := 0
+	lastChars1 := 0
+	lastChars2 := 0
+	lastDiff := Diff{}
+	for i := 0; i < len(diffs); i++ {
+		aDiff := diffs[i]
+		if aDiff.Type != DiffInsert {
+			// Equality or deletion.
+			chars1 += len(aDiff.Text)
+		}
+		if aDiff.Type != DiffDelete {
+			// Equality or insertion.
+			chars2 += len(aDiff.Text)
+		}
+		if chars1 > loc {
+			// Overshot the location.
+			lastDiff = aDiff
+			break
+		}
+		lastChars1 = chars1
+		lastChars2 = chars2
+	}
+	if lastDiff.Type == DiffDelete {
+		// The location was deleted.
+		return lastChars2
+	}
+	// Add the remaining character length.
+	return lastChars2 + (loc - lastChars1)
+}
+
+// DiffPrettyHtml converts a []Diff into a pretty HTML report.
+// It is intended as an example from which to write one's own display functions.
+func (dmp *DiffMatchPatch) DiffPrettyHtml(diffs []Diff) string {
+	var buff bytes.Buffer
+	for _, diff := range diffs {
+		text := strings.Replace(html.EscapeString(diff.Text), "\n", "&para;<br>", -1)
+		switch diff.Type {
+		case DiffInsert:
+			_, _ = buff.WriteString("<ins style=\"background:#e6ffe6;\">")
+			_, _ = buff.WriteString(text)
+			_, _ = buff.WriteString("</ins>")
+		case DiffDelete:
+			_, _ = buff.WriteString("<del style=\"background:#ffe6e6;\">")
+			_, _ = buff.WriteString(text)
+			_, _ = buff.WriteString("</del>")
+		case DiffEqual:
+			_, _ = buff.WriteString("<span>")
+			_, _ = buff.WriteString(text)
+			_, _ = buff.WriteString("</span>")
+		}
+	}
+	return buff.String()
+}
+
+// DiffPrettyText converts a []Diff into a colored text report.
+func (dmp *DiffMatchPatch) DiffPrettyText(diffs []Diff) string {
+	var buff bytes.Buffer
+	for _, diff := range diffs {
+		text := diff.Text
+
+		switch diff.Type {
+		case DiffInsert:
+			_, _ = buff.WriteString("\x1b[32m")
+			_, _ = buff.WriteString(text)
+			_, _ = buff.WriteString("\x1b[0m")
+		case DiffDelete:
+			_, _ = buff.WriteString("\x1b[31m")
+			_, _ = buff.WriteString(text)
+			_, _ = buff.WriteString("\x1b[0m")
+		case DiffEqual:
+			_, _ = buff.WriteString(text)
+		}
+	}
+
+	return buff.String()
+}
+
+// DiffText1 computes and returns the source text (all equalities and deletions).
+func (dmp *DiffMatchPatch) DiffText1(diffs []Diff) string {
+	//StringBuilder text = new StringBuilder()
+	var text bytes.Buffer
+
+	for _, aDiff := range diffs {
+		if aDiff.Type != DiffInsert {
+			_, _ = text.WriteString(aDiff.Text)
+		}
+	}
+	return text.String()
+}
+
+// DiffText2 computes and returns the destination text (all equalities and insertions).
+func (dmp *DiffMatchPatch) DiffText2(diffs []Diff) string {
+	var text bytes.Buffer
+
+	for _, aDiff := range diffs {
+		if aDiff.Type != DiffDelete {
+			_, _ = text.WriteString(aDiff.Text)
+		}
+	}
+	return text.String()
+}
+
+// DiffLevenshtein computes the Levenshtein distance that is the number of inserted, deleted or substituted characters.
+func (dmp *DiffMatchPatch) DiffLevenshtein(diffs []Diff) int {
+	levenshtein := 0
+	insertions := 0
+	deletions := 0
+
+	for _, aDiff := range diffs {
+		switch aDiff.Type {
+		case DiffInsert:
+			insertions += utf8.RuneCountInString(aDiff.Text)
+		case DiffDelete:
+			deletions += utf8.RuneCountInString(aDiff.Text)
+		case DiffEqual:
+			// A deletion and an insertion is one substitution.
+			levenshtein += max(insertions, deletions)
+			insertions = 0
+			deletions = 0
+		}
+	}
+
+	levenshtein += max(insertions, deletions)
+	return levenshtein
+}
+
+// DiffToDelta crushes the diff into an encoded string which describes the operations required to transform text1 into text2.
+// E.g. =3\t-2\t+ing  -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated.  Inserted text is escaped using %xx notation.
+func (dmp *DiffMatchPatch) DiffToDelta(diffs []Diff) string {
+	var text bytes.Buffer
+	for _, aDiff := range diffs {
+		switch aDiff.Type {
+		case DiffInsert:
+			_, _ = text.WriteString("+")
+			_, _ = text.WriteString(strings.Replace(url.QueryEscape(aDiff.Text), "+", " ", -1))
+			_, _ = text.WriteString("\t")
+			break
+		case DiffDelete:
+			_, _ = text.WriteString("-")
+			_, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
+			_, _ = text.WriteString("\t")
+			break
+		case DiffEqual:
+			_, _ = text.WriteString("=")
+			_, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
+			_, _ = text.WriteString("\t")
+			break
+		}
+	}
+	delta := text.String()
+	if len(delta) != 0 {
+		// Strip off trailing tab character.
+		delta = delta[0 : utf8.RuneCountInString(delta)-1]
+		delta = unescaper.Replace(delta)
+	}
+	return delta
+}
+
+// DiffFromDelta given the original text1, and an encoded string which describes the operations required to transform text1 into text2, comAdde the full diff.
+func (dmp *DiffMatchPatch) DiffFromDelta(text1 string, delta string) (diffs []Diff, err error) {
+	i := 0
+	runes := []rune(text1)
+
+	for _, token := range strings.Split(delta, "\t") {
+		if len(token) == 0 {
+			// Blank tokens are ok (from a trailing \t).
+			continue
+		}
+
+		// Each token begins with a one character parameter which specifies the operation of this token (delete, insert, equality).
+		param := token[1:]
+
+		switch op := token[0]; op {
+		case '+':
+			// Decode would Diff all "+" to " "
+			param = strings.Replace(param, "+", "%2b", -1)
+			param, err = url.QueryUnescape(param)
+			if err != nil {
+				return nil, err
+			}
+			if !utf8.ValidString(param) {
+				return nil, fmt.Errorf("invalid UTF-8 token: %q", param)
+			}
+
+			diffs = append(diffs, Diff{DiffInsert, param})
+		case '=', '-':
+			n, err := strconv.ParseInt(param, 10, 0)
+			if err != nil {
+				return nil, err
+			} else if n < 0 {
+				return nil, errors.New("Negative number in DiffFromDelta: " + param)
+			}
+
+			i += int(n)
+			// Break out if we are out of bounds, go1.6 can't handle this very well
+			if i > len(runes) {
+				break
+			}
+			// Remember that string slicing is by byte - we want by rune here.
+			text := string(runes[i-int(n) : i])
+
+			if op == '=' {
+				diffs = append(diffs, Diff{DiffEqual, text})
+			} else {
+				diffs = append(diffs, Diff{DiffDelete, text})
+			}
+		default:
+			// Anything else is an error.
+			return nil, errors.New("Invalid diff operation in DiffFromDelta: " + string(token[0]))
+		}
+	}
+
+	if i != len(runes) {
+		return nil, fmt.Errorf("Delta length (%v) is different from source text length (%v)", i, len(text1))
+	}
+
+	return diffs, nil
+}
diff --git a/internal/go-diff/diffmatchpatch/diffmatchpatch.go b/internal/go-diff/diffmatchpatch/diffmatchpatch.go
new file mode 100644
index 0000000..d3acc32
--- /dev/null
+++ b/internal/go-diff/diffmatchpatch/diffmatchpatch.go
@@ -0,0 +1,46 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+// Package diffmatchpatch offers robust algorithms to perform the operations required for synchronizing plain text.
+package diffmatchpatch
+
+import (
+	"time"
+)
+
+// DiffMatchPatch holds the configuration for diff-match-patch operations.
+type DiffMatchPatch struct {
+	// Number of seconds to map a diff before giving up (0 for infinity).
+	DiffTimeout time.Duration
+	// Cost of an empty edit operation in terms of edit characters.
+	DiffEditCost int
+	// How far to search for a match (0 = exact location, 1000+ = broad match). A match this many characters away from the expected location will add 1.0 to the score (0.0 is a perfect match).
+	MatchDistance int
+	// When deleting a large block of text (over ~64 characters), how close do the contents have to be to match the expected contents. (0.0 = perfection, 1.0 = very loose).  Note that MatchThreshold controls how closely the end points of a delete need to match.
+	PatchDeleteThreshold float64
+	// Chunk size for context length.
+	PatchMargin int
+	// The number of bits in an int.
+	MatchMaxBits int
+	// At what point is no match declared (0.0 = perfection, 1.0 = very loose).
+	MatchThreshold float64
+}
+
+// New creates a new DiffMatchPatch object with default parameters.
+func New() *DiffMatchPatch {
+	// Defaults.
+	return &DiffMatchPatch{
+		DiffTimeout:          time.Second,
+		DiffEditCost:         4,
+		MatchThreshold:       0.5,
+		MatchDistance:        1000,
+		PatchDeleteThreshold: 0.5,
+		PatchMargin:          4,
+		MatchMaxBits:         32,
+	}
+}
diff --git a/internal/go-diff/diffmatchpatch/match.go b/internal/go-diff/diffmatchpatch/match.go
new file mode 100644
index 0000000..17374e1
--- /dev/null
+++ b/internal/go-diff/diffmatchpatch/match.go
@@ -0,0 +1,160 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+	"math"
+)
+
+// MatchMain locates the best instance of 'pattern' in 'text' near 'loc'.
+// Returns -1 if no match found.
+func (dmp *DiffMatchPatch) MatchMain(text, pattern string, loc int) int {
+	// Check for null inputs not needed since null can't be passed in C#.
+
+	loc = int(math.Max(0, math.Min(float64(loc), float64(len(text)))))
+	if text == pattern {
+		// Shortcut (potentially not guaranteed by the algorithm)
+		return 0
+	} else if len(text) == 0 {
+		// Nothing to match.
+		return -1
+	} else if loc+len(pattern) <= len(text) && text[loc:loc+len(pattern)] == pattern {
+		// Perfect match at the perfect spot!  (Includes case of null pattern)
+		return loc
+	}
+	// Do a fuzzy compare.
+	return dmp.MatchBitap(text, pattern, loc)
+}
+
+// MatchBitap locates the best instance of 'pattern' in 'text' near 'loc' using the Bitap algorithm.
+// Returns -1 if no match was found.
+func (dmp *DiffMatchPatch) MatchBitap(text, pattern string, loc int) int {
+	// Initialise the alphabet.
+	s := dmp.MatchAlphabet(pattern)
+
+	// Highest score beyond which we give up.
+	scoreThreshold := dmp.MatchThreshold
+	// Is there a nearby exact match? (speedup)
+	bestLoc := indexOf(text, pattern, loc)
+	if bestLoc != -1 {
+		scoreThreshold = math.Min(dmp.matchBitapScore(0, bestLoc, loc,
+			pattern), scoreThreshold)
+		// What about in the other direction? (speedup)
+		bestLoc = lastIndexOf(text, pattern, loc+len(pattern))
+		if bestLoc != -1 {
+			scoreThreshold = math.Min(dmp.matchBitapScore(0, bestLoc, loc,
+				pattern), scoreThreshold)
+		}
+	}
+
+	// Initialise the bit arrays.
+	matchmask := 1 << uint((len(pattern) - 1))
+	bestLoc = -1
+
+	var binMin, binMid int
+	binMax := len(pattern) + len(text)
+	lastRd := []int{}
+	for d := 0; d < len(pattern); d++ {
+		// Scan for the best match; each iteration allows for one more error. Run a binary search to determine how far from 'loc' we can stray at this error level.
+		binMin = 0
+		binMid = binMax
+		for binMin < binMid {
+			if dmp.matchBitapScore(d, loc+binMid, loc, pattern) <= scoreThreshold {
+				binMin = binMid
+			} else {
+				binMax = binMid
+			}
+			binMid = (binMax-binMin)/2 + binMin
+		}
+		// Use the result from this iteration as the maximum for the next.
+		binMax = binMid
+		start := int(math.Max(1, float64(loc-binMid+1)))
+		finish := int(math.Min(float64(loc+binMid), float64(len(text))) + float64(len(pattern)))
+
+		rd := make([]int, finish+2)
+		rd[finish+1] = (1 << uint(d)) - 1
+
+		for j := finish; j >= start; j-- {
+			var charMatch int
+			if len(text) <= j-1 {
+				// Out of range.
+				charMatch = 0
+			} else if _, ok := s[text[j-1]]; !ok {
+				charMatch = 0
+			} else {
+				charMatch = s[text[j-1]]
+			}
+
+			if d == 0 {
+				// First pass: exact match.
+				rd[j] = ((rd[j+1] << 1) | 1) & charMatch
+			} else {
+				// Subsequent passes: fuzzy match.
+				rd[j] = ((rd[j+1]<<1)|1)&charMatch | (((lastRd[j+1] | lastRd[j]) << 1) | 1) | lastRd[j+1]
+			}
+			if (rd[j] & matchmask) != 0 {
+				score := dmp.matchBitapScore(d, j-1, loc, pattern)
+				// This match will almost certainly be better than any existing match.  But check anyway.
+				if score <= scoreThreshold {
+					// Told you so.
+					scoreThreshold = score
+					bestLoc = j - 1
+					if bestLoc > loc {
+						// When passing loc, don't exceed our current distance from loc.
+						start = int(math.Max(1, float64(2*loc-bestLoc)))
+					} else {
+						// Already passed loc, downhill from here on in.
+						break
+					}
+				}
+			}
+		}
+		if dmp.matchBitapScore(d+1, loc, loc, pattern) > scoreThreshold {
+			// No hope for a (better) match at greater error levels.
+			break
+		}
+		lastRd = rd
+	}
+	return bestLoc
+}
+
+// matchBitapScore computes and returns the score for a match with e errors and x location.
+func (dmp *DiffMatchPatch) matchBitapScore(e, x, loc int, pattern string) float64 {
+	accuracy := float64(e) / float64(len(pattern))
+	proximity := math.Abs(float64(loc - x))
+	if dmp.MatchDistance == 0 {
+		// Dodge divide by zero error.
+		if proximity == 0 {
+			return accuracy
+		}
+
+		return 1.0
+	}
+	return accuracy + (proximity / float64(dmp.MatchDistance))
+}
+
+// MatchAlphabet initialises the alphabet for the Bitap algorithm.
+func (dmp *DiffMatchPatch) MatchAlphabet(pattern string) map[byte]int {
+	s := map[byte]int{}
+	charPattern := []byte(pattern)
+	for _, c := range charPattern {
+		_, ok := s[c]
+		if !ok {
+			s[c] = 0
+		}
+	}
+	i := 0
+
+	for _, c := range charPattern {
+		value := s[c] | int(uint(1)<<uint((len(pattern)-i-1)))
+		s[c] = value
+		i++
+	}
+	return s
+}
diff --git a/internal/go-diff/diffmatchpatch/mathutil.go b/internal/go-diff/diffmatchpatch/mathutil.go
new file mode 100644
index 0000000..aed242b
--- /dev/null
+++ b/internal/go-diff/diffmatchpatch/mathutil.go
@@ -0,0 +1,23 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+func min(x, y int) int {
+	if x < y {
+		return x
+	}
+	return y
+}
+
+func max(x, y int) int {
+	if x > y {
+		return x
+	}
+	return y
+}
diff --git a/internal/go-diff/diffmatchpatch/operation_string.go b/internal/go-diff/diffmatchpatch/operation_string.go
new file mode 100644
index 0000000..533ec0d
--- /dev/null
+++ b/internal/go-diff/diffmatchpatch/operation_string.go
@@ -0,0 +1,17 @@
+// Code generated by "stringer -type=Operation -trimprefix=Diff"; DO NOT EDIT.
+
+package diffmatchpatch
+
+import "fmt"
+
+const _Operation_name = "DeleteEqualInsert"
+
+var _Operation_index = [...]uint8{0, 6, 11, 17}
+
+func (i Operation) String() string {
+	i -= -1
+	if i < 0 || i >= Operation(len(_Operation_index)-1) {
+		return fmt.Sprintf("Operation(%d)", i+-1)
+	}
+	return _Operation_name[_Operation_index[i]:_Operation_index[i+1]]
+}
diff --git a/internal/go-diff/diffmatchpatch/patch.go b/internal/go-diff/diffmatchpatch/patch.go
new file mode 100644
index 0000000..223c43c
--- /dev/null
+++ b/internal/go-diff/diffmatchpatch/patch.go
@@ -0,0 +1,556 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+	"bytes"
+	"errors"
+	"math"
+	"net/url"
+	"regexp"
+	"strconv"
+	"strings"
+)
+
+// Patch represents one patch operation.
+type Patch struct {
+	diffs   []Diff
+	Start1  int
+	Start2  int
+	Length1 int
+	Length2 int
+}
+
+// String emulates GNU diff's format.
+// Header: @@ -382,8 +481,9 @@
+// Indices are printed as 1-based, not 0-based.
+func (p *Patch) String() string {
+	var coords1, coords2 string
+
+	if p.Length1 == 0 {
+		coords1 = strconv.Itoa(p.Start1) + ",0"
+	} else if p.Length1 == 1 {
+		coords1 = strconv.Itoa(p.Start1 + 1)
+	} else {
+		coords1 = strconv.Itoa(p.Start1+1) + "," + strconv.Itoa(p.Length1)
+	}
+
+	if p.Length2 == 0 {
+		coords2 = strconv.Itoa(p.Start2) + ",0"
+	} else if p.Length2 == 1 {
+		coords2 = strconv.Itoa(p.Start2 + 1)
+	} else {
+		coords2 = strconv.Itoa(p.Start2+1) + "," + strconv.Itoa(p.Length2)
+	}
+
+	var text bytes.Buffer
+	_, _ = text.WriteString("@@ -" + coords1 + " +" + coords2 + " @@\n")
+
+	// Escape the body of the patch with %xx notation.
+	for _, aDiff := range p.diffs {
+		switch aDiff.Type {
+		case DiffInsert:
+			_, _ = text.WriteString("+")
+		case DiffDelete:
+			_, _ = text.WriteString("-")
+		case DiffEqual:
+			_, _ = text.WriteString(" ")
+		}
+
+		_, _ = text.WriteString(strings.Replace(url.QueryEscape(aDiff.Text), "+", " ", -1))
+		_, _ = text.WriteString("\n")
+	}
+
+	return unescaper.Replace(text.String())
+}
+
+// PatchAddContext increases the context until it is unique, but doesn't let the pattern expand beyond MatchMaxBits.
+func (dmp *DiffMatchPatch) PatchAddContext(patch Patch, text string) Patch {
+	if len(text) == 0 {
+		return patch
+	}
+
+	pattern := text[patch.Start2 : patch.Start2+patch.Length1]
+	padding := 0
+
+	// Look for the first and last matches of pattern in text.  If two different matches are found, increase the pattern length.
+	for strings.Index(text, pattern) != strings.LastIndex(text, pattern) &&
+		len(pattern) < dmp.MatchMaxBits-2*dmp.PatchMargin {
+		padding += dmp.PatchMargin
+		maxStart := max(0, patch.Start2-padding)
+		minEnd := min(len(text), patch.Start2+patch.Length1+padding)
+		pattern = text[maxStart:minEnd]
+	}
+	// Add one chunk for good luck.
+	padding += dmp.PatchMargin
+
+	// Add the prefix.
+	prefix := text[max(0, patch.Start2-padding):patch.Start2]
+	if len(prefix) != 0 {
+		patch.diffs = append([]Diff{Diff{DiffEqual, prefix}}, patch.diffs...)
+	}
+	// Add the suffix.
+	suffix := text[patch.Start2+patch.Length1 : min(len(text), patch.Start2+patch.Length1+padding)]
+	if len(suffix) != 0 {
+		patch.diffs = append(patch.diffs, Diff{DiffEqual, suffix})
+	}
+
+	// Roll back the start points.
+	patch.Start1 -= len(prefix)
+	patch.Start2 -= len(prefix)
+	// Extend the lengths.
+	patch.Length1 += len(prefix) + len(suffix)
+	patch.Length2 += len(prefix) + len(suffix)
+
+	return patch
+}
+
+// PatchMake computes a list of patches.
+func (dmp *DiffMatchPatch) PatchMake(opt ...interface{}) []Patch {
+	if len(opt) == 1 {
+		diffs, _ := opt[0].([]Diff)
+		text1 := dmp.DiffText1(diffs)
+		return dmp.PatchMake(text1, diffs)
+	} else if len(opt) == 2 {
+		text1 := opt[0].(string)
+		switch t := opt[1].(type) {
+		case string:
+			diffs := dmp.DiffMain(text1, t, true)
+			if len(diffs) > 2 {
+				diffs = dmp.DiffCleanupSemantic(diffs)
+				diffs = dmp.DiffCleanupEfficiency(diffs)
+			}
+			return dmp.PatchMake(text1, diffs)
+		case []Diff:
+			return dmp.patchMake2(text1, t)
+		}
+	} else if len(opt) == 3 {
+		return dmp.PatchMake(opt[0], opt[2])
+	}
+	return []Patch{}
+}
+
+// patchMake2 computes a list of patches to turn text1 into text2.
+// text2 is not provided, diffs are the delta between text1 and text2.
+func (dmp *DiffMatchPatch) patchMake2(text1 string, diffs []Diff) []Patch {
+	// Check for null inputs not needed since null can't be passed in C#.
+	patches := []Patch{}
+	if len(diffs) == 0 {
+		return patches // Get rid of the null case.
+	}
+
+	patch := Patch{}
+	charCount1 := 0 // Number of characters into the text1 string.
+	charCount2 := 0 // Number of characters into the text2 string.
+	// Start with text1 (prepatchText) and apply the diffs until we arrive at text2 (postpatchText). We recreate the patches one by one to determine context info.
+	prepatchText := text1
+	postpatchText := text1
+
+	for i, aDiff := range diffs {
+		if len(patch.diffs) == 0 && aDiff.Type != DiffEqual {
+			// A new patch starts here.
+			patch.Start1 = charCount1
+			patch.Start2 = charCount2
+		}
+
+		switch aDiff.Type {
+		case DiffInsert:
+			patch.diffs = append(patch.diffs, aDiff)
+			patch.Length2 += len(aDiff.Text)
+			postpatchText = postpatchText[:charCount2] +
+				aDiff.Text + postpatchText[charCount2:]
+		case DiffDelete:
+			patch.Length1 += len(aDiff.Text)
+			patch.diffs = append(patch.diffs, aDiff)
+			postpatchText = postpatchText[:charCount2] + postpatchText[charCount2+len(aDiff.Text):]
+		case DiffEqual:
+			if len(aDiff.Text) <= 2*dmp.PatchMargin &&
+				len(patch.diffs) != 0 && i != len(diffs)-1 {
+				// Small equality inside a patch.
+				patch.diffs = append(patch.diffs, aDiff)
+				patch.Length1 += len(aDiff.Text)
+				patch.Length2 += len(aDiff.Text)
+			}
+			if len(aDiff.Text) >= 2*dmp.PatchMargin {
+				// Time for a new patch.
+				if len(patch.diffs) != 0 {
+					patch = dmp.PatchAddContext(patch, prepatchText)
+					patches = append(patches, patch)
+					patch = Patch{}
+					// Unlike Unidiff, our patch lists have a rolling context. http://code.google.com/p/google-diff-match-patch/wiki/Unidiff Update prepatch text & pos to reflect the application of the just completed patch.
+					prepatchText = postpatchText
+					charCount1 = charCount2
+				}
+			}
+		}
+
+		// Update the current character count.
+		if aDiff.Type != DiffInsert {
+			charCount1 += len(aDiff.Text)
+		}
+		if aDiff.Type != DiffDelete {
+			charCount2 += len(aDiff.Text)
+		}
+	}
+
+	// Pick up the leftover patch if not empty.
+	if len(patch.diffs) != 0 {
+		patch = dmp.PatchAddContext(patch, prepatchText)
+		patches = append(patches, patch)
+	}
+
+	return patches
+}
+
+// PatchDeepCopy returns an array that is identical to a given an array of patches.
+func (dmp *DiffMatchPatch) PatchDeepCopy(patches []Patch) []Patch {
+	patchesCopy := []Patch{}
+	for _, aPatch := range patches {
+		patchCopy := Patch{}
+		for _, aDiff := range aPatch.diffs {
+			patchCopy.diffs = append(patchCopy.diffs, Diff{
+				aDiff.Type,
+				aDiff.Text,
+			})
+		}
+		patchCopy.Start1 = aPatch.Start1
+		patchCopy.Start2 = aPatch.Start2
+		patchCopy.Length1 = aPatch.Length1
+		patchCopy.Length2 = aPatch.Length2
+		patchesCopy = append(patchesCopy, patchCopy)
+	}
+	return patchesCopy
+}
+
+// PatchApply merges a set of patches onto the text.  Returns a patched text, as well as an array of true/false values indicating which patches were applied.
+func (dmp *DiffMatchPatch) PatchApply(patches []Patch, text string) (string, []bool) {
+	if len(patches) == 0 {
+		return text, []bool{}
+	}
+
+	// Deep copy the patches so that no changes are made to originals.
+	patches = dmp.PatchDeepCopy(patches)
+
+	nullPadding := dmp.PatchAddPadding(patches)
+	text = nullPadding + text + nullPadding
+	patches = dmp.PatchSplitMax(patches)
+
+	x := 0
+	// delta keeps track of the offset between the expected and actual location of the previous patch.  If there are patches expected at positions 10 and 20, but the first patch was found at 12, delta is 2 and the second patch has an effective expected position of 22.
+	delta := 0
+	results := make([]bool, len(patches))
+	for _, aPatch := range patches {
+		expectedLoc := aPatch.Start2 + delta
+		text1 := dmp.DiffText1(aPatch.diffs)
+		var startLoc int
+		endLoc := -1
+		if len(text1) > dmp.MatchMaxBits {
+			// PatchSplitMax will only provide an oversized pattern in the case of a monster delete.
+			startLoc = dmp.MatchMain(text, text1[:dmp.MatchMaxBits], expectedLoc)
+			if startLoc != -1 {
+				endLoc = dmp.MatchMain(text,
+					text1[len(text1)-dmp.MatchMaxBits:], expectedLoc+len(text1)-dmp.MatchMaxBits)
+				if endLoc == -1 || startLoc >= endLoc {
+					// Can't find valid trailing context.  Drop this patch.
+					startLoc = -1
+				}
+			}
+		} else {
+			startLoc = dmp.MatchMain(text, text1, expectedLoc)
+		}
+		if startLoc == -1 {
+			// No match found.  :(
+			results[x] = false
+			// Subtract the delta for this failed patch from subsequent patches.
+			delta -= aPatch.Length2 - aPatch.Length1
+		} else {
+			// Found a match.  :)
+			results[x] = true
+			delta = startLoc - expectedLoc
+			var text2 string
+			if endLoc == -1 {
+				text2 = text[startLoc:int(math.Min(float64(startLoc+len(text1)), float64(len(text))))]
+			} else {
+				text2 = text[startLoc:int(math.Min(float64(endLoc+dmp.MatchMaxBits), float64(len(text))))]
+			}
+			if text1 == text2 {
+				// Perfect match, just shove the Replacement text in.
+				text = text[:startLoc] + dmp.DiffText2(aPatch.diffs) + text[startLoc+len(text1):]
+			} else {
+				// Imperfect match.  Run a diff to get a framework of equivalent indices.
+				diffs := dmp.DiffMain(text1, text2, false)
+				if len(text1) > dmp.MatchMaxBits && float64(dmp.DiffLevenshtein(diffs))/float64(len(text1)) > dmp.PatchDeleteThreshold {
+					// The end points match, but the content is unacceptably bad.
+					results[x] = false
+				} else {
+					diffs = dmp.DiffCleanupSemanticLossless(diffs)
+					index1 := 0
+					for _, aDiff := range aPatch.diffs {
+						if aDiff.Type != DiffEqual {
+							index2 := dmp.DiffXIndex(diffs, index1)
+							if aDiff.Type == DiffInsert {
+								// Insertion
+								text = text[:startLoc+index2] + aDiff.Text + text[startLoc+index2:]
+							} else if aDiff.Type == DiffDelete {
+								// Deletion
+								startIndex := startLoc + index2
+								text = text[:startIndex] +
+									text[startIndex+dmp.DiffXIndex(diffs, index1+len(aDiff.Text))-index2:]
+							}
+						}
+						if aDiff.Type != DiffDelete {
+							index1 += len(aDiff.Text)
+						}
+					}
+				}
+			}
+		}
+		x++
+	}
+	// Strip the padding off.
+	text = text[len(nullPadding) : len(nullPadding)+(len(text)-2*len(nullPadding))]
+	return text, results
+}
+
+// PatchAddPadding adds some padding on text start and end so that edges can match something.
+// Intended to be called only from within patchApply.
+func (dmp *DiffMatchPatch) PatchAddPadding(patches []Patch) string {
+	paddingLength := dmp.PatchMargin
+	nullPadding := ""
+	for x := 1; x <= paddingLength; x++ {
+		nullPadding += string(x)
+	}
+
+	// Bump all the patches forward.
+	for i := range patches {
+		patches[i].Start1 += paddingLength
+		patches[i].Start2 += paddingLength
+	}
+
+	// Add some padding on start of first diff.
+	if len(patches[0].diffs) == 0 || patches[0].diffs[0].Type != DiffEqual {
+		// Add nullPadding equality.
+		patches[0].diffs = append([]Diff{Diff{DiffEqual, nullPadding}}, patches[0].diffs...)
+		patches[0].Start1 -= paddingLength // Should be 0.
+		patches[0].Start2 -= paddingLength // Should be 0.
+		patches[0].Length1 += paddingLength
+		patches[0].Length2 += paddingLength
+	} else if paddingLength > len(patches[0].diffs[0].Text) {
+		// Grow first equality.
+		extraLength := paddingLength - len(patches[0].diffs[0].Text)
+		patches[0].diffs[0].Text = nullPadding[len(patches[0].diffs[0].Text):] + patches[0].diffs[0].Text
+		patches[0].Start1 -= extraLength
+		patches[0].Start2 -= extraLength
+		patches[0].Length1 += extraLength
+		patches[0].Length2 += extraLength
+	}
+
+	// Add some padding on end of last diff.
+	last := len(patches) - 1
+	if len(patches[last].diffs) == 0 || patches[last].diffs[len(patches[last].diffs)-1].Type != DiffEqual {
+		// Add nullPadding equality.
+		patches[last].diffs = append(patches[last].diffs, Diff{DiffEqual, nullPadding})
+		patches[last].Length1 += paddingLength
+		patches[last].Length2 += paddingLength
+	} else if paddingLength > len(patches[last].diffs[len(patches[last].diffs)-1].Text) {
+		// Grow last equality.
+		lastDiff := patches[last].diffs[len(patches[last].diffs)-1]
+		extraLength := paddingLength - len(lastDiff.Text)
+		patches[last].diffs[len(patches[last].diffs)-1].Text += nullPadding[:extraLength]
+		patches[last].Length1 += extraLength
+		patches[last].Length2 += extraLength
+	}
+
+	return nullPadding
+}
+
+// PatchSplitMax looks through the patches and breaks up any which are longer than the maximum limit of the match algorithm.
+// Intended to be called only from within patchApply.
+func (dmp *DiffMatchPatch) PatchSplitMax(patches []Patch) []Patch {
+	patchSize := dmp.MatchMaxBits
+	for x := 0; x < len(patches); x++ {
+		if patches[x].Length1 <= patchSize {
+			continue
+		}
+		bigpatch := patches[x]
+		// Remove the big old patch.
+		patches = append(patches[:x], patches[x+1:]...)
+		x--
+
+		Start1 := bigpatch.Start1
+		Start2 := bigpatch.Start2
+		precontext := ""
+		for len(bigpatch.diffs) != 0 {
+			// Create one of several smaller patches.
+			patch := Patch{}
+			empty := true
+			patch.Start1 = Start1 - len(precontext)
+			patch.Start2 = Start2 - len(precontext)
+			if len(precontext) != 0 {
+				patch.Length1 = len(precontext)
+				patch.Length2 = len(precontext)
+				patch.diffs = append(patch.diffs, Diff{DiffEqual, precontext})
+			}
+			for len(bigpatch.diffs) != 0 && patch.Length1 < patchSize-dmp.PatchMargin {
+				diffType := bigpatch.diffs[0].Type
+				diffText := bigpatch.diffs[0].Text
+				if diffType == DiffInsert {
+					// Insertions are harmless.
+					patch.Length2 += len(diffText)
+					Start2 += len(diffText)
+					patch.diffs = append(patch.diffs, bigpatch.diffs[0])
+					bigpatch.diffs = bigpatch.diffs[1:]
+					empty = false
+				} else if diffType == DiffDelete && len(patch.diffs) == 1 && patch.diffs[0].Type == DiffEqual && len(diffText) > 2*patchSize {
+					// This is a large deletion.  Let it pass in one chunk.
+					patch.Length1 += len(diffText)
+					Start1 += len(diffText)
+					empty = false
+					patch.diffs = append(patch.diffs, Diff{diffType, diffText})
+					bigpatch.diffs = bigpatch.diffs[1:]
+				} else {
+					// Deletion or equality.  Only take as much as we can stomach.
+					diffText = diffText[:min(len(diffText), patchSize-patch.Length1-dmp.PatchMargin)]
+
+					patch.Length1 += len(diffText)
+					Start1 += len(diffText)
+					if diffType == DiffEqual {
+						patch.Length2 += len(diffText)
+						Start2 += len(diffText)
+					} else {
+						empty = false
+					}
+					patch.diffs = append(patch.diffs, Diff{diffType, diffText})
+					if diffText == bigpatch.diffs[0].Text {
+						bigpatch.diffs = bigpatch.diffs[1:]
+					} else {
+						bigpatch.diffs[0].Text =
+							bigpatch.diffs[0].Text[len(diffText):]
+					}
+				}
+			}
+			// Compute the head context for the next patch.
+			precontext = dmp.DiffText2(patch.diffs)
+			precontext = precontext[max(0, len(precontext)-dmp.PatchMargin):]
+
+			postcontext := ""
+			// Append the end context for this patch.
+			if len(dmp.DiffText1(bigpatch.diffs)) > dmp.PatchMargin {
+				postcontext = dmp.DiffText1(bigpatch.diffs)[:dmp.PatchMargin]
+			} else {
+				postcontext = dmp.DiffText1(bigpatch.diffs)
+			}
+
+			if len(postcontext) != 0 {
+				patch.Length1 += len(postcontext)
+				patch.Length2 += len(postcontext)
+				if len(patch.diffs) != 0 && patch.diffs[len(patch.diffs)-1].Type == DiffEqual {
+					patch.diffs[len(patch.diffs)-1].Text += postcontext
+				} else {
+					patch.diffs = append(patch.diffs, Diff{DiffEqual, postcontext})
+				}
+			}
+			if !empty {
+				x++
+				patches = append(patches[:x], append([]Patch{patch}, patches[x:]...)...)
+			}
+		}
+	}
+	return patches
+}
+
+// PatchToText takes a list of patches and returns a textual representation.
+func (dmp *DiffMatchPatch) PatchToText(patches []Patch) string {
+	var text bytes.Buffer
+	for _, aPatch := range patches {
+		_, _ = text.WriteString(aPatch.String())
+	}
+	return text.String()
+}
+
+// PatchFromText parses a textual representation of patches and returns a List of Patch objects.
+func (dmp *DiffMatchPatch) PatchFromText(textline string) ([]Patch, error) {
+	patches := []Patch{}
+	if len(textline) == 0 {
+		return patches, nil
+	}
+	text := strings.Split(textline, "\n")
+	textPointer := 0
+	patchHeader := regexp.MustCompile("^@@ -(\\d+),?(\\d*) \\+(\\d+),?(\\d*) @@$")
+
+	var patch Patch
+	var sign uint8
+	var line string
+	for textPointer < len(text) {
+
+		if !patchHeader.MatchString(text[textPointer]) {
+			return patches, errors.New("Invalid patch string: " + text[textPointer])
+		}
+
+		patch = Patch{}
+		m := patchHeader.FindStringSubmatch(text[textPointer])
+
+		patch.Start1, _ = strconv.Atoi(m[1])
+		if len(m[2]) == 0 {
+			patch.Start1--
+			patch.Length1 = 1
+		} else if m[2] == "0" {
+			patch.Length1 = 0
+		} else {
+			patch.Start1--
+			patch.Length1, _ = strconv.Atoi(m[2])
+		}
+
+		patch.Start2, _ = strconv.Atoi(m[3])
+
+		if len(m[4]) == 0 {
+			patch.Start2--
+			patch.Length2 = 1
+		} else if m[4] == "0" {
+			patch.Length2 = 0
+		} else {
+			patch.Start2--
+			patch.Length2, _ = strconv.Atoi(m[4])
+		}
+		textPointer++
+
+		for textPointer < len(text) {
+			if len(text[textPointer]) > 0 {
+				sign = text[textPointer][0]
+			} else {
+				textPointer++
+				continue
+			}
+
+			line = text[textPointer][1:]
+			line = strings.Replace(line, "+", "%2b", -1)
+			line, _ = url.QueryUnescape(line)
+			if sign == '-' {
+				// Deletion.
+				patch.diffs = append(patch.diffs, Diff{DiffDelete, line})
+			} else if sign == '+' {
+				// Insertion.
+				patch.diffs = append(patch.diffs, Diff{DiffInsert, line})
+			} else if sign == ' ' {
+				// Minor equality.
+				patch.diffs = append(patch.diffs, Diff{DiffEqual, line})
+			} else if sign == '@' {
+				// Start of next patch.
+				break
+			} else {
+				// WTF?
+				return patches, errors.New("Invalid patch mode '" + string(sign) + "' in: " + string(line))
+			}
+			textPointer++
+		}
+
+		patches = append(patches, patch)
+	}
+	return patches, nil
+}
diff --git a/internal/go-diff/diffmatchpatch/stringutil.go b/internal/go-diff/diffmatchpatch/stringutil.go
new file mode 100644
index 0000000..265f29c
--- /dev/null
+++ b/internal/go-diff/diffmatchpatch/stringutil.go
@@ -0,0 +1,88 @@
+// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
+// https://github.com/sergi/go-diff
+// See the included LICENSE file for license details.
+//
+// go-diff is a Go implementation of Google's Diff, Match, and Patch library
+// Original library is Copyright (c) 2006 Google Inc.
+// http://code.google.com/p/google-diff-match-patch/
+
+package diffmatchpatch
+
+import (
+	"strings"
+	"unicode/utf8"
+)
+
+// unescaper unescapes selected chars for compatibility with JavaScript's encodeURI.
+// In speed critical applications this could be dropped since the receiving application will certainly decode these fine. Note that this function is case-sensitive.  Thus "%3F" would not be unescaped.  But this is ok because it is only called with the output of HttpUtility.UrlEncode which returns lowercase hex. Example: "%3f" -> "?", "%24" -> "$", etc.
+var unescaper = strings.NewReplacer(
+	"%21", "!", "%7E", "~", "%27", "'",
+	"%28", "(", "%29", ")", "%3B", ";",
+	"%2F", "/", "%3F", "?", "%3A", ":",
+	"%40", "@", "%26", "&", "%3D", "=",
+	"%2B", "+", "%24", "$", "%2C", ",", "%23", "#", "%2A", "*")
+
+// indexOf returns the first index of pattern in str, starting at str[i].
+func indexOf(str string, pattern string, i int) int {
+	if i > len(str)-1 {
+		return -1
+	}
+	if i <= 0 {
+		return strings.Index(str, pattern)
+	}
+	ind := strings.Index(str[i:], pattern)
+	if ind == -1 {
+		return -1
+	}
+	return ind + i
+}
+
+// lastIndexOf returns the last index of pattern in str, starting at str[i].
+func lastIndexOf(str string, pattern string, i int) int {
+	if i < 0 {
+		return -1
+	}
+	if i >= len(str) {
+		return strings.LastIndex(str, pattern)
+	}
+	_, size := utf8.DecodeRuneInString(str[i:])
+	return strings.LastIndex(str[:i+size], pattern)
+}
+
+// runesIndexOf returns the index of pattern in target, starting at target[i].
+func runesIndexOf(target, pattern []rune, i int) int {
+	if i > len(target)-1 {
+		return -1
+	}
+	if i <= 0 {
+		return runesIndex(target, pattern)
+	}
+	ind := runesIndex(target[i:], pattern)
+	if ind == -1 {
+		return -1
+	}
+	return ind + i
+}
+
+func runesEqual(r1, r2 []rune) bool {
+	if len(r1) != len(r2) {
+		return false
+	}
+	for i, c := range r1 {
+		if c != r2[i] {
+			return false
+		}
+	}
+	return true
+}
+
+// runesIndex is the equivalent of strings.Index for rune slices.
+func runesIndex(r1, r2 []rune) int {
+	last := len(r1) - len(r2)
+	for i := 0; i <= last; i++ {
+		if runesEqual(r1[i:i+len(r2)], r2) {
+			return i
+		}
+	}
+	return -1
+}
diff --git a/internal/go-diff/scripts/lint.sh b/internal/go-diff/scripts/lint.sh
new file mode 100755
index 0000000..3dad05f
--- /dev/null
+++ b/internal/go-diff/scripts/lint.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+if [ -z ${PKG+x} ]; then echo "PKG is not set"; exit 1; fi
+if [ -z ${ROOT_DIR+x} ]; then echo "ROOT_DIR is not set"; exit 1; fi
+
+echo "gofmt:"
+OUT=$(gofmt -l $ROOT_DIR)
+if [ $(echo "$OUT\c" | wc -l) -ne 0 ]; then echo "$OUT"; PROBLEM=1; fi
+
+echo "errcheck:"
+OUT=$(errcheck $PKG/...)
+if [ $(echo "$OUT\c" | wc -l) -ne 0 ]; then echo "$OUT"; PROBLEM=1; fi
+
+echo "go vet:"
+OUT=$(go tool vet -all=true -v=true $ROOT_DIR 2>&1 | grep --invert-match -E "(Checking file|\%p of wrong type|can't check non-constant format)")
+if [ $(echo "$OUT\c" | wc -l) -ne 0 ]; then echo "$OUT"; PROBLEM=1; fi
+
+echo "golint:"
+OUT=$(golint $PKG/... | grep --invert-match -E "(method DiffPrettyHtml should be DiffPrettyHTML)")
+if [ $(echo "$OUT\c" | wc -l) -ne 0 ]; then echo "$OUT"; PROBLEM=1; fi
+
+if [ -n "$PROBLEM" ]; then exit 1; fi
diff --git a/internal/go-diff/testdata/speedtest1.txt b/internal/go-diff/testdata/speedtest1.txt
new file mode 100644
index 0000000..54b438f
--- /dev/null
+++ b/internal/go-diff/testdata/speedtest1.txt
@@ -0,0 +1,230 @@
+This is a '''list of newspapers published by [[Journal Register Company]]'''.
+
+The company owns daily and weekly newspapers, other print media properties and newspaper-affiliated local Websites in the [[U.S.]] states of [[Connecticut]], [[Michigan]], [[New York]], [[Ohio]] and [[Pennsylvania]], organized in six geographic "clusters":<ref>[http://www.journalregister.com/newspapers.html Journal Register Company: Our Newspapers], accessed February 10, 2008.</ref>
+
+== Capital-Saratoga ==
+Three dailies, associated weeklies and [[pennysaver]]s in greater [[Albany, New York]]; also [http://www.capitalcentral.com capitalcentral.com] and [http://www.jobsinnewyork.com JobsInNewYork.com].
+
+* ''The Oneida Daily Dispatch'' {{WS|oneidadispatch.com}} of [[Oneida, New York]]
+* ''[[The Record (Troy)|The Record]]'' {{WS|troyrecord.com}} of [[Troy, New York]]
+* ''[[The Saratogian]]'' {{WS|saratogian.com}} of [[Saratoga Springs, New York]]
+* Weeklies:
+** ''Community News'' {{WS|cnweekly.com}} weekly of [[Clifton Park, New York]]
+** ''Rome Observer'' of [[Rome, New York]]
+** ''Life & Times of Utica'' of [[Utica, New York]]
+
+== Connecticut ==
+Five dailies, associated weeklies and [[pennysaver]]s in the state of [[Connecticut]]; also [http://www.ctcentral.com CTcentral.com], [http://www.ctcarsandtrucks.com CTCarsAndTrucks.com] and [http://www.jobsinct.com JobsInCT.com].
+
+* ''The Middletown Press'' {{WS|middletownpress.com}} of [[Middletown, Connecticut|Middletown]]
+* ''[[New Haven Register]]'' {{WS|newhavenregister.com}} of [[New Haven, Connecticut|New Haven]]
+* ''The Register Citizen'' {{WS|registercitizen.com}} of [[Torrington, Connecticut|Torrington]]
+
+* [[New Haven Register#Competitors|Elm City Newspapers]] {{WS|ctcentral.com}}
+** ''The Advertiser'' of [[East Haven, Connecticut|East Haven]]
+** ''Hamden Chronicle'' of [[Hamden, Connecticut|Hamden]]
+** ''Milford Weekly'' of [[Milford, Connecticut|Milford]]
+** ''The Orange Bulletin'' of [[Orange, Connecticut|Orange]]
+** ''The Post'' of [[North Haven, Connecticut|North Haven]]
+** ''Shelton Weekly'' of [[Shelton, Connecticut|Shelton]]
+** ''The Stratford Bard'' of [[Stratford, Connecticut|Stratford]]
+** ''Wallingford Voice'' of [[Wallingford, Connecticut|Wallingford]]
+** ''West Haven News'' of [[West Haven, Connecticut|West Haven]]
+* Housatonic Publications 
+** ''The New Milford Times'' {{WS|newmilfordtimes.com}} of [[New Milford, Connecticut|New Milford]]
+** ''The Brookfield Journal'' of [[Brookfield, Connecticut|Brookfield]]
+** ''The Kent Good Times Dispatch'' of [[Kent, Connecticut|Kent]]
+** ''The Bethel Beacon'' of [[Bethel, Connecticut|Bethel]]
+** ''The Litchfield Enquirer'' of [[Litchfield, Connecticut|Litchfield]]
+** ''Litchfield County Times'' of [[Litchfield, Connecticut|Litchfield]]
+* Imprint Newspapers {{WS|imprintnewspapers.com}}
+** ''West Hartford News'' of [[West Hartford, Connecticut|West Hartford]]
+** ''Windsor Journal'' of [[Windsor, Connecticut|Windsor]]
+** ''Windsor Locks Journal'' of [[Windsor Locks, Connecticut|Windsor Locks]]
+** ''Avon Post'' of [[Avon, Connecticut|Avon]]
+** ''Farmington Post'' of [[Farmington, Connecticut|Farmington]]
+** ''Simsbury Post'' of [[Simsbury, Connecticut|Simsbury]]
+** ''Tri-Town Post'' of [[Burlington, Connecticut|Burlington]], [[Canton, Connecticut|Canton]] and [[Harwinton, Connecticut|Harwinton]]
+* Minuteman Publications
+** ''[[Fairfield Minuteman]]'' of [[Fairfield, Connecticut|Fairfield]]
+** ''The Westport Minuteman'' {{WS|westportminuteman.com}} of [[Westport, Connecticut|Westport]]
+* Shoreline Newspapers weeklies:
+** ''Branford Review'' of [[Branford, Connecticut|Branford]]
+** ''Clinton Recorder'' of [[Clinton, Connecticut|Clinton]]
+** ''The Dolphin'' of [[Naval Submarine Base New London]] in [[New London, Connecticut|New London]]
+** ''Main Street News'' {{WS|ctmainstreetnews.com}} of [[Essex, Connecticut|Essex]]
+** ''Pictorial Gazette'' of [[Old Saybrook, Connecticut|Old Saybrook]]
+** ''Regional Express'' of [[Colchester, Connecticut|Colchester]]
+** ''Regional Standard'' of [[Colchester, Connecticut|Colchester]]
+** ''Shoreline Times'' {{WS|shorelinetimes.com}} of [[Guilford, Connecticut|Guilford]]
+** ''Shore View East'' of [[Madison, Connecticut|Madison]]
+** ''Shore View West'' of [[Guilford, Connecticut|Guilford]]
+* Other weeklies:
+** ''Registro'' {{WS|registroct.com}} of [[New Haven, Connecticut|New Haven]]
+** ''Thomaston Express'' {{WS|thomastownexpress.com}} of [[Thomaston, Connecticut|Thomaston]]
+** ''Foothills Traders'' {{WS|foothillstrader.com}} of Torrington, Bristol, Canton
+
+== Michigan ==
+Four dailies, associated weeklies and [[pennysaver]]s in the state of [[Michigan]]; also [http://www.micentralhomes.com MIcentralhomes.com] and [http://www.micentralautos.com MIcentralautos.com]
+* ''[[Oakland Press]]'' {{WS|theoaklandpress.com}} of [[Oakland, Michigan|Oakland]]
+* ''Daily Tribune'' {{WS|dailytribune.com}} of [[Royal Oak, Michigan|Royal Oak]]
+* ''Macomb Daily'' {{WS|macombdaily.com}} of [[Mt. Clemens, Michigan|Mt. Clemens]]
+* ''[[Morning Sun]]'' {{WS|themorningsun.com}} of  [[Mount Pleasant, Michigan|Mount Pleasant]]
+* Heritage Newspapers {{WS|heritage.com}}
+** ''Belleville View''
+** ''Ile Camera''
+** ''Monroe Guardian''
+** ''Ypsilanti Courier''
+** ''News-Herald''
+** ''Press & Guide''
+** ''Chelsea Standard & Dexter Leader''
+** ''Manchester Enterprise''
+** ''Milan News-Leader''
+** ''Saline Reporter''
+* Independent Newspapers {{WS|sourcenewspapers.com}}
+** ''Advisor''
+** ''Source''
+* Morning Star {{WS|morningstarpublishing.com}}
+** ''Alma Reminder''
+** ''Alpena Star''
+** ''Antrim County News''
+** ''Carson City Reminder''
+** ''The Leader & Kalkaskian''
+** ''Ogemaw/Oscoda County Star''
+** ''Petoskey/Charlevoix Star''
+** ''Presque Isle Star''
+** ''Preview Community Weekly''
+** ''Roscommon County Star''
+** ''St. Johns Reminder''
+** ''Straits Area Star''
+** ''The (Edmore) Advertiser'' 
+* Voice Newspapers {{WS|voicenews.com}}
+** ''Armada Times''
+** ''Bay Voice''
+** ''Blue Water Voice''
+** ''Downriver Voice''
+** ''Macomb Township Voice''
+** ''North Macomb Voice''
+** ''Weekend Voice''
+** ''Suburban Lifestyles'' {{WS|suburbanlifestyles.com}}
+
+== Mid-Hudson ==
+One daily, associated magazines in the [[Hudson River Valley]] of [[New York]]; also [http://www.midhudsoncentral.com MidHudsonCentral.com] and [http://www.jobsinnewyork.com JobsInNewYork.com].
+
+* ''[[Daily Freeman]]'' {{WS|dailyfreeman.com}} of [[Kingston, New York]]
+
+== Ohio ==
+Two dailies, associated magazines and three shared Websites, all in the state of [[Ohio]]: [http://www.allaroundcleveland.com AllAroundCleveland.com], [http://www.allaroundclevelandcars.com AllAroundClevelandCars.com] and [http://www.allaroundclevelandjobs.com AllAroundClevelandJobs.com].
+
+* ''[[The News-Herald (Ohio)|The News-Herald]]'' {{WS|news-herald.com}} of [[Willoughby, Ohio|Willoughby]]
+* ''[[The Morning Journal]]'' {{WS|morningjournal.com}} of [[Lorain, Ohio|Lorain]]
+
+== Philadelphia area ==
+Seven dailies and associated weeklies and magazines in [[Pennsylvania]] and [[New Jersey]], and associated Websites: [http://www.allaroundphilly.com AllAroundPhilly.com], [http://www.jobsinnj.com JobsInNJ.com], [http://www.jobsinpa.com JobsInPA.com], and [http://www.phillycarsearch.com PhillyCarSearch.com].
+
+* ''The Daily Local'' {{WS|dailylocal.com}} of [[West Chester, Pennsylvania|West Chester]]
+* ''[[Delaware County Daily and Sunday Times]] {{WS|delcotimes.com}} of Primos
+* ''[[The Mercury (Pennsylvania)|The Mercury]]'' {{WS|pottstownmercury.com}} of [[Pottstown, Pennsylvania|Pottstown]]
+* ''The Phoenix'' {{WS|phoenixvillenews.com}} of [[Phoenixville, Pennsylvania|Phoenixville]]
+* ''[[The Reporter (Lansdale)|The Reporter]]'' {{WS|thereporteronline.com}} of [[Lansdale, Pennsylvania|Lansdale]]
+* ''The Times Herald'' {{WS|timesherald.com}} of [[Norristown, Pennsylvania|Norristown]]
+* ''[[The Trentonian]]'' {{WS|trentonian.com}} of [[Trenton, New Jersey]]
+
+* Weeklies
+** ''El Latino Expreso'' of [[Trenton, New Jersey]]
+** ''La Voz'' of [[Norristown, Pennsylvania]]
+** ''The Village News'' of [[Downingtown, Pennsylvania]]
+** ''The Times Record'' of [[Kennett Square, Pennsylvania]]
+** ''The Tri-County Record'' {{WS|tricountyrecord.com}} of [[Morgantown, Pennsylvania]]
+** ''News of Delaware County'' {{WS|newsofdelawarecounty.com}}of [[Havertown, Pennsylvania]]
+** ''Main Line Times'' {{WS|mainlinetimes.com}}of [[Ardmore, Pennsylvania]]
+** ''Penny Pincher'' of [[Pottstown, Pennsylvania]]
+** ''Town Talk'' {{WS|towntalknews.com}} of [[Ridley, Pennsylvania]]
+* Chesapeake Publishing {{WS|pa8newsgroup.com}} 
+** ''Solanco Sun Ledger'' of [[Quarryville, Pennsylvania]]
+** ''Columbia Ledger'' of [[Columbia, Pennsylvania]]
+** ''Coatesville Ledger'' of [[Downingtown, Pennsylvania]]
+** ''Parkesburg Post Ledger'' of [[Quarryville, Pennsylvania]]
+** ''Downingtown Ledger'' of [[Downingtown, Pennsylvania]]
+** ''The Kennett Paper'' of [[Kennett Square, Pennsylvania]]
+** ''Avon Grove Sun'' of [[West Grove, Pennsylvania]]
+** ''Oxford Tribune'' of [[Oxford, Pennsylvania]]
+** ''Elizabethtown Chronicle'' of [[Elizabethtown, Pennsylvania]]
+** ''Donegal Ledger'' of [[Donegal, Pennsylvania]]
+** ''Chadds Ford Post'' of [[Chadds Ford, Pennsylvania]]
+** ''The Central Record'' of [[Medford, New Jersey]]
+** ''Maple Shade Progress'' of [[Maple Shade, New Jersey]]
+* Intercounty Newspapers {{WS|buckslocalnews.com}} 
+** ''The Review'' of Roxborough, Pennsylvania
+** ''The Recorder'' of [[Conshohocken, Pennsylvania]]
+** ''The Leader'' of [[Mount Airy, Pennsylvania|Mount Airy]] and West Oak Lake, Pennsylvania
+** ''The Pennington Post'' of [[Pennington, New Jersey]]
+** ''The Bristol Pilot'' of [[Bristol, Pennsylvania]]
+** ''Yardley News'' of [[Yardley, Pennsylvania]]
+** ''New Hope Gazette'' of [[New Hope, Pennsylvania]]
+** ''Doylestown Patriot'' of [[Doylestown, Pennsylvania]]
+** ''Newtown Advance'' of [[Newtown, Pennsylvania]]
+** ''The Plain Dealer'' of [[Williamstown, New Jersey]]
+** ''News Report'' of [[Sewell, New Jersey]]
+** ''Record Breeze'' of [[Berlin, New Jersey]]
+** ''Newsweekly'' of [[Moorestown, New Jersey]]
+** ''Haddon Herald'' of [[Haddonfield, New Jersey]]
+** ''New Egypt Press'' of [[New Egypt, New Jersey]]
+** ''Community News'' of [[Pemberton, New Jersey]]
+** ''Plymouth Meeting Journal'' of [[Plymouth Meeting, Pennsylvania]]
+** ''Lafayette Hill Journal'' of [[Lafayette Hill, Pennsylvania]]
+* Montgomery Newspapers {{WS|montgomerynews.com}} 
+** ''Ambler Gazette'' of [[Ambler, Pennsylvania]]
+** ''Central Bucks Life'' of [[Bucks County, Pennsylvania]]
+** ''The Colonial'' of [[Plymouth Meeting, Pennsylvania]]
+** ''Glenside News'' of [[Glenside, Pennsylvania]]
+** ''The Globe'' of [[Lower Moreland Township, Pennsylvania]]
+** ''Main Line Life'' of [[Ardmore, Pennsylvania]]
+** ''Montgomery Life'' of [[Fort Washington, Pennsylvania]]
+** ''North Penn Life'' of [[Lansdale, Pennsylvania]]
+** ''Perkasie News Herald'' of [[Perkasie, Pennsylvania]]
+** ''Public Spirit'' of [[Hatboro, Pennsylvania]]
+** ''Souderton Independent'' of [[Souderton, Pennsylvania]]
+** ''Springfield Sun'' of [[Springfield, Pennsylvania]]
+** ''Spring-Ford Reporter'' of [[Royersford, Pennsylvania]]
+** ''Times Chronicle'' of [[Jenkintown, Pennsylvania]]
+** ''Valley Item'' of [[Perkiomenville, Pennsylvania]]
+** ''Willow Grove Guide'' of [[Willow Grove, Pennsylvania]]
+* News Gleaner Publications (closed December 2008) {{WS|newsgleaner.com}} 
+** ''Life Newspapers'' of [[Philadelphia, Pennsylvania]]
+* Suburban Publications
+** ''The Suburban & Wayne Times'' {{WS|waynesuburban.com}} of [[Wayne, Pennsylvania]]
+** ''The Suburban Advertiser'' of [[Exton, Pennsylvania]]
+** ''The King of Prussia Courier'' of [[King of Prussia, Pennsylvania]]
+* Press Newspapers {{WS|countypressonline.com}} 
+** ''County Press'' of [[Newtown Square, Pennsylvania]]
+** ''Garnet Valley Press'' of [[Glen Mills, Pennsylvania]]
+** ''Haverford Press'' of [[Newtown Square, Pennsylvania]] (closed January 2009)
+** ''Hometown Press'' of [[Glen Mills, Pennsylvania]] (closed January 2009)
+** ''Media Press'' of [[Newtown Square, Pennsylvania]] (closed January 2009)
+** ''Springfield Press'' of [[Springfield, Pennsylvania]]
+* Berks-Mont Newspapers {{WS|berksmontnews.com}} 
+** ''The Boyertown Area Times'' of [[Boyertown, Pennsylvania]]
+** ''The Kutztown Area Patriot'' of [[Kutztown, Pennsylvania]]
+** ''The Hamburg Area Item'' of [[Hamburg, Pennsylvania]]
+** ''The Southern Berks News'' of [[Exeter Township, Berks County, Pennsylvania]]
+** ''The Free Press'' of [[Quakertown, Pennsylvania]]
+** ''The Saucon News'' of [[Quakertown, Pennsylvania]]
+** ''Westside Weekly'' of [[Reading, Pennsylvania]]
+
+* Magazines
+** ''Bucks Co. Town & Country Living''
+** ''Chester Co. Town & Country Living''
+** ''Montomgery Co. Town & Country Living''
+** ''Garden State Town & Country Living''
+** ''Montgomery Homes''
+** ''Philadelphia Golfer''
+** ''Parents Express''
+** ''Art Matters''
+
+{{JRC}}
+
+==References==
+<references />
+
+[[Category:Journal Register publications|*]]
diff --git a/internal/go-diff/testdata/speedtest2.txt b/internal/go-diff/testdata/speedtest2.txt
new file mode 100644
index 0000000..8f25a80
--- /dev/null
+++ b/internal/go-diff/testdata/speedtest2.txt
@@ -0,0 +1,188 @@
+This is a '''list of newspapers published by [[Journal Register Company]]'''.
+
+The company owns daily and weekly newspapers, other print media properties and newspaper-affiliated local Websites in the [[U.S.]] states of [[Connecticut]], [[Michigan]], [[New York]], [[Ohio]], [[Pennsylvania]] and [[New Jersey]], organized in six geographic "clusters":<ref>[http://www.journalregister.com/publications.html Journal Register Company: Our Publications], accessed April 21, 2010.</ref>
+
+== Capital-Saratoga ==
+Three dailies, associated weeklies and [[pennysaver]]s in greater [[Albany, New York]]; also [http://www.capitalcentral.com capitalcentral.com] and [http://www.jobsinnewyork.com JobsInNewYork.com].
+
+* ''The Oneida Daily Dispatch'' {{WS|oneidadispatch.com}} of [[Oneida, New York]]
+* ''[[The Record (Troy)|The Record]]'' {{WS|troyrecord.com}} of [[Troy, New York]]
+* ''[[The Saratogian]]'' {{WS|saratogian.com}} of [[Saratoga Springs, New York]]
+* Weeklies:
+** ''Community News'' {{WS|cnweekly.com}} weekly of [[Clifton Park, New York]]
+** ''Rome Observer'' {{WS|romeobserver.com}} of [[Rome, New York]]
+** ''WG Life '' {{WS|saratogian.com/wglife/}} of [[Wilton, New York]]
+** ''Ballston Spa Life '' {{WS|saratogian.com/bspalife}} of [[Ballston Spa, New York]]
+** ''Greenbush Life'' {{WS|troyrecord.com/greenbush}} of [[Troy, New York]]
+** ''Latham Life'' {{WS|troyrecord.com/latham}} of [[Latham, New York]]
+** ''River Life'' {{WS|troyrecord.com/river}} of [[Troy, New York]]
+
+== Connecticut ==
+Three dailies, associated weeklies and [[pennysaver]]s in the state of [[Connecticut]]; also [http://www.ctcentral.com CTcentral.com], [http://www.ctcarsandtrucks.com CTCarsAndTrucks.com] and [http://www.jobsinct.com JobsInCT.com].
+
+* ''The Middletown Press'' {{WS|middletownpress.com}} of [[Middletown, Connecticut|Middletown]]
+* ''[[New Haven Register]]'' {{WS|newhavenregister.com}} of [[New Haven, Connecticut|New Haven]]
+* ''The Register Citizen'' {{WS|registercitizen.com}} of [[Torrington, Connecticut|Torrington]]
+
+* Housatonic Publications 
+** ''The Housatonic Times'' {{WS|housatonictimes.com}} of [[New Milford, Connecticut|New Milford]]
+** ''Litchfield County Times'' {{WS|countytimes.com}} of [[Litchfield, Connecticut|Litchfield]]
+
+* Minuteman Publications
+** ''[[Fairfield Minuteman]]'' {{WS|fairfieldminuteman.com}}of [[Fairfield, Connecticut|Fairfield]]
+** ''The Westport Minuteman'' {{WS|westportminuteman.com}} of [[Westport, Connecticut|Westport]]
+
+* Shoreline Newspapers 
+** ''The Dolphin'' {{WS|dolphin-news.com}} of [[Naval Submarine Base New London]] in [[New London, Connecticut|New London]]
+** ''Shoreline Times'' {{WS|shorelinetimes.com}} of [[Guilford, Connecticut|Guilford]]
+
+* Foothills Media Group {{WS|foothillsmediagroup.com}}
+** ''Thomaston Express'' {{WS|thomastonexpress.com}} of [[Thomaston, Connecticut|Thomaston]]
+** ''Good News About Torrington'' {{WS|goodnewsabouttorrington.com}} of [[Torrington, Connecticut|Torrington]]
+** ''Granby News'' {{WS|foothillsmediagroup.com/granby}} of [[Granby, Connecticut|Granby]]
+** ''Canton News'' {{WS|foothillsmediagroup.com/canton}} of [[Canton, Connecticut|Canton]]
+** ''Avon News'' {{WS|foothillsmediagroup.com/avon}} of [[Avon, Connecticut|Avon]]
+** ''Simsbury News'' {{WS|foothillsmediagroup.com/simsbury}} of [[Simsbury, Connecticut|Simsbury]]
+** ''Litchfield News'' {{WS|foothillsmediagroup.com/litchfield}} of [[Litchfield, Connecticut|Litchfield]]
+** ''Foothills Trader'' {{WS|foothillstrader.com}} of Torrington, Bristol, Canton
+
+* Other weeklies
+** ''The Milford-Orange Bulletin'' {{WS|ctbulletin.com}} of [[Orange, Connecticut|Orange]]
+** ''The Post-Chronicle'' {{WS|ctpostchronicle.com}} of [[North Haven, Connecticut|North Haven]]
+** ''West Hartford News'' {{WS|westhartfordnews.com}} of [[West Hartford, Connecticut|West Hartford]]
+
+* Magazines
+** ''The Connecticut Bride'' {{WS|connecticutmag.com}}
+** ''Connecticut Magazine'' {{WS|theconnecticutbride.com}}
+** ''Passport Magazine'' {{WS|passport-mag.com}}
+
+== Michigan ==
+Four dailies, associated weeklies and [[pennysaver]]s in the state of [[Michigan]]; also [http://www.micentralhomes.com MIcentralhomes.com] and [http://www.micentralautos.com MIcentralautos.com]
+* ''[[Oakland Press]]'' {{WS|theoaklandpress.com}} of [[Oakland, Michigan|Oakland]]
+* ''Daily Tribune'' {{WS|dailytribune.com}} of [[Royal Oak, Michigan|Royal Oak]]
+* ''Macomb Daily'' {{WS|macombdaily.com}} of [[Mt. Clemens, Michigan|Mt. Clemens]]
+* ''[[Morning Sun]]'' {{WS|themorningsun.com}} of  [[Mount Pleasant, Michigan|Mount Pleasant]]
+
+* Heritage Newspapers {{WS|heritage.com}}
+** ''Belleville View'' {{WS|bellevilleview.com}}
+** ''Ile Camera'' {{WS|thenewsherald.com/ile_camera}}
+** ''Monroe Guardian''  {{WS|monreguardian.com}}
+** ''Ypsilanti Courier'' {{WS|ypsilanticourier.com}}
+** ''News-Herald'' {{WS|thenewsherald.com}}
+** ''Press & Guide'' {{WS|pressandguide.com}}
+** ''Chelsea Standard & Dexter Leader'' {{WS|chelseastandard.com}}
+** ''Manchester Enterprise'' {{WS|manchesterguardian.com}}
+** ''Milan News-Leader'' {{WS|milannews.com}}
+** ''Saline Reporter'' {{WS|salinereporter.com}}
+* Independent Newspapers 
+** ''Advisor'' {{WS|sourcenewspapers.com}}
+** ''Source'' {{WS|sourcenewspapers.com}}
+* Morning Star {{WS|morningstarpublishing.com}}
+** ''The Leader & Kalkaskian'' {{WS|leaderandkalkaskian.com}}
+** ''Grand Traverse Insider'' {{WS|grandtraverseinsider.com}}
+** ''Alma Reminder''
+** ''Alpena Star''
+** ''Ogemaw/Oscoda County Star''
+** ''Presque Isle Star''
+** ''St. Johns Reminder''
+
+* Voice Newspapers {{WS|voicenews.com}}
+** ''Armada Times''
+** ''Bay Voice''
+** ''Blue Water Voice''
+** ''Downriver Voice''
+** ''Macomb Township Voice''
+** ''North Macomb Voice''
+** ''Weekend Voice''
+
+== Mid-Hudson ==
+One daily, associated magazines in the [[Hudson River Valley]] of [[New York]]; also [http://www.midhudsoncentral.com MidHudsonCentral.com] and [http://www.jobsinnewyork.com JobsInNewYork.com].
+
+* ''[[Daily Freeman]]'' {{WS|dailyfreeman.com}} of [[Kingston, New York]]
+* ''Las Noticias'' {{WS|lasnoticiasny.com}} of [[Kingston, New York]]
+
+== Ohio ==
+Two dailies, associated magazines and three shared Websites, all in the state of [[Ohio]]: [http://www.allaroundcleveland.com AllAroundCleveland.com], [http://www.allaroundclevelandcars.com AllAroundClevelandCars.com] and [http://www.allaroundclevelandjobs.com AllAroundClevelandJobs.com].
+
+* ''[[The News-Herald (Ohio)|The News-Herald]]'' {{WS|news-herald.com}} of [[Willoughby, Ohio|Willoughby]]
+* ''[[The Morning Journal]]'' {{WS|morningjournal.com}} of [[Lorain, Ohio|Lorain]]
+* ''El Latino Expreso'' {{WS|lorainlatino.com}} of [[Lorain, Ohio|Lorain]]
+
+== Philadelphia area ==
+Seven dailies and associated weeklies and magazines in [[Pennsylvania]] and [[New Jersey]], and associated Websites: [http://www.allaroundphilly.com AllAroundPhilly.com], [http://www.jobsinnj.com JobsInNJ.com], [http://www.jobsinpa.com JobsInPA.com], and [http://www.phillycarsearch.com PhillyCarSearch.com].
+
+* ''[[The Daily Local News]]'' {{WS|dailylocal.com}} of [[West Chester, Pennsylvania|West Chester]]
+* ''[[Delaware County Daily and Sunday Times]] {{WS|delcotimes.com}} of Primos [[Upper Darby Township, Pennsylvania]]
+* ''[[The Mercury (Pennsylvania)|The Mercury]]'' {{WS|pottstownmercury.com}} of [[Pottstown, Pennsylvania|Pottstown]]
+* ''[[The Reporter (Lansdale)|The Reporter]]'' {{WS|thereporteronline.com}} of [[Lansdale, Pennsylvania|Lansdale]]
+* ''The Times Herald'' {{WS|timesherald.com}} of [[Norristown, Pennsylvania|Norristown]]
+* ''[[The Trentonian]]'' {{WS|trentonian.com}} of [[Trenton, New Jersey]]
+
+* Weeklies
+* ''The Phoenix'' {{WS|phoenixvillenews.com}} of [[Phoenixville, Pennsylvania]]
+** ''El Latino Expreso'' {{WS|njexpreso.com}} of [[Trenton, New Jersey]]
+** ''La Voz'' {{WS|lavozpa.com}} of [[Norristown, Pennsylvania]]
+** ''The Tri County Record'' {{WS|tricountyrecord.com}} of [[Morgantown, Pennsylvania]]
+** ''Penny Pincher'' {{WS|pennypincherpa.com}}of [[Pottstown, Pennsylvania]]
+
+* Chesapeake Publishing  {{WS|southernchestercountyweeklies.com}}
+** ''The Kennett Paper'' {{WS|kennettpaper.com}} of [[Kennett Square, Pennsylvania]]
+** ''Avon Grove Sun'' {{WS|avongrovesun.com}} of [[West Grove, Pennsylvania]]
+** ''The Central Record'' {{WS|medfordcentralrecord.com}} of [[Medford, New Jersey]]
+** ''Maple Shade Progress'' {{WS|mapleshadeprogress.com}} of [[Maple Shade, New Jersey]]
+
+* Intercounty Newspapers {{WS|buckslocalnews.com}} {{WS|southjerseylocalnews.com}} 
+** ''The Pennington Post'' {{WS|penningtonpost.com}} of [[Pennington, New Jersey]]
+** ''The Bristol Pilot'' {{WS|bristolpilot.com}} of [[Bristol, Pennsylvania]]
+** ''Yardley News'' {{WS|yardleynews.com}} of [[Yardley, Pennsylvania]]
+** ''Advance of Bucks County'' {{WS|advanceofbucks.com}} of [[Newtown, Pennsylvania]]
+** ''Record Breeze'' {{WS|recordbreeze.com}} of [[Berlin, New Jersey]]
+** ''Community News'' {{WS|sjcommunitynews.com}} of [[Pemberton, New Jersey]]
+
+* Montgomery Newspapers {{WS|montgomerynews.com}} 
+** ''Ambler Gazette'' {{WS|amblergazette.com}} of [[Ambler, Pennsylvania]]
+** ''The Colonial'' {{WS|colonialnews.com}} of [[Plymouth Meeting, Pennsylvania]]
+** ''Glenside News'' {{WS|glensidenews.com}} of [[Glenside, Pennsylvania]]
+** ''The Globe'' {{WS|globenewspaper.com}} of [[Lower Moreland Township, Pennsylvania]]
+** ''Montgomery Life'' {{WS|montgomerylife.com}} of [[Fort Washington, Pennsylvania]]
+** ''North Penn Life'' {{WS|northpennlife.com}} of [[Lansdale, Pennsylvania]]
+** ''Perkasie News Herald'' {{WS|perkasienewsherald.com}} of [[Perkasie, Pennsylvania]]
+** ''Public Spirit'' {{WS|thepublicspirit.com}} of [[Hatboro, Pennsylvania]]
+** ''Souderton Independent'' {{WS|soudertonindependent.com}} of [[Souderton, Pennsylvania]]
+** ''Springfield Sun'' {{WS|springfieldsun.com}} of [[Springfield, Pennsylvania]]
+** ''Spring-Ford Reporter'' {{WS|springfordreporter.com}} of [[Royersford, Pennsylvania]]
+** ''Times Chronicle'' {{WS|thetimeschronicle.com}} of [[Jenkintown, Pennsylvania]]
+** ''Valley Item'' {{WS|valleyitem.com}} of [[Perkiomenville, Pennsylvania]]
+** ''Willow Grove Guide'' {{WS|willowgroveguide.com}} of [[Willow Grove, Pennsylvania]]
+** ''The Review'' {{WS|roxreview.com}} of [[Roxborough, Philadelphia, Pennsylvania]]
+
+* Main Line Media News {{WS|mainlinemedianews.com}}
+** ''Main Line Times'' {{WS|mainlinetimes.com}} of [[Ardmore, Pennsylvania]]
+** ''Main Line Life'' {{WS|mainlinelife.com}} of [[Ardmore, Pennsylvania]]
+** ''The King of Prussia Courier'' {{WS|kingofprussiacourier.com}} of [[King of Prussia, Pennsylvania]]
+
+* Delaware County News Network {{WS|delconewsnetwork.com}} 
+** ''News of Delaware County'' {{WS|newsofdelawarecounty.com}} of [[Havertown, Pennsylvania]]
+** ''County Press'' {{WS|countypressonline.com}} of [[Newtown Square, Pennsylvania]]
+** ''Garnet Valley Press'' {{WS|countypressonline.com}} of [[Glen Mills, Pennsylvania]]
+** ''Springfield Press'' {{WS|countypressonline.com}} of [[Springfield, Pennsylvania]]
+** ''Town Talk'' {{WS|towntalknews.com}} of [[Ridley, Pennsylvania]]
+
+* Berks-Mont Newspapers {{WS|berksmontnews.com}} 
+** ''The Boyertown Area Times'' {{WS|berksmontnews.com/boyertown_area_times}} of [[Boyertown, Pennsylvania]]
+** ''The Kutztown Area Patriot'' {{WS|berksmontnews.com/kutztown_area_patriot}} of [[Kutztown, Pennsylvania]]
+** ''The Hamburg Area Item'' {{WS|berksmontnews.com/hamburg_area_item}} of [[Hamburg, Pennsylvania]]
+** ''The Southern Berks News'' {{WS|berksmontnews.com/southern_berks_news}} of [[Exeter Township, Berks County, Pennsylvania]]
+** ''Community Connection'' {{WS|berksmontnews.com/community_connection}} of [[Boyertown, Pennsylvania]]
+
+* Magazines
+** ''Bucks Co. Town & Country Living'' {{WS|buckscountymagazine.com}} 
+** ''Parents Express'' {{WS|parents-express.com}} 
+** ''Real Men, Rednecks'' {{WS|realmenredneck.com}} 
+
+{{JRC}}
+
+==References==
+<references />
+
+[[Category:Journal Register publications|*]]
diff --git a/messages.go b/messages.go
index bf9e640..1c9e2a4 100644
--- a/messages.go
+++ b/messages.go
@@ -1,23 +1,24 @@
 package assertions
 
 const ( // equality
-	shouldHaveBeenEqual             = "Expected: '%v'\nActual:   '%v'\n(Should be equal)"
-	shouldNotHaveBeenEqual          = "Expected     '%v'\nto NOT equal '%v'\n(but it did)!"
-	shouldHaveBeenEqualTypeMismatch = "Expected: '%v' (%T)\nActual:   '%v' (%T)\n(Should be equal, type mismatch)"
-	shouldHaveBeenAlmostEqual       = "Expected '%v' to almost equal '%v' (but it didn't)!"
-	shouldHaveNotBeenAlmostEqual    = "Expected '%v' to NOT almost equal '%v' (but it did)!"
-	shouldHaveResembled             = "Expected: '%s'\nActual:   '%s'\n(Should resemble)!"
-	shouldNotHaveResembled          = "Expected        '%#v'\nto NOT resemble '%#v'\n(but it did)!"
-	shouldBePointers                = "Both arguments should be pointers "
-	shouldHaveBeenNonNilPointer     = shouldBePointers + "(the %s was %s)!"
-	shouldHavePointedTo             = "Expected '%+v' (address: '%v') and '%+v' (address: '%v') to be the same address (but their weren't)!"
-	shouldNotHavePointedTo          = "Expected '%+v' and '%+v' to be different references (but they matched: '%v')!"
-	shouldHaveBeenNil               = "Expected: nil\nActual:   '%v'"
-	shouldNotHaveBeenNil            = "Expected '%+v' to NOT be nil (but it was)!"
-	shouldHaveBeenTrue              = "Expected: true\nActual:   %v"
-	shouldHaveBeenFalse             = "Expected: false\nActual:   %v"
-	shouldHaveBeenZeroValue         = "'%+v' should have been the zero value" //"Expected: (zero value)\nActual:   %v"
-	shouldNotHaveBeenZeroValue      = "'%+v' should NOT have been the zero value"
+	shouldHaveBeenEqual              = "Expected: '%v'\nActual:   '%v'\n(Should be equal)"
+	shouldHaveBeenEqualNoResemblance = "Both the actual and expected values render equally ('%s') and their types are the same. Try using ShouldResemble instead."
+	shouldNotHaveBeenEqual           = "Expected     '%v'\nto NOT equal '%v'\n(but it did)!"
+	shouldHaveBeenEqualTypeMismatch  = "Expected: '%v' (%T)\nActual:   '%v' (%T)\n(Should be equal, type mismatch)"
+	shouldHaveBeenAlmostEqual        = "Expected '%v' to almost equal '%v' (but it didn't)!"
+	shouldHaveNotBeenAlmostEqual     = "Expected '%v' to NOT almost equal '%v' (but it did)!"
+	shouldHaveResembled              = "Expected: '%s'\nActual:   '%s'\n(Should resemble)!"
+	shouldNotHaveResembled           = "Expected        '%#v'\nto NOT resemble '%#v'\n(but it did)!"
+	shouldBePointers                 = "Both arguments should be pointers "
+	shouldHaveBeenNonNilPointer      = shouldBePointers + "(the %s was %s)!"
+	shouldHavePointedTo              = "Expected '%+v' (address: '%v') and '%+v' (address: '%v') to be the same address (but their weren't)!"
+	shouldNotHavePointedTo           = "Expected '%+v' and '%+v' to be different references (but they matched: '%v')!"
+	shouldHaveBeenNil                = "Expected: nil\nActual:   '%v'"
+	shouldNotHaveBeenNil             = "Expected '%+v' to NOT be nil (but it was)!"
+	shouldHaveBeenTrue               = "Expected: true\nActual:   %v"
+	shouldHaveBeenFalse              = "Expected: false\nActual:   %v"
+	shouldHaveBeenZeroValue          = "'%+v' should have been the zero value" //"Expected: (zero value)\nActual:   %v"
+	shouldNotHaveBeenZeroValue       = "'%+v' should NOT have been the zero value"
 )
 
 const ( // quantity comparisons
@@ -93,6 +94,6 @@
 	shouldNotHaveHappenedOnOrBetween = "Expected '%v' to NOT happen on or between '%v' and '%v' (but it did)!"
 
 	// format params: incorrect-index, previous-index, previous-time, incorrect-index, incorrect-time
-	shouldHaveBeenChronological = "The 'Time' at index [%d] should have happened after the previous one (but it didn't!):\n  [%d]: %s\n  [%d]: %s (see, it happened before!)"
+	shouldHaveBeenChronological    = "The 'Time' at index [%d] should have happened after the previous one (but it didn't!):\n  [%d]: %s\n  [%d]: %s (see, it happened before!)"
 	shouldNotHaveBeenchronological = "The provided times should NOT be chronological, but they were."
 )
diff --git a/utilities_for_test.go b/utilities_for_test.go
index e68f105..43ea19d 100644
--- a/utilities_for_test.go
+++ b/utilities_for_test.go
@@ -50,6 +50,9 @@
 	for strings.Contains(message, "  ") {
 		message = strings.Replace(message, "  ", " ", -1)
 	}
+	message = strings.Replace(message, "\x1b[32m", "", -1)
+	message = strings.Replace(message, "\x1b[31m", "", -1)
+	message = strings.Replace(message, "\x1b[0m", "", -1)
 	return message
 }