Raise LexerError on unterminated block comment

Previously this would result in `None` being returned by `read_comment`
which would result in a failed call to `startswith` in `tokenize`.
diff --git a/javalang/tokenizer.py b/javalang/tokenizer.py
index 25e5b3c..d725292 100644
--- a/javalang/tokenizer.py
+++ b/javalang/tokenizer.py
@@ -258,8 +258,10 @@
         elif accept_eof:
             i = self.length
         else:
+            self.error('Unterminated block comment')
+            partial_comment = self.data[self.i:]
             self.i = self.length
-            return
+            return partial_comment
 
         comment = self.data[self.i:i]
         start_of_line = self.data.rfind('\n', self.i, i)