)]}'
{
  "id": "02749e355da81241ec152d89d9ec6a7ee9921ff3",
  "repo": "external/github.com/python/cpython",
  "revision": "96fff35325e519cc76ffacf22e57e4c393d4446f",
  "path": "Parser/tokenizer.h"
}
