)]}'
{
  "id": "5bf7e84f26163489d923bbcef0a22a1954b2b426",
  "repo": "external/github.com/python/cpython",
  "revision": "c679227e31245b0e8dec74a1f7cc77710541d985",
  "path": "Parser/tokenizer.c"
}
