#! /usr/bin/env python """ This is a tool for exploring token lists generated by ``tokenize.generate_tokens()`` from test data in docutils/test/test_readers/test_python/test_parser or stdin. Usage:: showtok showtok < Where ```` is the key to the ``totest`` dictionary, and ```` is the index of the list ``totest[key]``. If no arguments are given, stdin is used for input. """ import sys import tokenize import pprint from token import tok_name import test_parser def name_tokens(tokens): for i in range(len(tokens)): tup = tokens[i] tokens[i] = (tok_name[tup[0]], tup) if len(sys.argv) > 1: key, caseno = sys.argv[1:] print 'totest["%s"][%s][0]:\n' % (key, caseno) input_text = test_parser.totest[key][int(caseno)][0] else: input_text = sys.stdin.read() print input_text tokens = list(tokenize.generate_tokens(iter(input_text.splitlines(1)).next)) name_tokens(tokens) pprint.pprint(tokens)