mirror of
				https://github.com/python/cpython.git
				synced 2025-10-31 05:31:20 +00:00 
			
		
		
		
	
		
			
	
	
		
			74 lines
		
	
	
	
		
			1.9 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
		
		
			
		
	
	
			74 lines
		
	
	
	
		
			1.9 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
|   | """Generate Lib/keyword.py from the Grammar and Tokens files using pgen""" | ||
|  | 
 | ||
|  | import argparse | ||
|  | 
 | ||
|  | from .build import build_parser, generate_token_definitions | ||
|  | from .c_generator import CParserGenerator | ||
|  | 
 | ||
|  | TEMPLATE = r'''
 | ||
|  | """Keywords (from "Grammar/python.gram")
 | ||
|  | 
 | ||
|  | This file is automatically generated; please don't muck it up! | ||
|  | 
 | ||
|  | To update the symbols in this file, 'cd' to the top directory of | ||
|  | the python source tree and run: | ||
|  | 
 | ||
|  |     PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \ | ||
|  |         Grammar/Grammar \ | ||
|  |         Grammar/Tokens \ | ||
|  |         Lib/keyword.py | ||
|  | 
 | ||
|  | Alternatively, you can run 'make regen-keyword'. | ||
|  | """
 | ||
|  | 
 | ||
|  | __all__ = ["iskeyword", "kwlist"] | ||
|  | 
 | ||
|  | kwlist = [ | ||
|  |     {keywords} | ||
|  | ] | ||
|  | 
 | ||
|  | iskeyword = frozenset(kwlist).__contains__ | ||
|  | '''.lstrip()
 | ||
|  | 
 | ||
|  | EXTRA_KEYWORDS = ["async", "await"] | ||
|  | 
 | ||
|  | 
 | ||
|  | def main(): | ||
|  |     parser = argparse.ArgumentParser( | ||
|  |         description="Generate the Lib/keywords.py file from the grammar." | ||
|  |     ) | ||
|  |     parser.add_argument( | ||
|  |         "grammar", type=str, help="The file with the grammar definition in PEG format" | ||
|  |     ) | ||
|  |     parser.add_argument( | ||
|  |         "tokens_file", | ||
|  |         type=argparse.FileType("r"), | ||
|  |         help="The file with the token definitions" | ||
|  |     ) | ||
|  |     parser.add_argument( | ||
|  |         "keyword_file", | ||
|  |         type=argparse.FileType("w"), | ||
|  |         help="The path to write the keyword definitions", | ||
|  |     ) | ||
|  |     args = parser.parse_args() | ||
|  | 
 | ||
|  |     grammar, _, _ = build_parser(args.grammar) | ||
|  |     with args.tokens_file as tok_file: | ||
|  |         all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file) | ||
|  |     gen: ParserGenerator = CParserGenerator( | ||
|  |         grammar, all_tokens, exact_tok, non_exact_tok, file=None | ||
|  |     ) | ||
|  |     gen.collect_todo() | ||
|  | 
 | ||
|  |     with args.keyword_file as thefile: | ||
|  |         all_keywords = sorted( | ||
|  |             list(gen.callmakervisitor.keyword_cache.keys()) + EXTRA_KEYWORDS | ||
|  |         ) | ||
|  | 
 | ||
|  |         keywords = ",\n    ".join(map(repr, all_keywords)) | ||
|  |         thefile.write(TEMPLATE.format(keywords=keywords)) | ||
|  | 
 | ||
|  | 
 | ||
|  | if __name__ == "__main__": | ||
|  |     main() |