| 
									
										
										
										
											2005-06-21 07:43:58 +00:00
										 |  |  | from test.test_support import verbose, findfile, is_resource_enabled, TestFailed | 
					
						
							| 
									
										
										
										
											2005-06-10 11:05:19 +00:00
										 |  |  | import os, glob, random | 
					
						
							|  |  |  | from tokenize import (tokenize, generate_tokens, untokenize, | 
					
						
							|  |  |  |                       NUMBER, NAME, OP, STRING) | 
					
						
							| 
									
										
										
										
											1997-10-27 22:15:06 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  | if verbose: | 
					
						
							|  |  |  |     print 'starting...' | 
					
						
							| 
									
										
										
										
											2003-05-12 19:29:36 +00:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2003-05-12 19:42:04 +00:00
										 |  |  | f = file(findfile('tokenize_tests' + os.extsep + 'txt')) | 
					
						
							| 
									
										
										
										
											2005-06-10 11:05:19 +00:00
										 |  |  | tokenize(f.readline) | 
					
						
							| 
									
										
										
										
											2003-05-12 19:29:36 +00:00
										 |  |  | f.close() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-06-10 11:05:19 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | ###### Test roundtrip for untokenize ########################## | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def test_roundtrip(f): | 
					
						
							|  |  |  |     ## print 'Testing:', f | 
					
						
							|  |  |  |     f = file(f) | 
					
						
							|  |  |  |     try: | 
					
						
							|  |  |  |         fulltok = list(generate_tokens(f.readline)) | 
					
						
							|  |  |  |     finally: | 
					
						
							|  |  |  |         f.close() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     t1 = [tok[:2] for tok in fulltok] | 
					
						
							|  |  |  |     newtext = untokenize(t1) | 
					
						
							|  |  |  |     readline = iter(newtext.splitlines(1)).next | 
					
						
							|  |  |  |     t2 = [tok[:2] for tok in generate_tokens(readline)] | 
					
						
							|  |  |  |     assert t1 == t2 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | f = findfile('tokenize_tests' + os.extsep + 'txt') | 
					
						
							|  |  |  | test_roundtrip(f) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | testdir = os.path.dirname(f) or os.curdir | 
					
						
							|  |  |  | testfiles = glob.glob(testdir + os.sep + 'test*.py') | 
					
						
							|  |  |  | if not is_resource_enabled('compiler'): | 
					
						
							|  |  |  |     testfiles = random.sample(testfiles, 10) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | for f in testfiles: | 
					
						
							|  |  |  |     test_roundtrip(f) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-06-21 07:43:58 +00:00
										 |  |  | ###### Test detecton of IndentationError ###################### | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | from cStringIO import StringIO | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | sampleBadText = """
 | 
					
						
							|  |  |  | def foo(): | 
					
						
							|  |  |  |     bar | 
					
						
							|  |  |  |   baz | 
					
						
							|  |  |  | """
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | try: | 
					
						
							|  |  |  |     for tok in generate_tokens(StringIO(sampleBadText).readline): | 
					
						
							|  |  |  |         pass | 
					
						
							|  |  |  | except IndentationError: | 
					
						
							|  |  |  |     pass | 
					
						
							|  |  |  | else: | 
					
						
							|  |  |  |     raise TestFailed("Did not detect IndentationError:") | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2005-06-10 11:05:19 +00:00
										 |  |  | 
 | 
					
						
							|  |  |  | ###### Test example in the docs ############################### | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | from decimal import Decimal | 
					
						
							|  |  |  | from cStringIO import StringIO | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def decistmt(s): | 
					
						
							|  |  |  |     """Substitute Decimals for floats in a string of statements.
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     >>> from decimal import Decimal | 
					
						
							|  |  |  |     >>> s = 'print +21.3e-5*-.1234/81.7' | 
					
						
							|  |  |  |     >>> decistmt(s) | 
					
						
							|  |  |  |     "print +Decimal ('21.3e-5')*-Decimal ('.1234')/Decimal ('81.7')" | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     >>> exec(s) | 
					
						
							|  |  |  |     -3.21716034272e-007 | 
					
						
							|  |  |  |     >>> exec(decistmt(s)) | 
					
						
							|  |  |  |     -3.217160342717258261933904529E-7 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     result = [] | 
					
						
							|  |  |  |     g = generate_tokens(StringIO(s).readline)   # tokenize the string | 
					
						
							|  |  |  |     for toknum, tokval, _, _, _  in g: | 
					
						
							|  |  |  |         if toknum == NUMBER and '.' in tokval:  # replace NUMBER tokens | 
					
						
							|  |  |  |             result.extend([ | 
					
						
							|  |  |  |                 (NAME, 'Decimal'), | 
					
						
							|  |  |  |                 (OP, '('), | 
					
						
							|  |  |  |                 (STRING, repr(tokval)), | 
					
						
							|  |  |  |                 (OP, ')') | 
					
						
							|  |  |  |             ]) | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             result.append((toknum, tokval)) | 
					
						
							|  |  |  |     return untokenize(result) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | import doctest | 
					
						
							|  |  |  | doctest.testmod() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											1997-10-27 22:15:06 +00:00
										 |  |  | if verbose: | 
					
						
							|  |  |  |     print 'finished' |