mirror of
				https://github.com/python/cpython.git
				synced 2025-10-31 21:51:50 +00:00 
			
		
		
		
	Merged revision 79605 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k ........ r79605 | antoine.pitrou | 2010-04-02 19:12:12 +0200 (ven, 02 avr 2010) | 3 lines Furniture is not very reliable these days (buildbot failures). ........
This commit is contained in:
		
							parent
							
								
									aeac26b9ea
								
							
						
					
					
						commit
						05609eff8a
					
				
					 1 changed files with 8 additions and 6 deletions
				
			
		|  | @ -1,6 +1,7 @@ | ||||||
| import io | import io | ||||||
| import unittest | import unittest | ||||||
| import urllib.robotparser | import urllib.robotparser | ||||||
|  | from urllib.error import URLError | ||||||
| from test import support | from test import support | ||||||
| 
 | 
 | ||||||
| class RobotTestCase(unittest.TestCase): | class RobotTestCase(unittest.TestCase): | ||||||
|  | @ -208,18 +209,19 @@ def RobotTest(index, robots_txt, good_urls, bad_urls, | ||||||
| class NetworkTestCase(unittest.TestCase): | class NetworkTestCase(unittest.TestCase): | ||||||
| 
 | 
 | ||||||
|     def testPasswordProtectedSite(self): |     def testPasswordProtectedSite(self): | ||||||
|         if not support.is_resource_enabled('network'): |         support.requires('network') | ||||||
|             return |         # XXX it depends on an external resource which could be unavailable | ||||||
|         # whole site is password-protected. |  | ||||||
|         url = 'http://mueblesmoraleda.com' |         url = 'http://mueblesmoraleda.com' | ||||||
|         parser = urllib.robotparser.RobotFileParser() |         parser = urllib.robotparser.RobotFileParser() | ||||||
|         parser.set_url(url) |         parser.set_url(url) | ||||||
|         parser.read() |         try: | ||||||
|  |             parser.read() | ||||||
|  |         except URLError: | ||||||
|  |             self.skipTest('%s is unavailable' % url) | ||||||
|         self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False) |         self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False) | ||||||
| 
 | 
 | ||||||
|     def testPythonOrg(self): |     def testPythonOrg(self): | ||||||
|         if not support.is_resource_enabled('network'): |         support.requires('network') | ||||||
|             return |  | ||||||
|         parser = urllib.robotparser.RobotFileParser( |         parser = urllib.robotparser.RobotFileParser( | ||||||
|             "http://www.python.org/robots.txt") |             "http://www.python.org/robots.txt") | ||||||
|         parser.read() |         parser.read() | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Florent Xicluna
						Florent Xicluna