mirror of
https://github.com/python/cpython.git
synced 2025-12-08 06:10:17 +00:00
Merge c3ecb3b244 into 7099af8f5e
This commit is contained in:
commit
000f00e833
3 changed files with 19 additions and 10 deletions
|
|
@ -7,6 +7,7 @@
|
||||||
import email.message
|
import email.message
|
||||||
import io
|
import io
|
||||||
import unittest
|
import unittest
|
||||||
|
import unittest.mock
|
||||||
from test import support
|
from test import support
|
||||||
from test.support import os_helper
|
from test.support import os_helper
|
||||||
from test.support import socket_helper
|
from test.support import socket_helper
|
||||||
|
|
@ -86,9 +87,15 @@ def fakehttp(self, fakedata, mock_close=False):
|
||||||
fake_http_class = fakehttp(fakedata, mock_close=mock_close)
|
fake_http_class = fakehttp(fakedata, mock_close=mock_close)
|
||||||
self._connection_class = http.client.HTTPConnection
|
self._connection_class = http.client.HTTPConnection
|
||||||
http.client.HTTPConnection = fake_http_class
|
http.client.HTTPConnection = fake_http_class
|
||||||
|
# Disable proxies during the test
|
||||||
|
self.getproxies = unittest.mock.patch.object(urllib.request, 'getproxies', return_value={})
|
||||||
|
self.getproxies.start()
|
||||||
|
# Clear cached opener
|
||||||
|
urllib.request.install_opener(None)
|
||||||
|
|
||||||
def unfakehttp(self):
|
def unfakehttp(self):
|
||||||
http.client.HTTPConnection = self._connection_class
|
http.client.HTTPConnection = self._connection_class
|
||||||
|
self.getproxies.stop()
|
||||||
|
|
||||||
|
|
||||||
class urlopen_FileTests(unittest.TestCase):
|
class urlopen_FileTests(unittest.TestCase):
|
||||||
|
|
|
||||||
|
|
@ -1380,8 +1380,11 @@ def request(conn, method, url, *pos, **kw):
|
||||||
# Change response for subsequent connection
|
# Change response for subsequent connection
|
||||||
conn.__class__.fakedata = b"HTTP/1.1 200 OK\r\n\r\nHello!"
|
conn.__class__.fakedata = b"HTTP/1.1 200 OK\r\n\r\nHello!"
|
||||||
http.client.HTTPConnection.request = request
|
http.client.HTTPConnection.request = request
|
||||||
fp = urllib.request.urlopen("http://python.org/path")
|
with mock.patch('urllib.request.getproxies', return_value={}):
|
||||||
self.assertEqual(fp.geturl(), "http://python.org/path?query")
|
# Disable proxy for predictable URL handling
|
||||||
|
urllib.request.install_opener(None)
|
||||||
|
fp = urllib.request.urlopen("http://python.org/path")
|
||||||
|
self.assertEqual(fp.geturl(), "http://python.org/path?query")
|
||||||
|
|
||||||
def test_redirect_encoding(self):
|
def test_redirect_encoding(self):
|
||||||
# Some characters in the redirect target may need special handling,
|
# Some characters in the redirect target may need special handling,
|
||||||
|
|
@ -1399,7 +1402,8 @@ def http_open(self, req):
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
handler = Handler()
|
handler = Handler()
|
||||||
opener = urllib.request.build_opener(handler)
|
with mock.patch('urllib.request.getproxies', return_value={}):
|
||||||
|
opener = urllib.request.build_opener(handler)
|
||||||
tests = (
|
tests = (
|
||||||
(b'/p\xC3\xA5-dansk/', b'/p%C3%A5-dansk/'),
|
(b'/p\xC3\xA5-dansk/', b'/p%C3%A5-dansk/'),
|
||||||
(b'/spaced%20path/', b'/spaced%20path/'),
|
(b'/spaced%20path/', b'/spaced%20path/'),
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@
|
||||||
import http.server
|
import http.server
|
||||||
import threading
|
import threading
|
||||||
import unittest
|
import unittest
|
||||||
|
import unittest.mock
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
from test import support
|
from test import support
|
||||||
|
|
@ -331,13 +332,10 @@ class ProxyAuthTests(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(ProxyAuthTests, self).setUp()
|
super(ProxyAuthTests, self).setUp()
|
||||||
# Ignore proxy bypass settings in the environment.
|
# Patch proxy_bypass temporarily to ignore proxy bypass settings.
|
||||||
def restore_environ(old_environ):
|
proxy_bypass = unittest.mock.patch('urllib.request.proxy_bypass', return_value=False)
|
||||||
os.environ.clear()
|
proxy_bypass.start()
|
||||||
os.environ.update(old_environ)
|
self.addCleanup(proxy_bypass.stop)
|
||||||
self.addCleanup(restore_environ, os.environ.copy())
|
|
||||||
os.environ['NO_PROXY'] = ''
|
|
||||||
os.environ['no_proxy'] = ''
|
|
||||||
|
|
||||||
self.digest_auth_handler = DigestAuthHandler()
|
self.digest_auth_handler = DigestAuthHandler()
|
||||||
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
|
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue