mirror of
https://github.com/python/cpython.git
synced 2026-01-06 15:32:22 +00:00
Revert my commit 3555cf6f9c98: "Issue #8796: codecs.open() calls the builtin
open() function instead of using StreamReaderWriter. Deprecate StreamReader, StreamWriter, StreamReaderWriter, StreamRecoder and EncodedFile() of the codec module. Use the builtin open() function or io.TextIOWrapper instead." "It has not been approved !" wrote Marc-Andre Lemburg.
This commit is contained in:
parent
4f2dab5c33
commit
0501070669
4 changed files with 59 additions and 148 deletions
|
|
@ -85,9 +85,6 @@ It defines the following functions:
|
|||
In case a search function cannot find a given encoding, it should return
|
||||
``None``.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
*streamreader* and *streamwriter* attributes are now deprecated.
|
||||
|
||||
|
||||
.. function:: lookup(encoding)
|
||||
|
||||
|
|
@ -142,8 +139,6 @@ functions which use :func:`lookup` for the codec lookup:
|
|||
|
||||
Raises a :exc:`LookupError` in case the encoding cannot be found.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
|
||||
|
||||
.. function:: getwriter(encoding)
|
||||
|
||||
|
|
@ -152,8 +147,6 @@ functions which use :func:`lookup` for the codec lookup:
|
|||
|
||||
Raises a :exc:`LookupError` in case the encoding cannot be found.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
|
||||
|
||||
.. function:: register_error(name, error_handler)
|
||||
|
||||
|
|
@ -222,11 +215,6 @@ utility functions:
|
|||
providing transparent encoding/decoding. The default file mode is ``'r'``
|
||||
meaning to open the file in read mode.
|
||||
|
||||
.. note::
|
||||
|
||||
This function is kept for backward compatibility with Python 2, the
|
||||
builtin :func:`open` function should be used instead.
|
||||
|
||||
.. note::
|
||||
|
||||
The wrapped version's methods will accept and return strings only. Bytes
|
||||
|
|
@ -263,8 +251,6 @@ utility functions:
|
|||
``'strict'``, which causes :exc:`ValueError` to be raised in case an encoding
|
||||
error occurs.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
|
||||
|
||||
.. function:: iterencode(iterator, encoding, errors='strict', **kwargs)
|
||||
|
||||
|
|
@ -577,9 +563,6 @@ The :class:`StreamWriter` class is a subclass of :class:`Codec` and defines the
|
|||
following methods which every stream writer must define in order to be
|
||||
compatible with the Python codec registry.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
Use the builtin the :class:`io.TextIOWrapper` class.
|
||||
|
||||
|
||||
.. class:: StreamWriter(stream[, errors])
|
||||
|
||||
|
|
@ -645,9 +628,6 @@ The :class:`StreamReader` class is a subclass of :class:`Codec` and defines the
|
|||
following methods which every stream reader must define in order to be
|
||||
compatible with the Python codec registry.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
Use the builtin the :class:`io.TextIOWrapper` class.
|
||||
|
||||
|
||||
.. class:: StreamReader(stream[, errors])
|
||||
|
||||
|
|
@ -748,9 +728,6 @@ and write modes.
|
|||
The design is such that one can use the factory functions returned by the
|
||||
:func:`lookup` function to construct the instance.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
Use the :class:`io.TextIOWrapper` class.
|
||||
|
||||
|
||||
.. class:: StreamReaderWriter(stream, Reader, Writer, errors)
|
||||
|
||||
|
|
@ -775,8 +752,6 @@ which is sometimes useful when dealing with different encoding environments.
|
|||
The design is such that one can use the factory functions returned by the
|
||||
:func:`lookup` function to construct the instance.
|
||||
|
||||
.. deprecated:: 3.3
|
||||
|
||||
|
||||
.. class:: StreamRecoder(stream, encode, decode, Reader, Writer, errors)
|
||||
|
||||
|
|
|
|||
|
|
@ -345,8 +345,6 @@ def __init__(self, stream, errors='strict'):
|
|||
The set of allowed parameter values can be extended via
|
||||
register_error.
|
||||
"""
|
||||
import warnings
|
||||
warnings.warn('use io.TextIOWrapper', DeprecationWarning, stacklevel=2)
|
||||
self.stream = stream
|
||||
self.errors = errors
|
||||
|
||||
|
|
@ -418,8 +416,6 @@ def __init__(self, stream, errors='strict'):
|
|||
The set of allowed parameter values can be extended via
|
||||
register_error.
|
||||
"""
|
||||
import warnings
|
||||
warnings.warn('use io.TextIOWrapper', DeprecationWarning, stacklevel=2)
|
||||
self.stream = stream
|
||||
self.errors = errors
|
||||
self.bytebuffer = b""
|
||||
|
|
@ -850,7 +846,7 @@ def __exit__(self, type, value, tb):
|
|||
|
||||
### Shortcuts
|
||||
|
||||
def open(filename, mode='r', encoding=None, errors=None, buffering=1):
|
||||
def open(filename, mode='rb', encoding=None, errors='strict', buffering=1):
|
||||
|
||||
""" Open an encoded file using the given mode and return
|
||||
a wrapped version providing transparent encoding/decoding.
|
||||
|
|
@ -881,13 +877,18 @@ def open(filename, mode='r', encoding=None, errors=None, buffering=1):
|
|||
parameter.
|
||||
|
||||
"""
|
||||
if encoding is not None:
|
||||
return builtins.open(filename, mode, buffering,
|
||||
encoding, errors, newline='')
|
||||
else:
|
||||
if 'b' not in mode:
|
||||
mode = mode + 'b'
|
||||
return builtins.open(filename, mode, buffering, encoding, errors)
|
||||
if encoding is not None and \
|
||||
'b' not in mode:
|
||||
# Force opening of the file in binary mode
|
||||
mode = mode + 'b'
|
||||
file = builtins.open(filename, mode, buffering)
|
||||
if encoding is None:
|
||||
return file
|
||||
info = lookup(encoding)
|
||||
srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
|
||||
# Add attributes to simplify introspection
|
||||
srw.encoding = encoding
|
||||
return srw
|
||||
|
||||
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
from test import support
|
||||
import _testcapi
|
||||
import codecs
|
||||
import io
|
||||
import sys
|
||||
import unittest
|
||||
import warnings
|
||||
import codecs
|
||||
import sys, _testcapi, io
|
||||
|
||||
class Queue(object):
|
||||
"""
|
||||
|
|
@ -66,9 +63,7 @@ def check_partial(self, input, partialresults):
|
|||
# the StreamReader and check that the results equal the appropriate
|
||||
# entries from partialresults.
|
||||
q = Queue(b"")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
r = codecs.getreader(self.encoding)(q)
|
||||
r = codecs.getreader(self.encoding)(q)
|
||||
result = ""
|
||||
for (c, partialresult) in zip(input.encode(self.encoding), partialresults):
|
||||
q.write(bytes([c]))
|
||||
|
|
@ -111,9 +106,7 @@ def getreader(input):
|
|||
return codecs.getreader(self.encoding)(stream)
|
||||
|
||||
def readalllines(input, keepends=True, size=None):
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = getreader(input)
|
||||
reader = getreader(input)
|
||||
lines = []
|
||||
while True:
|
||||
line = reader.readline(size=size, keepends=keepends)
|
||||
|
|
@ -222,18 +215,14 @@ def test_bug1175396(self):
|
|||
' \r\n',
|
||||
]
|
||||
stream = io.BytesIO("".join(s).encode(self.encoding))
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = codecs.getreader(self.encoding)(stream)
|
||||
reader = codecs.getreader(self.encoding)(stream)
|
||||
for (i, line) in enumerate(reader):
|
||||
self.assertEqual(line, s[i])
|
||||
|
||||
def test_readlinequeue(self):
|
||||
q = Queue(b"")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
writer = codecs.getwriter(self.encoding)(q)
|
||||
reader = codecs.getreader(self.encoding)(q)
|
||||
writer = codecs.getwriter(self.encoding)(q)
|
||||
reader = codecs.getreader(self.encoding)(q)
|
||||
|
||||
# No lineends
|
||||
writer.write("foo\r")
|
||||
|
|
@ -264,9 +253,7 @@ def test_bug1098990_a(self):
|
|||
|
||||
s = (s1+s2+s3).encode(self.encoding)
|
||||
stream = io.BytesIO(s)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = codecs.getreader(self.encoding)(stream)
|
||||
reader = codecs.getreader(self.encoding)(stream)
|
||||
self.assertEqual(reader.readline(), s1)
|
||||
self.assertEqual(reader.readline(), s2)
|
||||
self.assertEqual(reader.readline(), s3)
|
||||
|
|
@ -281,9 +268,7 @@ def test_bug1098990_b(self):
|
|||
|
||||
s = (s1+s2+s3+s4+s5).encode(self.encoding)
|
||||
stream = io.BytesIO(s)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = codecs.getreader(self.encoding)(stream)
|
||||
reader = codecs.getreader(self.encoding)(stream)
|
||||
self.assertEqual(reader.readline(), s1)
|
||||
self.assertEqual(reader.readline(), s2)
|
||||
self.assertEqual(reader.readline(), s3)
|
||||
|
|
@ -305,9 +290,7 @@ def test_only_one_bom(self):
|
|||
_,_,reader,writer = codecs.lookup(self.encoding)
|
||||
# encode some stream
|
||||
s = io.BytesIO()
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = writer(s)
|
||||
f = writer(s)
|
||||
f.write("spam")
|
||||
f.write("spam")
|
||||
d = s.getvalue()
|
||||
|
|
@ -315,22 +298,16 @@ def test_only_one_bom(self):
|
|||
self.assertTrue(d == self.spamle or d == self.spambe)
|
||||
# try to read it back
|
||||
s = io.BytesIO(d)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = reader(s)
|
||||
f = reader(s)
|
||||
self.assertEqual(f.read(), "spamspam")
|
||||
|
||||
def test_badbom(self):
|
||||
s = io.BytesIO(4*b"\xff")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
self.assertRaises(UnicodeError, f.read)
|
||||
|
||||
s = io.BytesIO(8*b"\xff")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
self.assertRaises(UnicodeError, f.read)
|
||||
|
||||
def test_partial(self):
|
||||
|
|
@ -477,9 +454,7 @@ def test_only_one_bom(self):
|
|||
_,_,reader,writer = codecs.lookup(self.encoding)
|
||||
# encode some stream
|
||||
s = io.BytesIO()
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = writer(s)
|
||||
f = writer(s)
|
||||
f.write("spam")
|
||||
f.write("spam")
|
||||
d = s.getvalue()
|
||||
|
|
@ -487,22 +462,16 @@ def test_only_one_bom(self):
|
|||
self.assertTrue(d == self.spamle or d == self.spambe)
|
||||
# try to read it back
|
||||
s = io.BytesIO(d)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = reader(s)
|
||||
f = reader(s)
|
||||
self.assertEqual(f.read(), "spamspam")
|
||||
|
||||
def test_badbom(self):
|
||||
s = io.BytesIO(b"\xff\xff")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
self.assertRaises(UnicodeError, f.read)
|
||||
|
||||
s = io.BytesIO(b"\xff\xff\xff\xff")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
f = codecs.getreader(self.encoding)(s)
|
||||
self.assertRaises(UnicodeError, f.read)
|
||||
|
||||
def test_partial(self):
|
||||
|
|
@ -548,8 +517,7 @@ def test_bug691291(self):
|
|||
self.addCleanup(support.unlink, support.TESTFN)
|
||||
with open(support.TESTFN, 'wb') as fp:
|
||||
fp.write(s)
|
||||
with codecs.open(support.TESTFN, 'U',
|
||||
encoding=self.encoding) as reader:
|
||||
with codecs.open(support.TESTFN, 'U', encoding=self.encoding) as reader:
|
||||
self.assertEqual(reader.read(), s1)
|
||||
|
||||
class UTF16LETest(ReadTest):
|
||||
|
|
@ -737,9 +705,7 @@ def test_stream_bom(self):
|
|||
reader = codecs.getreader("utf-8-sig")
|
||||
for sizehint in [None] + list(range(1, 11)) + \
|
||||
[64, 128, 256, 512, 1024]:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
istream = reader(io.BytesIO(bytestring))
|
||||
istream = reader(io.BytesIO(bytestring))
|
||||
ostream = io.StringIO()
|
||||
while 1:
|
||||
if sizehint is not None:
|
||||
|
|
@ -761,9 +727,7 @@ def test_stream_bare(self):
|
|||
reader = codecs.getreader("utf-8-sig")
|
||||
for sizehint in [None] + list(range(1, 11)) + \
|
||||
[64, 128, 256, 512, 1024]:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
istream = reader(io.BytesIO(bytestring))
|
||||
istream = reader(io.BytesIO(bytestring))
|
||||
ostream = io.StringIO()
|
||||
while 1:
|
||||
if sizehint is not None:
|
||||
|
|
@ -785,9 +749,7 @@ def test_empty(self):
|
|||
class RecodingTest(unittest.TestCase):
|
||||
def test_recoding(self):
|
||||
f = io.BytesIO()
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f2 = codecs.EncodedFile(f, "unicode_internal", "utf-8")
|
||||
f2 = codecs.EncodedFile(f, "unicode_internal", "utf-8")
|
||||
f2.write("a")
|
||||
f2.close()
|
||||
# Python used to crash on this at exit because of a refcount
|
||||
|
|
@ -1164,9 +1126,7 @@ def test_builtin_encode(self):
|
|||
self.assertEqual("pyth\xf6n.org.".encode("idna"), b"xn--pythn-mua.org.")
|
||||
|
||||
def test_stream(self):
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
r = codecs.getreader("idna")(io.BytesIO(b"abc"))
|
||||
r = codecs.getreader("idna")(io.BytesIO(b"abc"))
|
||||
r.read(3)
|
||||
self.assertEqual(r.read(), "")
|
||||
|
||||
|
|
@ -1273,24 +1233,18 @@ def test_getwriter(self):
|
|||
class StreamReaderTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
self.reader = codecs.getreader('utf-8')
|
||||
self.reader = codecs.getreader('utf-8')
|
||||
self.stream = io.BytesIO(b'\xed\x95\x9c\n\xea\xb8\x80')
|
||||
|
||||
def test_readlines(self):
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
f = self.reader(self.stream)
|
||||
f = self.reader(self.stream)
|
||||
self.assertEqual(f.readlines(), ['\ud55c\n', '\uae00'])
|
||||
|
||||
class EncodedFileTest(unittest.TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
f = io.BytesIO(b'\xed\x95\x9c\n\xea\xb8\x80')
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
ef = codecs.EncodedFile(f, 'utf-16-le', 'utf-8')
|
||||
ef = codecs.EncodedFile(f, 'utf-16-le', 'utf-8')
|
||||
self.assertEqual(ef.read(), b'\\\xd5\n\x00\x00\xae')
|
||||
|
||||
f = io.BytesIO()
|
||||
|
|
@ -1434,9 +1388,7 @@ def test_basics(self):
|
|||
if encoding not in broken_unicode_with_streams:
|
||||
# check stream reader/writer
|
||||
q = Queue(b"")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
writer = codecs.getwriter(encoding)(q)
|
||||
writer = codecs.getwriter(encoding)(q)
|
||||
encodedresult = b""
|
||||
for c in s:
|
||||
writer.write(c)
|
||||
|
|
@ -1444,9 +1396,7 @@ def test_basics(self):
|
|||
self.assertTrue(type(chunk) is bytes, type(chunk))
|
||||
encodedresult += chunk
|
||||
q = Queue(b"")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = codecs.getreader(encoding)(q)
|
||||
reader = codecs.getreader(encoding)(q)
|
||||
decodedresult = ""
|
||||
for c in encodedresult:
|
||||
q.write(bytes([c]))
|
||||
|
|
@ -1520,9 +1470,7 @@ def test_seek(self):
|
|||
continue
|
||||
if encoding in broken_unicode_with_streams:
|
||||
continue
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = codecs.getreader(encoding)(io.BytesIO(s.encode(encoding)))
|
||||
reader = codecs.getreader(encoding)(io.BytesIO(s.encode(encoding)))
|
||||
for t in range(5):
|
||||
# Test that calling seek resets the internal codec state and buffers
|
||||
reader.seek(0, 0)
|
||||
|
|
@ -1591,19 +1539,15 @@ def test_decode_with_string_map(self):
|
|||
class WithStmtTest(unittest.TestCase):
|
||||
def test_encodedfile(self):
|
||||
f = io.BytesIO(b"\xc3\xbc")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
with codecs.EncodedFile(f, "latin-1", "utf-8") as ef:
|
||||
self.assertEqual(ef.read(), b"\xfc")
|
||||
with codecs.EncodedFile(f, "latin-1", "utf-8") as ef:
|
||||
self.assertEqual(ef.read(), b"\xfc")
|
||||
|
||||
def test_streamreaderwriter(self):
|
||||
f = io.BytesIO(b"\xc3\xbc")
|
||||
info = codecs.lookup("utf-8")
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
with codecs.StreamReaderWriter(f, info.streamreader,
|
||||
info.streamwriter, 'strict') as srw:
|
||||
self.assertEqual(srw.read(), "\xfc")
|
||||
with codecs.StreamReaderWriter(f, info.streamreader,
|
||||
info.streamwriter, 'strict') as srw:
|
||||
self.assertEqual(srw.read(), "\xfc")
|
||||
|
||||
class TypesTest(unittest.TestCase):
|
||||
def test_decode_unicode(self):
|
||||
|
|
@ -1700,15 +1644,15 @@ def test_seek0(self):
|
|||
|
||||
# (StreamWriter) Check that the BOM is written after a seek(0)
|
||||
with codecs.open(support.TESTFN, 'w+', encoding=encoding) as f:
|
||||
f.write(data[0])
|
||||
self.assertNotEqual(f.tell(), 0)
|
||||
f.seek(0)
|
||||
f.write(data)
|
||||
f.writer.write(data[0])
|
||||
self.assertNotEqual(f.writer.tell(), 0)
|
||||
f.writer.seek(0)
|
||||
f.writer.write(data)
|
||||
f.seek(0)
|
||||
self.assertEqual(f.read(), data)
|
||||
|
||||
# Check that the BOM is not written after a seek() at a
|
||||
# position different than the start
|
||||
# Check that the BOM is not written after a seek() at a position
|
||||
# different than the start
|
||||
with codecs.open(support.TESTFN, 'w+', encoding=encoding) as f:
|
||||
f.write(data)
|
||||
f.seek(f.tell())
|
||||
|
|
@ -1716,12 +1660,12 @@ def test_seek0(self):
|
|||
f.seek(0)
|
||||
self.assertEqual(f.read(), data * 2)
|
||||
|
||||
# (StreamWriter) Check that the BOM is not written after a
|
||||
# seek() at a position different than the start
|
||||
# (StreamWriter) Check that the BOM is not written after a seek()
|
||||
# at a position different than the start
|
||||
with codecs.open(support.TESTFN, 'w+', encoding=encoding) as f:
|
||||
f.write(data)
|
||||
f.seek(f.tell())
|
||||
f.write(data)
|
||||
f.writer.write(data)
|
||||
f.writer.seek(f.writer.tell())
|
||||
f.writer.write(data)
|
||||
f.seek(0)
|
||||
self.assertEqual(f.read(), data * 2)
|
||||
|
||||
|
|
@ -1760,9 +1704,7 @@ def test_basics(self):
|
|||
def test_read(self):
|
||||
for encoding in bytes_transform_encodings:
|
||||
sin = codecs.encode(b"\x80", encoding)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = codecs.getreader(encoding)(io.BytesIO(sin))
|
||||
reader = codecs.getreader(encoding)(io.BytesIO(sin))
|
||||
sout = reader.read()
|
||||
self.assertEqual(sout, b"\x80")
|
||||
|
||||
|
|
@ -1771,9 +1713,7 @@ def test_readline(self):
|
|||
if encoding in ['uu_codec', 'zlib_codec']:
|
||||
continue
|
||||
sin = codecs.encode(b"\x80", encoding)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
reader = codecs.getreader(encoding)(io.BytesIO(sin))
|
||||
reader = codecs.getreader(encoding)(io.BytesIO(sin))
|
||||
sout = reader.readline()
|
||||
self.assertEqual(sout, b"\x80")
|
||||
|
||||
|
|
|
|||
|
|
@ -167,11 +167,6 @@ Library
|
|||
- Issue #1625: BZ2File and bz2.decompress() now support multi-stream files.
|
||||
Initial patch by Nir Aides.
|
||||
|
||||
- Issue #8796: codecs.open() calls the builtin open() function instead of using
|
||||
StreamReaderWriter. Deprecate StreamReader, StreamWriter, StreamReaderWriter,
|
||||
StreamRecoder and EncodedFile() of the codec module. Use the builtin open()
|
||||
function or io.TextIOWrapper instead.
|
||||
|
||||
- Issue #12175: BufferedReader.read(-1) now calls raw.readall() if available.
|
||||
|
||||
- Issue #12175: FileIO.readall() now only reads the file position and size
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue