Use with statements to eliminate ResourceWarnings

This commit is contained in:
Thom Smith 2021-09-22 16:32:47 -04:00 committed by Ingy döt Net
parent 779bdb129e
commit 575d2f46db
16 changed files with 145 additions and 91 deletions

View file

@ -103,7 +103,8 @@ def display(results, verbose):
for filename in filenames: for filename in filenames:
sys.stdout.write('-'*75+'\n') sys.stdout.write('-'*75+'\n')
sys.stdout.write('%s:\n' % filename) sys.stdout.write('%s:\n' % filename)
data = open(filename, 'r', errors='replace').read() with open(filename, 'r', errors='replace') as file:
data = file.read()
sys.stdout.write(data) sys.stdout.write(data)
if data and data[-1] != '\n': if data and data[-1] != '\n':
sys.stdout.write('\n') sys.stdout.write('\n')

View file

@ -2,7 +2,8 @@
import yaml, canonical import yaml, canonical
def test_canonical_scanner(canonical_filename, verbose=False): def test_canonical_scanner(canonical_filename, verbose=False):
data = open(canonical_filename, 'rb').read() with open(canonical_filename, 'rb') as file:
data = file.read()
tokens = list(yaml.canonical_scan(data)) tokens = list(yaml.canonical_scan(data))
assert tokens, tokens assert tokens, tokens
if verbose: if verbose:
@ -12,7 +13,8 @@ def test_canonical_scanner(canonical_filename, verbose=False):
test_canonical_scanner.unittest = ['.canonical'] test_canonical_scanner.unittest = ['.canonical']
def test_canonical_parser(canonical_filename, verbose=False): def test_canonical_parser(canonical_filename, verbose=False):
data = open(canonical_filename, 'rb').read() with open(canonical_filename, 'rb') as file:
data = file.read()
events = list(yaml.canonical_parse(data)) events = list(yaml.canonical_parse(data))
assert events, events assert events, events
if verbose: if verbose:
@ -22,7 +24,8 @@ def test_canonical_parser(canonical_filename, verbose=False):
test_canonical_parser.unittest = ['.canonical'] test_canonical_parser.unittest = ['.canonical']
def test_canonical_error(data_filename, canonical_filename, verbose=False): def test_canonical_error(data_filename, canonical_filename, verbose=False):
data = open(data_filename, 'rb').read() with open(data_filename, 'rb') as file:
data = file.read()
try: try:
output = list(yaml.canonical_load_all(data)) output = list(yaml.canonical_load_all(data))
except yaml.YAMLError as exc: except yaml.YAMLError as exc:

View file

@ -257,10 +257,12 @@ def test_constructor_types(data_filename, code_filename, verbose=False):
native1 = None native1 = None
native2 = None native2 = None
try: try:
native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader)) with open(data_filename, 'rb') as file:
native1 = list(yaml.load_all(file, Loader=MyLoader))
if len(native1) == 1: if len(native1) == 1:
native1 = native1[0] native1 = native1[0]
native2 = _load_code(open(code_filename, 'rb').read()) with open(code_filename, 'rb') as file:
native2 = _load_code(file.read())
try: try:
if native1 == native2: if native1 == native2:
return return
@ -284,7 +286,8 @@ test_constructor_types.unittest = ['.data', '.code']
def test_subclass_blacklist_types(data_filename, verbose=False): def test_subclass_blacklist_types(data_filename, verbose=False):
_make_objects() _make_objects()
try: try:
yaml.load(open(data_filename, 'rb').read(), MyFullLoader) with open(data_filename, 'rb') as file:
yaml.load(file.read(), MyFullLoader)
except yaml.YAMLError as exc: except yaml.YAMLError as exc:
if verbose: if verbose:
print("%s:" % exc.__class__.__name__, exc) print("%s:" % exc.__class__.__name__, exc)

View file

@ -15,7 +15,8 @@ def _compare_events(events1, events2):
assert event1.value == event2.value, (event1, event2) assert event1.value == event2.value, (event1, event2)
def test_emitter_on_data(data_filename, canonical_filename, verbose=False): def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
events = list(yaml.parse(open(data_filename, 'rb'))) with open(data_filename, 'rb') as file:
events = list(yaml.parse(file))
output = yaml.emit(events) output = yaml.emit(events)
if verbose: if verbose:
print("OUTPUT:") print("OUTPUT:")
@ -26,7 +27,8 @@ def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
test_emitter_on_data.unittest = ['.data', '.canonical'] test_emitter_on_data.unittest = ['.data', '.canonical']
def test_emitter_on_canonical(canonical_filename, verbose=False): def test_emitter_on_canonical(canonical_filename, verbose=False):
events = list(yaml.parse(open(canonical_filename, 'rb'))) with open(canonical_filename, 'rb') as file:
events = list(yaml.parse(file))
for canonical in [False, True]: for canonical in [False, True]:
output = yaml.emit(events, canonical=canonical) output = yaml.emit(events, canonical=canonical)
if verbose: if verbose:
@ -39,7 +41,8 @@ test_emitter_on_canonical.unittest = ['.canonical']
def test_emitter_styles(data_filename, canonical_filename, verbose=False): def test_emitter_styles(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]: for filename in [data_filename, canonical_filename]:
events = list(yaml.parse(open(filename, 'rb'))) with open(filename, 'rb') as file:
events = list(yaml.parse(file))
for flow_style in [False, True]: for flow_style in [False, True]:
for style in ['|', '>', '"', '\'', '']: for style in ['|', '>', '"', '\'', '']:
styled_events = [] styled_events = []
@ -86,7 +89,8 @@ class EventsLoader(yaml.Loader):
EventsLoader.add_constructor(None, EventsLoader.construct_event) EventsLoader.add_constructor(None, EventsLoader.construct_event)
def test_emitter_events(events_filename, verbose=False): def test_emitter_events(events_filename, verbose=False):
events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) with open(events_filename, 'rb') as file:
events = list(yaml.load(file, Loader=EventsLoader))
output = yaml.emit(events) output = yaml.emit(events)
if verbose: if verbose:
print("OUTPUT:") print("OUTPUT:")

View file

@ -3,7 +3,8 @@ import yaml, test_emitter
def test_loader_error(error_filename, verbose=False): def test_loader_error(error_filename, verbose=False):
try: try:
list(yaml.load_all(open(error_filename, 'rb'), yaml.FullLoader)) with open(error_filename, 'rb') as file:
list(yaml.load_all(file, yaml.FullLoader))
except yaml.YAMLError as exc: except yaml.YAMLError as exc:
if verbose: if verbose:
print("%s:" % exc.__class__.__name__, exc) print("%s:" % exc.__class__.__name__, exc)
@ -14,7 +15,8 @@ test_loader_error.unittest = ['.loader-error']
def test_loader_error_string(error_filename, verbose=False): def test_loader_error_string(error_filename, verbose=False):
try: try:
list(yaml.load_all(open(error_filename, 'rb').read(), yaml.FullLoader)) with open(error_filename, 'rb') as file:
list(yaml.load_all(file.read(), yaml.FullLoader))
except yaml.YAMLError as exc: except yaml.YAMLError as exc:
if verbose: if verbose:
print("%s:" % exc.__class__.__name__, exc) print("%s:" % exc.__class__.__name__, exc)
@ -25,7 +27,8 @@ test_loader_error_string.unittest = ['.loader-error']
def test_loader_error_single(error_filename, verbose=False): def test_loader_error_single(error_filename, verbose=False):
try: try:
yaml.load(open(error_filename, 'rb').read(), yaml.FullLoader) with open(error_filename, 'rb') as file:
yaml.load(file.read(), yaml.FullLoader)
except yaml.YAMLError as exc: except yaml.YAMLError as exc:
if verbose: if verbose:
print("%s:" % exc.__class__.__name__, exc) print("%s:" % exc.__class__.__name__, exc)
@ -35,8 +38,8 @@ def test_loader_error_single(error_filename, verbose=False):
test_loader_error_single.unittest = ['.single-loader-error'] test_loader_error_single.unittest = ['.single-loader-error']
def test_emitter_error(error_filename, verbose=False): def test_emitter_error(error_filename, verbose=False):
events = list(yaml.load(open(error_filename, 'rb'), with open(error_filename, 'rb') as file:
Loader=test_emitter.EventsLoader)) events = list(yaml.load(file, Loader=test_emitter.EventsLoader))
try: try:
yaml.emit(events) yaml.emit(events)
except yaml.YAMLError as exc: except yaml.YAMLError as exc:
@ -48,7 +51,8 @@ def test_emitter_error(error_filename, verbose=False):
test_emitter_error.unittest = ['.emitter-error'] test_emitter_error.unittest = ['.emitter-error']
def test_dumper_error(error_filename, verbose=False): def test_dumper_error(error_filename, verbose=False):
code = open(error_filename, 'rb').read() with open(error_filename, 'rb') as file:
code = file.read()
try: try:
import yaml import yaml
from io import StringIO from io import StringIO

View file

@ -3,7 +3,8 @@ import yaml
import codecs, io, tempfile, os, os.path import codecs, io, tempfile, os, os.path
def test_unicode_input(unicode_filename, verbose=False): def test_unicode_input(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8') with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
value = ' '.join(data.split()) value = ' '.join(data.split())
output = yaml.full_load(data) output = yaml.full_load(data)
assert output == value, (output, value) assert output == value, (output, value)
@ -23,7 +24,8 @@ def test_unicode_input(unicode_filename, verbose=False):
test_unicode_input.unittest = ['.unicode'] test_unicode_input.unittest = ['.unicode']
def test_unicode_input_errors(unicode_filename, verbose=False): def test_unicode_input_errors(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8') with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
for input in [data.encode('utf-16-be'), for input in [data.encode('utf-16-be'),
data.encode('utf-16-le'), data.encode('utf-16-le'),
codecs.BOM_UTF8+data.encode('utf-16-be'), codecs.BOM_UTF8+data.encode('utf-16-be'),
@ -47,7 +49,8 @@ def test_unicode_input_errors(unicode_filename, verbose=False):
test_unicode_input_errors.unittest = ['.unicode'] test_unicode_input_errors.unittest = ['.unicode']
def test_unicode_output(unicode_filename, verbose=False): def test_unicode_output(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8') with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
value = ' '.join(data.split()) value = ' '.join(data.split())
for allow_unicode in [False, True]: for allow_unicode in [False, True]:
data1 = yaml.dump(value, allow_unicode=allow_unicode) data1 = yaml.dump(value, allow_unicode=allow_unicode)
@ -82,7 +85,8 @@ def test_unicode_output(unicode_filename, verbose=False):
test_unicode_output.unittest = ['.unicode'] test_unicode_output.unittest = ['.unicode']
def test_file_output(unicode_filename, verbose=False): def test_file_output(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8') with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
handle, filename = tempfile.mkstemp() handle, filename = tempfile.mkstemp()
os.close(handle) os.close(handle)
try: try:
@ -92,14 +96,14 @@ def test_file_output(unicode_filename, verbose=False):
stream = io.BytesIO() stream = io.BytesIO()
yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True) yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True)
data2 = stream.getvalue().decode('utf-16-le')[1:] data2 = stream.getvalue().decode('utf-16-le')[1:]
stream = open(filename, 'w', encoding='utf-16-le') with open(filename, 'w', encoding='utf-16-le') as stream:
yaml.dump(data, stream, allow_unicode=True) yaml.dump(data, stream, allow_unicode=True)
stream.close() with open(filename, 'r', encoding='utf-16-le') as file:
data3 = open(filename, 'r', encoding='utf-16-le').read() data3 = file.read()
stream = open(filename, 'wb') with open(filename, 'wb') as stream:
yaml.dump(data, stream, encoding='utf-8', allow_unicode=True) yaml.dump(data, stream, encoding='utf-8', allow_unicode=True)
stream.close() with open(filename, 'r', encoding='utf-8') as file:
data4 = open(filename, 'r', encoding='utf-8').read() data4 = file.read()
assert data1 == data2, (data1, data2) assert data1 == data2, (data1, data2)
assert data1 == data3, (data1, data3) assert data1 == data3, (data1, data3)
assert data1 == data4, (data1, data4) assert data1 == data4, (data1, data4)
@ -110,7 +114,8 @@ def test_file_output(unicode_filename, verbose=False):
test_file_output.unittest = ['.unicode'] test_file_output.unittest = ['.unicode']
def test_unicode_transfer(unicode_filename, verbose=False): def test_unicode_transfer(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8') with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
input = data input = data
if encoding is not None: if encoding is not None:

View file

@ -2,7 +2,8 @@
import yaml import yaml
def test_marks(marks_filename, verbose=False): def test_marks(marks_filename, verbose=False):
inputs = open(marks_filename, 'r').read().split('---\n')[1:] with open(marks_filename, 'r') as file:
inputs = file.read().split('---\n')[1:]
for input in inputs: for input in inputs:
index = 0 index = 0
line = 0 line = 0

View file

@ -29,8 +29,10 @@ class Multi2(yaml.FullLoader):
pass pass
def test_multi_constructor(input_filename, code_filename, verbose=False): def test_multi_constructor(input_filename, code_filename, verbose=False):
input = open(input_filename, 'rb').read().decode('utf-8') with open(input_filename, 'rb') as file:
native = _load_code(open(code_filename, 'rb').read()) input = file.read().decode('utf-8')
with open(code_filename, 'rb') as file:
native = _load_code(file.read())
# default multi constructor for ! and !! tags # default multi constructor for ! and !! tags
Multi1.add_multi_constructor('!', myconstructor1) Multi1.add_multi_constructor('!', myconstructor1)

View file

@ -13,18 +13,22 @@ def _run_reader(data, verbose):
raise AssertionError("expected an exception") raise AssertionError("expected an exception")
def test_stream_error(error_filename, verbose=False): def test_stream_error(error_filename, verbose=False):
_run_reader(open(error_filename, 'rb'), verbose) with open(error_filename, 'rb') as file:
_run_reader(open(error_filename, 'rb').read(), verbose) _run_reader(file, verbose)
with open(error_filename, 'rb') as file:
_run_reader(file.read(), verbose)
for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']: for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']:
try: try:
data = open(error_filename, 'rb').read().decode(encoding) with open(error_filename, 'rb') as file:
data = file.read().decode(encoding)
break break
except UnicodeDecodeError: except UnicodeDecodeError:
pass pass
else: else:
return return
_run_reader(data, verbose) _run_reader(data, verbose)
_run_reader(open(error_filename, encoding=encoding), verbose) with open(error_filename, encoding=encoding) as file:
_run_reader(file, verbose)
test_stream_error.unittest = ['.stream-error'] test_stream_error.unittest = ['.stream-error']

View file

@ -24,7 +24,8 @@ class AnInstanceWithState(AnInstance):
def test_recursive(recursive_filename, verbose=False): def test_recursive(recursive_filename, verbose=False):
context = globals().copy() context = globals().copy()
exec(open(recursive_filename, 'rb').read(), context) with open(recursive_filename, 'rb') as file:
exec(file.read(), context)
value1 = context['value'] value1 = context['value']
output1 = None output1 = None
value2 = None value2 = None

View file

@ -7,7 +7,8 @@ def test_representer_types(code_filename, verbose=False):
test_constructor._make_objects() test_constructor._make_objects()
for allow_unicode in [False, True]: for allow_unicode in [False, True]:
for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']: for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']:
native1 = test_constructor._load_code(open(code_filename, 'rb').read()) with open(code_filename, 'rb') as file:
native1 = test_constructor._load_code(file.read())
native2 = None native2 = None
try: try:
output = yaml.dump(native1, Dumper=test_constructor.MyDumper, output = yaml.dump(native1, Dumper=test_constructor.MyDumper,

View file

@ -6,8 +6,10 @@ def test_implicit_resolver(data_filename, detect_filename, verbose=False):
correct_tag = None correct_tag = None
node = None node = None
try: try:
correct_tag = open(detect_filename, 'r').read().strip() with open(detect_filename, 'r') as file:
node = yaml.compose(open(data_filename, 'rb')) correct_tag = file.read().strip()
with open(data_filename, 'rb') as file:
node = yaml.compose(file)
assert isinstance(node, yaml.SequenceNode), node assert isinstance(node, yaml.SequenceNode), node
for scalar in node.value: for scalar in node.value:
assert isinstance(scalar, yaml.ScalarNode), scalar assert isinstance(scalar, yaml.ScalarNode), scalar
@ -58,8 +60,10 @@ def _convert_node(node):
def test_path_resolver_loader(data_filename, path_filename, verbose=False): def test_path_resolver_loader(data_filename, path_filename, verbose=False):
_make_path_loader_and_dumper() _make_path_loader_and_dumper()
nodes1 = list(yaml.compose_all(open(data_filename, 'rb').read(), Loader=MyLoader)) with open(data_filename, 'rb') as file:
nodes2 = list(yaml.compose_all(open(path_filename, 'rb').read())) nodes1 = list(yaml.compose_all(file.read(), Loader=MyLoader))
with open(path_filename, 'rb') as file:
nodes2 = list(yaml.compose_all(file.read()))
try: try:
for node1, node2 in zip(nodes1, nodes2): for node1, node2 in zip(nodes1, nodes2):
data1 = _convert_node(node1) data1 = _convert_node(node1)
@ -74,15 +78,17 @@ test_path_resolver_loader.unittest = ['.data', '.path']
def test_path_resolver_dumper(data_filename, path_filename, verbose=False): def test_path_resolver_dumper(data_filename, path_filename, verbose=False):
_make_path_loader_and_dumper() _make_path_loader_and_dumper()
for filename in [data_filename, path_filename]: for filename in [data_filename, path_filename]:
output = yaml.serialize_all(yaml.compose_all(open(filename, 'rb')), Dumper=MyDumper) with open(filename, 'rb') as file:
output = yaml.serialize_all(yaml.compose_all(file), Dumper=MyDumper)
if verbose: if verbose:
print(output) print(output)
nodes1 = yaml.compose_all(output) nodes1 = yaml.compose_all(output)
nodes2 = yaml.compose_all(open(data_filename, 'rb')) with open(data_filename, 'rb') as file:
for node1, node2 in zip(nodes1, nodes2): nodes2 = yaml.compose_all(file)
data1 = _convert_node(node1) for node1, node2 in zip(nodes1, nodes2):
data2 = _convert_node(node2) data1 = _convert_node(node1)
assert data1 == data2, (data1, data2) data2 = _convert_node(node2)
assert data1 == data2, (data1, data2)
test_path_resolver_dumper.unittest = ['.data', '.path'] test_path_resolver_dumper.unittest = ['.data', '.path']

View file

@ -3,8 +3,10 @@ import pprint
import sys import sys
def test_sort_keys(input_filename, sorted_filename, verbose=False): def test_sort_keys(input_filename, sorted_filename, verbose=False):
input = open(input_filename, 'rb').read().decode('utf-8') with open(input_filename, 'rb') as file:
sorted = open(sorted_filename, 'rb').read().decode('utf-8') input = file.read().decode('utf-8')
with open(sorted_filename, 'rb') as file:
sorted = file.read().decode('utf-8')
data = yaml.load(input, Loader=yaml.FullLoader) data = yaml.load(input, Loader=yaml.FullLoader)
dump_sorted = yaml.dump(data, default_flow_style=False, sort_keys=True) dump_sorted = yaml.dump(data, default_flow_style=False, sort_keys=True)
dump_unsorted = yaml.dump(data, default_flow_style=False, sort_keys=False) dump_unsorted = yaml.dump(data, default_flow_style=False, sort_keys=False)

View file

@ -34,15 +34,19 @@ def _convert_structure(loader):
def test_structure(data_filename, structure_filename, verbose=False): def test_structure(data_filename, structure_filename, verbose=False):
nodes1 = [] nodes1 = []
nodes2 = eval(open(structure_filename, 'r').read()) with open(structure_filename, 'r') as file:
nodes2 = eval(file.read())
try: try:
loader = yaml.Loader(open(data_filename, 'rb')) with open(data_filename, 'rb') as file:
while loader.check_event(): loader = yaml.Loader(file)
if loader.check_event(yaml.StreamStartEvent, yaml.StreamEndEvent, while loader.check_event():
yaml.DocumentStartEvent, yaml.DocumentEndEvent): if loader.check_event(
loader.get_event() yaml.StreamStartEvent, yaml.StreamEndEvent,
continue yaml.DocumentStartEvent, yaml.DocumentEndEvent
nodes1.append(_convert_structure(loader)) ):
loader.get_event()
continue
nodes1.append(_convert_structure(loader))
if len(nodes1) == 1: if len(nodes1) == 1:
nodes1 = nodes1[0] nodes1 = nodes1[0]
assert nodes1 == nodes2, (nodes1, nodes2) assert nodes1 == nodes2, (nodes1, nodes2)
@ -71,8 +75,10 @@ def test_parser(data_filename, canonical_filename, verbose=False):
events1 = None events1 = None
events2 = None events2 = None
try: try:
events1 = list(yaml.parse(open(data_filename, 'rb'))) with open(data_filename, 'rb') as file:
events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) events1 = list(yaml.parse(file))
with open(canonical_filename, 'rb') as file:
events2 = list(yaml.canonical_parse(file))
_compare_events(events1, events2) _compare_events(events1, events2)
finally: finally:
if verbose: if verbose:
@ -87,8 +93,10 @@ def test_parser_on_canonical(canonical_filename, verbose=False):
events1 = None events1 = None
events2 = None events2 = None
try: try:
events1 = list(yaml.parse(open(canonical_filename, 'rb'))) with open(canonical_filename, 'rb') as file:
events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) events1 = list(yaml.parse(file))
with open(canonical_filename, 'rb') as file:
events2 = list(yaml.canonical_parse(file))
_compare_events(events1, events2, full=True) _compare_events(events1, events2, full=True)
finally: finally:
if verbose: if verbose:
@ -117,8 +125,10 @@ def test_composer(data_filename, canonical_filename, verbose=False):
nodes1 = None nodes1 = None
nodes2 = None nodes2 = None
try: try:
nodes1 = list(yaml.compose_all(open(data_filename, 'rb'))) with open(data_filename, 'rb') as file:
nodes2 = list(yaml.canonical_compose_all(open(canonical_filename, 'rb'))) nodes1 = list(yaml.compose_all(file))
with open(canonical_filename, 'rb') as file:
nodes2 = list(yaml.canonical_compose_all(file))
assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2)) assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2))
for node1, node2 in zip(nodes1, nodes2): for node1, node2 in zip(nodes1, nodes2):
_compare_nodes(node1, node2) _compare_nodes(node1, node2)
@ -169,8 +179,10 @@ def test_constructor(data_filename, canonical_filename, verbose=False):
native1 = None native1 = None
native2 = None native2 = None
try: try:
native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader)) with open(data_filename, 'rb') as file:
native2 = list(yaml.load_all(open(canonical_filename, 'rb'), Loader=MyCanonicalLoader)) native1 = list(yaml.load_all(file, Loader=MyLoader))
with open(canonical_filename, 'rb') as file:
native2 = list(yaml.load_all(file, Loader=MyCanonicalLoader))
assert native1 == native2, (native1, native2) assert native1 == native2, (native1, native2)
finally: finally:
if verbose: if verbose:

View file

@ -44,11 +44,13 @@ _replaces = {
def test_tokens(data_filename, tokens_filename, verbose=False): def test_tokens(data_filename, tokens_filename, verbose=False):
tokens1 = [] tokens1 = []
tokens2 = open(tokens_filename, 'r').read().split() with open(tokens_filename, 'r') as file:
tokens2 = file.read().split()
try: try:
for token in yaml.scan(open(data_filename, 'rb')): with open(data_filename, 'rb') as file:
if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)): for token in yaml.scan(file):
tokens1.append(_replaces[token.__class__]) if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)):
tokens1.append(_replaces[token.__class__])
finally: finally:
if verbose: if verbose:
print("TOKENS1:", ' '.join(tokens1)) print("TOKENS1:", ' '.join(tokens1))
@ -63,8 +65,9 @@ def test_scanner(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]: for filename in [data_filename, canonical_filename]:
tokens = [] tokens = []
try: try:
for token in yaml.scan(open(filename, 'rb')): with open(filename, 'rb') as file:
tokens.append(token.__class__.__name__) for token in yaml.scan(file):
tokens.append(token.__class__.__name__)
finally: finally:
if verbose: if verbose:
pprint.pprint(tokens) pprint.pprint(tokens)

View file

@ -154,14 +154,14 @@ def _compare_scanners(py_data, c_data, verbose):
pprint.pprint(c_tokens) pprint.pprint(c_tokens)
def test_c_scanner(data_filename, canonical_filename, verbose=False): def test_c_scanner(data_filename, canonical_filename, verbose=False):
_compare_scanners(open(data_filename, 'rb'), with open(data_filename, 'rb') as file1, open(data_filename, 'rb') as file2:
open(data_filename, 'rb'), verbose) _compare_scanners(file1, file2, verbose)
_compare_scanners(open(data_filename, 'rb').read(), with open(data_filename, 'rb') as file1, open(data_filename, 'rb') as file2:
open(data_filename, 'rb').read(), verbose) _compare_scanners(file1.read(), file2.read(), verbose)
_compare_scanners(open(canonical_filename, 'rb'), with open(canonical_filename, 'rb') as file1, open(canonical_filename, 'rb') as file2:
open(canonical_filename, 'rb'), verbose) _compare_scanners(file1, file2, verbose)
_compare_scanners(open(canonical_filename, 'rb').read(), with open(canonical_filename, 'rb') as file1, open(canonical_filename, 'rb') as file2:
open(canonical_filename, 'rb').read(), verbose) _compare_scanners(file1.read(), file2.read(), verbose)
test_c_scanner.unittest = ['.data', '.canonical'] test_c_scanner.unittest = ['.data', '.canonical']
test_c_scanner.skip = ['.skip-ext'] test_c_scanner.skip = ['.skip-ext']
@ -187,14 +187,14 @@ def _compare_parsers(py_data, c_data, verbose):
pprint.pprint(c_events) pprint.pprint(c_events)
def test_c_parser(data_filename, canonical_filename, verbose=False): def test_c_parser(data_filename, canonical_filename, verbose=False):
_compare_parsers(open(data_filename, 'rb'), with open(data_filename, 'rb') as file1, open(data_filename, 'rb') as file2:
open(data_filename, 'rb'), verbose) _compare_parsers(file1, file2, verbose)
_compare_parsers(open(data_filename, 'rb').read(), with open(data_filename, 'rb') as file1, open(data_filename, 'rb') as file2:
open(data_filename, 'rb').read(), verbose) _compare_parsers(file1.read(), file2.read(), verbose)
_compare_parsers(open(canonical_filename, 'rb'), with open(canonical_filename, 'rb') as file1, open(canonical_filename, 'rb') as file2:
open(canonical_filename, 'rb'), verbose) _compare_parsers(file1, file2, verbose)
_compare_parsers(open(canonical_filename, 'rb').read(), with open(canonical_filename, 'rb') as file1, open(canonical_filename, 'rb') as file2:
open(canonical_filename, 'rb').read(), verbose) _compare_parsers(file1.read(), file2.read(), verbose)
test_c_parser.unittest = ['.data', '.canonical'] test_c_parser.unittest = ['.data', '.canonical']
test_c_parser.skip = ['.skip-ext'] test_c_parser.skip = ['.skip-ext']
@ -232,8 +232,10 @@ def _compare_emitters(data, verbose):
pprint.pprint(c_events) pprint.pprint(c_events)
def test_c_emitter(data_filename, canonical_filename, verbose=False): def test_c_emitter(data_filename, canonical_filename, verbose=False):
_compare_emitters(open(data_filename, 'rb').read(), verbose) with open(data_filename, 'rb') as file:
_compare_emitters(open(canonical_filename, 'rb').read(), verbose) _compare_emitters(file.read(), verbose)
with open(canonical_filename, 'rb') as file:
_compare_emitters(file.read(), verbose)
test_c_emitter.unittest = ['.data', '.canonical'] test_c_emitter.unittest = ['.data', '.canonical']
test_c_emitter.skip = ['.skip-ext'] test_c_emitter.skip = ['.skip-ext']