Clear cyclic references in the parser and the emitter to avoid extra GC calls.

This commit is contained in:
Kirill Simonov 2011-05-30 02:51:30 +00:00
parent 23c952fe08
commit 7e1b5fae0b
11 changed files with 130 additions and 44 deletions

View file

@ -311,6 +311,9 @@ cdef class CParser:
yaml_parser_delete(&self.parser)
yaml_event_delete(&self.parsed_event)
def dispose(self):
pass
cdef object _parser_error(self):
if self.parser.error == YAML_MEMORY_ERROR:
return MemoryError
@ -993,6 +996,9 @@ cdef class CEmitter:
def __dealloc__(self):
yaml_emitter_delete(&self.emitter)
def dispose(self):
pass
cdef object _emitter_error(self):
if self.emitter.error == YAML_MEMORY_ERROR:
return MemoryError

View file

@ -21,16 +21,22 @@ def scan(stream, Loader=Loader):
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
while loader.check_token():
yield loader.get_token()
try:
while loader.check_token():
yield loader.get_token()
finally:
loader.dispose()
def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
while loader.check_event():
yield loader.get_event()
try:
while loader.check_event():
yield loader.get_event()
finally:
loader.dispose()
def compose(stream, Loader=Loader):
"""
@ -38,7 +44,10 @@ def compose(stream, Loader=Loader):
and produce the corresponding representation tree.
"""
loader = Loader(stream)
return loader.get_single_node()
try:
return loader.get_single_node()
finally:
loader.dispose()
def compose_all(stream, Loader=Loader):
"""
@ -46,8 +55,11 @@ def compose_all(stream, Loader=Loader):
and produce corresponding representation trees.
"""
loader = Loader(stream)
while loader.check_node():
yield loader.get_node()
try:
while loader.check_node():
yield loader.get_node()
finally:
loader.dispose()
def load(stream, Loader=Loader):
"""
@ -55,7 +67,10 @@ def load(stream, Loader=Loader):
and produce the corresponding Python object.
"""
loader = Loader(stream)
return loader.get_single_data()
try:
return loader.get_single_data()
finally:
loader.dispose()
def load_all(stream, Loader=Loader):
"""
@ -63,8 +78,11 @@ def load_all(stream, Loader=Loader):
and produce corresponding Python objects.
"""
loader = Loader(stream)
while loader.check_data():
yield loader.get_data()
try:
while loader.check_data():
yield loader.get_data()
finally:
loader.dispose()
def safe_load(stream):
"""
@ -96,8 +114,11 @@ def emit(events, stream=None, Dumper=Dumper,
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
for event in events:
dumper.emit(event)
try:
for event in events:
dumper.emit(event)
finally:
dumper.dispose()
if getvalue:
return getvalue()
@ -122,10 +143,13 @@ def serialize_all(nodes, stream=None, Dumper=Dumper,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
dumper.open()
for node in nodes:
dumper.serialize(node)
dumper.close()
try:
dumper.open()
for node in nodes:
dumper.serialize(node)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()
@ -160,10 +184,13 @@ def dump_all(documents, stream=None, Dumper=Dumper,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
dumper.open()
for data in documents:
dumper.represent(data)
dumper.close()
try:
dumper.open()
for data in documents:
dumper.represent(data)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()

View file

@ -58,11 +58,11 @@ class BaseConstructor(object):
return data
def construct_object(self, node, deep=False):
if node in self.constructed_objects:
return self.constructed_objects[node]
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.constructed_objects:
return self.constructed_objects[node]
if node in self.recursive_objects:
raise ConstructorError(None, None,
"found unconstructable recursive node", node.start_mark)

View file

@ -103,6 +103,11 @@ class Emitter(object):
self.analysis = None
self.style = None
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def emit(self, event):
self.events.append(event)
while not self.need_more_events():

View file

@ -86,6 +86,11 @@ class Parser(object):
self.marks = []
self.state = self.parse_stream_start
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def check_event(self, *choices):
# Check the type of the next event.
if self.current_event is None:

View file

@ -22,16 +22,22 @@ def scan(stream, Loader=Loader):
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
while loader.check_token():
yield loader.get_token()
try:
while loader.check_token():
yield loader.get_token()
finally:
loader.dispose()
def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
while loader.check_event():
yield loader.get_event()
try:
while loader.check_event():
yield loader.get_event()
finally:
loader.dispose()
def compose(stream, Loader=Loader):
"""
@ -39,7 +45,10 @@ def compose(stream, Loader=Loader):
and produce the corresponding representation tree.
"""
loader = Loader(stream)
return loader.get_single_node()
try:
return loader.get_single_node()
finally:
loader.dispose()
def compose_all(stream, Loader=Loader):
"""
@ -47,8 +56,11 @@ def compose_all(stream, Loader=Loader):
and produce corresponding representation trees.
"""
loader = Loader(stream)
while loader.check_node():
yield loader.get_node()
try:
while loader.check_node():
yield loader.get_node()
finally:
loader.dispose()
def load(stream, Loader=Loader):
"""
@ -56,7 +68,10 @@ def load(stream, Loader=Loader):
and produce the corresponding Python object.
"""
loader = Loader(stream)
return loader.get_single_data()
try:
return loader.get_single_data()
finally:
loader.dispose()
def load_all(stream, Loader=Loader):
"""
@ -64,8 +79,11 @@ def load_all(stream, Loader=Loader):
and produce corresponding Python objects.
"""
loader = Loader(stream)
while loader.check_data():
yield loader.get_data()
try:
while loader.check_data():
yield loader.get_data()
finally:
loader.dispose()
def safe_load(stream):
"""
@ -96,8 +114,11 @@ def emit(events, stream=None, Dumper=Dumper,
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
for event in events:
dumper.emit(event)
try:
for event in events:
dumper.emit(event)
finally:
dumper.dispose()
if getvalue:
return getvalue()
@ -121,10 +142,13 @@ def serialize_all(nodes, stream=None, Dumper=Dumper,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
dumper.open()
for node in nodes:
dumper.serialize(node)
dumper.close()
try:
dumper.open()
for node in nodes:
dumper.serialize(node)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()
@ -158,10 +182,13 @@ def dump_all(documents, stream=None, Dumper=Dumper,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
dumper.open()
for data in documents:
dumper.represent(data)
dumper.close()
try:
dumper.open()
for data in documents:
dumper.represent(data)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()

View file

@ -51,11 +51,11 @@ class BaseConstructor:
return data
def construct_object(self, node, deep=False):
if node in self.constructed_objects:
return self.constructed_objects[node]
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.constructed_objects:
return self.constructed_objects[node]
if node in self.recursive_objects:
raise ConstructorError(None, None,
"found unconstructable recursive node", node.start_mark)

View file

@ -103,6 +103,11 @@ class Emitter:
self.analysis = None
self.style = None
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def emit(self, event):
self.events.append(event)
while not self.need_more_events():

View file

@ -86,6 +86,11 @@ class Parser:
self.marks = []
self.state = self.parse_stream_start
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def check_event(self, *choices):
# Check the type of the next event.
if self.current_event is None:

View file

@ -211,6 +211,9 @@ class CanonicalParser:
self.events = []
self.parsed = False
def dispose(self):
pass
# stream: STREAM-START document* STREAM-END
def parse_stream(self):
self.get_token(yaml.StreamStartToken)

View file

@ -212,6 +212,9 @@ class CanonicalParser:
self.events = []
self.parsed = False
def dispose(self):
pass
# stream: STREAM-START document* STREAM-END
def parse_stream(self):
self.get_token(yaml.StreamStartToken)