Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 30 additions & 16 deletions yaml/_yaml.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,20 @@ cdef class Mark:
% (self.name, self.line+1, self.column+1)
return where

# Helper function to create a Mark object with a single call
# This version does not have to convert size_t back to Python
# which makes it quite a bit faster when called from Cython
cdef Mark _create_mark(object name, size_t index, size_t line, size_t column,
object buffer, object pointer):
cdef Mark mark = Mark.__new__(Mark)
mark.name = name
mark.index = index
mark.line = line
mark.column = column
mark.buffer = buffer
mark.pointer = pointer
return mark

#class YAMLError(Exception):
# pass
#
Expand Down Expand Up @@ -310,12 +324,12 @@ cdef class CParser:
context_mark = None
problem_mark = None
if self.parser.context != NULL:
context_mark = Mark(self.stream_name,
context_mark = _create_mark(self.stream_name,
self.parser.context_mark.index,
self.parser.context_mark.line,
self.parser.context_mark.column, None, None)
if self.parser.problem != NULL:
problem_mark = Mark(self.stream_name,
problem_mark = _create_mark(self.stream_name,
self.parser.problem_mark.index,
self.parser.problem_mark.line,
self.parser.problem_mark.column, None, None)
Expand Down Expand Up @@ -356,12 +370,12 @@ cdef class CParser:
return token_object

cdef object _token_to_object(self, yaml_token_t *token):
start_mark = Mark(self.stream_name,
start_mark = _create_mark(self.stream_name,
token.start_mark.index,
token.start_mark.line,
token.start_mark.column,
None, None)
end_mark = Mark(self.stream_name,
end_mark = _create_mark(self.stream_name,
token.end_mark.index,
token.end_mark.line,
token.end_mark.column,
Expand Down Expand Up @@ -503,12 +517,12 @@ cdef class CParser:

cdef object _event_to_object(self, yaml_event_t *event):
cdef yaml_tag_directive_t *tag_directive
start_mark = Mark(self.stream_name,
start_mark = _create_mark(self.stream_name,
event.start_mark.index,
event.start_mark.line,
event.start_mark.column,
None, None)
end_mark = Mark(self.stream_name,
end_mark = _create_mark(self.stream_name,
event.end_mark.index,
event.end_mark.line,
event.end_mark.column,
Expand Down Expand Up @@ -673,7 +687,7 @@ cdef class CParser:
document = self._compose_document()
self._parse_next_event()
if self.parsed_event.type != YAML_STREAM_END_EVENT:
mark = Mark(self.stream_name,
mark = _create_mark(self.stream_name,
self.parsed_event.start_mark.index,
self.parsed_event.start_mark.line,
self.parsed_event.start_mark.column,
Expand All @@ -695,7 +709,7 @@ cdef class CParser:
if self.parsed_event.type == YAML_ALIAS_EVENT:
anchor = PyUnicode_FromYamlString(self.parsed_event.data.alias.anchor)
if anchor not in self.anchors:
mark = Mark(self.stream_name,
mark = _create_mark(self.stream_name,
self.parsed_event.start_mark.index,
self.parsed_event.start_mark.line,
self.parsed_event.start_mark.column,
Expand All @@ -715,7 +729,7 @@ cdef class CParser:
anchor = PyUnicode_FromYamlString(self.parsed_event.data.mapping_start.anchor)
if anchor is not None:
if anchor in self.anchors:
mark = Mark(self.stream_name,
mark = _create_mark(self.stream_name,
self.parsed_event.start_mark.index,
self.parsed_event.start_mark.line,
self.parsed_event.start_mark.column,
Expand All @@ -733,12 +747,12 @@ cdef class CParser:
return node

cdef _compose_scalar_node(self, object anchor):
start_mark = Mark(self.stream_name,
start_mark = _create_mark(self.stream_name,
self.parsed_event.start_mark.index,
self.parsed_event.start_mark.line,
self.parsed_event.start_mark.column,
None, None)
end_mark = Mark(self.stream_name,
end_mark = _create_mark(self.stream_name,
self.parsed_event.end_mark.index,
self.parsed_event.end_mark.line,
self.parsed_event.end_mark.column,
Expand Down Expand Up @@ -776,7 +790,7 @@ cdef class CParser:

cdef _compose_sequence_node(self, object anchor):
cdef int index
start_mark = Mark(self.stream_name,
start_mark = _create_mark(self.stream_name,
self.parsed_event.start_mark.index,
self.parsed_event.start_mark.line,
self.parsed_event.start_mark.column,
Expand Down Expand Up @@ -806,7 +820,7 @@ cdef class CParser:
value.append(self._compose_node(node, index))
index = index+1
self._parse_next_event()
node.end_mark = Mark(self.stream_name,
node.end_mark = _create_mark(self.stream_name,
self.parsed_event.end_mark.index,
self.parsed_event.end_mark.line,
self.parsed_event.end_mark.column,
Expand All @@ -815,7 +829,7 @@ cdef class CParser:
return node

cdef _compose_mapping_node(self, object anchor):
start_mark = Mark(self.stream_name,
start_mark = _create_mark(self.stream_name,
self.parsed_event.start_mark.index,
self.parsed_event.start_mark.line,
self.parsed_event.start_mark.column,
Expand Down Expand Up @@ -845,7 +859,7 @@ cdef class CParser:
item_value = self._compose_node(node, item_key)
value.append((item_key, item_value))
self._parse_next_event()
node.end_mark = Mark(self.stream_name,
node.end_mark = _create_mark(self.stream_name,
self.parsed_event.end_mark.index,
self.parsed_event.end_mark.line,
self.parsed_event.end_mark.column,
Expand Down Expand Up @@ -912,7 +926,7 @@ cdef class CEmitter:
if hasattr(stream, u'encoding'):
self.dump_unicode = 1
self.use_encoding = encoding
yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
yaml_emitter_set_output(&self.emitter, output_handler, <void *>self)
if canonical:
yaml_emitter_set_canonical(&self.emitter, 1)
if indent is not None:
Expand Down