1 # The MIT License (MIT)
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documeneffective_filetation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 import jsonschema
# type: ignore
31 from barectf
.typing
import VersionNumber
, _OptStr
32 from typing
import Optional
, List
, Dict
, Any
, TextIO
, MutableMapping
, Union
, Set
, Iterable
, Callable
, Tuple
36 # The context of a configuration parsing error.
38 # Such a context object has a name and, optionally, a message.
39 class _ConfigurationParseErrorContext
:
40 def __init__(self
, name
: str, message
: _OptStr
= None):
45 def name(self
) -> str:
49 def message(self
) -> _OptStr
:
53 # A configuration parsing error.
55 # Such an error object contains a list of contexts (`context` property).
57 # The first context of this list is the most specific context, while the
58 # last is the more general.
60 # Use _append_ctx() to append a context to an existing configuration
61 # parsing error when you catch it before raising it again. You can use
62 # _append_error_ctx() to do exactly this in a single call.
63 class _ConfigurationParseError(Exception):
64 def __init__(self
, init_ctx_obj_name
, init_ctx_msg
=None):
66 self
._ctx
: List
[_ConfigurationParseErrorContext
] = []
67 self
._append
_ctx
(init_ctx_obj_name
, init_ctx_msg
)
70 def context(self
) -> List
[_ConfigurationParseErrorContext
]:
73 def _append_ctx(self
, name
: str, msg
: _OptStr
= None):
74 self
._ctx
.append(_ConfigurationParseErrorContext(name
, msg
))
79 for ctx
in reversed(self
._ctx
):
82 if ctx
.message
is not None:
83 line
+= f
' {ctx.message}'
87 return '\n'.join(lines
)
90 # Appends the context having the object name `obj_name` and the
91 # (optional) message `message` to the `_ConfigurationParseError`
92 # exception `exc` and then raises `exc` again.
93 def _append_error_ctx(exc
: _ConfigurationParseError
, obj_name
: str, message
: _OptStr
= None):
94 exc
._append
_ctx
(obj_name
, message
)
98 _V3Prefixes
= collections
.namedtuple('_V3Prefixes', ['identifier', 'file_name'])
101 # Convers a v2 prefix to v3 prefixes.
102 def _v3_prefixes_from_v2_prefix(v2_prefix
: str) -> _V3Prefixes
:
103 return _V3Prefixes(v2_prefix
, v2_prefix
.rstrip('_'))
106 # This JSON schema reference resolver only serves to detect when it
107 # needs to resolve a remote URI.
109 # This must never happen in barectf because all our schemas are local;
110 # it would mean a programming or schema error.
111 class _RefResolver(jsonschema
.RefResolver
):
112 def resolve_remote(self
, uri
: str):
113 raise RuntimeError(f
'Missing local schema with URI `{uri}`')
116 # Not all static type checkers support type recursion, so let's just use
117 # `Any` as a map node's value's type.
118 _MapNode
= MutableMapping
[str, Any
]
121 # Schema validator which considers all the schemas found in the
122 # subdirectories `subdirs` (at build time) of the barectf package's
123 # `schemas` directory.
125 # The only public method is validate() which accepts an instance to
126 # validate as well as a schema short ID.
127 class _SchemaValidator
:
128 def __init__(self
, subdirs
: Iterable
[str]):
129 schemas_dir
= pkg_resources
.resource_filename(__name__
, 'schemas')
130 self
._store
: Dict
[str, str] = {}
132 for subdir
in subdirs
:
133 dir = os
.path
.join(schemas_dir
, subdir
)
135 for file_name
in os
.listdir(dir):
136 if not file_name
.endswith('.yaml'):
139 with
open(os
.path
.join(dir, file_name
)) as f
:
140 schema
= yaml
.load(f
, Loader
=yaml
.SafeLoader
)
142 assert '$id' in schema
143 schema_id
= schema
['$id']
144 assert schema_id
not in self
._store
145 self
._store
[schema_id
] = schema
148 def _dict_from_ordered_dict(obj
):
149 if type(obj
) is not collections
.OrderedDict
:
154 for k
, v
in obj
.items():
157 if type(v
) is collections
.OrderedDict
:
158 new_v
= _SchemaValidator
._dict
_from
_ordered
_dict
(v
)
159 elif type(v
) is list:
160 new_v
= [_SchemaValidator
._dict
_from
_ordered
_dict
(elem
) for elem
in v
]
166 def _validate(self
, instance
: _MapNode
, schema_short_id
: str):
167 # retrieve full schema ID from short ID
168 schema_id
= f
'https://barectf.org/schemas/{schema_short_id}.json'
169 assert schema_id
in self
._store
171 # retrieve full schema
172 schema
= self
._store
[schema_id
]
174 # Create a reference resolver for this schema using this
175 # validator's schema store.
176 resolver
= _RefResolver(base_uri
=schema_id
, referrer
=schema
,
179 # create a JSON schema validator using this reference resolver
180 validator
= jsonschema
.Draft7Validator(schema
, resolver
=resolver
)
182 # Validate the instance, converting its
183 # `collections.OrderedDict` objects to `dict` objects so as to
184 # make any error message easier to read (because
185 # validator.validate() below uses str() for error messages, and
186 # collections.OrderedDict.__str__() returns a somewhat bulky
188 validator
.validate(self
._dict
_from
_ordered
_dict
(instance
))
190 # Validates `instance` using the schema having the short ID
193 # A schema short ID is the part between `schemas/` and `.json` in
196 # Raises a `_ConfigurationParseError` object, hiding any
197 # `jsonschema` exception, on validation failure.
198 def validate(self
, instance
: _MapNode
, schema_short_id
: str):
200 self
._validate
(instance
, schema_short_id
)
201 except jsonschema
.ValidationError
as exc
:
202 # convert to barectf `_ConfigurationParseError` exception
203 contexts
= ['Configuration object']
205 # Each element of the instance's absolute path is either an
206 # integer (array element's index) or a string (object
208 for elem
in exc
.absolute_path
:
209 if type(elem
) is int:
210 ctx
= f
'Element #{elem + 1}'
212 ctx
= f
'`{elem}` property'
218 if len(exc
.context
) > 0:
219 # According to the documentation of
220 # jsonschema.ValidationError.context(), the method
223 # > list of errors from the subschemas
225 # This contains additional information about the
226 # validation failure which can help the user figure out
227 # what's wrong exactly.
229 # Join each message with `; ` and append this to our
230 # configuration parsing error's message.
231 msgs
= '; '.join([e
.message
for e
in exc
.context
])
232 schema_ctx
= f
': {msgs}'
234 new_exc
= _ConfigurationParseError(contexts
.pop(),
235 f
'{exc.message}{schema_ctx} (from schema `{schema_short_id}`)')
237 for ctx
in reversed(contexts
):
238 new_exc
._append
_ctx
(ctx
)
243 # barectf 3 YAML configuration node.
245 def __init__(self
, config_node
: _MapNode
):
246 self
._config
_node
= config_node
249 def config_node(self
) -> _MapNode
:
250 return self
._config
_node
253 _CONFIG_V3_YAML_TAG
= 'tag:barectf.org,2020/3/config'
256 # Loads the content of the YAML file-like object `file` as a Python
257 # object and returns it.
259 # If the file's object has the barectf 3 configuration tag, then this
260 # function returns a `_ConfigNodeV3` object. Otherwise, it returns a
261 # `collections.OrderedDict` object.
263 # All YAML maps are loaded as `collections.OrderedDict` objects.
264 def _yaml_load(file: TextIO
) -> Union
[_ConfigNodeV3
, _MapNode
]:
265 class Loader(yaml
.Loader
):
268 def config_ctor(loader
, node
) -> _ConfigNodeV3
:
269 if not isinstance(node
, yaml
.MappingNode
):
270 problem
= f
'Expecting a map for the tag `{node.tag}`'
271 raise yaml
.constructor
.ConstructorError(problem
=problem
)
273 loader
.flatten_mapping(node
)
274 return _ConfigNodeV3(collections
.OrderedDict(loader
.construct_pairs(node
)))
276 def mapping_ctor(loader
, node
) -> _MapNode
:
277 loader
.flatten_mapping(node
)
278 return collections
.OrderedDict(loader
.construct_pairs(node
))
280 Loader
.add_constructor(_CONFIG_V3_YAML_TAG
, config_ctor
)
281 Loader
.add_constructor(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
, mapping_ctor
)
285 return yaml
.load(file, Loader
=Loader
)
286 except (yaml
.YAMLError
, OSError, IOError) as exc
:
287 raise _ConfigurationParseError('YAML loader', f
'Cannot load file: {exc}')
290 def _yaml_load_path(path
: str) -> Union
[_ConfigNodeV3
, _MapNode
]:
291 with
open(path
) as f
:
295 # Dumps the content of the Python object `obj`
296 # (`collections.OrderedDict` or `_ConfigNodeV3`) as a YAML string and
298 def _yaml_dump(node
: _MapNode
, **kwds
) -> str:
299 class Dumper(yaml
.Dumper
):
302 def config_repr(dumper
, node
):
303 return dumper
.represent_mapping(_CONFIG_V3_YAML_TAG
, node
.config_node
.items())
305 def mapping_repr(dumper
, node
):
306 return dumper
.represent_mapping(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
309 Dumper
.add_representer(_ConfigNodeV3
, config_repr
)
310 Dumper
.add_representer(collections
.OrderedDict
, mapping_repr
)
313 return yaml
.dump(node
, Dumper
=Dumper
, version
=(1, 2), **kwds
)
316 # A common barectf YAML configuration parser.
318 # This is the base class of any barectf YAML configuration parser. It
319 # mostly contains helpers.
321 # Builds a base barectf YAML configuration parser to process the
322 # configuration node `node` (already loaded from the file-like
325 # For its _process_node_include() method, the parser considers the
326 # package inclusion directory as well as `include_dirs`, and ignores
327 # nonexistent inclusion files if `ignore_include_not_found` is
329 def __init__(self
, root_file
: TextIO
, node
: Union
[_MapNode
, _ConfigNodeV3
],
330 with_pkg_include_dir
: bool, include_dirs
: Optional
[List
[str]],
331 ignore_include_not_found
: bool, major_version
: VersionNumber
):
332 self
._root
_file
= root_file
333 self
._root
_node
= node
334 self
._ft
_prop
_names
= [
344 'element-field-type',
347 if include_dirs
is None:
350 self
._include
_dirs
= copy
.copy(include_dirs
)
352 if with_pkg_include_dir
:
353 self
._include
_dirs
.append(pkg_resources
.resource_filename(__name__
, f
'include/{major_version}'))
355 self
._ignore
_include
_not
_found
= ignore_include_not_found
356 self
._include
_stack
: List
[str] = []
357 self
._resolved
_ft
_aliases
: Set
[str] = set()
358 self
._schema
_validator
= _SchemaValidator({'common/config', f
'{major_version}/config'})
359 self
._major
_version
= major_version
362 def _struct_ft_node_members_prop_name(self
) -> str:
363 if self
._major
_version
== 2:
368 # Returns the last included file name from the parser's inclusion
369 # file name stack, or `N/A` if the root file does not have an
370 # associated path under the `name` property.
371 def _get_last_include_file(self
) -> str:
372 if self
._include
_stack
:
373 return self
._include
_stack
[-1]
375 if hasattr(self
._root
_file
, 'name'):
376 return typing
.cast(str, self
._root
_file
.name
)
380 # Loads the inclusion file having the path `yaml_path` and returns
381 # its content as a `collections.OrderedDict` object.
382 def _load_include(self
, yaml_path
) -> Optional
[_MapNode
]:
383 for inc_dir
in self
._include
_dirs
:
384 # Current inclusion dir + file name path.
386 # Note: os.path.join() only takes the last argument if it's
388 inc_path
= os
.path
.join(inc_dir
, yaml_path
)
390 # real path (symbolic links resolved)
391 real_path
= os
.path
.realpath(inc_path
)
393 # normalized path (weird stuff removed!)
394 norm_path
= os
.path
.normpath(real_path
)
396 if not os
.path
.isfile(norm_path
):
397 # file doesn't exist: skip
400 if norm_path
in self
._include
_stack
:
401 base_path
= self
._get
_last
_include
_file
()
402 raise _ConfigurationParseError(f
'File `{base_path}`',
403 f
'Cannot recursively include file `{norm_path}`')
405 self
._include
_stack
.append(norm_path
)
408 return typing
.cast(_MapNode
, _yaml_load_path(norm_path
))
410 if not self
._ignore
_include
_not
_found
:
411 base_path
= self
._get
_last
_include
_file
()
412 raise _ConfigurationParseError(f
'File `{base_path}`',
413 f
'Cannot include file `{yaml_path}`: file not found in inclusion directories')
417 # Returns a list of all the inclusion file paths as found in the
418 # inclusion node `include_node`.
419 def _get_include_paths(self
, include_node
: _MapNode
) -> List
[str]:
420 if include_node
is None:
424 if type(include_node
) is str:
426 return [typing
.cast(str, include_node
)]
429 assert type(include_node
) is list
430 return typing
.cast(List
[str], include_node
)
432 # Updates the node `base_node` with an overlay node `overlay_node`.
434 # Both the inclusion and field type node inheritance features use
435 # this update mechanism.
436 def _update_node(self
, base_node
: _MapNode
, overlay_node
: _MapNode
):
437 # see the comment about the `members` property below
438 def update_members_node(base_value
: List
[Any
], olay_value
: List
[Any
]):
439 for olay_item
in olay_value
:
440 # assume we append `olay_item` to `base_value` initially
441 append_olay_item
= True
443 if type(olay_item
) is collections
.OrderedDict
:
444 # overlay item is an object
445 if len(olay_item
) == 1:
446 # overlay object item contains a single property
447 olay_name
= list(olay_item
)[0]
449 # find corresponding base item
450 for base_item
in base_value
:
451 if type(base_item
) is collections
.OrderedDict
:
452 if len(olay_item
) == 1:
453 base_name
= list(base_item
)[0]
455 if olay_name
== base_name
:
456 # Names match: update with usual
458 self
._update
_node
(base_item
, olay_item
)
460 # Do _not_ append `olay_item` to
461 # `base_value`: we just updated
463 append_olay_item
= False
467 base_value
.append(copy
.deepcopy(olay_item
))
469 for olay_key
, olay_value
in overlay_node
.items():
470 if olay_key
in base_node
:
471 base_value
= base_node
[olay_key
]
473 if type(olay_value
) is collections
.OrderedDict
and type(base_value
) is collections
.OrderedDict
:
475 self
._update
_node
(base_value
, olay_value
)
476 elif type(olay_value
) is list and type(base_value
) is list:
477 if olay_key
== 'members' and self
._major
_version
== 3:
478 # This is a "temporary" hack.
480 # In barectf 2, a structure field type node
488 # Having an overlay such as
500 # because the `fields` property is a map.
502 # In barectf 3, this is fixed (a YAML map is not
503 # ordered), so that the same initial structure
504 # field type node looks like this:
513 # Although the `members` property is
514 # syntaxically an array, it's semantically an
515 # ordered map, where an entry's key is the array
516 # item's map's first key (like YAML's `!!omap`).
518 # Having an overlay such as
533 # with the naive strategy, while what we really
543 # As of this version of barectf, the _only_
544 # property with a list value which acts as an
545 # ordered map is named `members`. This is why we
546 # can only check the value of `olay_key`,
547 # whatever our context.
549 # update_members_node() attempts to perform
550 # this below. For a given item of `olay_value`,
553 # * It's not an object.
555 # * It contains more than one property.
557 # * Its single property's name does not match
558 # the name of the single property of any
559 # object item of `base_value`.
561 # then we append the item to `base_value` as
563 update_members_node(base_value
, olay_value
)
565 # append extension array items to base items
566 base_value
+= copy
.deepcopy(olay_value
)
568 # fall back to replacing base property
569 base_node
[olay_key
] = copy
.deepcopy(olay_value
)
571 # set base property from overlay property
572 base_node
[olay_key
] = copy
.deepcopy(olay_value
)
574 # Processes inclusions using `last_overlay_node` as the last overlay
575 # node to use to "patch" the node.
577 # If `last_overlay_node` contains an `$include` property, then this
578 # method patches the current base node (initially empty) in order
579 # using the content of the inclusion files (recursively).
581 # At the end, this method removes the `$include` property of
582 # `last_overlay_node` and then patches the current base node with
583 # its other properties before returning the result (always a deep
585 def _process_node_include(self
, last_overlay_node
: _MapNode
,
586 process_base_include_cb
: Callable
[[_MapNode
], _MapNode
],
587 process_children_include_cb
: Optional
[Callable
[[_MapNode
], None]] = None) -> _MapNode
:
588 # process children inclusions first
589 if process_children_include_cb
is not None:
590 process_children_include_cb(last_overlay_node
)
592 incl_prop_name
= '$include'
594 if incl_prop_name
in last_overlay_node
:
595 include_node
= last_overlay_node
[incl_prop_name
]
598 return last_overlay_node
600 include_paths
= self
._get
_include
_paths
(include_node
)
601 cur_base_path
= self
._get
_last
_include
_file
()
604 # keep the inclusion paths and remove the `$include` property
605 include_paths
= copy
.deepcopy(include_paths
)
606 del last_overlay_node
[incl_prop_name
]
608 for include_path
in include_paths
:
609 # load raw YAML from included file
610 overlay_node
= self
._load
_include
(include_path
)
612 if overlay_node
is None:
613 # Cannot find inclusion file, but we're ignoring those
614 # errors, otherwise _load_include() itself raises a
618 # recursively process inclusions
620 overlay_node
= process_base_include_cb(overlay_node
)
621 except _ConfigurationParseError
as exc
:
622 _append_error_ctx(exc
, f
'File `{cur_base_path}`')
624 # pop inclusion stack now that we're done including
625 del self
._include
_stack
[-1]
627 # At this point, `base_node` is fully resolved (does not
628 # contain any `$include` property).
629 if base_node
is None:
630 base_node
= overlay_node
632 self
._update
_node
(base_node
, overlay_node
)
634 # Finally, update the latest base node with our last overlay
636 if base_node
is None:
637 # Nothing was included, which is possible when we're
638 # ignoring inclusion errors.
639 return last_overlay_node
641 self
._update
_node
(base_node
, last_overlay_node
)
644 # Generates pairs of member node and field type node property name
645 # (in the member node) for the structure field type node's members
647 def _struct_ft_member_fts_iter(self
,
648 node
: Union
[List
[_MapNode
], _MapNode
]) -> Iterable
[Tuple
[_MapNode
, str]]:
649 if type(node
) is list:
651 assert self
._major
_version
== 3
652 node
= typing
.cast(List
[_MapNode
], node
)
654 for member_node
in node
:
655 assert type(member_node
) is collections
.OrderedDict
656 member_node
= typing
.cast(_MapNode
, member_node
)
657 name
, val
= list(member_node
.items())[0]
659 if type(val
) is collections
.OrderedDict
:
663 yield member_node
, name
666 assert self
._major
_version
== 2
667 assert type(node
) is collections
.OrderedDict
668 node
= typing
.cast(_MapNode
, node
)
673 # Resolves the field type alias `key` in the node `parent_node`, as
674 # well as any nested field type aliases, using the aliases of the
675 # `ft_aliases_node` node.
677 # If `key` is not in `parent_node`, this method returns.
679 # This method can modify `ft_aliases_node` and `parent_node[key]`.
681 # `ctx_obj_name` is the context's object name when this method
682 # raises a `_ConfigurationParseError` exception.
683 def _resolve_ft_alias(self
, ft_aliases_node
: _MapNode
, parent_node
: _MapNode
, key
: str,
684 ctx_obj_name
: str, alias_set
: Optional
[Set
[str]] = None):
685 if key
not in parent_node
:
688 node
= parent_node
[key
]
691 # some nodes can be null to use their default value
694 # This set holds all the field type aliases to be expanded,
695 # recursively. This is used to detect cycles.
696 if alias_set
is None:
699 if type(node
) is str:
702 # Make sure this alias names an existing field type node, at
704 if alias
not in ft_aliases_node
:
705 raise _ConfigurationParseError(ctx_obj_name
,
706 f
'Field type alias `{alias}` does not exist')
708 if alias
not in self
._resolved
_ft
_aliases
:
709 # Only check for a field type alias cycle when we didn't
710 # resolve the alias yet, as a given node can refer to
711 # the same field type alias more than once.
712 if alias
in alias_set
:
713 msg
= f
'Cycle detected during the `{alias}` field type alias resolution'
714 raise _ConfigurationParseError(ctx_obj_name
, msg
)
718 # Add `alias` to the set of encountered field type
719 # aliases before calling self._resolve_ft_alias() to
722 self
._resolve
_ft
_alias
(ft_aliases_node
, ft_aliases_node
, alias
, ctx_obj_name
,
724 self
._resolved
_ft
_aliases
.add(alias
)
726 # replace alias with field type node copy
727 parent_node
[key
] = copy
.deepcopy(ft_aliases_node
[alias
])
730 # resolve nested field type aliases
731 for pkey
in self
._ft
_prop
_names
:
732 self
._resolve
_ft
_alias
(ft_aliases_node
, node
, pkey
, ctx_obj_name
, alias_set
)
734 # Resolve field type aliases of structure field type node member
736 pkey
= self
._struct
_ft
_node
_members
_prop
_name
739 for member_node
, ft_prop_name
in self
._struct
_ft
_member
_fts
_iter
(node
[pkey
]):
740 self
._resolve
_ft
_alias
(ft_aliases_node
, member_node
, ft_prop_name
,
741 ctx_obj_name
, alias_set
)
743 # Like _resolve_ft_alias(), but builds a context object name for any
744 # `ctx_obj_name` exception.
745 def _resolve_ft_alias_from(self
, ft_aliases_node
: _MapNode
, parent_node
: _MapNode
, key
: str):
746 self
._resolve
_ft
_alias
(ft_aliases_node
, parent_node
, key
, f
'`{key}` property')
748 # Applies field type node inheritance to the property `key` of
751 # `parent_node[key]`, if it exists, must not contain any field type
752 # alias (all field type objects are complete).
754 # This method can modify `parent[key]`.
756 # When this method returns, no field type node has an `$inherit` or
757 # `inherit` property.
758 def _apply_ft_inheritance(self
, parent_node
: _MapNode
, key
: str):
759 if key
not in parent_node
:
762 node
= parent_node
[key
]
767 # process children first
768 for pkey
in self
._ft
_prop
_names
:
769 self
._apply
_ft
_inheritance
(node
, pkey
)
771 # Process the field types of structure field type node member
773 pkey
= self
._struct
_ft
_node
_members
_prop
_name
776 for member_node
, ft_prop_name
in self
._struct
_ft
_member
_fts
_iter
(node
[pkey
]):
777 self
._apply
_ft
_inheritance
(member_node
, ft_prop_name
)
779 # apply inheritance for this node
780 if 'inherit' in node
:
781 # barectf 2.1: `inherit` property was renamed to `$inherit`
782 assert '$inherit' not in node
783 node
['$inherit'] = node
['inherit']
786 inherit_key
= '$inherit'
788 if inherit_key
in node
:
789 assert type(node
[inherit_key
]) is collections
.OrderedDict
791 # apply inheritance below
792 self
._apply
_ft
_inheritance
(node
, inherit_key
)
794 # `node` is an overlay on the `$inherit` node
795 base_node
= node
[inherit_key
]
796 del node
[inherit_key
]
797 self
._update
_node
(base_node
, node
)
799 # set updated base node as this node
800 parent_node
[key
] = base_node