Use barectf.cli._run() as the CLI's starting function
[deliverable/barectf.git] / barectf / config_parse_common.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documeneffective_filetation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
24 import pkg_resources
25 import collections
26 import jsonschema
27 import os.path
28 import yaml
29 import copy
30 import os
31
32
33 # The context of a configuration parsing error.
34 #
35 # Such a context object has a name and, optionally, a message.
36 class _ConfigurationParseErrorContext:
37 def __init__(self, name, message=None):
38 self._name = name
39 self._msg = message
40
41 @property
42 def name(self):
43 return self._name
44
45 @property
46 def message(self):
47 return self._msg
48
49
50 # Appends the context having the object name `obj_name` and the
51 # (optional) message `message` to the `_ConfigurationParseError`
52 # exception `exc` and then raises `exc` again.
53 def _append_error_ctx(exc, obj_name, message=None):
54 exc._append_ctx(obj_name, message)
55 raise exc
56
57
58 # A configuration parsing error.
59 #
60 # Such an error object contains a list of contexts (`context` property).
61 #
62 # The first context of this list is the most specific context, while the
63 # last is the more general.
64 #
65 # Use _append_ctx() to append a context to an existing configuration
66 # parsing error when you catch it before raising it again. You can use
67 # _append_error_ctx() to do exactly this in a single call.
68 class _ConfigurationParseError(Exception):
69 def __init__(self, init_ctx_obj_name, init_ctx_msg=None):
70 super().__init__()
71 self._ctx = []
72 self._append_ctx(init_ctx_obj_name, init_ctx_msg)
73
74 @property
75 def context(self):
76 return self._ctx
77
78 def _append_ctx(self, name, msg=None):
79 self._ctx.append(_ConfigurationParseErrorContext(name, msg))
80
81 def __str__(self):
82 lines = []
83
84 for ctx in reversed(self._ctx):
85 line = f'{ctx.name}:'
86
87 if ctx.message is not None:
88 line += f' {ctx.message}'
89
90 lines.append(line)
91
92 return '\n'.join(lines)
93
94
95 _V3Prefixes = collections.namedtuple('_V3Prefixes', ['identifier', 'file_name'])
96
97
98 # Convers a v2 prefix to v3 prefixes.
99 def _v3_prefixes_from_v2_prefix(v2_prefix):
100 return _V3Prefixes(v2_prefix, v2_prefix.rstrip('_'))
101
102
103 # This JSON schema reference resolver only serves to detect when it
104 # needs to resolve a remote URI.
105 #
106 # This must never happen in barectf because all our schemas are local;
107 # it would mean a programming or schema error.
108 class _RefResolver(jsonschema.RefResolver):
109 def resolve_remote(self, uri):
110 raise RuntimeError(f'Missing local schema with URI `{uri}`')
111
112
113 # Schema validator which considers all the schemas found in the
114 # subdirectories `subdirs` (at build time) of the barectf package's
115 # `schemas` directory.
116 #
117 # The only public method is validate() which accepts an instance to
118 # validate as well as a schema short ID.
119 class _SchemaValidator:
120 def __init__(self, subdirs):
121 schemas_dir = pkg_resources.resource_filename(__name__, 'schemas')
122 self._store = {}
123
124 for subdir in subdirs:
125 dir = os.path.join(schemas_dir, subdir)
126
127 for file_name in os.listdir(dir):
128 if not file_name.endswith('.yaml'):
129 continue
130
131 with open(os.path.join(dir, file_name)) as f:
132 schema = yaml.load(f, Loader=yaml.SafeLoader)
133
134 assert '$id' in schema
135 schema_id = schema['$id']
136 assert schema_id not in self._store
137 self._store[schema_id] = schema
138
139 @staticmethod
140 def _dict_from_ordered_dict(obj):
141 if type(obj) is not collections.OrderedDict:
142 return obj
143
144 dct = {}
145
146 for k, v in obj.items():
147 new_v = v
148
149 if type(v) is collections.OrderedDict:
150 new_v = _SchemaValidator._dict_from_ordered_dict(v)
151 elif type(v) is list:
152 new_v = [_SchemaValidator._dict_from_ordered_dict(elem) for elem in v]
153
154 dct[k] = new_v
155
156 return dct
157
158 def _validate(self, instance, schema_short_id):
159 # retrieve full schema ID from short ID
160 schema_id = f'https://barectf.org/schemas/{schema_short_id}.json'
161 assert schema_id in self._store
162
163 # retrieve full schema
164 schema = self._store[schema_id]
165
166 # Create a reference resolver for this schema using this
167 # validator's schema store.
168 resolver = _RefResolver(base_uri=schema_id, referrer=schema,
169 store=self._store)
170
171 # create a JSON schema validator using this reference resolver
172 validator = jsonschema.Draft7Validator(schema, resolver=resolver)
173
174 # Validate the instance, converting its
175 # `collections.OrderedDict` objects to `dict` objects so as to
176 # make any error message easier to read (because
177 # validator.validate() below uses str() for error messages, and
178 # collections.OrderedDict.__str__() returns a somewhat bulky
179 # representation).
180 validator.validate(self._dict_from_ordered_dict(instance))
181
182 # Validates `instance` using the schema having the short ID
183 # `schema_short_id`.
184 #
185 # A schema short ID is the part between `schemas/` and `.json` in
186 # its URI.
187 #
188 # Raises a `_ConfigurationParseError` object, hiding any
189 # `jsonschema` exception, on validation failure.
190 def validate(self, instance, schema_short_id):
191 try:
192 self._validate(instance, schema_short_id)
193 except jsonschema.ValidationError as exc:
194 # convert to barectf `_ConfigurationParseError` exception
195 contexts = ['Configuration object']
196
197 # Each element of the instance's absolute path is either an
198 # integer (array element's index) or a string (object
199 # property's name).
200 for elem in exc.absolute_path:
201 if type(elem) is int:
202 ctx = f'Element #{elem + 1}'
203 else:
204 ctx = f'`{elem}` property'
205
206 contexts.append(ctx)
207
208 schema_ctx = ''
209
210 if len(exc.context) > 0:
211 # According to the documentation of
212 # jsonschema.ValidationError.context(), the method
213 # returns a
214 #
215 # > list of errors from the subschemas
216 #
217 # This contains additional information about the
218 # validation failure which can help the user figure out
219 # what's wrong exactly.
220 #
221 # Join each message with `; ` and append this to our
222 # configuration parsing error's message.
223 msgs = '; '.join([e.message for e in exc.context])
224 schema_ctx = f': {msgs}'
225
226 new_exc = _ConfigurationParseError(contexts.pop(),
227 f'{exc.message}{schema_ctx} (from schema `{schema_short_id}`)')
228
229 for ctx in reversed(contexts):
230 new_exc._append_ctx(ctx)
231
232 raise new_exc
233
234
235 # barectf 3 YAML configuration node.
236 class _ConfigNodeV3:
237 def __init__(self, config_node):
238 self._config_node = config_node
239
240 @property
241 def config_node(self):
242 return self._config_node
243
244
245 _CONFIG_V3_YAML_TAG = 'tag:barectf.org,2020/3/config'
246
247
248 # Loads the content of the YAML file-like object `file` as a Python
249 # object and returns it.
250 #
251 # If the file's object has the barectf 3 configuration tag, then this
252 # function returns a `_ConfigNodeV3` object. Otherwise, it returns a
253 # `collections.OrderedDict` object.
254 #
255 # All YAML maps are loaded as `collections.OrderedDict` objects.
256 def _yaml_load(file):
257 class Loader(yaml.Loader):
258 pass
259
260 def config_ctor(loader, node):
261 if not isinstance(node, yaml.MappingNode):
262 problem = f'Expecting a map for the tag `{node.tag}`'
263 raise yaml.constructor.ConstructorError(problem=problem)
264
265 loader.flatten_mapping(node)
266 return _ConfigNodeV3(collections.OrderedDict(loader.construct_pairs(node)))
267
268 def mapping_ctor(loader, node):
269 loader.flatten_mapping(node)
270 return collections.OrderedDict(loader.construct_pairs(node))
271
272 Loader.add_constructor(_CONFIG_V3_YAML_TAG, config_ctor)
273 Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, mapping_ctor)
274
275 # YAML -> Python
276 try:
277 return yaml.load(file, Loader=Loader)
278 except (yaml.YAMLError, OSError, IOError) as exc:
279 raise _ConfigurationParseError('YAML loader', f'Cannot load file: {exc}')
280
281
282 def _yaml_load_path(path):
283 with open(path) as f:
284 return _yaml_load(f)
285
286
287 # Dumps the content of the Python object `obj`
288 # (`collections.OrderedDict` or `_ConfigNodeV3`) as a YAML string and
289 # returns it.
290 def _yaml_dump(node, **kwds):
291 class Dumper(yaml.Dumper):
292 pass
293
294 def config_repr(dumper, node):
295 return dumper.represent_mapping(_CONFIG_V3_YAML_TAG, node.config_node.items())
296
297 def mapping_repr(dumper, node):
298 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
299 node.items())
300
301 Dumper.add_representer(_ConfigNodeV3, config_repr)
302 Dumper.add_representer(collections.OrderedDict, mapping_repr)
303
304 # Python -> YAML
305 return yaml.dump(node, Dumper=Dumper, version=(1, 2), **kwds)
306
307
308 # A common barectf YAML configuration parser.
309 #
310 # This is the base class of any barectf YAML configuration parser. It
311 # mostly contains helpers.
312 class _Parser:
313 # Builds a base barectf YAML configuration parser to process the
314 # configuration node `node` (already loaded from the file having the
315 # path `path`).
316 #
317 # For its _process_node_include() method, the parser considers the
318 # package inclusion directory as well as `include_dirs`, and ignores
319 # nonexistent inclusion files if `ignore_include_not_found` is
320 # `True`.
321 def __init__(self, path, node, with_pkg_include_dir, include_dirs, ignore_include_not_found,
322 major_version):
323 self._root_path = path
324 self._root_node = node
325 self._ft_prop_names = [
326 # barectf 2.1+
327 '$inherit',
328
329 # barectf 2
330 'inherit',
331 'value-type',
332 'element-type',
333
334 # barectf 3
335 'element-field-type',
336 ]
337
338 self._include_dirs = copy.copy(include_dirs)
339
340 if with_pkg_include_dir:
341 self._include_dirs.append(pkg_resources.resource_filename(__name__, f'include/{major_version}'))
342
343 self._ignore_include_not_found = ignore_include_not_found
344 self._include_stack = []
345 self._resolved_ft_aliases = set()
346 self._schema_validator = _SchemaValidator({'common/config', f'{major_version}/config'})
347 self._major_version = major_version
348
349 @property
350 def _struct_ft_node_members_prop_name(self):
351 if self._major_version == 2:
352 return 'fields'
353 else:
354 return 'members'
355
356 # Returns the last included file name from the parser's inclusion
357 # file name stack.
358 def _get_last_include_file(self):
359 if self._include_stack:
360 return self._include_stack[-1]
361
362 return self._root_path
363
364 # Loads the inclusion file having the path `yaml_path` and returns
365 # its content as a `collections.OrderedDict` object.
366 def _load_include(self, yaml_path):
367 for inc_dir in self._include_dirs:
368 # Current inclusion dir + file name path.
369 #
370 # Note: os.path.join() only takes the last argument if it's
371 # absolute.
372 inc_path = os.path.join(inc_dir, yaml_path)
373
374 # real path (symbolic links resolved)
375 real_path = os.path.realpath(inc_path)
376
377 # normalized path (weird stuff removed!)
378 norm_path = os.path.normpath(real_path)
379
380 if not os.path.isfile(norm_path):
381 # file doesn't exist: skip
382 continue
383
384 if norm_path in self._include_stack:
385 base_path = self._get_last_include_file()
386 raise _ConfigurationParseError(f'File `{base_path}`',
387 f'Cannot recursively include file `{norm_path}`')
388
389 self._include_stack.append(norm_path)
390
391 # load raw content
392 return _yaml_load_path(norm_path)
393
394 if not self._ignore_include_not_found:
395 base_path = self._get_last_include_file()
396 raise _ConfigurationParseError(f'File `{base_path}`',
397 f'Cannot include file `{yaml_path}`: file not found in inclusion directories')
398
399 # Returns a list of all the inclusion file paths as found in the
400 # inclusion node `include_node`.
401 def _get_include_paths(self, include_node):
402 if include_node is None:
403 # none
404 return []
405
406 if type(include_node) is str:
407 # wrap as array
408 return [include_node]
409
410 # already an array
411 assert type(include_node) is list
412 return include_node
413
414 # Updates the node `base_node` with an overlay node `overlay_node`.
415 #
416 # Both the inclusion and field type node inheritance features use
417 # this update mechanism.
418 def _update_node(self, base_node, overlay_node):
419 # see the comment about the `members` property below
420 def update_members_node(base_value, olay_value):
421 assert type(olay_value) is list
422 assert type(base_value) is list
423
424 for olay_item in olay_value:
425 # assume we append `olay_item` to `base_value` initially
426 append_olay_item = True
427
428 if type(olay_item) is collections.OrderedDict:
429 # overlay item is an object
430 if len(olay_item) == 1:
431 # overlay object item contains a single property
432 olay_name = list(olay_item)[0]
433
434 # find corresponding base item
435 for base_item in base_value:
436 if type(base_item) is collections.OrderedDict:
437 if len(olay_item) == 1:
438 base_name = list(base_item)[0]
439
440 if olay_name == base_name:
441 # Names match: update with usual
442 # strategy.
443 self._update_node(base_item, olay_item)
444
445 # Do _not_ append `olay_item` to
446 # `base_value`: we just updated
447 # `base_item`.
448 append_olay_item = False
449 break
450
451 if append_olay_item:
452 base_value.append(copy.deepcopy(olay_item))
453
454 for olay_key, olay_value in overlay_node.items():
455 if olay_key in base_node:
456 base_value = base_node[olay_key]
457
458 if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
459 # merge both objects
460 self._update_node(base_value, olay_value)
461 elif type(olay_value) is list and type(base_value) is list:
462 if olay_key == 'members' and self._major_version == 3:
463 # This is a "temporary" hack.
464 #
465 # In barectf 2, a structure field type node
466 # looks like this:
467 #
468 # class: struct
469 # fields:
470 # hello: uint8
471 # world: string
472 #
473 # Having an overlay such as
474 #
475 # fields:
476 # hello: float
477 #
478 # will result in
479 #
480 # class: struct
481 # fields:
482 # hello: float
483 # world: string
484 #
485 # because the `fields` property is a map.
486 #
487 # In barectf 3, this is fixed (a YAML map is not
488 # ordered), so that the same initial structure
489 # field type node looks like this:
490 #
491 # class: struct
492 # members:
493 # - hello: uint8
494 # - world:
495 # field-type:
496 # class: str
497 #
498 # Although the `members` property is
499 # syntaxically an array, it's semantically an
500 # ordered map, where an entry's key is the array
501 # item's map's first key (like YAML's `!!omap`).
502 #
503 # Having an overlay such as
504 #
505 # members:
506 # - hello: float
507 #
508 # would result in
509 #
510 # class: struct
511 # members:
512 # - hello: uint8
513 # - world:
514 # field-type:
515 # class: str
516 # - hello: float
517 #
518 # with the naive strategy, while what we really
519 # want is:
520 #
521 # class: struct
522 # members:
523 # - hello: float
524 # - world:
525 # field-type:
526 # class: str
527 #
528 # As of this version of barectf, the _only_
529 # property with a list value which acts as an
530 # ordered map is named `members`. This is why we
531 # can only check the value of `olay_key`,
532 # whatever our context.
533 #
534 # update_members_node() attempts to perform
535 # this below. For a given item of `olay_value`,
536 # if
537 #
538 # * It's not an object.
539 #
540 # * It contains more than one property.
541 #
542 # * Its single property's name does not match
543 # the name of the single property of any
544 # object item of `base_value`.
545 #
546 # then we append the item to `base_value` as
547 # usual.
548 update_members_node(base_value, olay_value)
549 else:
550 # append extension array items to base items
551 base_value += copy.deepcopy(olay_value)
552 else:
553 # fall back to replacing base property
554 base_node[olay_key] = copy.deepcopy(olay_value)
555 else:
556 # set base property from overlay property
557 base_node[olay_key] = copy.deepcopy(olay_value)
558
559 # Processes inclusions using `last_overlay_node` as the last overlay
560 # node to use to "patch" the node.
561 #
562 # If `last_overlay_node` contains an `$include` property, then this
563 # method patches the current base node (initially empty) in order
564 # using the content of the inclusion files (recursively).
565 #
566 # At the end, this method removes the `$include` property of
567 # `last_overlay_node` and then patches the current base node with
568 # its other properties before returning the result (always a deep
569 # copy).
570 def _process_node_include(self, last_overlay_node,
571 process_base_include_cb,
572 process_children_include_cb=None):
573 # process children inclusions first
574 if process_children_include_cb is not None:
575 process_children_include_cb(last_overlay_node)
576
577 incl_prop_name = '$include'
578
579 if incl_prop_name in last_overlay_node:
580 include_node = last_overlay_node[incl_prop_name]
581 else:
582 # no inclusions!
583 return last_overlay_node
584
585 include_paths = self._get_include_paths(include_node)
586 cur_base_path = self._get_last_include_file()
587 base_node = None
588
589 # keep the inclusion paths and remove the `$include` property
590 include_paths = copy.deepcopy(include_paths)
591 del last_overlay_node[incl_prop_name]
592
593 for include_path in include_paths:
594 # load raw YAML from included file
595 overlay_node = self._load_include(include_path)
596
597 if overlay_node is None:
598 # Cannot find inclusion file, but we're ignoring those
599 # errors, otherwise _load_include() itself raises a
600 # config error.
601 continue
602
603 # recursively process inclusions
604 try:
605 overlay_node = process_base_include_cb(overlay_node)
606 except _ConfigurationParseError as exc:
607 _append_error_ctx(exc, f'File `{cur_base_path}`')
608
609 # pop inclusion stack now that we're done including
610 del self._include_stack[-1]
611
612 # At this point, `base_node` is fully resolved (does not
613 # contain any `$include` property).
614 if base_node is None:
615 base_node = overlay_node
616 else:
617 self._update_node(base_node, overlay_node)
618
619 # Finally, update the latest base node with our last overlay
620 # node.
621 if base_node is None:
622 # Nothing was included, which is possible when we're
623 # ignoring inclusion errors.
624 return last_overlay_node
625
626 self._update_node(base_node, last_overlay_node)
627 return base_node
628
629 # Generates pairs of member node and field type node property name
630 # (in the member node) for the structure field type node's members
631 # node `node`.
632 def _struct_ft_member_fts_iter(self, node):
633 if type(node) is list:
634 # barectf 3
635 assert self._major_version == 3
636
637 for member_node in node:
638 assert type(member_node) is collections.OrderedDict
639 name, val = list(member_node.items())[0]
640
641 if type(val) is collections.OrderedDict:
642 member_node = val
643 name = 'field-type'
644
645 yield member_node, name
646 else:
647 # barectf 2
648 assert self._major_version == 2
649 assert type(node) is collections.OrderedDict
650
651 for name in node:
652 yield node, name
653
654 # Resolves the field type alias `key` in the node `parent_node`, as
655 # well as any nested field type aliases, using the aliases of the
656 # `ft_aliases_node` node.
657 #
658 # If `key` is not in `parent_node`, this method returns.
659 #
660 # This method can modify `ft_aliases_node` and `parent_node[key]`.
661 #
662 # `ctx_obj_name` is the context's object name when this method
663 # raises a `_ConfigurationParseError` exception.
664 def _resolve_ft_alias(self, ft_aliases_node, parent_node, key, ctx_obj_name, alias_set=None):
665 if key not in parent_node:
666 return
667
668 node = parent_node[key]
669
670 if node is None:
671 # some nodes can be null to use their default value
672 return
673
674 # This set holds all the field type aliases to be expanded,
675 # recursively. This is used to detect cycles.
676 if alias_set is None:
677 alias_set = set()
678
679 if type(node) is str:
680 alias = node
681
682 # Make sure this alias names an existing field type node, at
683 # least.
684 if alias not in ft_aliases_node:
685 raise _ConfigurationParseError(ctx_obj_name,
686 f'Field type alias `{alias}` does not exist')
687
688 if alias not in self._resolved_ft_aliases:
689 # Only check for a field type alias cycle when we didn't
690 # resolve the alias yet, as a given node can refer to
691 # the same field type alias more than once.
692 if alias in alias_set:
693 msg = f'Cycle detected during the `{alias}` field type alias resolution'
694 raise _ConfigurationParseError(ctx_obj_name, msg)
695
696 # Resolve it.
697 #
698 # Add `alias` to the set of encountered field type
699 # aliases before calling self._resolve_ft_alias() to
700 # detect cycles.
701 alias_set.add(alias)
702 self._resolve_ft_alias(ft_aliases_node, ft_aliases_node, alias, ctx_obj_name,
703 alias_set)
704 self._resolved_ft_aliases.add(alias)
705
706 # replace alias with field type node copy
707 parent_node[key] = copy.deepcopy(ft_aliases_node[alias])
708 return
709
710 # resolve nested field type aliases
711 for pkey in self._ft_prop_names:
712 self._resolve_ft_alias(ft_aliases_node, node, pkey, ctx_obj_name, alias_set)
713
714 # Resolve field type aliases of structure field type node member
715 # nodes.
716 pkey = self._struct_ft_node_members_prop_name
717
718 if pkey in node:
719 for member_node, ft_prop_name in self._struct_ft_member_fts_iter(node[pkey]):
720 self._resolve_ft_alias(ft_aliases_node, member_node, ft_prop_name,
721 ctx_obj_name, alias_set)
722
723 # Like _resolve_ft_alias(), but builds a context object name for any
724 # `ctx_obj_name` exception.
725 def _resolve_ft_alias_from(self, ft_aliases_node, parent_node, key):
726 self._resolve_ft_alias(ft_aliases_node, parent_node, key, f'`{key}` property')
727
728 # Applies field type node inheritance to the property `key` of
729 # `parent_node`.
730 #
731 # `parent_node[key]`, if it exists, must not contain any field type
732 # alias (all field type objects are complete).
733 #
734 # This method can modify `parent[key]`.
735 #
736 # When this method returns, no field type node has an `$inherit` or
737 # `inherit` property.
738 def _apply_ft_inheritance(self, parent_node, key):
739 if key not in parent_node:
740 return
741
742 node = parent_node[key]
743
744 if node is None:
745 return
746
747 # process children first
748 for pkey in self._ft_prop_names:
749 self._apply_ft_inheritance(node, pkey)
750
751 # Process the field types of structure field type node member
752 # nodes.
753 pkey = self._struct_ft_node_members_prop_name
754
755 if pkey in node:
756 for member_node, ft_prop_name in self._struct_ft_member_fts_iter(node[pkey]):
757 self._apply_ft_inheritance(member_node, ft_prop_name)
758
759 # apply inheritance for this node
760 if 'inherit' in node:
761 # barectf 2.1: `inherit` property was renamed to `$inherit`
762 assert '$inherit' not in node
763 node['$inherit'] = node['inherit']
764 del node['inherit']
765
766 inherit_key = '$inherit'
767
768 if inherit_key in node:
769 assert type(node[inherit_key]) is collections.OrderedDict
770
771 # apply inheritance below
772 self._apply_ft_inheritance(node, inherit_key)
773
774 # `node` is an overlay on the `$inherit` node
775 base_node = node[inherit_key]
776 del node[inherit_key]
777 self._update_node(base_node, node)
778
779 # set updated base node as this node
780 parent_node[key] = base_node
This page took 0.052444 seconds and 4 git commands to generate.