Rename "time" -> "timestamp" (terminology)
[barectf.git] / barectf / config_parse_v3.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
24 import barectf.config_parse_common as barectf_config_parse_common
25 from barectf.config_parse_common import _ConfigurationParseError
26 from barectf.config_parse_common import _append_error_ctx
27 from barectf.config_parse_common import _MapNode
28 import barectf.config as barectf_config
29 from barectf.config import _OptStructFt
30 import collections
31 import uuid
32 from barectf.typing import Count, Alignment, VersionNumber
33 from typing import Optional, List, Dict, Any, TextIO, Set, Iterable, Callable, Tuple, Type
34 import typing
35
36
37 # A barectf 3 YAML configuration parser.
38 #
39 # When you build such a parser, it parses the configuration node `node`
40 # (already loaded from the file having the path `path`) and creates a
41 # corresponding `barectf.Configuration` object which you can get with
42 # the `config` property.
43 #
44 # See the comments of _parse() for more implementation details about the
45 # parsing stages and general strategy.
46 class _Parser(barectf_config_parse_common._Parser):
47 # Builds a barectf 3 YAML configuration parser and parses the root
48 # configuration node `node` (already loaded from the file-like
49 # object `root_file`).
50 def __init__(self, root_file: TextIO, node: barectf_config_parse_common._ConfigNodeV3,
51 with_pkg_include_dir: bool, inclusion_dirs: Optional[List[str]],
52 ignore_include_not_found: bool):
53 super().__init__(root_file, node, with_pkg_include_dir, inclusion_dirs,
54 ignore_include_not_found, VersionNumber(3))
55 self._ft_cls_name_to_create_method: Dict[str, Callable[[_MapNode],
56 List[barectf_config._FieldType]]] = {
57 'unsigned-integer': self._create_int_ft,
58 'signed-integer': self._create_int_ft,
59 'unsigned-enumeration': self._create_enum_ft,
60 'signed-enumeration': self._create_enum_ft,
61 'real': self._create_real_ft,
62 'string': self._create_string_ft,
63 'static-array': self._create_static_array_ft,
64 'dynamic-array': self._create_dynamic_array_ft,
65 'structure': self._create_struct_ft,
66 }
67 self._parse()
68
69 # Validates the alignment `alignment`, raising a
70 # `_ConfigurationParseError` exception using `ctx_obj_name` if it's
71 # invalid.
72 @staticmethod
73 def _validate_alignment(alignment: Alignment, ctx_obj_name: str):
74 assert alignment >= 1
75
76 # check for power of two
77 if (alignment & (alignment - 1)) != 0:
78 raise _ConfigurationParseError(ctx_obj_name,
79 f'Invalid alignment (not a power of two): {alignment}')
80
81 # Validates the TSDL identifier `iden`, raising a
82 # `_ConfigurationParseError` exception using `ctx_obj_name` and
83 # `prop` to format the message if it's invalid.
84 @staticmethod
85 def _validate_iden(iden: str, ctx_obj_name: str, prop: str):
86 assert type(iden) is str
87 ctf_keywords = {
88 'align',
89 'callsite',
90 'clock',
91 'enum',
92 'env',
93 'event',
94 'floating_point',
95 'integer',
96 'stream',
97 'string',
98 'struct',
99 'trace',
100 'typealias',
101 'typedef',
102 'variant',
103 }
104
105 if iden in ctf_keywords:
106 msg = f'Invalid {prop} (not a valid identifier): `{iden}`'
107 raise _ConfigurationParseError(ctx_obj_name, msg)
108
109 @staticmethod
110 def _alignment_prop(ft_node: _MapNode, prop_name: str) -> Alignment:
111 alignment = ft_node.get(prop_name)
112
113 if alignment is not None:
114 _Parser._validate_alignment(alignment, '`prop_name` property')
115
116 return Alignment(alignment)
117
118 @property
119 def _trace_type_node(self) -> _MapNode:
120 return self.config_node['trace']['type']
121
122 @staticmethod
123 def _byte_order_from_node(node: str) -> barectf_config.ByteOrder:
124 return {
125 'big-endian': barectf_config.ByteOrder.BIG_ENDIAN,
126 'little-endian': barectf_config.ByteOrder.LITTLE_ENDIAN,
127 }[node]
128
129 # Creates a bit array field type having the type `ft_type` from the
130 # bit array field type node `ft_node`, passing the additional
131 # `*args` to ft_type.__init__().
132 def _create_common_bit_array_ft(self, ft_node: _MapNode,
133 ft_type: Type[barectf_config._BitArrayFieldType],
134 default_alignment: Optional[Alignment],
135 *args) -> barectf_config._BitArrayFieldType:
136 alignment = self._alignment_prop(ft_node, 'alignment')
137
138 if alignment is None:
139 alignment = default_alignment
140
141 return ft_type(ft_node['size'], alignment, *args)
142
143 # Creates an integer field type having the type `ft_type` from the
144 # integer field type node `ft_node`, passing the additional `*args`
145 # to ft_type.__init__().
146 def _create_common_int_ft(self, ft_node: _MapNode,
147 ft_type: Type[barectf_config._IntegerFieldType], *args) -> barectf_config._IntegerFieldType:
148 preferred_display_base = {
149 'binary': barectf_config.DisplayBase.BINARY,
150 'octal': barectf_config.DisplayBase.OCTAL,
151 'decimal': barectf_config.DisplayBase.DECIMAL,
152 'hexadecimal': barectf_config.DisplayBase.HEXADECIMAL,
153 }[ft_node.get('preferred-display-base', 'decimal')]
154 return typing.cast(barectf_config._IntegerFieldType,
155 self._create_common_bit_array_ft(ft_node, ft_type, None,
156 preferred_display_base, *args))
157
158 # Creates an integer field type from the unsigned/signed integer
159 # field type node `ft_node`.
160 def _create_int_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
161 ft_type = {
162 'unsigned-integer': barectf_config.UnsignedIntegerFieldType,
163 'signed-integer': barectf_config.SignedIntegerFieldType,
164 }[ft_node['class']]
165 return [self._create_common_int_ft(ft_node, ft_type)]
166
167 # Creates an enumeration field type from the unsigned/signed
168 # enumeration field type node `ft_node`.
169 def _create_enum_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
170 ft_type = {
171 'unsigned-enumeration': barectf_config.UnsignedEnumerationFieldType,
172 'signed-enumeration': barectf_config.SignedEnumerationFieldType,
173 }[ft_node['class']]
174 mappings = collections.OrderedDict()
175
176 for label, mapping_node in ft_node.get('mappings', {}).items():
177 ranges = set()
178
179 for range_node in mapping_node:
180 if type(range_node) is list:
181 ranges.add(barectf_config.EnumerationFieldTypeMappingRange(range_node[0],
182 range_node[1]))
183 else:
184 assert type(range_node) is int
185 ranges.add(barectf_config.EnumerationFieldTypeMappingRange(range_node,
186 range_node))
187
188 mappings[label] = barectf_config.EnumerationFieldTypeMapping(ranges)
189
190 return [typing.cast(barectf_config._EnumerationFieldType,
191 self._create_common_int_ft(ft_node, ft_type,
192 barectf_config.EnumerationFieldTypeMappings(mappings)))]
193
194 # Creates a real field type from the real field type node `ft_node`.
195 def _create_real_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
196 return [typing.cast(barectf_config.RealFieldType,
197 self._create_common_bit_array_ft(ft_node, barectf_config.RealFieldType,
198 Alignment(8)))]
199
200 # Creates a string field type from the string field type node
201 # `ft_node`.
202 def _create_string_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
203 return [barectf_config.StringFieldType()]
204
205 def _create_array_ft(self, ft_type, ft_node: _MapNode, **kwargs) -> barectf_config._ArrayFieldType:
206 prop_name = 'element-field-type'
207
208 try:
209 element_fts = self._create_fts(ft_node[prop_name])
210 except _ConfigurationParseError as exc:
211 _append_error_ctx(exc, f'`{prop_name}` property')
212
213 if len(element_fts) != 1 or isinstance(element_fts[0], (barectf_config.StructureFieldType,
214 barectf_config.DynamicArrayFieldType)):
215 raise _ConfigurationParseError(f'`{prop_name}` property',
216 'Nested structure and dynamic array field types are not supported')
217
218 return ft_type(element_field_type=element_fts[0], **kwargs)
219
220 # Creates a static array field type from the static array field type
221 # node `ft_node`.
222 def _create_static_array_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
223 return [typing.cast(barectf_config.StaticArrayFieldType,
224 self._create_array_ft(barectf_config.StaticArrayFieldType, ft_node,
225 length=ft_node['length']))]
226
227 # Creates a dynamic array field type from the dynamic array field
228 # type node `ft_node`.
229 def _create_dynamic_array_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
230 # create length unsigned integer field type
231 len_ft = barectf_config.UnsignedIntegerFieldType(32, alignment=Alignment(8))
232 return [
233 len_ft,
234 typing.cast(barectf_config.DynamicArrayFieldType,
235 self._create_array_ft(barectf_config.DynamicArrayFieldType, ft_node,
236 length_field_type=len_ft))
237 ]
238
239 # Creates structure field type members from the structure field type
240 # members node `members_node`.
241 #
242 # `prop_name` is the name of the property of which `members_node` is
243 # the value.
244 def _create_struct_ft_members(self, members_node: List[_MapNode], prop_name: str):
245 members = collections.OrderedDict()
246 member_names: Set[str] = set()
247
248 for member_node in members_node:
249 member_name, member_node = list(member_node.items())[0]
250
251 if member_name in member_names:
252 raise _ConfigurationParseError(f'`{prop_name}` property',
253 f'Duplicate member `{member_name}`')
254
255 self._validate_iden(member_name, f'`{prop_name}` property',
256 'structure field type member name')
257 member_names.add(member_name)
258 ft_prop_name = 'field-type'
259 ft_node = member_node[ft_prop_name]
260
261 try:
262 if ft_node['class'] in ['structure']:
263 raise _ConfigurationParseError(f'`{ft_prop_name}` property',
264 'Nested structure field types are not supported')
265
266 try:
267 member_fts = self._create_fts(ft_node)
268 except _ConfigurationParseError as exc:
269 _append_error_ctx(exc, f'`{ft_prop_name}` property')
270 except _ConfigurationParseError as exc:
271 _append_error_ctx(exc, f'Structure field type member `{member_name}`')
272
273 if len(member_fts) == 2:
274 # The only case where this happens is a dynamic array
275 # field type node which generates an unsigned integer
276 # field type for the length and the dynamic array field
277 # type itself.
278 assert type(member_fts[1]) is barectf_config.DynamicArrayFieldType
279 members[f'__{member_name}_len'] = barectf_config.StructureFieldTypeMember(member_fts[0])
280 else:
281 assert len(member_fts) == 1
282
283 members[member_name] = barectf_config.StructureFieldTypeMember(member_fts[-1])
284
285 return barectf_config.StructureFieldTypeMembers(members)
286
287 # Creates a structure field type from the structure field type node
288 # `ft_node`.
289 def _create_struct_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
290 minimum_alignment = self._alignment_prop(ft_node, 'minimum-alignment')
291
292 if minimum_alignment is None:
293 minimum_alignment = 1
294
295 members = None
296 prop_name = 'members'
297 members_node = ft_node.get(prop_name)
298
299 if members_node is not None:
300 members = self._create_struct_ft_members(members_node, prop_name)
301
302 return [barectf_config.StructureFieldType(minimum_alignment, members)]
303
304 # Creates field types from the field type node `ft_node`.
305 def _create_fts(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
306 return self._ft_cls_name_to_create_method[ft_node['class']](ft_node)
307
308 # Creates field types from the field type node `parent_node[key]`
309 # if it exists.
310 def _try_create_fts(self, parent_node: _MapNode, key: str) -> Optional[List[barectf_config._FieldType]]:
311 if key not in parent_node:
312 return None
313
314 try:
315 return self._create_fts(parent_node[key])
316 except _ConfigurationParseError as exc:
317 _append_error_ctx(exc, f'`{key}` property')
318
319 # satisfy static type checker (never reached)
320 raise
321
322 # Like _try_create_fts(), but casts the result's type (first and
323 # only element) to `barectf_config.StructureFieldType` to satisfy
324 # static type checkers.
325 def _try_create_struct_ft(self, parent_node: _MapNode, key: str) -> _OptStructFt:
326 fts = self._try_create_fts(parent_node, key)
327
328 if fts is None:
329 return None
330
331 return typing.cast(barectf_config.StructureFieldType, fts[0])
332
333 # Returns the total number of members in the structure field type
334 # node `ft_node` if it exists, otherwise 0.
335 @staticmethod
336 def _total_struct_ft_node_members(ft_node: Optional[_MapNode]) -> Count:
337 if ft_node is None:
338 return Count(0)
339
340 members_node = ft_node.get('members')
341
342 if members_node is None:
343 return Count(0)
344
345 return Count(len(members_node))
346
347 # Creates an event record type from the event record type node
348 # `ert_node` named `name`.
349 #
350 # `ert_member_count` is the total number of structure field type
351 # members within the event record type so far (from the common part
352 # in its data stream type). For example, if the data stream type has
353 # an event record header field type with `id` and `timestamp`
354 # members, then `ert_member_count` is 2.
355 def _create_ert(self, name: str, ert_node: _MapNode,
356 ert_member_count: Count) -> barectf_config.EventRecordType:
357 try:
358 self._validate_iden(name, '`name` property', 'event record type name')
359
360 # make sure the event record type is not empty
361 spec_ctx_ft_prop_name = 'specific-context-field-type'
362 payload_ft_prop_name = 'payload-field-type'
363 ert_member_count = Count(ert_member_count + self._total_struct_ft_node_members(ert_node.get(spec_ctx_ft_prop_name)))
364 ert_member_count = Count(ert_member_count + self._total_struct_ft_node_members(ert_node.get(payload_ft_prop_name)))
365
366 if ert_member_count == 0:
367 raise _ConfigurationParseError('Event record type',
368 'Event record type is empty (no members).')
369
370 # create event record type
371 return barectf_config.EventRecordType(name, ert_node.get('log-level'),
372 self._try_create_struct_ft(ert_node,
373 spec_ctx_ft_prop_name),
374 self._try_create_struct_ft(ert_node,
375 payload_ft_prop_name))
376 except _ConfigurationParseError as exc:
377 _append_error_ctx(exc, f'Event record type `{name}`')
378
379 # satisfy static type checker (never reached)
380 raise
381
382 # Returns the effective feature field type for the field type
383 # node `parent_node[key]`, if any.
384 #
385 # Returns:
386 #
387 # If `parent_node[key]` is `False`:
388 # `None`.
389 #
390 # If `parent_node[key]` is `True`:
391 # `barectf_config.DEFAULT_FIELD_TYPE`.
392 #
393 # If `parent_node[key]` doesn't exist:
394 # `none` (parameter).
395 #
396 # Otherwise:
397 # A created field type.
398 def _feature_ft(self, parent_node: _MapNode, key: str, none: Any = None) -> Any:
399 if key not in parent_node:
400 # missing: default feature field type
401 return none
402
403 ft_node = parent_node[key]
404 assert ft_node is not None
405
406 if ft_node is True:
407 # default feature field type
408 return barectf_config.DEFAULT_FIELD_TYPE
409
410 if ft_node is False:
411 # disabled feature
412 return None
413
414 assert type(ft_node) is collections.OrderedDict
415 return self._create_fts(ft_node)[0]
416
417 def _create_dst(self, name: str, dst_node: _MapNode) -> barectf_config.DataStreamType:
418 try:
419 # validate data stream type's name
420 self._validate_iden(name, '`name` property', 'data stream type name')
421
422 # get default clock type, if any
423 def_clk_type = None
424 prop_name = '$default-clock-type-name'
425 def_clk_type_name = dst_node.get(prop_name)
426
427 if def_clk_type_name is not None:
428 try:
429 def_clk_type = self._clk_type(def_clk_type_name, prop_name)
430 except _ConfigurationParseError as exc:
431 _append_error_ctx(exc, f'`{prop_name}` property')
432
433 # create feature field types
434 pkt_total_size_ft = barectf_config.DEFAULT_FIELD_TYPE
435 pkt_content_size_ft = barectf_config.DEFAULT_FIELD_TYPE
436 pkt_beginning_ts_ft = None
437 pkt_end_ts_ft = None
438 pkt_discarded_er_counter_snap_ft = None
439 ert_id_ft = barectf_config.DEFAULT_FIELD_TYPE
440 ert_ts_ft = None
441
442 if def_clk_type is not None:
443 # The data stream type has a default clock type.
444 # Initialize the packet beginning timestamp, packet end
445 # timestamp, and event record timestamp field types to
446 # default field types.
447 #
448 # This means your data stream type node only needs a
449 # default clock type name to enable those features
450 # automatically. Those features do not add any parameter
451 # to the event tracing functions.
452 pkt_beginning_ts_ft = barectf_config.DEFAULT_FIELD_TYPE
453 pkt_end_ts_ft = barectf_config.DEFAULT_FIELD_TYPE
454 ert_ts_ft = barectf_config.DEFAULT_FIELD_TYPE
455
456 features_node = dst_node.get('$features')
457
458 if features_node is not None:
459 # create packet feature field types
460 pkt_node = features_node.get('packet')
461
462 if pkt_node is not None:
463 pkt_total_size_ft = self._feature_ft(pkt_node, 'total-size-field-type',
464 pkt_total_size_ft)
465 pkt_content_size_ft = self._feature_ft(pkt_node, 'content-size-field-type',
466 pkt_content_size_ft)
467 pkt_beginning_ts_ft = self._feature_ft(pkt_node,
468 'beginning-timestamp-field-type',
469 pkt_beginning_ts_ft)
470 pkt_end_ts_ft = self._feature_ft(pkt_node, 'end-timestamp-field-type',
471 pkt_end_ts_ft)
472 pkt_discarded_er_counter_snap_ft = self._feature_ft(pkt_node,
473 'discarded-event-records-counter-snapshot-field-type',
474 pkt_discarded_er_counter_snap_ft)
475
476 # create event record feature field types
477 er_node = features_node.get('event-record')
478 type_id_ft_prop_name = 'type-id-field-type'
479
480 if er_node is not None:
481 ert_id_ft = self._feature_ft(er_node, type_id_ft_prop_name, ert_id_ft)
482 ert_ts_ft = self._feature_ft(er_node, 'timestamp-field-type', ert_ts_ft)
483
484 erts_prop_name = 'event-record-types'
485 ert_count = len(dst_node[erts_prop_name])
486
487 try:
488 if ert_id_ft is None and ert_count > 1:
489 raise _ConfigurationParseError(f'`{type_id_ft_prop_name}` property',
490 'Event record type ID field type feature is required because data stream type has more than one event record type')
491
492 if isinstance(ert_id_ft, barectf_config._IntegerFieldType):
493 ert_id_int_ft = typing.cast(barectf_config._IntegerFieldType, ert_id_ft)
494
495 if ert_count > (1 << ert_id_int_ft.size):
496 raise _ConfigurationParseError(f'`{type_id_ft_prop_name}` property',
497 f'Field type\'s size ({ert_id_int_ft.size} bits) is too small to accomodate {ert_count} event record types')
498 except _ConfigurationParseError as exc:
499 exc._append_ctx('`event-record` property')
500 _append_error_ctx(exc, '`$features` property')
501
502 pkt_features = barectf_config.DataStreamTypePacketFeatures(pkt_total_size_ft,
503 pkt_content_size_ft,
504 pkt_beginning_ts_ft,
505 pkt_end_ts_ft,
506 pkt_discarded_er_counter_snap_ft)
507 er_features = barectf_config.DataStreamTypeEventRecordFeatures(ert_id_ft, ert_ts_ft)
508 features = barectf_config.DataStreamTypeFeatures(pkt_features, er_features)
509
510 # create packet context (structure) field type extra members
511 pkt_ctx_ft_extra_members = None
512 prop_name = 'packet-context-field-type-extra-members'
513 pkt_ctx_ft_extra_members_node = dst_node.get(prop_name)
514
515 if pkt_ctx_ft_extra_members_node is not None:
516 pkt_ctx_ft_extra_members = self._create_struct_ft_members(pkt_ctx_ft_extra_members_node,
517 prop_name)
518
519 # check for illegal packet context field type member names
520 reserved_member_names = {
521 'packet_size',
522 'content_size',
523 'timestamp_begin',
524 'timestamp_end',
525 'events_discarded',
526 'packet_seq_num',
527 }
528
529 for member_name in pkt_ctx_ft_extra_members:
530 if member_name in reserved_member_names:
531 raise _ConfigurationParseError(f'`{prop_name}` property',
532 f'Packet context field type member name `{member_name}` is reserved.')
533
534 # create event record types
535 er_header_common_ctx_member_count = Count(0)
536
537 if er_features.type_id_field_type is not None:
538 er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + 1)
539
540 if er_features.timestamp_field_type is not None:
541 er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + 1)
542
543 er_common_ctx_ft_prop_name = 'event-record-common-context-field-type'
544 er_common_ctx_ft_node = dst_node.get(er_common_ctx_ft_prop_name)
545 er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + self._total_struct_ft_node_members(er_common_ctx_ft_node))
546 erts = set()
547
548 for ert_name, ert_node in dst_node[erts_prop_name].items():
549 erts.add(self._create_ert(ert_name, ert_node, er_header_common_ctx_member_count))
550
551 # create data stream type
552 return barectf_config.DataStreamType(name, erts, def_clk_type, features,
553 pkt_ctx_ft_extra_members,
554 self._try_create_struct_ft(dst_node,
555 er_common_ctx_ft_prop_name))
556 except _ConfigurationParseError as exc:
557 _append_error_ctx(exc, f'Data data stream type `{name}`')
558
559 # satisfy static type checker (never reached)
560 raise
561
562 def _clk_type(self, name: str, prop_name: str) -> barectf_config.ClockType:
563 clk_type = self._clk_types.get(name)
564
565 if clk_type is None:
566 raise _ConfigurationParseError(f'`{prop_name}` property',
567 f'Clock type `{name}` does not exist')
568
569 return clk_type
570
571 def _create_clk_type(self, name: str, clk_type_node: _MapNode) -> barectf_config.ClockType:
572 self._validate_iden(name, '`name` property', 'clock type name')
573 clk_type_uuid = None
574 uuid_node = clk_type_node.get('uuid')
575
576 if uuid_node is not None:
577 clk_type_uuid = uuid.UUID(uuid_node)
578
579 offset_seconds = 0
580 offset_cycles = Count(0)
581 offset_node = clk_type_node.get('offset')
582
583 if offset_node is not None:
584 offset_seconds = offset_node.get('seconds', 0)
585 offset_cycles = offset_node.get('cycles', Count(0))
586
587 return barectf_config.ClockType(name, clk_type_node.get('frequency', int(1e9)),
588 clk_type_uuid, clk_type_node.get('description'),
589 clk_type_node.get('precision', 0),
590 barectf_config.ClockTypeOffset(offset_seconds, offset_cycles),
591 clk_type_node.get('origin-is-unix-epoch', False))
592
593 def _create_clk_types(self):
594 self._clk_types = {}
595
596 for clk_type_name, clk_type_node in self._trace_type_node.get('clock-types', {}).items():
597 self._clk_types[clk_type_name] = self._create_clk_type(clk_type_name, clk_type_node)
598
599 def _create_trace_type(self):
600 try:
601 # create clock types (_create_dst() needs them)
602 self._create_clk_types()
603
604 # get UUID
605 trace_type_uuid = None
606 uuid_node = self._trace_type_node.get('uuid')
607
608 if uuid_node is not None:
609 if uuid_node == 'auto':
610 trace_type_uuid = uuid.uuid1()
611 else:
612 trace_type_uuid = uuid.UUID(uuid_node)
613
614 # create feature field types
615 magic_ft = barectf_config.DEFAULT_FIELD_TYPE
616 uuid_ft = None
617 dst_id_ft = barectf_config.DEFAULT_FIELD_TYPE
618
619 if trace_type_uuid is not None:
620 # Trace type has a UUID: initialize UUID field type to
621 # a default field type.
622 uuid_ft = barectf_config.DEFAULT_FIELD_TYPE
623
624 features_node = self._trace_type_node.get('$features')
625 dst_id_ft_prop_name = 'data-stream-type-id-field-type'
626
627 if features_node is not None:
628 magic_ft = self._feature_ft(features_node, 'magic-field-type',
629 magic_ft)
630 uuid_ft = self._feature_ft(features_node, 'uuid-field-type', uuid_ft)
631 dst_id_ft = self._feature_ft(features_node, dst_id_ft_prop_name, dst_id_ft)
632
633 dsts_prop_name = 'data-stream-types'
634 dst_count = len(self._trace_type_node[dsts_prop_name])
635
636 try:
637 if dst_id_ft is None and dst_count > 1:
638 raise _ConfigurationParseError(f'`{dst_id_ft_prop_name}` property',
639 'Data stream type ID field type feature is required because trace type has more than one data stream type')
640
641 if isinstance(dst_id_ft, barectf_config._FieldType) and dst_count > (1 << dst_id_ft.size):
642 raise _ConfigurationParseError(f'`{dst_id_ft_prop_name}` property',
643 f'Field type\'s size ({dst_id_ft.size} bits) is too small to accomodate {dst_count} data stream types')
644 except _ConfigurationParseError as exc:
645 _append_error_ctx(exc, '`$features` property')
646
647 features = barectf_config.TraceTypeFeatures(magic_ft, uuid_ft, dst_id_ft)
648
649 # create data stream types
650 dsts = set()
651
652 for dst_name, dst_node in self._trace_type_node[dsts_prop_name].items():
653 dsts.add(self._create_dst(dst_name, dst_node))
654
655 # create trace type
656 return barectf_config.TraceType(self._native_byte_order, dsts, trace_type_uuid,
657 features)
658 except _ConfigurationParseError as exc:
659 _append_error_ctx(exc, 'Trace type')
660
661 def _create_trace(self):
662 try:
663 trace_type = self._create_trace_type()
664 trace_node = self.config_node['trace']
665 env = None
666 env_node = trace_node.get('environment')
667
668 if env_node is not None:
669 # validate each environment variable name
670 for name in env_node:
671 self._validate_iden(name, '`environment` property',
672 'environment variable name')
673
674 # the node already has the expected structure
675 env = barectf_config.TraceEnvironment(env_node)
676
677 return barectf_config.Trace(trace_type, env)
678
679 except _ConfigurationParseError as exc:
680 _append_error_ctx(exc, 'Trace')
681
682 def _create_config(self):
683 # create trace first
684 trace = self._create_trace()
685
686 # find default data stream type, if any
687 def_dst = None
688
689 for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
690 prop_name = '$is-default'
691 is_default = dst_node.get(prop_name)
692
693 if is_default is True:
694 if def_dst is not None:
695 exc = _ConfigurationParseError(f'`{prop_name}` property',
696 f'Duplicate default data stream type (`{def_dst.name}`)')
697 exc._append_ctx(f'Data stream type `{dst_name}`')
698 _append_error_ctx(exc, 'Trace type')
699
700 def_dst = trace.type.data_stream_type(dst_name)
701
702 # create clock type C type mapping
703 clk_types_node = self._trace_type_node.get('clock-types')
704 clk_type_c_types = None
705
706 if clk_types_node is not None:
707 clk_type_c_types = collections.OrderedDict()
708
709 for dst in trace.type.data_stream_types:
710 if dst.default_clock_type is None:
711 continue
712
713 clk_type_node = clk_types_node[dst.default_clock_type.name]
714 c_type = clk_type_node.get('$c-type')
715
716 if c_type is not None:
717 clk_type_c_types[dst.default_clock_type] = c_type
718
719 # create options
720 iden_prefix_def = False
721 def_dst_name_def = False
722 opts_node = self.config_node.get('options')
723 iden_prefix = 'barectf_'
724 file_name_prefix = 'barectf'
725
726 if opts_node is not None:
727 code_gen_opts_node = opts_node.get('code-generation')
728
729 if code_gen_opts_node is not None:
730 prefix_node = code_gen_opts_node.get('prefix', 'barectf')
731
732 if type(prefix_node) is str:
733 # automatic prefixes
734 iden_prefix = f'{prefix_node}_'
735 file_name_prefix = prefix_node
736 else:
737 iden_prefix = prefix_node['identifier']
738 file_name_prefix = prefix_node['file-name']
739
740 header_opts = code_gen_opts_node.get('header')
741
742 if header_opts is not None:
743 iden_prefix_def = header_opts.get('identifier-prefix-definition', False)
744 def_dst_name_def = header_opts.get('default-data-stream-type-name-definition',
745 False)
746
747 header_opts = barectf_config.ConfigurationCodeGenerationHeaderOptions(iden_prefix_def,
748 def_dst_name_def)
749 cg_opts = barectf_config.ConfigurationCodeGenerationOptions(iden_prefix, file_name_prefix,
750 def_dst, header_opts,
751 clk_type_c_types)
752 opts = barectf_config.ConfigurationOptions(cg_opts)
753
754 # create configuration
755 self._config = barectf_config.Configuration(trace, opts)
756
757 # Expands the field type aliases found in the trace type node.
758 #
759 # This method modifies the trace type node.
760 #
761 # When this method returns:
762 #
763 # * Any field type alias is replaced with its full field type
764 # node equivalent.
765 #
766 # * The `$field-type-aliases` property of the trace type node is
767 # removed.
768 def _expand_ft_aliases(self):
769 def resolve_ft_alias_from(parent_node: _MapNode, key: str):
770 if key not in parent_node:
771 return
772
773 if type(parent_node[key]) not in [collections.OrderedDict, str]:
774 return
775
776 self._resolve_ft_alias_from(ft_aliases_node, parent_node, key)
777
778 ft_aliases_node = self._trace_type_node['$field-type-aliases']
779
780 # Expand field type aliases within trace, data stream, and event
781 # record type nodes.
782 features_prop_name = '$features'
783
784 try:
785 features_node = self._trace_type_node.get(features_prop_name)
786
787 if features_node is not None:
788 try:
789 resolve_ft_alias_from(features_node, 'magic-field-type')
790 resolve_ft_alias_from(features_node, 'uuid-field-type')
791 resolve_ft_alias_from(features_node, 'data-stream-type-id-field-type')
792 except _ConfigurationParseError as exc:
793 _append_error_ctx(exc, f'`{features_prop_name}` property')
794 except _ConfigurationParseError as exc:
795 _append_error_ctx(exc, 'Trace type')
796
797 for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
798 try:
799 features_node = dst_node.get(features_prop_name)
800
801 if features_node is not None:
802 try:
803 pkt_prop_name = 'packet'
804 pkt_node = features_node.get(pkt_prop_name)
805
806 if pkt_node is not None:
807 try:
808 resolve_ft_alias_from(pkt_node, 'total-size-field-type')
809 resolve_ft_alias_from(pkt_node, 'content-size-field-type')
810 resolve_ft_alias_from(pkt_node, 'beginning-timestamp-field-type')
811 resolve_ft_alias_from(pkt_node, 'end-timestamp-field-type')
812 resolve_ft_alias_from(pkt_node,
813 'discarded-event-records-counter-snapshot-field-type')
814 except _ConfigurationParseError as exc:
815 _append_error_ctx(exc, f'`{pkt_prop_name}` property')
816
817 er_prop_name = 'event-record'
818 er_node = features_node.get(er_prop_name)
819
820 if er_node is not None:
821 try:
822 resolve_ft_alias_from(er_node, 'type-id-field-type')
823 resolve_ft_alias_from(er_node, 'timestamp-field-type')
824 except _ConfigurationParseError as exc:
825 _append_error_ctx(exc, f'`{er_prop_name}` property')
826 except _ConfigurationParseError as exc:
827 _append_error_ctx(exc, f'`{features_prop_name}` property')
828
829 pkt_ctx_ft_extra_members_prop_name = 'packet-context-field-type-extra-members'
830 pkt_ctx_ft_extra_members_node = dst_node.get(pkt_ctx_ft_extra_members_prop_name)
831
832 if pkt_ctx_ft_extra_members_node is not None:
833 try:
834 for member_node in pkt_ctx_ft_extra_members_node:
835 member_node = list(member_node.values())[0]
836 resolve_ft_alias_from(member_node, 'field-type')
837 except _ConfigurationParseError as exc:
838 _append_error_ctx(exc, f'`{pkt_ctx_ft_extra_members_prop_name}` property')
839
840 resolve_ft_alias_from(dst_node, 'event-record-common-context-field-type')
841
842 for ert_name, ert_node in dst_node['event-record-types'].items():
843 try:
844 resolve_ft_alias_from(ert_node, 'specific-context-field-type')
845 resolve_ft_alias_from(ert_node, 'payload-field-type')
846 except _ConfigurationParseError as exc:
847 _append_error_ctx(exc, f'Event record type `{ert_name}`')
848 except _ConfigurationParseError as exc:
849 _append_error_ctx(exc, f'Data stream type `{dst_name}`')
850
851 # remove the (now unneeded) `$field-type-aliases` property
852 del self._trace_type_node['$field-type-aliases']
853
854 # Applies field type inheritance to all field type nodes found in
855 # the trace type node.
856 #
857 # This method modifies the trace type node.
858 #
859 # When this method returns, no field type node has an `$inherit`
860 # property.
861 def _apply_fts_inheritance(self):
862 def apply_ft_inheritance(parent_node: _MapNode, key: str):
863 if key not in parent_node:
864 return
865
866 if type(parent_node[key]) is not collections.OrderedDict:
867 return
868
869 self._apply_ft_inheritance(parent_node, key)
870
871 features_prop_name = '$features'
872 features_node = self._trace_type_node.get(features_prop_name)
873
874 if features_node is not None:
875 apply_ft_inheritance(features_node, 'magic-field-type')
876 apply_ft_inheritance(features_node, 'uuid-field-type')
877 apply_ft_inheritance(features_node, 'data-stream-type-id-field-type')
878
879 for dst_node in self._trace_type_node['data-stream-types'].values():
880 features_node = dst_node.get(features_prop_name)
881
882 if features_node is not None:
883 pkt_node = features_node.get('packet')
884
885 if pkt_node is not None:
886 apply_ft_inheritance(pkt_node, 'total-size-field-type')
887 apply_ft_inheritance(pkt_node, 'content-size-field-type')
888 apply_ft_inheritance(pkt_node, 'beginning-timestamp-field-type')
889 apply_ft_inheritance(pkt_node, 'end-timestamp-field-type')
890 apply_ft_inheritance(pkt_node,
891 'discarded-event-records-counter-snapshot-field-type')
892
893 er_node = features_node.get('event-record')
894
895 if er_node is not None:
896 apply_ft_inheritance(er_node, 'type-id-field-type')
897 apply_ft_inheritance(er_node, 'timestamp-field-type')
898
899 pkt_ctx_ft_extra_members_node = dst_node.get('packet-context-field-type-extra-members')
900
901 if pkt_ctx_ft_extra_members_node is not None:
902 for member_node in pkt_ctx_ft_extra_members_node:
903 member_node = list(member_node.values())[0]
904 apply_ft_inheritance(member_node, 'field-type')
905
906 apply_ft_inheritance(dst_node, 'event-record-common-context-field-type')
907
908 for ert_node in dst_node['event-record-types'].values():
909 apply_ft_inheritance(ert_node, 'specific-context-field-type')
910 apply_ft_inheritance(ert_node, 'payload-field-type')
911
912 # Normalizes structure field type member nodes.
913 #
914 # A structure field type member node can look like this:
915 #
916 # - msg: custom-string
917 #
918 # which is the equivalent of this:
919 #
920 # - msg:
921 # field-type: custom-string
922 #
923 # This method normalizes form 1 to use form 2.
924 def _normalize_struct_ft_member_nodes(self):
925 def normalize_members_node(members_node: List[_MapNode]):
926 ft_prop_name = 'field-type'
927
928 for member_node in members_node:
929 member_name, val_node = list(member_node.items())[0]
930
931 if type(val_node) is str:
932 member_node[member_name] = collections.OrderedDict({
933 ft_prop_name: val_node
934 })
935
936 normalize_struct_ft_member_nodes(member_node[member_name], ft_prop_name)
937
938 def normalize_struct_ft_member_nodes(parent_node: _MapNode, key: str):
939 if type(parent_node) is not collections.OrderedDict:
940 return
941
942 ft_node = parent_node.get(key)
943
944 if type(ft_node) is not collections.OrderedDict:
945 return
946
947 ft_node = typing.cast(collections.OrderedDict, ft_node)
948 members_nodes = ft_node.get('members')
949
950 if members_nodes is not None:
951 normalize_members_node(members_nodes)
952
953 prop_name = '$field-type-aliases'
954 ft_aliases_node = self._trace_type_node.get(prop_name)
955
956 if ft_aliases_node is not None:
957 for alias in ft_aliases_node:
958 normalize_struct_ft_member_nodes(ft_aliases_node, alias)
959
960 features_prop_name = '$features'
961 features_node = self._trace_type_node.get(features_prop_name)
962
963 if features_node is not None:
964 normalize_struct_ft_member_nodes(features_node, 'magic-field-type')
965 normalize_struct_ft_member_nodes(features_node, 'uuid-field-type')
966 normalize_struct_ft_member_nodes(features_node, 'data-stream-type-id-field-type')
967
968 for dst_node in self._trace_type_node['data-stream-types'].values():
969 features_node = dst_node.get(features_prop_name)
970
971 if features_node is not None:
972 pkt_node = features_node.get('packet')
973
974 if pkt_node is not None:
975 normalize_struct_ft_member_nodes(pkt_node, 'total-size-field-type')
976 normalize_struct_ft_member_nodes(pkt_node, 'content-size-field-type')
977 normalize_struct_ft_member_nodes(pkt_node, 'beginning-timestamp-field-type')
978 normalize_struct_ft_member_nodes(pkt_node, 'end-timestamp-field-type')
979 normalize_struct_ft_member_nodes(pkt_node,
980 'discarded-event-records-counter-snapshot-field-type')
981
982 er_node = features_node.get('event-record')
983
984 if er_node is not None:
985 normalize_struct_ft_member_nodes(er_node, 'type-id-field-type')
986 normalize_struct_ft_member_nodes(er_node, 'timestamp-field-type')
987
988 pkt_ctx_ft_extra_members_node = dst_node.get('packet-context-field-type-extra-members')
989
990 if pkt_ctx_ft_extra_members_node is not None:
991 normalize_members_node(pkt_ctx_ft_extra_members_node)
992
993 normalize_struct_ft_member_nodes(dst_node, 'event-record-common-context-field-type')
994
995 for ert_node in dst_node['event-record-types'].values():
996 normalize_struct_ft_member_nodes(ert_node, 'specific-context-field-type')
997 normalize_struct_ft_member_nodes(ert_node, 'payload-field-type')
998
999 # Calls _expand_ft_aliases() and _apply_fts_inheritance() if the
1000 # trace type node has a `$field-type-aliases` property.
1001 def _expand_fts(self):
1002 # Make sure that the current configuration node is valid
1003 # considering field types are not expanded yet.
1004 self._schema_validator.validate(self.config_node,
1005 'config/3/config-pre-field-type-expansion')
1006
1007 prop_name = '$field-type-aliases'
1008 ft_aliases_node = self._trace_type_node.get(prop_name)
1009
1010 if ft_aliases_node is None:
1011 # If there's no `'$field-type-aliases'` node, then there's
1012 # no field type aliases and therefore no possible
1013 # inheritance.
1014 if prop_name in self._trace_type_node:
1015 del self._trace_type_node[prop_name]
1016
1017 return
1018
1019 # normalize structure field type member nodes
1020 self._normalize_struct_ft_member_nodes()
1021
1022 # first, expand field type aliases
1023 self._expand_ft_aliases()
1024
1025 # next, apply inheritance to create effective field type nodes
1026 self._apply_fts_inheritance()
1027
1028 # Substitute the event record type node log level aliases with their
1029 # numeric equivalents.
1030 #
1031 # Removes the `$log-level-aliases` property of the trace type node.
1032 def _sub_log_level_aliases(self):
1033 # Make sure that the current configuration node is valid
1034 # considering log level aliases are not substituted yet.
1035 self._schema_validator.validate(self.config_node,
1036 'config/3/config-pre-log-level-alias-sub')
1037
1038 log_level_aliases_prop_name = '$log-level-aliases'
1039 log_level_aliases_node = self._trace_type_node.get(log_level_aliases_prop_name)
1040
1041 if log_level_aliases_prop_name in self._trace_type_node:
1042 del self._trace_type_node[log_level_aliases_prop_name]
1043
1044 if log_level_aliases_node is None:
1045 # no log level aliases
1046 return
1047
1048 # substitute log level aliases
1049 for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
1050 try:
1051 for ert_name, ert_node in dst_node['event-record-types'].items():
1052 try:
1053 prop_name = 'log-level'
1054 ll_node = ert_node.get(prop_name)
1055
1056 if ll_node is None:
1057 continue
1058
1059 if type(ll_node) is str:
1060 if ll_node not in log_level_aliases_node:
1061 raise _ConfigurationParseError(f'`{prop_name}` property',
1062 f'Log level alias `{ll_node}` does not exist')
1063
1064 ert_node[prop_name] = log_level_aliases_node[ll_node]
1065 except _ConfigurationParseError as exc:
1066 _append_error_ctx(exc, f'Event record type `{ert_name}`')
1067 except _ConfigurationParseError as exc:
1068 _append_error_ctx(exc, f'Data stream type `{dst_name}`')
1069
1070 # Generator of parent node and key pairs for all the nodes,
1071 # recursively, of `node`.
1072 #
1073 # It is safe to delete a yielded node during the iteration.
1074 @staticmethod
1075 def _props(node: Any) -> Iterable[Tuple[Any, str]]:
1076 if type(node) is collections.OrderedDict:
1077 for key in list(node):
1078 yield from _Parser._props(node[key])
1079 yield node, key
1080 elif type(node) is list:
1081 for item_node in node:
1082 yield from _Parser._props(item_node)
1083
1084 def _trace_type_props(self) -> Iterable[Tuple[Any, str]]:
1085 yield from _Parser._props(self.config_node['trace']['type'])
1086
1087 # Normalize the properties of the configuration node.
1088 #
1089 # This method, for each property of the trace type node:
1090 #
1091 # 1. Removes it if it's `None` (means default).
1092 #
1093 # 2. Chooses a specific `class` property value.
1094 #
1095 # 3. Chooses a specific `byte-order`/`native-byte-order` property
1096 # value.
1097 #
1098 # 4. Chooses a specific `preferred-display-base` property value.
1099 #
1100 # This method also applies 1. to the trace node's `environment`
1101 # property.
1102 def _normalize_props(self):
1103 def normalize_byte_order_prop(parent_node: _MapNode, key: str):
1104 node = parent_node[key]
1105
1106 if node in ['be', 'big']:
1107 parent_node[key] = 'big-endian'
1108 elif node in ['le', 'little']:
1109 parent_node[key] = 'little-endian'
1110
1111 trace_node = self.config_node['trace']
1112 normalize_byte_order_prop(self._trace_type_node, 'native-byte-order')
1113
1114 for parent_node, key in self._trace_type_props():
1115 node = parent_node[key]
1116
1117 if node is None:
1118 # a `None` property is equivalent to not having it
1119 del parent_node[key]
1120 continue
1121
1122 if key == 'class' and type(node) is str:
1123 # field type class aliases
1124 if node in ['uint', 'unsigned-int']:
1125 parent_node[key] = 'unsigned-integer'
1126 elif node in ['sint', 'signed-int']:
1127 parent_node[key] = 'signed-integer'
1128 elif node in ['uenum', 'unsigned-enum']:
1129 parent_node[key] = 'unsigned-enumeration'
1130 elif node in ['senum', 'signed-enum']:
1131 parent_node[key] = 'signed-enumeration'
1132 elif node == 'str':
1133 parent_node[key] = 'string'
1134 elif node == 'struct':
1135 parent_node[key] = 'structure'
1136 elif key == 'preferred-display-base' and type(node) is str:
1137 # display base aliases
1138 if node == 'bin':
1139 parent_node[key] = 'binary'
1140 elif node == 'oct':
1141 parent_node[key] = 'octal'
1142 elif node == 'dec':
1143 parent_node[key] = 'decimal'
1144 elif node == 'hex':
1145 parent_node[key] = 'hexadecimal'
1146
1147 prop_name = 'environment'
1148
1149 if prop_name in trace_node:
1150 node = trace_node[prop_name]
1151
1152 if node is None:
1153 del trace_node[prop_name]
1154
1155 # Sets the parser's native byte order.
1156 def _set_native_byte_order(self):
1157 self._native_byte_order_node = self._trace_type_node['native-byte-order']
1158 self._native_byte_order = self._byte_order_from_node(self._native_byte_order_node)
1159
1160 # Processes the inclusions of the event record type node
1161 # `ert_node`, returning the effective node.
1162 def _process_ert_node_include(self, ert_node: _MapNode) -> _MapNode:
1163 # Make sure the event record type node is valid for the
1164 # inclusion processing stage.
1165 self._schema_validator.validate(ert_node, 'config/3/ert-pre-include')
1166
1167 # process inclusions
1168 return self._process_node_include(ert_node, self._process_ert_node_include)
1169
1170 # Processes the inclusions of the data stream type node `dst_node`,
1171 # returning the effective node.
1172 def _process_dst_node_include(self, dst_node: _MapNode) -> _MapNode:
1173 def process_children_include(dst_node: _MapNode):
1174 prop_name = 'event-record-types'
1175
1176 if prop_name in dst_node:
1177 erts_node = dst_node[prop_name]
1178
1179 for key in list(erts_node):
1180 erts_node[key] = self._process_ert_node_include(erts_node[key])
1181
1182 # Make sure the data stream type node is valid for the inclusion
1183 # processing stage.
1184 self._schema_validator.validate(dst_node, 'config/3/dst-pre-include')
1185
1186 # process inclusions
1187 return self._process_node_include(dst_node, self._process_dst_node_include,
1188 process_children_include)
1189
1190 # Processes the inclusions of the clock type node `clk_type_node`,
1191 # returning the effective node.
1192 def _process_clk_type_node_include(self, clk_type_node: _MapNode) -> _MapNode:
1193 # Make sure the clock type node is valid for the inclusion
1194 # processing stage.
1195 self._schema_validator.validate(clk_type_node, 'config/3/clock-type-pre-include')
1196
1197 # process inclusions
1198 return self._process_node_include(clk_type_node, self._process_clk_type_node_include)
1199
1200 # Processes the inclusions of the trace type node `trace_type_node`,
1201 # returning the effective node.
1202 def _process_trace_type_node_include(self, trace_type_node: _MapNode) -> _MapNode:
1203 def process_children_include(trace_type_node: _MapNode):
1204 prop_name = 'clock-types'
1205
1206 if prop_name in trace_type_node:
1207 clk_types_node = trace_type_node[prop_name]
1208
1209 for key in list(clk_types_node):
1210 clk_types_node[key] = self._process_clk_type_node_include(clk_types_node[key])
1211
1212 prop_name = 'data-stream-types'
1213
1214 if prop_name in trace_type_node:
1215 dsts_node = trace_type_node[prop_name]
1216
1217 for key in list(dsts_node):
1218 dsts_node[key] = self._process_dst_node_include(dsts_node[key])
1219
1220 # Make sure the trace type node is valid for the inclusion
1221 # processing stage.
1222 self._schema_validator.validate(trace_type_node, 'config/3/trace-type-pre-include')
1223
1224 # process inclusions
1225 return self._process_node_include(trace_type_node, self._process_trace_type_node_include,
1226 process_children_include)
1227
1228 # Processes the inclusions of the trace node `trace_node`, returning
1229 # the effective node.
1230 def _process_trace_node_include(self, trace_node: _MapNode) -> _MapNode:
1231 def process_children_include(trace_node: _MapNode):
1232 prop_name = 'type'
1233 trace_node[prop_name] = self._process_trace_type_node_include(trace_node[prop_name])
1234
1235 # Make sure the trace node is valid for the inclusion processing
1236 # stage.
1237 self._schema_validator.validate(trace_node, 'config/3/trace-pre-include')
1238
1239 # process inclusions
1240 return self._process_node_include(trace_node, self._process_trace_node_include,
1241 process_children_include)
1242
1243 # Processes the inclusions of the configuration node, modifying it
1244 # during the process.
1245 def _process_config_includes(self):
1246 # Process inclusions in this order:
1247 #
1248 # 1. Clock type node and event record type nodes (the order
1249 # between those is not important).
1250 #
1251 # 2. Data stream type nodes.
1252 #
1253 # 3. Trace type node.
1254 #
1255 # 4. Trace node.
1256 #
1257 # This is because:
1258 #
1259 # * A trace node can include a trace type node, clock type
1260 # nodes, data stream type nodes, and event record type nodes.
1261 #
1262 # * A trace type node can include clock type nodes, data stream
1263 # type nodes, and event record type nodes.
1264 #
1265 # * A data stream type node can include event record type nodes.
1266 #
1267 # First, make sure the configuration node itself is valid for
1268 # the inclusion processing stage.
1269 self._schema_validator.validate(self.config_node, 'config/3/config-pre-include')
1270
1271 # Process trace node inclusions.
1272 #
1273 # self._process_trace_node_include() returns a new (or the same)
1274 # trace node without any `$include` property in it, recursively.
1275 self.config_node['trace'] = self._process_trace_node_include(self.config_node['trace'])
1276
1277 def _parse(self):
1278 # process configuration node inclusions
1279 self._process_config_includes()
1280
1281 # Expand field type nodes.
1282 #
1283 # This process:
1284 #
1285 # 1. Replaces field type aliases with "effective" field type
1286 # nodes, recursively.
1287 #
1288 # After this step, the `$field-type-aliases` property of the
1289 # trace type node is gone.
1290 #
1291 # 2. Applies inheritance, following the `$inherit` properties.
1292 #
1293 # After this step, field type nodes do not contain `$inherit`
1294 # properties.
1295 #
1296 # This is done blindly, in that the process _doesn't_ validate
1297 # field type nodes at this point.
1298 self._expand_fts()
1299
1300 # Substitute log level aliases.
1301 #
1302 # This process:
1303 #
1304 # 1. Replaces log level aliases in event record type nodes with
1305 # their numeric equivalents as found in the
1306 # `$log-level-aliases` property of the trace type node.
1307 #
1308 # 2. Removes the `$log-level-aliases` property from the trace
1309 # type node.
1310 self._sub_log_level_aliases()
1311
1312 # At this point, the configuration node must be valid as an
1313 # effective configuration node.
1314 self._schema_validator.validate(self.config_node, 'config/3/config')
1315
1316 # Normalize properties.
1317 #
1318 # This process removes `None` properties and chooses specific
1319 # enumerators when aliases exist (for example, `big-endian`
1320 # instead of `be`).
1321 #
1322 # The goal of this is that, if the user then gets this parser's
1323 # `config_node` property, it has a normal and very readable
1324 # form.
1325 #
1326 # It also makes _create_config() easier to implement because it
1327 # doesn't need to check for `None` nodes or enumerator aliases.
1328 self._normalize_props()
1329
1330 # Set the native byte order.
1331 self._set_native_byte_order()
1332
1333 # Create a barectf configuration object from the configuration
1334 # node.
1335 self._create_config()
1336
1337 @property
1338 def config(self) -> barectf_config.Configuration:
1339 return self._config
1340
1341 @property
1342 def config_node(self) -> _MapNode:
1343 return typing.cast(barectf_config_parse_common._ConfigNodeV3, self._root_node).config_node
This page took 0.059558 seconds and 4 git commands to generate.