Rename "event type" -> "event record type", "stream type" -> "data stream type"
[barectf.git] / barectf / config_parse_v3.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
24 import barectf.config_parse_common as barectf_config_parse_common
25 from barectf.config_parse_common import _ConfigurationParseError
26 from barectf.config_parse_common import _append_error_ctx
27 from barectf.config_parse_common import _MapNode
28 import barectf.config as barectf_config
29 from barectf.config import _OptStructFt
30 import collections
31 import uuid
32 from barectf.typing import Count, Alignment, VersionNumber
33 from typing import Optional, List, Dict, Any, TextIO, Set, Iterable, Callable, Tuple, Type
34 import typing
35
36
37 # A barectf 3 YAML configuration parser.
38 #
39 # When you build such a parser, it parses the configuration node `node`
40 # (already loaded from the file having the path `path`) and creates a
41 # corresponding `barectf.Configuration` object which you can get with
42 # the `config` property.
43 #
44 # See the comments of _parse() for more implementation details about the
45 # parsing stages and general strategy.
46 class _Parser(barectf_config_parse_common._Parser):
47 # Builds a barectf 3 YAML configuration parser and parses the root
48 # configuration node `node` (already loaded from the file-like
49 # object `root_file`).
50 def __init__(self, root_file: TextIO, node: barectf_config_parse_common._ConfigNodeV3,
51 with_pkg_include_dir: bool, inclusion_dirs: Optional[List[str]],
52 ignore_include_not_found: bool):
53 super().__init__(root_file, node, with_pkg_include_dir, inclusion_dirs,
54 ignore_include_not_found, VersionNumber(3))
55 self._ft_cls_name_to_create_method: Dict[str, Callable[[_MapNode],
56 List[barectf_config._FieldType]]] = {
57 'unsigned-integer': self._create_int_ft,
58 'signed-integer': self._create_int_ft,
59 'unsigned-enumeration': self._create_enum_ft,
60 'signed-enumeration': self._create_enum_ft,
61 'real': self._create_real_ft,
62 'string': self._create_string_ft,
63 'static-array': self._create_static_array_ft,
64 'dynamic-array': self._create_dynamic_array_ft,
65 'structure': self._create_struct_ft,
66 }
67 self._parse()
68
69 # Validates the alignment `alignment`, raising a
70 # `_ConfigurationParseError` exception using `ctx_obj_name` if it's
71 # invalid.
72 @staticmethod
73 def _validate_alignment(alignment: Alignment, ctx_obj_name: str):
74 assert alignment >= 1
75
76 # check for power of two
77 if (alignment & (alignment - 1)) != 0:
78 raise _ConfigurationParseError(ctx_obj_name,
79 f'Invalid alignment (not a power of two): {alignment}')
80
81 # Validates the TSDL identifier `iden`, raising a
82 # `_ConfigurationParseError` exception using `ctx_obj_name` and
83 # `prop` to format the message if it's invalid.
84 @staticmethod
85 def _validate_iden(iden: str, ctx_obj_name: str, prop: str):
86 assert type(iden) is str
87 ctf_keywords = {
88 'align',
89 'callsite',
90 'clock',
91 'enum',
92 'env',
93 'event',
94 'floating_point',
95 'integer',
96 'stream',
97 'string',
98 'struct',
99 'trace',
100 'typealias',
101 'typedef',
102 'variant',
103 }
104
105 if iden in ctf_keywords:
106 msg = f'Invalid {prop} (not a valid identifier): `{iden}`'
107 raise _ConfigurationParseError(ctx_obj_name, msg)
108
109 @staticmethod
110 def _alignment_prop(ft_node: _MapNode, prop_name: str) -> Alignment:
111 alignment = ft_node.get(prop_name)
112
113 if alignment is not None:
114 _Parser._validate_alignment(alignment, '`prop_name` property')
115
116 return Alignment(alignment)
117
118 @property
119 def _trace_type_node(self) -> _MapNode:
120 return self.config_node['trace']['type']
121
122 @staticmethod
123 def _byte_order_from_node(node: str) -> barectf_config.ByteOrder:
124 return {
125 'big-endian': barectf_config.ByteOrder.BIG_ENDIAN,
126 'little-endian': barectf_config.ByteOrder.LITTLE_ENDIAN,
127 }[node]
128
129 # Creates a bit array field type having the type `ft_type` from the
130 # bit array field type node `ft_node`, passing the additional
131 # `*args` to ft_type.__init__().
132 def _create_common_bit_array_ft(self, ft_node: _MapNode,
133 ft_type: Type[barectf_config._BitArrayFieldType],
134 default_alignment: Optional[Alignment],
135 *args) -> barectf_config._BitArrayFieldType:
136 alignment = self._alignment_prop(ft_node, 'alignment')
137
138 if alignment is None:
139 alignment = default_alignment
140
141 return ft_type(ft_node['size'], alignment, *args)
142
143 # Creates an integer field type having the type `ft_type` from the
144 # integer field type node `ft_node`, passing the additional `*args`
145 # to ft_type.__init__().
146 def _create_common_int_ft(self, ft_node: _MapNode,
147 ft_type: Type[barectf_config._IntegerFieldType], *args) -> barectf_config._IntegerFieldType:
148 preferred_display_base = {
149 'binary': barectf_config.DisplayBase.BINARY,
150 'octal': barectf_config.DisplayBase.OCTAL,
151 'decimal': barectf_config.DisplayBase.DECIMAL,
152 'hexadecimal': barectf_config.DisplayBase.HEXADECIMAL,
153 }[ft_node.get('preferred-display-base', 'decimal')]
154 return typing.cast(barectf_config._IntegerFieldType,
155 self._create_common_bit_array_ft(ft_node, ft_type, None,
156 preferred_display_base, *args))
157
158 # Creates an integer field type from the unsigned/signed integer
159 # field type node `ft_node`.
160 def _create_int_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
161 ft_type = {
162 'unsigned-integer': barectf_config.UnsignedIntegerFieldType,
163 'signed-integer': barectf_config.SignedIntegerFieldType,
164 }[ft_node['class']]
165 return [self._create_common_int_ft(ft_node, ft_type)]
166
167 # Creates an enumeration field type from the unsigned/signed
168 # enumeration field type node `ft_node`.
169 def _create_enum_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
170 ft_type = {
171 'unsigned-enumeration': barectf_config.UnsignedEnumerationFieldType,
172 'signed-enumeration': barectf_config.SignedEnumerationFieldType,
173 }[ft_node['class']]
174 mappings = collections.OrderedDict()
175
176 for label, mapping_node in ft_node.get('mappings', {}).items():
177 ranges = set()
178
179 for range_node in mapping_node:
180 if type(range_node) is list:
181 ranges.add(barectf_config.EnumerationFieldTypeMappingRange(range_node[0],
182 range_node[1]))
183 else:
184 assert type(range_node) is int
185 ranges.add(barectf_config.EnumerationFieldTypeMappingRange(range_node,
186 range_node))
187
188 mappings[label] = barectf_config.EnumerationFieldTypeMapping(ranges)
189
190 return [typing.cast(barectf_config._EnumerationFieldType,
191 self._create_common_int_ft(ft_node, ft_type,
192 barectf_config.EnumerationFieldTypeMappings(mappings)))]
193
194 # Creates a real field type from the real field type node `ft_node`.
195 def _create_real_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
196 return [typing.cast(barectf_config.RealFieldType,
197 self._create_common_bit_array_ft(ft_node, barectf_config.RealFieldType,
198 Alignment(8)))]
199
200 # Creates a string field type from the string field type node
201 # `ft_node`.
202 def _create_string_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
203 return [barectf_config.StringFieldType()]
204
205 def _create_array_ft(self, ft_type, ft_node: _MapNode, **kwargs) -> barectf_config._ArrayFieldType:
206 prop_name = 'element-field-type'
207
208 try:
209 element_fts = self._create_fts(ft_node[prop_name])
210 except _ConfigurationParseError as exc:
211 _append_error_ctx(exc, f'`{prop_name}` property')
212
213 if len(element_fts) != 1 or isinstance(element_fts[0], (barectf_config.StructureFieldType,
214 barectf_config.DynamicArrayFieldType)):
215 raise _ConfigurationParseError(f'`{prop_name}` property',
216 'Nested structure and dynamic array field types are not supported')
217
218 return ft_type(element_field_type=element_fts[0], **kwargs)
219
220 # Creates a static array field type from the static array field type
221 # node `ft_node`.
222 def _create_static_array_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
223 return [typing.cast(barectf_config.StaticArrayFieldType,
224 self._create_array_ft(barectf_config.StaticArrayFieldType, ft_node,
225 length=ft_node['length']))]
226
227 # Creates a dynamic array field type from the dynamic array field
228 # type node `ft_node`.
229 def _create_dynamic_array_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
230 # create length unsigned integer field type
231 len_ft = barectf_config.UnsignedIntegerFieldType(32, alignment=Alignment(8))
232 return [
233 len_ft,
234 typing.cast(barectf_config.DynamicArrayFieldType,
235 self._create_array_ft(barectf_config.DynamicArrayFieldType, ft_node,
236 length_field_type=len_ft))
237 ]
238
239 # Creates structure field type members from the structure field type
240 # members node `members_node`.
241 #
242 # `prop_name` is the name of the property of which `members_node` is
243 # the value.
244 def _create_struct_ft_members(self, members_node: List[_MapNode], prop_name: str):
245 members = collections.OrderedDict()
246 member_names: Set[str] = set()
247
248 for member_node in members_node:
249 member_name, member_node = list(member_node.items())[0]
250
251 if member_name in member_names:
252 raise _ConfigurationParseError(f'`{prop_name}` property',
253 f'Duplicate member `{member_name}`')
254
255 self._validate_iden(member_name, f'`{prop_name}` property',
256 'structure field type member name')
257 member_names.add(member_name)
258 ft_prop_name = 'field-type'
259 ft_node = member_node[ft_prop_name]
260
261 try:
262 if ft_node['class'] in ['structure']:
263 raise _ConfigurationParseError(f'`{ft_prop_name}` property',
264 'Nested structure field types are not supported')
265
266 try:
267 member_fts = self._create_fts(ft_node)
268 except _ConfigurationParseError as exc:
269 _append_error_ctx(exc, f'`{ft_prop_name}` property')
270 except _ConfigurationParseError as exc:
271 _append_error_ctx(exc, f'Structure field type member `{member_name}`')
272
273 if len(member_fts) == 2:
274 # The only case where this happens is a dynamic array
275 # field type node which generates an unsigned integer
276 # field type for the length and the dynamic array field
277 # type itself.
278 assert type(member_fts[1]) is barectf_config.DynamicArrayFieldType
279 members[f'__{member_name}_len'] = barectf_config.StructureFieldTypeMember(member_fts[0])
280 else:
281 assert len(member_fts) == 1
282
283 members[member_name] = barectf_config.StructureFieldTypeMember(member_fts[-1])
284
285 return barectf_config.StructureFieldTypeMembers(members)
286
287 # Creates a structure field type from the structure field type node
288 # `ft_node`.
289 def _create_struct_ft(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
290 minimum_alignment = self._alignment_prop(ft_node, 'minimum-alignment')
291
292 if minimum_alignment is None:
293 minimum_alignment = 1
294
295 members = None
296 prop_name = 'members'
297 members_node = ft_node.get(prop_name)
298
299 if members_node is not None:
300 members = self._create_struct_ft_members(members_node, prop_name)
301
302 return [barectf_config.StructureFieldType(minimum_alignment, members)]
303
304 # Creates field types from the field type node `ft_node`.
305 def _create_fts(self, ft_node: _MapNode) -> List[barectf_config._FieldType]:
306 return self._ft_cls_name_to_create_method[ft_node['class']](ft_node)
307
308 # Creates field types from the field type node `parent_node[key]`
309 # if it exists.
310 def _try_create_fts(self, parent_node: _MapNode, key: str) -> Optional[List[barectf_config._FieldType]]:
311 if key not in parent_node:
312 return None
313
314 try:
315 return self._create_fts(parent_node[key])
316 except _ConfigurationParseError as exc:
317 _append_error_ctx(exc, f'`{key}` property')
318
319 # satisfy static type checker (never reached)
320 raise
321
322 # Like _try_create_fts(), but casts the result's type (first and
323 # only element) to `barectf_config.StructureFieldType` to satisfy
324 # static type checkers.
325 def _try_create_struct_ft(self, parent_node: _MapNode, key: str) -> _OptStructFt:
326 fts = self._try_create_fts(parent_node, key)
327
328 if fts is None:
329 return None
330
331 return typing.cast(barectf_config.StructureFieldType, fts[0])
332
333 # Returns the total number of members in the structure field type
334 # node `ft_node` if it exists, otherwise 0.
335 @staticmethod
336 def _total_struct_ft_node_members(ft_node: Optional[_MapNode]) -> Count:
337 if ft_node is None:
338 return Count(0)
339
340 members_node = ft_node.get('members')
341
342 if members_node is None:
343 return Count(0)
344
345 return Count(len(members_node))
346
347 # Creates an event record type from the event record type node
348 # `ert_node` named `name`.
349 #
350 # `ert_member_count` is the total number of structure field type
351 # members within the event record type so far (from the common part
352 # in its data stream type). For example, if the data stream type has
353 # an event record header field type with `id` and `timestamp`
354 # members, then `ert_member_count` is 2.
355 def _create_ert(self, name: str, ert_node: _MapNode,
356 ert_member_count: Count) -> barectf_config.EventRecordType:
357 try:
358 self._validate_iden(name, '`name` property', 'event record type name')
359
360 # make sure the event record type is not empty
361 spec_ctx_ft_prop_name = 'specific-context-field-type'
362 payload_ft_prop_name = 'payload-field-type'
363 ert_member_count = Count(ert_member_count + self._total_struct_ft_node_members(ert_node.get(spec_ctx_ft_prop_name)))
364 ert_member_count = Count(ert_member_count + self._total_struct_ft_node_members(ert_node.get(payload_ft_prop_name)))
365
366 if ert_member_count == 0:
367 raise _ConfigurationParseError('Event record type',
368 'Event record type is empty (no members).')
369
370 # create event record type
371 return barectf_config.EventRecordType(name, ert_node.get('log-level'),
372 self._try_create_struct_ft(ert_node,
373 spec_ctx_ft_prop_name),
374 self._try_create_struct_ft(ert_node,
375 payload_ft_prop_name))
376 except _ConfigurationParseError as exc:
377 _append_error_ctx(exc, f'Event record type `{name}`')
378
379 # satisfy static type checker (never reached)
380 raise
381
382 # Returns the effective feature field type for the field type
383 # node `parent_node[key]`, if any.
384 #
385 # Returns:
386 #
387 # If `parent_node[key]` is `False`:
388 # `None`.
389 #
390 # If `parent_node[key]` is `True`:
391 # `barectf_config.DEFAULT_FIELD_TYPE`.
392 #
393 # If `parent_node[key]` doesn't exist:
394 # `none` (parameter).
395 #
396 # Otherwise:
397 # A created field type.
398 def _feature_ft(self, parent_node: _MapNode, key: str, none: Any = None) -> Any:
399 if key not in parent_node:
400 # missing: default feature field type
401 return none
402
403 ft_node = parent_node[key]
404 assert ft_node is not None
405
406 if ft_node is True:
407 # default feature field type
408 return barectf_config.DEFAULT_FIELD_TYPE
409
410 if ft_node is False:
411 # disabled feature
412 return None
413
414 assert type(ft_node) is collections.OrderedDict
415 return self._create_fts(ft_node)[0]
416
417 def _create_dst(self, name: str, dst_node: _MapNode) -> barectf_config.DataStreamType:
418 try:
419 # validate data stream type's name
420 self._validate_iden(name, '`name` property', 'data stream type name')
421
422 # get default clock type, if any
423 def_clk_type = None
424 prop_name = '$default-clock-type-name'
425 def_clk_type_name = dst_node.get(prop_name)
426
427 if def_clk_type_name is not None:
428 try:
429 def_clk_type = self._clk_type(def_clk_type_name, prop_name)
430 except _ConfigurationParseError as exc:
431 _append_error_ctx(exc, f'`{prop_name}` property')
432
433 # create feature field types
434 pkt_total_size_ft = barectf_config.DEFAULT_FIELD_TYPE
435 pkt_content_size_ft = barectf_config.DEFAULT_FIELD_TYPE
436 pkt_beginning_time_ft = None
437 pkt_end_time_ft = None
438 pkt_discarded_er_counter_snap_ft = None
439 ert_id_ft = barectf_config.DEFAULT_FIELD_TYPE
440 ert_time_ft = None
441
442 if def_clk_type is not None:
443 # The data stream type has a default clock type.
444 # Initialize the packet beginning time, packet end time,
445 # and event record time field types to default field
446 # types.
447 #
448 # This means your data stream type node only needs a
449 # default clock type name to enable those features
450 # automatically. Those features do not add any parameter
451 # to the event tracing functions.
452 pkt_beginning_time_ft = barectf_config.DEFAULT_FIELD_TYPE
453 pkt_end_time_ft = barectf_config.DEFAULT_FIELD_TYPE
454 ert_time_ft = barectf_config.DEFAULT_FIELD_TYPE
455
456 features_node = dst_node.get('$features')
457
458 if features_node is not None:
459 # create packet feature field types
460 pkt_node = features_node.get('packet')
461
462 if pkt_node is not None:
463 pkt_total_size_ft = self._feature_ft(pkt_node, 'total-size-field-type',
464 pkt_total_size_ft)
465 pkt_content_size_ft = self._feature_ft(pkt_node, 'content-size-field-type',
466 pkt_content_size_ft)
467 pkt_beginning_time_ft = self._feature_ft(pkt_node, 'beginning-time-field-type',
468 pkt_beginning_time_ft)
469 pkt_end_time_ft = self._feature_ft(pkt_node, 'end-time-field-type',
470 pkt_end_time_ft)
471 pkt_discarded_er_counter_snap_ft = self._feature_ft(pkt_node,
472 'discarded-event-records-counter-snapshot-field-type',
473 pkt_discarded_er_counter_snap_ft)
474
475 # create event record feature field types
476 er_node = features_node.get('event-record')
477 type_id_ft_prop_name = 'type-id-field-type'
478
479 if er_node is not None:
480 ert_id_ft = self._feature_ft(er_node, type_id_ft_prop_name, ert_id_ft)
481 ert_time_ft = self._feature_ft(er_node, 'time-field-type', ert_time_ft)
482
483 erts_prop_name = 'event-record-types'
484 ert_count = len(dst_node[erts_prop_name])
485
486 try:
487 if ert_id_ft is None and ert_count > 1:
488 raise _ConfigurationParseError(f'`{type_id_ft_prop_name}` property',
489 'Event record type ID field type feature is required because data stream type has more than one event record type')
490
491 if isinstance(ert_id_ft, barectf_config._IntegerFieldType):
492 ert_id_int_ft = typing.cast(barectf_config._IntegerFieldType, ert_id_ft)
493
494 if ert_count > (1 << ert_id_int_ft.size):
495 raise _ConfigurationParseError(f'`{type_id_ft_prop_name}` property',
496 f'Field type\'s size ({ert_id_int_ft.size} bits) is too small to accomodate {ert_count} event record types')
497 except _ConfigurationParseError as exc:
498 exc._append_ctx('`event-record` property')
499 _append_error_ctx(exc, '`$features` property')
500
501 pkt_features = barectf_config.DataStreamTypePacketFeatures(pkt_total_size_ft,
502 pkt_content_size_ft,
503 pkt_beginning_time_ft,
504 pkt_end_time_ft,
505 pkt_discarded_er_counter_snap_ft)
506 er_features = barectf_config.DataStreamTypeEventRecordFeatures(ert_id_ft, ert_time_ft)
507 features = barectf_config.DataStreamTypeFeatures(pkt_features, er_features)
508
509 # create packet context (structure) field type extra members
510 pkt_ctx_ft_extra_members = None
511 prop_name = 'packet-context-field-type-extra-members'
512 pkt_ctx_ft_extra_members_node = dst_node.get(prop_name)
513
514 if pkt_ctx_ft_extra_members_node is not None:
515 pkt_ctx_ft_extra_members = self._create_struct_ft_members(pkt_ctx_ft_extra_members_node,
516 prop_name)
517
518 # check for illegal packet context field type member names
519 reserved_member_names = {
520 'packet_size',
521 'content_size',
522 'timestamp_begin',
523 'timestamp_end',
524 'events_discarded',
525 'packet_seq_num',
526 }
527
528 for member_name in pkt_ctx_ft_extra_members:
529 if member_name in reserved_member_names:
530 raise _ConfigurationParseError(f'`{prop_name}` property',
531 f'Packet context field type member name `{member_name}` is reserved.')
532
533 # create event record types
534 er_header_common_ctx_member_count = Count(0)
535
536 if er_features.type_id_field_type is not None:
537 er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + 1)
538
539 if er_features.time_field_type is not None:
540 er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + 1)
541
542 er_common_ctx_ft_prop_name = 'event-record-common-context-field-type'
543 er_common_ctx_ft_node = dst_node.get(er_common_ctx_ft_prop_name)
544 er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + self._total_struct_ft_node_members(er_common_ctx_ft_node))
545 erts = set()
546
547 for ert_name, ert_node in dst_node[erts_prop_name].items():
548 erts.add(self._create_ert(ert_name, ert_node, er_header_common_ctx_member_count))
549
550 # create data stream type
551 return barectf_config.DataStreamType(name, erts, def_clk_type, features,
552 pkt_ctx_ft_extra_members,
553 self._try_create_struct_ft(dst_node,
554 er_common_ctx_ft_prop_name))
555 except _ConfigurationParseError as exc:
556 _append_error_ctx(exc, f'Data data stream type `{name}`')
557
558 # satisfy static type checker (never reached)
559 raise
560
561 def _clk_type(self, name: str, prop_name: str) -> barectf_config.ClockType:
562 clk_type = self._clk_types.get(name)
563
564 if clk_type is None:
565 raise _ConfigurationParseError(f'`{prop_name}` property',
566 f'Clock type `{name}` does not exist')
567
568 return clk_type
569
570 def _create_clk_type(self, name: str, clk_type_node: _MapNode) -> barectf_config.ClockType:
571 self._validate_iden(name, '`name` property', 'clock type name')
572 clk_type_uuid = None
573 uuid_node = clk_type_node.get('uuid')
574
575 if uuid_node is not None:
576 clk_type_uuid = uuid.UUID(uuid_node)
577
578 offset_seconds = 0
579 offset_cycles = Count(0)
580 offset_node = clk_type_node.get('offset')
581
582 if offset_node is not None:
583 offset_seconds = offset_node.get('seconds', 0)
584 offset_cycles = offset_node.get('cycles', Count(0))
585
586 return barectf_config.ClockType(name, clk_type_node.get('frequency', int(1e9)),
587 clk_type_uuid, clk_type_node.get('description'),
588 clk_type_node.get('precision', 0),
589 barectf_config.ClockTypeOffset(offset_seconds, offset_cycles),
590 clk_type_node.get('origin-is-unix-epoch', False))
591
592 def _create_clk_types(self):
593 self._clk_types = {}
594
595 for clk_type_name, clk_type_node in self._trace_type_node.get('clock-types', {}).items():
596 self._clk_types[clk_type_name] = self._create_clk_type(clk_type_name, clk_type_node)
597
598 def _create_trace_type(self):
599 try:
600 # create clock types (_create_dst() needs them)
601 self._create_clk_types()
602
603 # get UUID
604 trace_type_uuid = None
605 uuid_node = self._trace_type_node.get('uuid')
606
607 if uuid_node is not None:
608 if uuid_node == 'auto':
609 trace_type_uuid = uuid.uuid1()
610 else:
611 trace_type_uuid = uuid.UUID(uuid_node)
612
613 # create feature field types
614 magic_ft = barectf_config.DEFAULT_FIELD_TYPE
615 uuid_ft = None
616 dst_id_ft = barectf_config.DEFAULT_FIELD_TYPE
617
618 if trace_type_uuid is not None:
619 # Trace type has a UUID: initialize UUID field type to
620 # a default field type.
621 uuid_ft = barectf_config.DEFAULT_FIELD_TYPE
622
623 features_node = self._trace_type_node.get('$features')
624 dst_id_ft_prop_name = 'data-stream-type-id-field-type'
625
626 if features_node is not None:
627 magic_ft = self._feature_ft(features_node, 'magic-field-type',
628 magic_ft)
629 uuid_ft = self._feature_ft(features_node, 'uuid-field-type', uuid_ft)
630 dst_id_ft = self._feature_ft(features_node, dst_id_ft_prop_name, dst_id_ft)
631
632 dsts_prop_name = 'data-stream-types'
633 dst_count = len(self._trace_type_node[dsts_prop_name])
634
635 try:
636 if dst_id_ft is None and dst_count > 1:
637 raise _ConfigurationParseError(f'`{dst_id_ft_prop_name}` property',
638 'Data stream type ID field type feature is required because trace type has more than one data stream type')
639
640 if isinstance(dst_id_ft, barectf_config._FieldType) and dst_count > (1 << dst_id_ft.size):
641 raise _ConfigurationParseError(f'`{dst_id_ft_prop_name}` property',
642 f'Field type\'s size ({dst_id_ft.size} bits) is too small to accomodate {dst_count} data stream types')
643 except _ConfigurationParseError as exc:
644 _append_error_ctx(exc, '`$features` property')
645
646 features = barectf_config.TraceTypeFeatures(magic_ft, uuid_ft, dst_id_ft)
647
648 # create data stream types
649 dsts = set()
650
651 for dst_name, dst_node in self._trace_type_node[dsts_prop_name].items():
652 dsts.add(self._create_dst(dst_name, dst_node))
653
654 # create trace type
655 return barectf_config.TraceType(dsts, trace_type_uuid, features)
656 except _ConfigurationParseError as exc:
657 _append_error_ctx(exc, 'Trace type')
658
659 def _create_trace(self):
660 try:
661 trace_type = self._create_trace_type()
662 trace_node = self.config_node['trace']
663 env = None
664 env_node = trace_node.get('environment')
665
666 if env_node is not None:
667 # validate each environment variable name
668 for name in env_node:
669 self._validate_iden(name, '`environment` property',
670 'environment variable name')
671
672 # the node already has the expected structure
673 env = barectf_config.TraceEnvironment(env_node)
674
675 return barectf_config.Trace(trace_type, env)
676
677 except _ConfigurationParseError as exc:
678 _append_error_ctx(exc, 'Trace')
679
680 def _create_config(self):
681 # create trace first
682 trace = self._create_trace()
683
684 # find default data stream type, if any
685 def_dst = None
686
687 for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
688 prop_name = '$is-default'
689 is_default = dst_node.get(prop_name)
690
691 if is_default is True:
692 if def_dst is not None:
693 exc = _ConfigurationParseError(f'`{prop_name}` property',
694 f'Duplicate default data stream type (`{def_dst.name}`)')
695 exc._append_ctx(f'Data stream type `{dst_name}`')
696 _append_error_ctx(exc, 'Trace type')
697
698 def_dst = trace.type.data_stream_type(dst_name)
699
700 # create clock type C type mapping
701 clk_types_node = self._trace_type_node.get('clock-types')
702 clk_type_c_types = None
703
704 if clk_types_node is not None:
705 clk_type_c_types = collections.OrderedDict()
706
707 for dst in trace.type.data_stream_types:
708 if dst.default_clock_type is None:
709 continue
710
711 clk_type_node = clk_types_node[dst.default_clock_type.name]
712 c_type = clk_type_node.get('$c-type')
713
714 if c_type is not None:
715 clk_type_c_types[dst.default_clock_type] = c_type
716
717 # create options
718 iden_prefix_def = False
719 def_dst_name_def = False
720 opts_node = self.config_node.get('options')
721 iden_prefix = 'barectf_'
722 file_name_prefix = 'barectf'
723
724 if opts_node is not None:
725 code_gen_opts_node = opts_node.get('code-generation')
726
727 if code_gen_opts_node is not None:
728 prefix_node = code_gen_opts_node.get('prefix', 'barectf')
729
730 if type(prefix_node) is str:
731 # automatic prefixes
732 iden_prefix = f'{prefix_node}_'
733 file_name_prefix = prefix_node
734 else:
735 iden_prefix = prefix_node['identifier']
736 file_name_prefix = prefix_node['file-name']
737
738 header_opts = code_gen_opts_node.get('header')
739
740 if header_opts is not None:
741 iden_prefix_def = header_opts.get('identifier-prefix-definition', False)
742 def_dst_name_def = header_opts.get('default-data-stream-type-name-definition',
743 False)
744
745 header_opts = barectf_config.ConfigurationCodeGenerationHeaderOptions(iden_prefix_def,
746 def_dst_name_def)
747 cg_opts = barectf_config.ConfigurationCodeGenerationOptions(iden_prefix, file_name_prefix,
748 def_dst, header_opts,
749 clk_type_c_types)
750 opts = barectf_config.ConfigurationOptions(cg_opts)
751
752 # create configuration
753 self._config = barectf_config.Configuration(trace, self._target_byte_order, opts)
754
755 # Expands the field type aliases found in the trace type node.
756 #
757 # This method modifies the trace type node.
758 #
759 # When this method returns:
760 #
761 # * Any field type alias is replaced with its full field type
762 # node equivalent.
763 #
764 # * The `$field-type-aliases` property of the trace type node is
765 # removed.
766 def _expand_ft_aliases(self):
767 def resolve_ft_alias_from(parent_node: _MapNode, key: str):
768 if key not in parent_node:
769 return
770
771 if type(parent_node[key]) not in [collections.OrderedDict, str]:
772 return
773
774 self._resolve_ft_alias_from(ft_aliases_node, parent_node, key)
775
776 ft_aliases_node = self._trace_type_node['$field-type-aliases']
777
778 # Expand field type aliases within trace, data stream, and event
779 # record type nodes.
780 features_prop_name = '$features'
781
782 try:
783 features_node = self._trace_type_node.get(features_prop_name)
784
785 if features_node is not None:
786 try:
787 resolve_ft_alias_from(features_node, 'magic-field-type')
788 resolve_ft_alias_from(features_node, 'uuid-field-type')
789 resolve_ft_alias_from(features_node, 'data-stream-type-id-field-type')
790 except _ConfigurationParseError as exc:
791 _append_error_ctx(exc, f'`{features_prop_name}` property')
792 except _ConfigurationParseError as exc:
793 _append_error_ctx(exc, 'Trace type')
794
795 for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
796 try:
797 features_node = dst_node.get(features_prop_name)
798
799 if features_node is not None:
800 try:
801 pkt_prop_name = 'packet'
802 pkt_node = features_node.get(pkt_prop_name)
803
804 if pkt_node is not None:
805 try:
806 resolve_ft_alias_from(pkt_node, 'total-size-field-type')
807 resolve_ft_alias_from(pkt_node, 'content-size-field-type')
808 resolve_ft_alias_from(pkt_node, 'beginning-time-field-type')
809 resolve_ft_alias_from(pkt_node, 'end-time-field-type')
810 resolve_ft_alias_from(pkt_node,
811 'discarded-event-records-counter-snapshot-field-type')
812 except _ConfigurationParseError as exc:
813 _append_error_ctx(exc, f'`{pkt_prop_name}` property')
814
815 er_prop_name = 'event-record'
816 er_node = features_node.get(er_prop_name)
817
818 if er_node is not None:
819 try:
820 resolve_ft_alias_from(er_node, 'type-id-field-type')
821 resolve_ft_alias_from(er_node, 'time-field-type')
822 except _ConfigurationParseError as exc:
823 _append_error_ctx(exc, f'`{er_prop_name}` property')
824 except _ConfigurationParseError as exc:
825 _append_error_ctx(exc, f'`{features_prop_name}` property')
826
827 pkt_ctx_ft_extra_members_prop_name = 'packet-context-field-type-extra-members'
828 pkt_ctx_ft_extra_members_node = dst_node.get(pkt_ctx_ft_extra_members_prop_name)
829
830 if pkt_ctx_ft_extra_members_node is not None:
831 try:
832 for member_node in pkt_ctx_ft_extra_members_node:
833 member_node = list(member_node.values())[0]
834 resolve_ft_alias_from(member_node, 'field-type')
835 except _ConfigurationParseError as exc:
836 _append_error_ctx(exc, f'`{pkt_ctx_ft_extra_members_prop_name}` property')
837
838 resolve_ft_alias_from(dst_node, 'event-record-common-context-field-type')
839
840 for ert_name, ert_node in dst_node['event-record-types'].items():
841 try:
842 resolve_ft_alias_from(ert_node, 'specific-context-field-type')
843 resolve_ft_alias_from(ert_node, 'payload-field-type')
844 except _ConfigurationParseError as exc:
845 _append_error_ctx(exc, f'Event record type `{ert_name}`')
846 except _ConfigurationParseError as exc:
847 _append_error_ctx(exc, f'Data stream type `{dst_name}`')
848
849 # remove the (now unneeded) `$field-type-aliases` property
850 del self._trace_type_node['$field-type-aliases']
851
852 # Applies field type inheritance to all field type nodes found in
853 # the trace type node.
854 #
855 # This method modifies the trace type node.
856 #
857 # When this method returns, no field type node has an `$inherit`
858 # property.
859 def _apply_fts_inheritance(self):
860 def apply_ft_inheritance(parent_node: _MapNode, key: str):
861 if key not in parent_node:
862 return
863
864 if type(parent_node[key]) is not collections.OrderedDict:
865 return
866
867 self._apply_ft_inheritance(parent_node, key)
868
869 features_prop_name = '$features'
870 features_node = self._trace_type_node.get(features_prop_name)
871
872 if features_node is not None:
873 apply_ft_inheritance(features_node, 'magic-field-type')
874 apply_ft_inheritance(features_node, 'uuid-field-type')
875 apply_ft_inheritance(features_node, 'data-stream-type-id-field-type')
876
877 for dst_node in self._trace_type_node['data-stream-types'].values():
878 features_node = dst_node.get(features_prop_name)
879
880 if features_node is not None:
881 pkt_node = features_node.get('packet')
882
883 if pkt_node is not None:
884 apply_ft_inheritance(pkt_node, 'total-size-field-type')
885 apply_ft_inheritance(pkt_node, 'content-size-field-type')
886 apply_ft_inheritance(pkt_node, 'beginning-time-field-type')
887 apply_ft_inheritance(pkt_node, 'end-time-field-type')
888 apply_ft_inheritance(pkt_node, 'discarded-event-records-counter-snapshot-field-type')
889
890 er_node = features_node.get('event-record')
891
892 if er_node is not None:
893 apply_ft_inheritance(er_node, 'type-id-field-type')
894 apply_ft_inheritance(er_node, 'time-field-type')
895
896 pkt_ctx_ft_extra_members_node = dst_node.get('packet-context-field-type-extra-members')
897
898 if pkt_ctx_ft_extra_members_node is not None:
899 for member_node in pkt_ctx_ft_extra_members_node:
900 member_node = list(member_node.values())[0]
901 apply_ft_inheritance(member_node, 'field-type')
902
903 apply_ft_inheritance(dst_node, 'event-record-common-context-field-type')
904
905 for ert_node in dst_node['event-record-types'].values():
906 apply_ft_inheritance(ert_node, 'specific-context-field-type')
907 apply_ft_inheritance(ert_node, 'payload-field-type')
908
909 # Normalizes structure field type member nodes.
910 #
911 # A structure field type member node can look like this:
912 #
913 # - msg: custom-string
914 #
915 # which is the equivalent of this:
916 #
917 # - msg:
918 # field-type: custom-string
919 #
920 # This method normalizes form 1 to use form 2.
921 def _normalize_struct_ft_member_nodes(self):
922 def normalize_members_node(members_node: List[_MapNode]):
923 ft_prop_name = 'field-type'
924
925 for member_node in members_node:
926 member_name, val_node = list(member_node.items())[0]
927
928 if type(val_node) is str:
929 member_node[member_name] = collections.OrderedDict({
930 ft_prop_name: val_node
931 })
932
933 normalize_struct_ft_member_nodes(member_node[member_name], ft_prop_name)
934
935 def normalize_struct_ft_member_nodes(parent_node: _MapNode, key: str):
936 if type(parent_node) is not collections.OrderedDict:
937 return
938
939 ft_node = parent_node.get(key)
940
941 if type(ft_node) is not collections.OrderedDict:
942 return
943
944 ft_node = typing.cast(collections.OrderedDict, ft_node)
945 members_nodes = ft_node.get('members')
946
947 if members_nodes is not None:
948 normalize_members_node(members_nodes)
949
950 prop_name = '$field-type-aliases'
951 ft_aliases_node = self._trace_type_node.get(prop_name)
952
953 if ft_aliases_node is not None:
954 for alias in ft_aliases_node:
955 normalize_struct_ft_member_nodes(ft_aliases_node, alias)
956
957 features_prop_name = '$features'
958 features_node = self._trace_type_node.get(features_prop_name)
959
960 if features_node is not None:
961 normalize_struct_ft_member_nodes(features_node, 'magic-field-type')
962 normalize_struct_ft_member_nodes(features_node, 'uuid-field-type')
963 normalize_struct_ft_member_nodes(features_node, 'data-stream-type-id-field-type')
964
965 for dst_node in self._trace_type_node['data-stream-types'].values():
966 features_node = dst_node.get(features_prop_name)
967
968 if features_node is not None:
969 pkt_node = features_node.get('packet')
970
971 if pkt_node is not None:
972 normalize_struct_ft_member_nodes(pkt_node, 'total-size-field-type')
973 normalize_struct_ft_member_nodes(pkt_node, 'content-size-field-type')
974 normalize_struct_ft_member_nodes(pkt_node, 'beginning-time-field-type')
975 normalize_struct_ft_member_nodes(pkt_node, 'end-time-field-type')
976 normalize_struct_ft_member_nodes(pkt_node,
977 'discarded-event-records-counter-snapshot-field-type')
978
979 er_node = features_node.get('event-record')
980
981 if er_node is not None:
982 normalize_struct_ft_member_nodes(er_node, 'type-id-field-type')
983 normalize_struct_ft_member_nodes(er_node, 'time-field-type')
984
985 pkt_ctx_ft_extra_members_node = dst_node.get('packet-context-field-type-extra-members')
986
987 if pkt_ctx_ft_extra_members_node is not None:
988 normalize_members_node(pkt_ctx_ft_extra_members_node)
989
990 normalize_struct_ft_member_nodes(dst_node, 'event-record-common-context-field-type')
991
992 for ert_node in dst_node['event-record-types'].values():
993 normalize_struct_ft_member_nodes(ert_node, 'specific-context-field-type')
994 normalize_struct_ft_member_nodes(ert_node, 'payload-field-type')
995
996 # Calls _expand_ft_aliases() and _apply_fts_inheritance() if the
997 # trace type node has a `$field-type-aliases` property.
998 def _expand_fts(self):
999 # Make sure that the current configuration node is valid
1000 # considering field types are not expanded yet.
1001 self._schema_validator.validate(self.config_node,
1002 'config/3/config-pre-field-type-expansion')
1003
1004 prop_name = '$field-type-aliases'
1005 ft_aliases_node = self._trace_type_node.get(prop_name)
1006
1007 if ft_aliases_node is None:
1008 # If there's no `'$field-type-aliases'` node, then there's
1009 # no field type aliases and therefore no possible
1010 # inheritance.
1011 if prop_name in self._trace_type_node:
1012 del self._trace_type_node[prop_name]
1013
1014 return
1015
1016 # normalize structure field type member nodes
1017 self._normalize_struct_ft_member_nodes()
1018
1019 # first, expand field type aliases
1020 self._expand_ft_aliases()
1021
1022 # next, apply inheritance to create effective field type nodes
1023 self._apply_fts_inheritance()
1024
1025 # Substitute the event record type node log level aliases with their
1026 # numeric equivalents.
1027 #
1028 # Removes the `$log-level-aliases` property of the trace type node.
1029 def _sub_log_level_aliases(self):
1030 # Make sure that the current configuration node is valid
1031 # considering log level aliases are not substituted yet.
1032 self._schema_validator.validate(self.config_node,
1033 'config/3/config-pre-log-level-alias-sub')
1034
1035 log_level_aliases_prop_name = '$log-level-aliases'
1036 log_level_aliases_node = self._trace_type_node.get(log_level_aliases_prop_name)
1037
1038 if log_level_aliases_prop_name in self._trace_type_node:
1039 del self._trace_type_node[log_level_aliases_prop_name]
1040
1041 if log_level_aliases_node is None:
1042 # no log level aliases
1043 return
1044
1045 # substitute log level aliases
1046 for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
1047 try:
1048 for ert_name, ert_node in dst_node['event-record-types'].items():
1049 try:
1050 prop_name = 'log-level'
1051 ll_node = ert_node.get(prop_name)
1052
1053 if ll_node is None:
1054 continue
1055
1056 if type(ll_node) is str:
1057 if ll_node not in log_level_aliases_node:
1058 raise _ConfigurationParseError(f'`{prop_name}` property',
1059 f'Log level alias `{ll_node}` does not exist')
1060
1061 ert_node[prop_name] = log_level_aliases_node[ll_node]
1062 except _ConfigurationParseError as exc:
1063 _append_error_ctx(exc, f'Event record type `{ert_name}`')
1064 except _ConfigurationParseError as exc:
1065 _append_error_ctx(exc, f'Data stream type `{dst_name}`')
1066
1067 # Generator of parent node and key pairs for all the nodes,
1068 # recursively, of `node`.
1069 #
1070 # It is safe to delete a yielded node during the iteration.
1071 @staticmethod
1072 def _props(node: Any) -> Iterable[Tuple[Any, str]]:
1073 if type(node) is collections.OrderedDict:
1074 for key in list(node):
1075 yield from _Parser._props(node[key])
1076 yield node, key
1077 elif type(node) is list:
1078 for item_node in node:
1079 yield from _Parser._props(item_node)
1080
1081 def _trace_type_props(self) -> Iterable[Tuple[Any, str]]:
1082 yield from _Parser._props(self.config_node['trace']['type'])
1083
1084 # Normalize the properties of the configuration node.
1085 #
1086 # This method, for each property of the trace type node:
1087 #
1088 # 1. Removes it if it's `None` (means default).
1089 #
1090 # 2. Chooses a specific `class` property value.
1091 #
1092 # 3. Chooses a specific `byte-order`/`target-byte-order` property
1093 # value.
1094 #
1095 # 4. Chooses a specific `preferred-display-base` property value.
1096 #
1097 # This method also applies 1. to the trace node's `environment`
1098 # property.
1099 def _normalize_props(self):
1100 def normalize_byte_order_prop(parent_node: _MapNode, key: str):
1101 node = parent_node[key]
1102
1103 if node in ['be', 'big']:
1104 parent_node[key] = 'big-endian'
1105 elif node in ['le', 'little']:
1106 parent_node[key] = 'little-endian'
1107
1108 trace_node = self.config_node['trace']
1109 normalize_byte_order_prop(self.config_node, 'target-byte-order')
1110
1111 for parent_node, key in self._trace_type_props():
1112 node = parent_node[key]
1113
1114 if node is None:
1115 # a `None` property is equivalent to not having it
1116 del parent_node[key]
1117 continue
1118
1119 if key == 'class' and type(node) is str:
1120 # field type class aliases
1121 if node in ['uint', 'unsigned-int']:
1122 parent_node[key] = 'unsigned-integer'
1123 elif node in ['sint', 'signed-int']:
1124 parent_node[key] = 'signed-integer'
1125 elif node in ['uenum', 'unsigned-enum']:
1126 parent_node[key] = 'unsigned-enumeration'
1127 elif node in ['senum', 'signed-enum']:
1128 parent_node[key] = 'signed-enumeration'
1129 elif node == 'str':
1130 parent_node[key] = 'string'
1131 elif node == 'struct':
1132 parent_node[key] = 'structure'
1133 elif key == 'preferred-display-base' and type(node) is str:
1134 # display base aliases
1135 if node == 'bin':
1136 parent_node[key] = 'binary'
1137 elif node == 'oct':
1138 parent_node[key] = 'octal'
1139 elif node == 'dec':
1140 parent_node[key] = 'decimal'
1141 elif node == 'hex':
1142 parent_node[key] = 'hexadecimal'
1143
1144 prop_name = 'environment'
1145
1146 if prop_name in trace_node:
1147 node = trace_node[prop_name]
1148
1149 if node is None:
1150 del trace_node[prop_name]
1151
1152 # Sets the parser's target byte order.
1153 def _set_target_byte_order(self):
1154 self._target_byte_order_node = self.config_node['target-byte-order']
1155 self._target_byte_order = self._byte_order_from_node(self._target_byte_order_node)
1156
1157 # Processes the inclusions of the event record type node
1158 # `ert_node`, returning the effective node.
1159 def _process_ert_node_include(self, ert_node: _MapNode) -> _MapNode:
1160 # Make sure the event record type node is valid for the
1161 # inclusion processing stage.
1162 self._schema_validator.validate(ert_node, 'config/3/ert-pre-include')
1163
1164 # process inclusions
1165 return self._process_node_include(ert_node, self._process_ert_node_include)
1166
1167 # Processes the inclusions of the data stream type node `dst_node`,
1168 # returning the effective node.
1169 def _process_dst_node_include(self, dst_node: _MapNode) -> _MapNode:
1170 def process_children_include(dst_node: _MapNode):
1171 prop_name = 'event-record-types'
1172
1173 if prop_name in dst_node:
1174 erts_node = dst_node[prop_name]
1175
1176 for key in list(erts_node):
1177 erts_node[key] = self._process_ert_node_include(erts_node[key])
1178
1179 # Make sure the data stream type node is valid for the inclusion
1180 # processing stage.
1181 self._schema_validator.validate(dst_node, 'config/3/dst-pre-include')
1182
1183 # process inclusions
1184 return self._process_node_include(dst_node, self._process_dst_node_include,
1185 process_children_include)
1186
1187 # Processes the inclusions of the clock type node `clk_type_node`,
1188 # returning the effective node.
1189 def _process_clk_type_node_include(self, clk_type_node: _MapNode) -> _MapNode:
1190 # Make sure the clock type node is valid for the inclusion
1191 # processing stage.
1192 self._schema_validator.validate(clk_type_node, 'config/3/clock-type-pre-include')
1193
1194 # process inclusions
1195 return self._process_node_include(clk_type_node, self._process_clk_type_node_include)
1196
1197 # Processes the inclusions of the trace type node `trace_type_node`,
1198 # returning the effective node.
1199 def _process_trace_type_node_include(self, trace_type_node: _MapNode) -> _MapNode:
1200 def process_children_include(trace_type_node: _MapNode):
1201 prop_name = 'clock-types'
1202
1203 if prop_name in trace_type_node:
1204 clk_types_node = trace_type_node[prop_name]
1205
1206 for key in list(clk_types_node):
1207 clk_types_node[key] = self._process_clk_type_node_include(clk_types_node[key])
1208
1209 prop_name = 'data-stream-types'
1210
1211 if prop_name in trace_type_node:
1212 dsts_node = trace_type_node[prop_name]
1213
1214 for key in list(dsts_node):
1215 dsts_node[key] = self._process_dst_node_include(dsts_node[key])
1216
1217 # Make sure the trace type node is valid for the inclusion
1218 # processing stage.
1219 self._schema_validator.validate(trace_type_node, 'config/3/trace-type-pre-include')
1220
1221 # process inclusions
1222 return self._process_node_include(trace_type_node, self._process_trace_type_node_include,
1223 process_children_include)
1224
1225 # Processes the inclusions of the trace node `trace_node`, returning
1226 # the effective node.
1227 def _process_trace_node_include(self, trace_node: _MapNode) -> _MapNode:
1228 def process_children_include(trace_node: _MapNode):
1229 prop_name = 'type'
1230 trace_node[prop_name] = self._process_trace_type_node_include(trace_node[prop_name])
1231
1232 # Make sure the trace node is valid for the inclusion processing
1233 # stage.
1234 self._schema_validator.validate(trace_node, 'config/3/trace-pre-include')
1235
1236 # process inclusions
1237 return self._process_node_include(trace_node, self._process_trace_node_include,
1238 process_children_include)
1239
1240 # Processes the inclusions of the configuration node, modifying it
1241 # during the process.
1242 def _process_config_includes(self):
1243 # Process inclusions in this order:
1244 #
1245 # 1. Clock type node and event record type nodes (the order
1246 # between those is not important).
1247 #
1248 # 2. Data stream type nodes.
1249 #
1250 # 3. Trace type node.
1251 #
1252 # 4. Trace node.
1253 #
1254 # This is because:
1255 #
1256 # * A trace node can include a trace type node, clock type
1257 # nodes, data stream type nodes, and event record type nodes.
1258 #
1259 # * A trace type node can include clock type nodes, data stream
1260 # type nodes, and event record type nodes.
1261 #
1262 # * A data stream type node can include event record type nodes.
1263 #
1264 # First, make sure the configuration node itself is valid for
1265 # the inclusion processing stage.
1266 self._schema_validator.validate(self.config_node, 'config/3/config-pre-include')
1267
1268 # Process trace node inclusions.
1269 #
1270 # self._process_trace_node_include() returns a new (or the same)
1271 # trace node without any `$include` property in it, recursively.
1272 self.config_node['trace'] = self._process_trace_node_include(self.config_node['trace'])
1273
1274 def _parse(self):
1275 # process configuration node inclusions
1276 self._process_config_includes()
1277
1278 # Expand field type nodes.
1279 #
1280 # This process:
1281 #
1282 # 1. Replaces field type aliases with "effective" field type
1283 # nodes, recursively.
1284 #
1285 # After this step, the `$field-type-aliases` property of the
1286 # trace type node is gone.
1287 #
1288 # 2. Applies inheritance, following the `$inherit` properties.
1289 #
1290 # After this step, field type nodes do not contain `$inherit`
1291 # properties.
1292 #
1293 # This is done blindly, in that the process _doesn't_ validate
1294 # field type nodes at this point.
1295 self._expand_fts()
1296
1297 # Substitute log level aliases.
1298 #
1299 # This process:
1300 #
1301 # 1. Replaces log level aliases in event record type nodes with
1302 # their numeric equivalents as found in the
1303 # `$log-level-aliases` property of the trace type node.
1304 #
1305 # 2. Removes the `$log-level-aliases` property from the trace
1306 # type node.
1307 self._sub_log_level_aliases()
1308
1309 # At this point, the configuration node must be valid as an
1310 # effective configuration node.
1311 self._schema_validator.validate(self.config_node, 'config/3/config')
1312
1313 # Normalize properties.
1314 #
1315 # This process removes `None` properties and chooses specific
1316 # enumerators when aliases exist (for example, `big-endian`
1317 # instead of `be`).
1318 #
1319 # The goal of this is that, if the user then gets this parser's
1320 # `config_node` property, it has a normal and very readable
1321 # form.
1322 #
1323 # It also makes _create_config() easier to implement because it
1324 # doesn't need to check for `None` nodes or enumerator aliases.
1325 self._normalize_props()
1326
1327 # Set the target byte order.
1328 self._set_target_byte_order()
1329
1330 # Create a barectf configuration object from the configuration
1331 # node.
1332 self._create_config()
1333
1334 @property
1335 def config(self) -> barectf_config.Configuration:
1336 return self._config
1337
1338 @property
1339 def config_node(self) -> _MapNode:
1340 return typing.cast(barectf_config_parse_common._ConfigNodeV3, self._root_node).config_node
This page took 0.121295 seconds and 5 git commands to generate.