config_parse.py: replace `'...'.format()` with f-strings
[deliverable/barectf.git] / barectf / config_parse.py
CommitLineData
7f4429f2
PP
1# The MIT License (MIT)
2#
4a90140d 3# Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
7f4429f2 4#
1378f213
PP
5# Permission is hereby granted, free of charge, to any person obtaining
6# a copy of this software and associated documentation files (the
7# "Software"), to deal in the Software without restriction, including
8# without limitation the rights to use, copy, modify, merge, publish,
9# distribute, sublicense, and/or sell copies of the Software, and to
10# permit persons to whom the Software is furnished to do so, subject to
11# the following conditions:
7f4429f2 12#
1378f213
PP
13# The above copyright notice and this permission notice shall be
14# included in all copies or substantial portions of the Software.
7f4429f2 15#
1378f213
PP
16# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
7f4429f2
PP
23
24from barectf import metadata
25from barectf import config
6839ffba 26import pkg_resources
7f4429f2 27import collections
6839ffba 28import jsonschema
6839ffba 29import os.path
7f4429f2
PP
30import enum
31import yaml
32import uuid
33import copy
7f4429f2
PP
34import os
35
36
be7df3b2
PP
37# The context of a configuration parsing error.
38#
39# Such a context object has a name and, optionally, a message.
7f4429f2
PP
40class _ConfigParseErrorCtx:
41 def __init__(self, name, msg=None):
42 self._name = name
43 self._msg = msg
44
45 @property
46 def name(self):
47 return self._name
48
49 @property
50 def msg(self):
51 return self._msg
52
53
be7df3b2
PP
54# Appends the context having the object name `obj_name` and the
55# (optional) message `msg` to the `_ConfigParseError` exception `exc`
56# and then raises `exc` again.
57def _append_error_ctx(exc, obj_name, msg=None):
58 exc.append_ctx(obj_name, msg)
59 raise
60
61
62# A configuration parsing error.
63#
64# Such an error object contains a list of contexts (`ctx` property).
65#
66# The first context of this list is the most specific context, while the
67# last is the more general.
68#
69# Use append_ctx() to append a context to an existing configuration
70# parsing error when you catch it before raising it again. You can use
71# _append_error_ctx() to do exactly this in a single call.
9fb5657f 72class _ConfigParseError(RuntimeError):
7f4429f2
PP
73 def __init__(self, init_ctx_name, init_ctx_msg=None):
74 self._ctx = []
75 self.append_ctx(init_ctx_name, init_ctx_msg)
76
77 @property
78 def ctx(self):
79 return self._ctx
80
81 def append_ctx(self, name, msg=None):
82 self._ctx.append(_ConfigParseErrorCtx(name, msg))
83
84
85def _opt_to_public(obj):
86 if obj is None:
87 return
88
89 return obj.to_public()
90
91
aad8e5e8
PP
92# Pseudo object base class.
93#
94# A concrete pseudo object contains the same data as its public version,
95# but it's mutable.
96#
97# The to_public() method converts the pseudo object to an equivalent
98# public, immutable object, caching the result so as to always return
99# the same Python object.
7f4429f2
PP
100class _PseudoObj:
101 def __init__(self):
102 self._public = None
103
104 def to_public(self):
105 if self._public is None:
106 self._public = self._to_public()
107
108 return self._public
109
110 def _to_public(self):
111 raise NotImplementedError
112
113
114class _PropertyMapping(_PseudoObj):
115 def __init__(self):
116 super().__init__()
117 self.object = None
118 self.prop = None
119
120 def _to_public(self):
121 return metadata.PropertyMapping(self.object.to_public(), self.prop)
122
123
124class _Integer(_PseudoObj):
125 def __init__(self):
126 super().__init__()
127 self.size = None
128 self.byte_order = None
7f4429f2 129 self.align = None
7f4429f2 130 self.signed = False
7f4429f2 131 self.base = 10
7f4429f2 132 self.encoding = metadata.Encoding.NONE
7f4429f2
PP
133 self.property_mappings = []
134
135 @property
136 def real_align(self):
137 if self.align is None:
138 if self.size % 8 == 0:
139 return 8
140 else:
141 return 1
142 else:
143 return self.align
144
145 def _to_public(self):
146 prop_mappings = [pm.to_public() for pm in self.property_mappings]
147 return metadata.Integer(self.size, self.byte_order, self.align,
148 self.signed, self.base, self.encoding,
149 prop_mappings)
150
151
152class _FloatingPoint(_PseudoObj):
153 def __init__(self):
154 super().__init__()
155 self.exp_size = None
156 self.mant_size = None
157 self.byte_order = None
7f4429f2
PP
158 self.align = 8
159
160 @property
161 def real_align(self):
162 return self.align
163
164 def _to_public(self):
165 return metadata.FloatingPoint(self.exp_size, self.mant_size,
166 self.byte_order, self.align)
167
168
169class _Enum(_PseudoObj):
170 def __init__(self):
171 super().__init__()
172 self.value_type = None
173 self.members = collections.OrderedDict()
174
7f4429f2
PP
175 @property
176 def real_align(self):
177 return self.value_type.real_align
178
179 def _to_public(self):
180 return metadata.Enum(self.value_type.to_public(), self.members)
181
182
183class _String(_PseudoObj):
184 def __init__(self):
185 super().__init__()
7f4429f2
PP
186 self.encoding = metadata.Encoding.UTF8
187
6839ffba
PP
188 @property
189 def real_align(self):
190 return 8
7f4429f2
PP
191
192 def _to_public(self):
193 return metadata.String(self.encoding)
194
195
196class _Array(_PseudoObj):
197 def __init__(self):
198 super().__init__()
199 self.element_type = None
200 self.length = None
201
202 @property
203 def real_align(self):
204 return self.element_type.real_align
205
206 def _to_public(self):
207 return metadata.Array(self.element_type.to_public(), self.length)
208
209
210class _Struct(_PseudoObj):
211 def __init__(self):
212 super().__init__()
7f4429f2 213 self.min_align = 1
7f4429f2
PP
214 self.fields = collections.OrderedDict()
215
216 @property
217 def real_align(self):
218 align = self.min_align
219
220 for pseudo_field in self.fields.values():
221 if pseudo_field.real_align > align:
222 align = pseudo_field.real_align
223
224 return align
225
226 def _to_public(self):
227 fields = []
228
229 for name, pseudo_field in self.fields.items():
230 fields.append((name, pseudo_field.to_public()))
231
232 return metadata.Struct(self.min_align, collections.OrderedDict(fields))
233
234
235class _Trace(_PseudoObj):
236 def __init__(self):
237 super().__init__()
238 self.byte_order = None
239 self.uuid = None
240 self.packet_header_type = None
241
242 def _to_public(self):
243 return metadata.Trace(self.byte_order, self.uuid,
244 _opt_to_public(self.packet_header_type))
245
246
247class _Clock(_PseudoObj):
248 def __init__(self):
249 super().__init__()
7f4429f2 250 self.name = None
7f4429f2 251 self.uuid = None
7f4429f2 252 self.description = None
7f4429f2 253 self.freq = int(1e9)
7f4429f2 254 self.error_cycles = 0
7f4429f2 255 self.offset_seconds = 0
7f4429f2 256 self.offset_cycles = 0
7f4429f2 257 self.absolute = False
7f4429f2
PP
258 self.return_ctype = 'uint32_t'
259
260 def _to_public(self):
261 return metadata.Clock(self.name, self.uuid, self.description, self.freq,
262 self.error_cycles, self.offset_seconds,
263 self.offset_cycles, self.absolute,
264 self.return_ctype)
265
266
267class _Event(_PseudoObj):
268 def __init__(self):
269 super().__init__()
270 self.id = None
271 self.name = None
272 self.log_level = None
273 self.payload_type = None
274 self.context_type = None
275
276 def _to_public(self):
277 return metadata.Event(self.id, self.name, self.log_level,
278 _opt_to_public(self.payload_type),
279 _opt_to_public(self.context_type))
280
281
282class _Stream(_PseudoObj):
283 def __init__(self):
284 super().__init__()
285 self.id = None
286 self.name = None
287 self.packet_context_type = None
288 self.event_header_type = None
289 self.event_context_type = None
290 self.events = collections.OrderedDict()
291
292 def is_event_empty(self, event):
293 total_fields = 0
294
295 if self.event_header_type is not None:
296 total_fields += len(self.event_header_type.fields)
297
298 if self.event_context_type is not None:
299 total_fields += len(self.event_context_type.fields)
300
301 if event.context_type is not None:
302 total_fields += len(event.context_type.fields)
303
304 if event.payload_type is not None:
305 total_fields += len(event.payload_type.fields)
306
307 return total_fields == 0
308
309 def _to_public(self):
310 events = []
311
312 for name, pseudo_ev in self.events.items():
313 events.append((name, pseudo_ev.to_public()))
314
315 return metadata.Stream(self.id, self.name,
316 _opt_to_public(self.packet_context_type),
317 _opt_to_public(self.event_header_type),
318 _opt_to_public(self.event_context_type),
319 collections.OrderedDict(events))
320
321
322class _Metadata(_PseudoObj):
323 def __init__(self):
324 super().__init__()
325 self.trace = None
326 self.env = None
327 self.clocks = None
328 self.streams = None
329 self.default_stream_name = None
330
331 def _to_public(self):
332 clocks = []
333
334 for name, pseudo_clock in self.clocks.items():
335 clocks.append((name, pseudo_clock.to_public()))
336
337 streams = []
338
339 for name, pseudo_stream in self.streams.items():
340 streams.append((name, pseudo_stream.to_public()))
341
342 return metadata.Metadata(self.trace.to_public(), self.env,
343 collections.OrderedDict(clocks),
344 collections.OrderedDict(streams),
345 self.default_stream_name)
346
347
6839ffba
PP
348# This JSON schema reference resolver only serves to detect when it
349# needs to resolve a remote URI.
350#
351# This must never happen in barectf because all our schemas are local;
352# it would mean a programming or schema error.
353class _RefResolver(jsonschema.RefResolver):
354 def resolve_remote(self, uri):
c74b9b04 355 raise RuntimeError(f'Missing local schema with URI `{uri}`')
6839ffba
PP
356
357
358# Schema validator which considers all the schemas found in the barectf
359# package's `schemas` directory.
360#
361# The only public method is validate() which accepts an instance to
362# validate as well as a schema short ID.
363class _SchemaValidator:
364 def __init__(self):
365 subdirs = ['config', os.path.join('2', 'config')]
366 schemas_dir = pkg_resources.resource_filename(__name__, 'schemas')
367 self._store = {}
368
369 for subdir in subdirs:
370 dir = os.path.join(schemas_dir, subdir)
371
372 for file_name in os.listdir(dir):
373 if not file_name.endswith('.yaml'):
374 continue
375
376 with open(os.path.join(dir, file_name)) as f:
377 schema = yaml.load(f, Loader=yaml.SafeLoader)
378
379 assert '$id' in schema
380 schema_id = schema['$id']
381 assert schema_id not in self._store
382 self._store[schema_id] = schema
383
384 @staticmethod
385 def _dict_from_ordered_dict(o_dict):
386 dct = {}
387
388 for k, v in o_dict.items():
389 new_v = v
390
391 if type(v) is collections.OrderedDict:
392 new_v = _SchemaValidator._dict_from_ordered_dict(v)
7f4429f2 393
6839ffba 394 dct[k] = new_v
7f4429f2 395
6839ffba 396 return dct
7f4429f2 397
6839ffba
PP
398 def _validate(self, instance, schema_short_id):
399 # retrieve full schema ID from short ID
c74b9b04 400 schema_id = f'https://barectf.org/schemas/{schema_short_id}.json'
6839ffba 401 assert schema_id in self._store
7f4429f2 402
6839ffba
PP
403 # retrieve full schema
404 schema = self._store[schema_id]
7f4429f2 405
6839ffba
PP
406 # Create a reference resolver for this schema using this
407 # validator's schema store.
408 resolver = _RefResolver(base_uri=schema_id, referrer=schema,
409 store=self._store)
7f4429f2 410
6839ffba
PP
411 # create a JSON schema validator using this reference resolver
412 validator = jsonschema.Draft7Validator(schema, resolver=resolver)
7f4429f2 413
6839ffba
PP
414 # Validate the instance, converting its
415 # `collections.OrderedDict` objects to `dict` objects so as to
416 # make any error message easier to read (because
417 # validator.validate() below uses str() for error messages, and
aad8e5e8
PP
418 # collections.OrderedDict.__str__() returns a somewhat bulky
419 # representation).
6839ffba 420 validator.validate(self._dict_from_ordered_dict(instance))
7f4429f2 421
6839ffba
PP
422 # Validates `instance` using the schema having the short ID
423 # `schema_short_id`.
424 #
425 # A schema short ID is the part between `schemas/` and `.json` in
426 # its URI.
427 #
9fb5657f 428 # Raises a `_ConfigParseError` object, hiding any `jsonschema`
6839ffba
PP
429 # exception, on validation failure.
430 def validate(self, instance, schema_short_id):
431 try:
432 self._validate(instance, schema_short_id)
433 except jsonschema.ValidationError as exc:
9fb5657f 434 # convert to barectf `_ConfigParseError` exception
6839ffba 435 contexts = ['Configuration object']
c3aed479 436
aad8e5e8
PP
437 # Each element of the instance's absolute path is either an
438 # integer (array element's index) or a string (object
439 # property's name).
c3aed479
PP
440 for elem in exc.absolute_path:
441 if type(elem) is int:
c74b9b04 442 ctx = f'Element {elem}'
c3aed479 443 else:
c74b9b04 444 ctx = f'`{elem}` property'
c3aed479
PP
445
446 contexts.append(ctx)
447
6839ffba
PP
448 schema_ctx = ''
449
450 if len(exc.context) > 0:
aad8e5e8
PP
451 # According to the documentation of
452 # jsonschema.ValidationError.context(),
453 # the method returns a
454 #
455 # > list of errors from the subschemas
456 #
457 # This contains additional information about the
458 # validation failure which can help the user figure out
459 # what's wrong exactly.
460 #
461 # Join each message with `; ` and append this to our
462 # configuration parsing error's message.
6839ffba 463 msgs = '; '.join([e.message for e in exc.context])
c74b9b04 464 schema_ctx = f': {msgs}'
7f4429f2 465
9fb5657f 466 new_exc = _ConfigParseError(contexts.pop(),
c74b9b04 467 f'{exc.message}{schema_ctx} (from schema `{schema_short_id}`)')
7f4429f2 468
6839ffba
PP
469 for ctx in reversed(contexts):
470 new_exc.append_ctx(ctx)
471
472 raise new_exc
7f4429f2
PP
473
474
aad8e5e8
PP
475# Converts the byte order string `bo_str` to a `metadata.ByteOrder`
476# enumerator.
7f4429f2
PP
477def _byte_order_str_to_bo(bo_str):
478 bo_str = bo_str.lower()
479
480 if bo_str == 'le':
481 return metadata.ByteOrder.LE
482 elif bo_str == 'be':
483 return metadata.ByteOrder.BE
484
485
aad8e5e8
PP
486# Converts the encoding string `encoding_str` to a `metadata.Encoding`
487# enumerator.
7f4429f2
PP
488def _encoding_str_to_encoding(encoding_str):
489 encoding_str = encoding_str.lower()
490
491 if encoding_str == 'utf-8' or encoding_str == 'utf8':
492 return metadata.Encoding.UTF8
493 elif encoding_str == 'ascii':
494 return metadata.Encoding.ASCII
495 elif encoding_str == 'none':
496 return metadata.Encoding.NONE
497
498
aad8e5e8
PP
499# Validates the TSDL identifier `iden`, raising a `_ConfigParseError`
500# exception using `ctx_obj_name` and `prop` to format the message if
501# it's invalid.
6839ffba
PP
502def _validate_identifier(iden, ctx_obj_name, prop):
503 assert type(iden) is str
504 ctf_keywords = {
505 'align',
506 'callsite',
507 'clock',
508 'enum',
509 'env',
510 'event',
511 'floating_point',
512 'integer',
513 'stream',
514 'string',
515 'struct',
516 'trace',
517 'typealias',
518 'typedef',
519 'variant',
520 }
7f4429f2 521
6839ffba 522 if iden in ctf_keywords:
c74b9b04
PP
523 msg = f'Invalid {prop} (not a valid identifier): `{iden}`'
524 raise _ConfigParseError(ctx_obj_name, msg)
7f4429f2 525
7f4429f2 526
aad8e5e8
PP
527# Validates the alignment `align`, raising a `_ConfigParseError`
528# exception using `ctx_obj_name` if it's invalid.
6839ffba
PP
529def _validate_alignment(align, ctx_obj_name):
530 assert align >= 1
7f4429f2 531
6839ffba 532 if (align & (align - 1)) != 0:
9fb5657f 533 raise _ConfigParseError(ctx_obj_name,
c74b9b04 534 f'Invalid alignment (not a power of two): {align}')
7f4429f2
PP
535
536
6839ffba
PP
537# Entities.
538#
539# Order of values is important here.
540@enum.unique
541class _Entity(enum.IntEnum):
542 TRACE_PACKET_HEADER = 0
543 STREAM_PACKET_CONTEXT = 1
544 STREAM_EVENT_HEADER = 2
545 STREAM_EVENT_CONTEXT = 3
546 EVENT_CONTEXT = 4
547 EVENT_PAYLOAD = 5
548
549
aad8e5e8
PP
550# A validator which validates the configured metadata for barectf
551# specific needs.
7f4429f2
PP
552#
553# barectf needs:
554#
aad8e5e8
PP
555# * The alignments of all header/context field types are at least 8.
556#
557# * There are no nested structure or array field types, except the
558# packet header field type's `uuid` field
559#
7f4429f2
PP
560class _BarectfMetadataValidator:
561 def __init__(self):
562 self._type_to_validate_type_func = {
7f4429f2
PP
563 _Struct: self._validate_struct_type,
564 _Array: self._validate_array_type,
565 }
566
7f4429f2
PP
567 def _validate_struct_type(self, t, entity_root):
568 if not entity_root:
ace614f2
PP
569 raise _ConfigParseError('Structure field type',
570 'Inner structure field types are not supported as of this version')
7f4429f2
PP
571
572 for field_name, field_type in t.fields.items():
573 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
574 if field_name == 'uuid':
575 # allow
576 continue
577
578 try:
579 self._validate_type(field_type, False)
9fb5657f 580 except _ConfigParseError as exc:
ace614f2 581 _append_error_ctx(exc,
c74b9b04 582 f'Structure field type\'s field `{field_name}`')
7f4429f2
PP
583
584 def _validate_array_type(self, t, entity_root):
ace614f2
PP
585 raise _ConfigParseError('Array field type',
586 'Not supported as of this version')
7f4429f2
PP
587
588 def _validate_type(self, t, entity_root):
6839ffba
PP
589 func = self._type_to_validate_type_func.get(type(t))
590
591 if func is not None:
592 func(t, entity_root)
7f4429f2
PP
593
594 def _validate_entity(self, t):
595 if t is None:
596 return
597
aad8e5e8 598 # make sure root field type has a real alignment of at least 8
7f4429f2 599 if t.real_align < 8:
ace614f2 600 raise _ConfigParseError('Root field type',
c74b9b04 601 f'Effective alignment must be at least 8 (got {t.real_align})')
7f4429f2 602
6839ffba 603 assert type(t) is _Struct
7f4429f2 604
aad8e5e8 605 # validate field types
7f4429f2
PP
606 self._validate_type(t, True)
607
ace614f2 608 def _validate_event_entities_and_names(self, stream, ev):
7f4429f2 609 try:
ace614f2
PP
610 _validate_identifier(ev.name, 'Event type', 'event type name')
611
612 self._cur_entity = _Entity.EVENT_CONTEXT
613
614 try:
615 self._validate_entity(ev.context_type)
616 except _ConfigParseError as exc:
617 _append_error_ctx(exc, 'Event type',
618 'Invalid context field type')
619
620 self._cur_entity = _Entity.EVENT_PAYLOAD
621
622 try:
623 self._validate_entity(ev.payload_type)
624 except _ConfigParseError as exc:
625 _append_error_ctx(exc, 'Event type',
626 'Invalid payload field type')
627
628 if stream.is_event_empty(ev):
629 raise _ConfigParseError('Event type', 'Empty')
9fb5657f 630 except _ConfigParseError as exc:
c74b9b04 631 _append_error_ctx(exc, f'Event type `{ev.name}`')
7f4429f2 632
ace614f2
PP
633 def _validate_stream_entities_and_names(self, stream):
634 try:
635 _validate_identifier(stream.name, 'Stream type', 'stream type name')
7f4429f2
PP
636 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
637
638 try:
639 self._validate_entity(stream.packet_context_type)
9fb5657f 640 except _ConfigParseError as exc:
ace614f2
PP
641 _append_error_ctx(exc, 'Stream type',
642 'Invalid packet context field type')
7f4429f2
PP
643
644 self._cur_entity = _Entity.STREAM_EVENT_HEADER
645
646 try:
647 self._validate_entity(stream.event_header_type)
9fb5657f 648 except _ConfigParseError as exc:
ace614f2
PP
649 _append_error_ctx(exc, 'Stream type',
650 'Invalid event header field type')
7f4429f2
PP
651
652 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
653
654 try:
655 self._validate_entity(stream.event_context_type)
9fb5657f 656 except _ConfigParseError as exc:
ace614f2
PP
657 _append_error_ctx(exc, 'Stream type',
658 'Invalid event context field type')
7f4429f2 659
ace614f2
PP
660 for ev in stream.events.values():
661 self._validate_event_entities_and_names(stream, ev)
662 except _ConfigParseError as exc:
c74b9b04 663 _append_error_ctx(exc, f'Stream type `{stream.name}`')
7f4429f2 664
ace614f2
PP
665 def _validate_entities_and_names(self, meta):
666 self._cur_entity = _Entity.TRACE_PACKET_HEADER
7f4429f2 667
ace614f2
PP
668 try:
669 self._validate_entity(meta.trace.packet_header_type)
670 except _ConfigParseError as exc:
671 _append_error_ctx(exc, 'Trace type',
672 'Invalid packet header field type')
7f4429f2 673
ace614f2
PP
674 for stream in meta.streams.values():
675 self._validate_stream_entities_and_names(stream)
7f4429f2
PP
676
677 def _validate_default_stream(self, meta):
aad8e5e8 678 if meta.default_stream_name is not None:
7f4429f2 679 if meta.default_stream_name not in meta.streams.keys():
c74b9b04
PP
680 msg = f'Default stream type name (`{meta.default_stream_name}`) does not name an existing stream type'
681 raise _ConfigParseError('Metadata', msg)
7f4429f2
PP
682
683 def validate(self, meta):
ace614f2
PP
684 try:
685 self._validate_entities_and_names(meta)
686 self._validate_default_stream(meta)
687 except _ConfigParseError as exc:
688 _append_error_ctx(exc, 'barectf metadata')
7f4429f2
PP
689
690
aad8e5e8 691# A validator which validates special fields of trace, stream, and event
6839ffba 692# types.
7f4429f2 693class _MetadataSpecialFieldsValidator:
aad8e5e8 694 # Validates the packet header field type `t`.
7f4429f2 695 def _validate_trace_packet_header_type(self, t):
ace614f2
PP
696 ctx_obj_name = '`packet-header-type` property'
697
aad8e5e8
PP
698 # If there's more than one stream type, then the `stream_id`
699 # (stream type ID) field is required.
7f4429f2 700 if len(self._meta.streams) > 1:
7f4429f2 701 if t is None:
ace614f2
PP
702 raise _ConfigParseError('Trace type',
703 '`stream_id` field is required (because there\'s more than one stream type), but packet header field type is missing')
7f4429f2 704
7f4429f2 705 if 'stream_id' not in t.fields:
ace614f2
PP
706 raise _ConfigParseError(ctx_obj_name,
707 '`stream_id` field is required (because there\'s more than one stream type)')
7f4429f2 708
750374a1 709 if t is None:
7f4429f2
PP
710 return
711
aad8e5e8
PP
712 # The `magic` field type must be the first one.
713 #
714 # The `stream_id` field type's size (bits) must be large enough
715 # to accomodate any stream type ID.
7f4429f2
PP
716 for i, (field_name, field_type) in enumerate(t.fields.items()):
717 if field_name == 'magic':
7f4429f2 718 if i != 0:
ace614f2
PP
719 raise _ConfigParseError(ctx_obj_name,
720 '`magic` field must be the first packet header field type\'s field')
7f4429f2 721 elif field_name == 'stream_id':
7f4429f2 722 if len(self._meta.streams) > (1 << field_type.size):
ace614f2 723 raise _ConfigParseError(ctx_obj_name,
c74b9b04 724 f'`stream_id` field\'s size is too small to accomodate {len(self._meta.streams)} stream types')
7f4429f2 725
aad8e5e8 726 # Validates the trace type of the metadata object `meta`.
7f4429f2
PP
727 def _validate_trace(self, meta):
728 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
729
aad8e5e8
PP
730 # Validates the packet context field type of the stream type
731 # `stream`.
7f4429f2 732 def _validate_stream_packet_context(self, stream):
ace614f2 733 ctx_obj_name = '`packet-context-type` property'
7f4429f2 734 t = stream.packet_context_type
750374a1 735 assert t is not None
7f4429f2 736
aad8e5e8
PP
737 # The `timestamp_begin` and `timestamp_end` field types must be
738 # mapped to the `value` property of the same clock.
750374a1
PP
739 ts_begin = t.fields.get('timestamp_begin')
740 ts_end = t.fields.get('timestamp_end')
7f4429f2 741
7f4429f2
PP
742 if ts_begin is not None and ts_end is not None:
743 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
ace614f2
PP
744 raise _ConfigParseError(ctx_obj_name,
745 '`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock value')
7f4429f2 746
aad8e5e8
PP
747 # The `packet_size` field type's size must be greater than or
748 # equal to the `content_size` field type's size.
750374a1 749 if t.fields['content_size'].size > t.fields['packet_size'].size:
ace614f2
PP
750 raise _ConfigParseError(ctx_obj_name,
751 '`content_size` field\'s size must be less than or equal to `packet_size` field\'s size')
7f4429f2 752
aad8e5e8 753 # Validates the event header field type of the stream type `stream`.
7f4429f2 754 def _validate_stream_event_header(self, stream):
ace614f2 755 ctx_obj_name = '`event-header-type` property'
7f4429f2
PP
756 t = stream.event_header_type
757
aad8e5e8
PP
758 # If there's more than one event type, then the `id` (event type
759 # ID) field is required.
7f4429f2 760 if len(stream.events) > 1:
7f4429f2 761 if t is None:
ace614f2
PP
762 raise _ConfigParseError('Stream type',
763 '`id` field is required (because there\'s more than one event type), but event header field type is missing')
7f4429f2 764
7f4429f2 765 if 'id' not in t.fields:
ace614f2
PP
766 raise _ConfigParseError(ctx_obj_name,
767 '`id` field is required (because there\'s more than one event type)')
7f4429f2 768
750374a1 769 if t is None:
7f4429f2
PP
770 return
771
aad8e5e8
PP
772 # The `id` field type's size (bits) must be large enough to
773 # accomodate any event type ID.
750374a1 774 eid = t.fields.get('id')
7f4429f2 775
750374a1 776 if eid is not None:
7f4429f2 777 if len(stream.events) > (1 << eid.size):
ace614f2 778 raise _ConfigParseError(ctx_obj_name,
c74b9b04 779 f'`id` field\'s size is too small to accomodate {len(stream.events)} event types')
7f4429f2 780
aad8e5e8 781 # Validates the stream type `stream`.
7f4429f2
PP
782 def _validate_stream(self, stream):
783 self._validate_stream_packet_context(stream)
784 self._validate_stream_event_header(stream)
785
aad8e5e8
PP
786 # Validates the trace and stream types of the metadata object
787 # `meta`.
7f4429f2
PP
788 def validate(self, meta):
789 self._meta = meta
7f4429f2 790
ace614f2 791 try:
7f4429f2 792 try:
ace614f2 793 self._validate_trace(meta)
9fb5657f 794 except _ConfigParseError as exc:
ace614f2
PP
795 _append_error_ctx(exc, 'Trace type')
796
797 for stream in meta.streams.values():
798 try:
799 self._validate_stream(stream)
800 except _ConfigParseError as exc:
c74b9b04 801 _append_error_ctx(exc, f'Stream type `{stream.name}`')
ace614f2
PP
802 except _ConfigParseError as exc:
803 _append_error_ctx(exc, 'Metadata')
7f4429f2
PP
804
805
aad8e5e8
PP
806# A barectf YAML configuration parser.
807#
c8270369
PP
808# When you build such a parser, it parses the configuration file and
809# creates a corresponding `config.Config` object which you can get with
810# the `config` property.
aad8e5e8 811#
c8270369 812# See the comments of _parse() for more implementation details about the
aad8e5e8 813# parsing stages and general strategy.
7f4429f2 814class _YamlConfigParser:
c8270369
PP
815 # Builds a barectf YAML configuration parser and parses the
816 # configuration file having the path `path`.
817 #
818 # The parser considers the inclusion directories `include_dirs`,
819 # ignores nonexistent inclusion files if `ignore_include_not_found`
820 # is `True`, and dumps the effective configuration (as YAML) if
821 # `dump_config` is `True`.
1f2c551a
PP
822 #
823 # Raises `_ConfigParseError` on parsing error.
c8270369
PP
824 def __init__(self, path, include_dirs, ignore_include_not_found,
825 dump_config):
1f2c551a 826 self._root_path = path
a644ed68
PP
827 self._class_name_to_create_field_type_func = {
828 'int': self._create_integer_field_type,
829 'integer': self._create_integer_field_type,
830 'flt': self._create_float_field_type,
831 'float': self._create_float_field_type,
832 'floating-point': self._create_float_field_type,
833 'enum': self._create_enum_field_type,
834 'enumeration': self._create_enum_field_type,
835 'str': self._create_string_field_type,
836 'string': self._create_string_field_type,
837 'struct': self._create_struct_field_type,
838 'structure': self._create_struct_field_type,
839 'array': self._create_array_field_type,
7f4429f2 840 }
7f4429f2
PP
841 self._include_dirs = include_dirs
842 self._ignore_include_not_found = ignore_include_not_found
843 self._dump_config = dump_config
6839ffba 844 self._schema_validator = _SchemaValidator()
c8270369 845 self._parse()
7f4429f2 846
aad8e5e8 847 # Sets the default byte order as found in the `metadata_node` node.
7f4429f2 848 def _set_byte_order(self, metadata_node):
6839ffba
PP
849 self._bo = _byte_order_str_to_bo(metadata_node['trace']['byte-order'])
850 assert self._bo is not None
7f4429f2 851
aad8e5e8
PP
852 # Sets the clock value property mapping of the pseudo integer field
853 # type object `int_obj` as found in the `prop_mapping_node` node.
7f4429f2 854 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
7f4429f2 855 clock_name = prop_mapping_node['name']
6839ffba 856 clock = self._clocks.get(clock_name)
7f4429f2 857
6839ffba 858 if clock is None:
ace614f2 859 exc = _ConfigParseError('`property-mappings` property',
c74b9b04 860 f'Clock type `{clock_name}` does not exist')
ace614f2
PP
861 exc.append_ctx('Integer field type')
862 raise exc
7f4429f2 863
7f4429f2 864 prop_mapping = _PropertyMapping()
6839ffba
PP
865 prop_mapping.object = clock
866 prop_mapping.prop = 'value'
7f4429f2
PP
867 int_obj.property_mappings.append(prop_mapping)
868
aad8e5e8
PP
869 # Creates a pseudo integer field type from the node `node` and
870 # returns it.
a644ed68 871 def _create_integer_field_type(self, node):
6839ffba 872 obj = _Integer()
6839ffba 873 obj.size = node['size']
6839ffba 874 align_node = node.get('align')
7f4429f2 875
6839ffba 876 if align_node is not None:
ace614f2 877 _validate_alignment(align_node, 'Integer field type')
6839ffba 878 obj.align = align_node
7f4429f2 879
6839ffba 880 signed_node = node.get('signed')
7f4429f2 881
6839ffba
PP
882 if signed_node is not None:
883 obj.signed = signed_node
7f4429f2 884
6839ffba
PP
885 obj.byte_order = self._bo
886 bo_node = node.get('byte-order')
7f4429f2 887
6839ffba
PP
888 if bo_node is not None:
889 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2 890
6839ffba
PP
891 base_node = node.get('base')
892
893 if base_node is not None:
894 if base_node == 'bin':
895 obj.base = 2
896 elif base_node == 'oct':
897 obj.base = 8
898 elif base_node == 'dec':
899 obj.base = 10
7f4429f2 900 else:
6839ffba
PP
901 assert base_node == 'hex'
902 obj.base = 16
7f4429f2 903
6839ffba 904 encoding_node = node.get('encoding')
7f4429f2 905
6839ffba
PP
906 if encoding_node is not None:
907 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2 908
6839ffba 909 pm_node = node.get('property-mappings')
7f4429f2 910
6839ffba
PP
911 if pm_node is not None:
912 assert len(pm_node) == 1
913 self._set_int_clock_prop_mapping(obj, pm_node[0])
7f4429f2
PP
914
915 return obj
916
aad8e5e8
PP
917 # Creates a pseudo floating point number field type from the node
918 # `node` and returns it.
a644ed68 919 def _create_float_field_type(self, node):
6839ffba 920 obj = _FloatingPoint()
6839ffba
PP
921 size_node = node['size']
922 obj.exp_size = size_node['exp']
923 obj.mant_size = size_node['mant']
6839ffba 924 align_node = node.get('align')
7f4429f2 925
6839ffba 926 if align_node is not None:
ace614f2 927 _validate_alignment(align_node, 'Floating point number field type')
6839ffba 928 obj.align = align_node
7f4429f2 929
6839ffba
PP
930 obj.byte_order = self._bo
931 bo_node = node.get('byte-order')
7f4429f2 932
6839ffba
PP
933 if bo_node is not None:
934 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2
PP
935
936 return obj
937
aad8e5e8
PP
938 # Creates a pseudo enumeration field type from the node `node` and
939 # returns it.
a644ed68 940 def _create_enum_field_type(self, node):
ace614f2 941 ctx_obj_name = 'Enumeration field type'
6839ffba 942 obj = _Enum()
7f4429f2 943
aad8e5e8 944 # value (integer) field type
6839ffba
PP
945 try:
946 obj.value_type = self._create_type(node['value-type'])
9fb5657f 947 except _ConfigParseError as exc:
ace614f2
PP
948 _append_error_ctx(exc, ctx_obj_name,
949 'Cannot create value (integer) field type')
7f4429f2
PP
950
951 # members
6839ffba 952 members_node = node.get('members')
7f4429f2 953
6839ffba
PP
954 if members_node is not None:
955 if obj.value_type.signed:
956 value_min = -(1 << obj.value_type.size - 1)
957 value_max = (1 << (obj.value_type.size - 1)) - 1
7f4429f2 958 else:
6839ffba
PP
959 value_min = 0
960 value_max = (1 << obj.value_type.size) - 1
7f4429f2 961
6839ffba 962 cur = 0
7f4429f2 963
6839ffba
PP
964 for m_node in members_node:
965 if type(m_node) is str:
7f4429f2
PP
966 label = m_node
967 value = (cur, cur)
968 cur += 1
969 else:
6839ffba 970 assert type(m_node) is collections.OrderedDict
7f4429f2 971 label = m_node['label']
7f4429f2
PP
972 value = m_node['value']
973
6839ffba 974 if type(value) is int:
7f4429f2
PP
975 cur = value + 1
976 value = (value, value)
977 else:
6839ffba
PP
978 assert type(value) is list
979 assert len(value) == 2
7f4429f2
PP
980 mn = value[0]
981 mx = value[1]
982
983 if mn > mx:
ace614f2 984 exc = _ConfigParseError(ctx_obj_name)
c74b9b04
PP
985 exc.append_ctx(f'Member `{label}`',
986 f'Invalid integral range ({mn} > {mx})')
ace614f2 987 raise exc
7f4429f2
PP
988
989 value = (mn, mx)
990 cur = mx + 1
991
aad8e5e8
PP
992 # Make sure that all the integral values of the range
993 # fits the enumeration field type's integer value field
994 # type depending on its size (bits).
c74b9b04
PP
995 member_obj_name = f'Member `{label}`'
996 msg = f'Value {value[0]} is outside the value type range [{value_min}, {value_max}]'
7f4429f2 997
ace614f2
PP
998 try:
999 if value[0] < value_min or value[0] > value_max:
1000 raise _ConfigParseError(member_obj_name, msg)
7f4429f2 1001
ace614f2
PP
1002 if value[1] < value_min or value[1] > value_max:
1003 raise _ConfigParseError(member_obj_name, msg)
1004 except _ConfigParseError as exc:
1005 _append_error_ctx(exc, ctx_obj_name)
7f4429f2 1006
6839ffba 1007 obj.members[label] = value
7f4429f2 1008
6839ffba 1009 return obj
7f4429f2 1010
aad8e5e8
PP
1011 # Creates a pseudo string field type from the node `node` and
1012 # returns it.
a644ed68 1013 def _create_string_field_type(self, node):
6839ffba 1014 obj = _String()
6839ffba 1015 encoding_node = node.get('encoding')
7f4429f2 1016
6839ffba
PP
1017 if encoding_node is not None:
1018 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2
PP
1019
1020 return obj
1021
aad8e5e8
PP
1022 # Creates a pseudo structure field type from the node `node` and
1023 # returns it.
a644ed68 1024 def _create_struct_field_type(self, node):
ace614f2 1025 ctx_obj_name = 'Structure field type'
6839ffba 1026 obj = _Struct()
6839ffba 1027 min_align_node = node.get('min-align')
7f4429f2 1028
6839ffba 1029 if min_align_node is not None:
ace614f2 1030 _validate_alignment(min_align_node, ctx_obj_name)
6839ffba 1031 obj.min_align = min_align_node
7f4429f2 1032
6839ffba 1033 fields_node = node.get('fields')
7f4429f2 1034
6839ffba
PP
1035 if fields_node is not None:
1036 for field_name, field_node in fields_node.items():
ace614f2 1037 _validate_identifier(field_name, ctx_obj_name, 'field name')
7f4429f2 1038
6839ffba
PP
1039 try:
1040 obj.fields[field_name] = self._create_type(field_node)
9fb5657f 1041 except _ConfigParseError as exc:
ace614f2 1042 _append_error_ctx(exc, ctx_obj_name,
c74b9b04 1043 f'Cannot create field `{field_name}`')
7f4429f2
PP
1044
1045 return obj
1046
aad8e5e8
PP
1047 # Creates a pseudo array field type from the node `node` and returns
1048 # it.
a644ed68 1049 def _create_array_field_type(self, node):
6839ffba 1050 obj = _Array()
6839ffba 1051 obj.length = node['length']
7f4429f2 1052
6839ffba
PP
1053 try:
1054 obj.element_type = self._create_type(node['element-type'])
9fb5657f 1055 except _ConfigParseError as exc:
ace614f2
PP
1056 _append_error_ctx(exc, 'Array field type',
1057 'Cannot create element field type')
7f4429f2 1058
6839ffba 1059 return obj
7f4429f2 1060
aad8e5e8
PP
1061 # Creates a pseudo field type from the node `node` and returns it.
1062 #
1063 # This method checks the `class` property of `node` to determine
a644ed68
PP
1064 # which function of `self._class_name_to_create_field_type_func` to
1065 # call to create the corresponding pseudo field type.
6839ffba 1066 def _create_type(self, type_node):
a644ed68 1067 return self._class_name_to_create_field_type_func[type_node['class']](type_node)
7f4429f2 1068
aad8e5e8 1069 # Creates a pseudo clock type from the node `node` and returns it.
7f4429f2 1070 def _create_clock(self, node):
7f4429f2 1071 clock = _Clock()
6839ffba 1072 uuid_node = node.get('uuid')
7f4429f2 1073
6839ffba
PP
1074 if uuid_node is not None:
1075 try:
1076 clock.uuid = uuid.UUID(uuid_node)
8dfc91b0 1077 except ValueError as exc:
ace614f2 1078 raise _ConfigParseError('Clock type',
c74b9b04 1079 f'Malformed UUID `{uuid_node}`: {exc}')
7f4429f2 1080
6839ffba 1081 descr_node = node.get('description')
7f4429f2 1082
6839ffba
PP
1083 if descr_node is not None:
1084 clock.description = descr_node
7f4429f2 1085
6839ffba 1086 freq_node = node.get('freq')
7f4429f2 1087
6839ffba
PP
1088 if freq_node is not None:
1089 clock.freq = freq_node
7f4429f2 1090
6839ffba 1091 error_cycles_node = node.get('error-cycles')
7f4429f2 1092
6839ffba
PP
1093 if error_cycles_node is not None:
1094 clock.error_cycles = error_cycles_node
7f4429f2 1095
6839ffba 1096 offset_node = node.get('offset')
7f4429f2 1097
6839ffba 1098 if offset_node is not None:
6839ffba 1099 offset_cycles_node = offset_node.get('cycles')
7f4429f2 1100
6839ffba
PP
1101 if offset_cycles_node is not None:
1102 clock.offset_cycles = offset_cycles_node
7f4429f2 1103
6839ffba 1104 offset_seconds_node = offset_node.get('seconds')
7f4429f2 1105
6839ffba
PP
1106 if offset_seconds_node is not None:
1107 clock.offset_seconds = offset_seconds_node
7f4429f2 1108
6839ffba 1109 absolute_node = node.get('absolute')
7f4429f2 1110
6839ffba
PP
1111 if absolute_node is not None:
1112 clock.absolute = absolute_node
7f4429f2 1113
6839ffba 1114 return_ctype_node = node.get('$return-ctype')
7f4429f2 1115
6839ffba 1116 if return_ctype_node is None:
aad8e5e8
PP
1117 # barectf 2.1: `return-ctype` property was renamed to
1118 # `$return-ctype`
6839ffba 1119 return_ctype_node = node.get('return-ctype')
7f4429f2
PP
1120
1121 if return_ctype_node is not None:
6839ffba 1122 clock.return_ctype = return_ctype_node
7f4429f2
PP
1123
1124 return clock
1125
aad8e5e8
PP
1126 # Registers all the clock types of the metadata node
1127 # `metadata_node`, creating pseudo clock types during the process,
1128 # within this parser.
1129 #
1130 # The pseudo clock types in `self._clocks` are then accessible when
a644ed68
PP
1131 # creating a pseudo integer field type (see
1132 # _create_integer_field_type() and _set_int_clock_prop_mapping()).
7f4429f2
PP
1133 def _register_clocks(self, metadata_node):
1134 self._clocks = collections.OrderedDict()
6839ffba 1135 clocks_node = metadata_node.get('clocks')
7f4429f2
PP
1136
1137 if clocks_node is None:
1138 return
1139
7f4429f2 1140 for clock_name, clock_node in clocks_node.items():
ace614f2 1141 _validate_identifier(clock_name, 'Metadata', 'clock type name')
6839ffba 1142 assert clock_name not in self._clocks
7f4429f2
PP
1143
1144 try:
1145 clock = self._create_clock(clock_node)
9fb5657f 1146 except _ConfigParseError as exc:
131d409a 1147 _append_error_ctx(exc, 'Metadata',
c74b9b04 1148 f'Cannot create clock type `{clock}`')
7f4429f2
PP
1149
1150 clock.name = clock_name
1151 self._clocks[clock_name] = clock
1152
aad8e5e8
PP
1153 # Creates an environment object (`collections.OrderedDict`) from the
1154 # metadata node `metadata_node` and returns it.
7f4429f2 1155 def _create_env(self, metadata_node):
6839ffba 1156 env_node = metadata_node.get('env')
7f4429f2
PP
1157
1158 if env_node is None:
6839ffba 1159 return collections.OrderedDict()
7f4429f2
PP
1160
1161 for env_name, env_value in env_node.items():
6839ffba
PP
1162 _validate_identifier(env_name, 'Metadata',
1163 'environment variable name')
7f4429f2 1164
6839ffba 1165 return copy.deepcopy(env_node)
7f4429f2 1166
aad8e5e8
PP
1167 # Creates a pseudo trace type from the metadata node `metadata_node`
1168 # and returns it.
7f4429f2 1169 def _create_trace(self, metadata_node):
ace614f2 1170 ctx_obj_name = 'Trace type'
7f4429f2 1171 trace = _Trace()
7f4429f2 1172 trace_node = metadata_node['trace']
7f4429f2 1173 trace.byte_order = self._bo
6839ffba 1174 uuid_node = trace_node.get('uuid')
7f4429f2 1175
6839ffba 1176 if uuid_node is not None:
aad8e5e8
PP
1177 # The `uuid` property of the trace type node can be `auto`
1178 # to make barectf generate a UUID.
6839ffba
PP
1179 if uuid_node == 'auto':
1180 trace.uuid = uuid.uuid1()
7f4429f2
PP
1181 else:
1182 try:
6839ffba 1183 trace.uuid = uuid.UUID(uuid_node)
8dfc91b0 1184 except ValueError as exc:
ace614f2 1185 raise _ConfigParseError(ctx_obj_name,
c74b9b04 1186 f'Malformed UUID `{uuid_node}`: {exc}')
7f4429f2 1187
6839ffba
PP
1188 pht_node = trace_node.get('packet-header-type')
1189
1190 if pht_node is not None:
7f4429f2 1191 try:
6839ffba 1192 trace.packet_header_type = self._create_type(pht_node)
9fb5657f 1193 except _ConfigParseError as exc:
ace614f2
PP
1194 _append_error_ctx(exc, ctx_obj_name,
1195 'Cannot create packet header field type')
7f4429f2 1196
7f4429f2
PP
1197 return trace
1198
aad8e5e8
PP
1199 # Creates a pseudo event type from the event node `event_node` and
1200 # returns it.
7f4429f2 1201 def _create_event(self, event_node):
ace614f2 1202 ctx_obj_name = 'Event type'
7f4429f2 1203 event = _Event()
6839ffba 1204 log_level_node = event_node.get('log-level')
7f4429f2 1205
6839ffba
PP
1206 if log_level_node is not None:
1207 assert type(log_level_node) is int
1208 event.log_level = metadata.LogLevel(None, log_level_node)
7f4429f2 1209
6839ffba 1210 ct_node = event_node.get('context-type')
7f4429f2 1211
6839ffba 1212 if ct_node is not None:
7f4429f2 1213 try:
6839ffba 1214 event.context_type = self._create_type(ct_node)
9fb5657f 1215 except _ConfigParseError as exc:
ace614f2
PP
1216 _append_error_ctx(exc, ctx_obj_name,
1217 'Cannot create context field type')
7f4429f2 1218
6839ffba 1219 pt_node = event_node.get('payload-type')
7f4429f2 1220
6839ffba 1221 if pt_node is not None:
7f4429f2 1222 try:
6839ffba 1223 event.payload_type = self._create_type(pt_node)
9fb5657f 1224 except _ConfigParseError as exc:
ace614f2
PP
1225 _append_error_ctx(exc, ctx_obj_name,
1226 'Cannot create payload field type')
7f4429f2 1227
7f4429f2
PP
1228 return event
1229
aad8e5e8
PP
1230 # Creates a pseudo stream type named `stream_name` from the stream
1231 # node `stream_node` and returns it.
7f4429f2 1232 def _create_stream(self, stream_name, stream_node):
ace614f2 1233 ctx_obj_name = 'Stream type'
7f4429f2 1234 stream = _Stream()
6839ffba 1235 pct_node = stream_node.get('packet-context-type')
7f4429f2 1236
6839ffba 1237 if pct_node is not None:
7f4429f2 1238 try:
6839ffba 1239 stream.packet_context_type = self._create_type(pct_node)
9fb5657f 1240 except _ConfigParseError as exc:
ace614f2
PP
1241 _append_error_ctx(exc, ctx_obj_name,
1242 'Cannot create packet context field type')
7f4429f2 1243
6839ffba 1244 eht_node = stream_node.get('event-header-type')
7f4429f2 1245
6839ffba 1246 if eht_node is not None:
7f4429f2 1247 try:
6839ffba 1248 stream.event_header_type = self._create_type(eht_node)
9fb5657f 1249 except _ConfigParseError as exc:
ace614f2
PP
1250 _append_error_ctx(exc, ctx_obj_name,
1251 'Cannot create event header field type')
7f4429f2 1252
6839ffba 1253 ect_node = stream_node.get('event-context-type')
7f4429f2 1254
6839ffba 1255 if ect_node is not None:
7f4429f2 1256 try:
6839ffba 1257 stream.event_context_type = self._create_type(ect_node)
9fb5657f 1258 except _ConfigParseError as exc:
ace614f2
PP
1259 _append_error_ctx(exc, ctx_obj_name,
1260 'Cannot create event context field type')
7f4429f2 1261
6839ffba
PP
1262 events_node = stream_node['events']
1263 cur_id = 0
7f4429f2 1264
6839ffba
PP
1265 for ev_name, ev_node in events_node.items():
1266 try:
1267 ev = self._create_event(ev_node)
9fb5657f 1268 except _ConfigParseError as exc:
ace614f2 1269 _append_error_ctx(exc, ctx_obj_name,
c74b9b04 1270 f'Cannot create event type `{ev_name}`')
7f4429f2 1271
6839ffba
PP
1272 ev.id = cur_id
1273 ev.name = ev_name
1274 stream.events[ev_name] = ev
1275 cur_id += 1
7f4429f2 1276
6839ffba 1277 default_node = stream_node.get('$default')
7f4429f2 1278
6839ffba
PP
1279 if default_node is not None:
1280 if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
c74b9b04
PP
1281 msg = f'Cannot specify more than one default stream type (default stream type already set to `{self._meta.default_stream_name}`)'
1282 raise _ConfigParseError('Stream type', msg)
7f4429f2 1283
6839ffba 1284 self._meta.default_stream_name = stream_name
7f4429f2
PP
1285
1286 return stream
1287
aad8e5e8
PP
1288 # Creates a `collections.OrderedDict` object where keys are stream
1289 # type names and values are pseudo stream types from the metadata
1290 # node `metadata_node` and returns it.
7f4429f2
PP
1291 def _create_streams(self, metadata_node):
1292 streams = collections.OrderedDict()
7f4429f2 1293 streams_node = metadata_node['streams']
7f4429f2
PP
1294 cur_id = 0
1295
1296 for stream_name, stream_node in streams_node.items():
1297 try:
1298 stream = self._create_stream(stream_name, stream_node)
9fb5657f 1299 except _ConfigParseError as exc:
131d409a 1300 _append_error_ctx(exc, 'Metadata',
c74b9b04 1301 f'Cannot create stream type `{stream_name}`')
7f4429f2
PP
1302
1303 stream.id = cur_id
6839ffba 1304 stream.name = stream_name
7f4429f2
PP
1305 streams[stream_name] = stream
1306 cur_id += 1
1307
1308 return streams
1309
aad8e5e8
PP
1310 # Creates a pseudo metadata object from the configuration node
1311 # `root` and returns it.
7f4429f2
PP
1312 def _create_metadata(self, root):
1313 self._meta = _Metadata()
7f4429f2
PP
1314 metadata_node = root['metadata']
1315
7f4429f2
PP
1316 if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None:
1317 default_stream_node = metadata_node['$default-stream']
7f4429f2
PP
1318 self._meta.default_stream_name = default_stream_node
1319
1320 self._set_byte_order(metadata_node)
1321 self._register_clocks(metadata_node)
1322 self._meta.clocks = self._clocks
7f4429f2
PP
1323 self._meta.env = self._create_env(metadata_node)
1324 self._meta.trace = self._create_trace(metadata_node)
7f4429f2
PP
1325 self._meta.streams = self._create_streams(metadata_node)
1326
aad8e5e8 1327 # validate the pseudo metadata object
ace614f2
PP
1328 _MetadataSpecialFieldsValidator().validate(self._meta)
1329 _BarectfMetadataValidator().validate(self._meta)
7f4429f2
PP
1330
1331 return self._meta
1332
aad8e5e8
PP
1333 # Gets and validates the tracing prefix as found in the
1334 # configuration node `config_node` and returns it.
6839ffba
PP
1335 def _get_prefix(self, config_node):
1336 prefix = config_node.get('prefix', 'barectf_')
1bf9d86d 1337 _validate_identifier(prefix, '`prefix` property', 'prefix')
6839ffba 1338 return prefix
7f4429f2 1339
aad8e5e8
PP
1340 # Gets the options as found in the configuration node `config_node`
1341 # and returns a corresponding `config.ConfigOptions` object.
6839ffba
PP
1342 def _get_options(self, config_node):
1343 gen_prefix_def = False
1344 gen_default_stream_def = False
1345 options_node = config_node.get('options')
7f4429f2 1346
6839ffba
PP
1347 if options_node is not None:
1348 gen_prefix_def = options_node.get('gen-prefix-def',
1349 gen_prefix_def)
1350 gen_default_stream_def = options_node.get('gen-default-stream-def',
1351 gen_default_stream_def)
7f4429f2
PP
1352
1353 return config.ConfigOptions(gen_prefix_def, gen_default_stream_def)
1354
aad8e5e8
PP
1355 # Returns the last included file name from the parser's inclusion
1356 # file name stack.
7f4429f2
PP
1357 def _get_last_include_file(self):
1358 if self._include_stack:
1359 return self._include_stack[-1]
1360
1f2c551a 1361 return self._root_path
7f4429f2 1362
aad8e5e8
PP
1363 # Loads the inclusion file having the path `yaml_path` and returns
1364 # its content as a `collections.OrderedDict` object.
7f4429f2
PP
1365 def _load_include(self, yaml_path):
1366 for inc_dir in self._include_dirs:
6839ffba
PP
1367 # Current inclusion dir + file name path.
1368 #
1369 # Note: os.path.join() only takes the last argument if it's
1370 # absolute.
7f4429f2
PP
1371 inc_path = os.path.join(inc_dir, yaml_path)
1372
1373 # real path (symbolic links resolved)
1374 real_path = os.path.realpath(inc_path)
1375
1376 # normalized path (weird stuff removed!)
1377 norm_path = os.path.normpath(real_path)
1378
1379 if not os.path.isfile(norm_path):
6839ffba 1380 # file doesn't exist: skip
7f4429f2
PP
1381 continue
1382
1383 if norm_path in self._include_stack:
1384 base_path = self._get_last_include_file()
c74b9b04
PP
1385 raise _ConfigParseError(f'File `{base_path}`',
1386 f'Cannot recursively include file `{norm_path}`')
7f4429f2
PP
1387
1388 self._include_stack.append(norm_path)
1389
1390 # load raw content
1391 return self._yaml_ordered_load(norm_path)
1392
1393 if not self._ignore_include_not_found:
1394 base_path = self._get_last_include_file()
c74b9b04
PP
1395 raise _ConfigParseError(f'File `{base_path}`',
1396 f'Cannot include file `{yaml_path}`: file not found in inclusion directories')
8dfc91b0 1397
aad8e5e8
PP
1398 # Returns a list of all the inclusion file paths as found in the
1399 # inclusion node `include_node`.
7f4429f2
PP
1400 def _get_include_paths(self, include_node):
1401 if include_node is None:
6839ffba 1402 # none
7f4429f2
PP
1403 return []
1404
6839ffba
PP
1405 if type(include_node) is str:
1406 # wrap as array
7f4429f2
PP
1407 return [include_node]
1408
6839ffba
PP
1409 # already an array
1410 assert type(include_node) is list
1411 return include_node
7f4429f2 1412
aad8e5e8
PP
1413 # Updates the node `base_node` with an overlay node `overlay_node`.
1414 #
1415 # Both the inclusion and field type inheritance features use this
1416 # update mechanism.
7f4429f2
PP
1417 def _update_node(self, base_node, overlay_node):
1418 for olay_key, olay_value in overlay_node.items():
1419 if olay_key in base_node:
1420 base_value = base_node[olay_key]
1421
6839ffba 1422 if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
aad8e5e8 1423 # merge both objects
7f4429f2 1424 self._update_node(base_value, olay_value)
6839ffba 1425 elif type(olay_value) is list and type(base_value) is list:
7f4429f2
PP
1426 # append extension array items to base items
1427 base_value += olay_value
1428 else:
aad8e5e8 1429 # fall back to replacing base property
7f4429f2
PP
1430 base_node[olay_key] = olay_value
1431 else:
aad8e5e8 1432 # set base property from overlay property
7f4429f2
PP
1433 base_node[olay_key] = olay_value
1434
aad8e5e8
PP
1435 # Processes inclusions using `last_overlay_node` as the last overlay
1436 # node to use to "patch" the node.
1437 #
1438 # If `last_overlay_node` contains an `$include` property, then this
1439 # method patches the current base node (initially empty) in order
1440 # using the content of the inclusion files (recursively).
1441 #
1442 # At the end, this method removes the `$include` of
1443 # `last_overlay_node` and then patches the current base node with
1444 # its other properties before returning the result (always a deep
1445 # copy).
6839ffba 1446 def _process_node_include(self, last_overlay_node,
7f4429f2
PP
1447 process_base_include_cb,
1448 process_children_include_cb=None):
7f4429f2 1449 # process children inclusions first
6839ffba 1450 if process_children_include_cb is not None:
7f4429f2
PP
1451 process_children_include_cb(last_overlay_node)
1452
6839ffba
PP
1453 incl_prop_name = '$include'
1454
1455 if incl_prop_name in last_overlay_node:
1456 include_node = last_overlay_node[incl_prop_name]
7f4429f2 1457 else:
6839ffba 1458 # no inclusions!
7f4429f2
PP
1459 return last_overlay_node
1460
1461 include_paths = self._get_include_paths(include_node)
1462 cur_base_path = self._get_last_include_file()
1463 base_node = None
1464
6839ffba 1465 # keep the inclusion paths and remove the `$include` property
7f4429f2 1466 include_paths = copy.deepcopy(include_paths)
6839ffba 1467 del last_overlay_node[incl_prop_name]
7f4429f2
PP
1468
1469 for include_path in include_paths:
1470 # load raw YAML from included file
1471 overlay_node = self._load_include(include_path)
1472
1473 if overlay_node is None:
6839ffba
PP
1474 # Cannot find inclusion file, but we're ignoring those
1475 # errors, otherwise _load_include() itself raises a
1476 # config error.
7f4429f2
PP
1477 continue
1478
6839ffba 1479 # recursively process inclusions
7f4429f2
PP
1480 try:
1481 overlay_node = process_base_include_cb(overlay_node)
9fb5657f 1482 except _ConfigParseError as exc:
c74b9b04 1483 _append_error_ctx(exc, f'File `{cur_base_path}`')
7f4429f2 1484
6839ffba 1485 # pop inclusion stack now that we're done including
7f4429f2
PP
1486 del self._include_stack[-1]
1487
6839ffba
PP
1488 # At this point, `base_node` is fully resolved (does not
1489 # contain any `$include` property).
7f4429f2
PP
1490 if base_node is None:
1491 base_node = overlay_node
1492 else:
1493 self._update_node(base_node, overlay_node)
1494
6839ffba
PP
1495 # Finally, update the latest base node with our last overlay
1496 # node.
7f4429f2 1497 if base_node is None:
6839ffba
PP
1498 # Nothing was included, which is possible when we're
1499 # ignoring inclusion errors.
7f4429f2
PP
1500 return last_overlay_node
1501
1502 self._update_node(base_node, last_overlay_node)
7f4429f2
PP
1503 return base_node
1504
aad8e5e8
PP
1505 # Process the inclusions of the event type node `event_node`,
1506 # returning the effective node.
7f4429f2 1507 def _process_event_include(self, event_node):
aad8e5e8 1508 # Make sure the event type node is valid for the inclusion
6839ffba
PP
1509 # processing stage.
1510 self._schema_validator.validate(event_node,
1511 '2/config/event-pre-include')
1512
1513 # process inclusions
1514 return self._process_node_include(event_node,
7f4429f2
PP
1515 self._process_event_include)
1516
aad8e5e8
PP
1517 # Process the inclusions of the stream type node `stream_node`,
1518 # returning the effective node.
7f4429f2
PP
1519 def _process_stream_include(self, stream_node):
1520 def process_children_include(stream_node):
1521 if 'events' in stream_node:
1522 events_node = stream_node['events']
1523
6839ffba
PP
1524 for key in list(events_node):
1525 events_node[key] = self._process_event_include(events_node[key])
7f4429f2 1526
aad8e5e8 1527 # Make sure the stream type node is valid for the inclusion
6839ffba
PP
1528 # processing stage.
1529 self._schema_validator.validate(stream_node,
1530 '2/config/stream-pre-include')
7f4429f2 1531
6839ffba
PP
1532 # process inclusions
1533 return self._process_node_include(stream_node,
7f4429f2
PP
1534 self._process_stream_include,
1535 process_children_include)
1536
aad8e5e8
PP
1537 # Process the inclusions of the trace type node `trace_node`,
1538 # returning the effective node.
7f4429f2 1539 def _process_trace_include(self, trace_node):
aad8e5e8 1540 # Make sure the trace type node is valid for the inclusion
6839ffba
PP
1541 # processing stage.
1542 self._schema_validator.validate(trace_node,
1543 '2/config/trace-pre-include')
1544
1545 # process inclusions
1546 return self._process_node_include(trace_node,
7f4429f2
PP
1547 self._process_trace_include)
1548
aad8e5e8
PP
1549 # Process the inclusions of the clock type node `clock_node`,
1550 # returning the effective node.
7f4429f2 1551 def _process_clock_include(self, clock_node):
aad8e5e8 1552 # Make sure the clock type node is valid for the inclusion
6839ffba
PP
1553 # processing stage.
1554 self._schema_validator.validate(clock_node,
1555 '2/config/clock-pre-include')
1556
1557 # process inclusions
1558 return self._process_node_include(clock_node,
7f4429f2
PP
1559 self._process_clock_include)
1560
aad8e5e8
PP
1561 # Process the inclusions of the metadata node `metadata_node`,
1562 # returning the effective node.
7f4429f2
PP
1563 def _process_metadata_include(self, metadata_node):
1564 def process_children_include(metadata_node):
1565 if 'trace' in metadata_node:
1566 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
1567
1568 if 'clocks' in metadata_node:
1569 clocks_node = metadata_node['clocks']
1570
6839ffba
PP
1571 for key in list(clocks_node):
1572 clocks_node[key] = self._process_clock_include(clocks_node[key])
7f4429f2
PP
1573
1574 if 'streams' in metadata_node:
1575 streams_node = metadata_node['streams']
1576
6839ffba
PP
1577 for key in list(streams_node):
1578 streams_node[key] = self._process_stream_include(streams_node[key])
7f4429f2 1579
aad8e5e8 1580 # Make sure the metadata node is valid for the inclusion
6839ffba
PP
1581 # processing stage.
1582 self._schema_validator.validate(metadata_node,
1583 '2/config/metadata-pre-include')
7f4429f2 1584
6839ffba
PP
1585 # process inclusions
1586 return self._process_node_include(metadata_node,
7f4429f2
PP
1587 self._process_metadata_include,
1588 process_children_include)
1589
aad8e5e8
PP
1590 # Process the inclusions of the configuration node `config_node`,
1591 # returning the effective node.
6839ffba
PP
1592 def _process_config_includes(self, config_node):
1593 # Process inclusions in this order:
1594 #
aad8e5e8
PP
1595 # 1. Clock type node, event type nodes, and trace type nodes
1596 # (the order between those is not important).
6839ffba 1597 #
aad8e5e8 1598 # 2. Stream type nodes.
6839ffba 1599 #
aad8e5e8 1600 # 3. Metadata node.
7f4429f2 1601 #
6839ffba 1602 # This is because:
7f4429f2 1603 #
aad8e5e8
PP
1604 # * A metadata node can include clock type nodes, a trace type
1605 # node, stream type nodes, and event type nodes (indirectly).
7f4429f2 1606 #
aad8e5e8 1607 # * A stream type node can include event type nodes.
7f4429f2 1608 #
6839ffba
PP
1609 # We keep a stack of absolute paths to included files
1610 # (`self._include_stack`) to detect recursion.
1611 #
1612 # First, make sure the configuration object itself is valid for
1613 # the inclusion processing stage.
1614 self._schema_validator.validate(config_node,
1615 '2/config/config-pre-include')
1616
aad8e5e8 1617 # Process metadata node inclusions.
6839ffba
PP
1618 #
1619 # self._process_metadata_include() returns a new (or the same)
aad8e5e8 1620 # metadata node without any `$include` property in it,
6839ffba
PP
1621 # recursively.
1622 config_node['metadata'] = self._process_metadata_include(config_node['metadata'])
1623
1624 return config_node
7f4429f2 1625
aad8e5e8
PP
1626 # Expands the field type aliases found in the metadata node
1627 # `metadata_node` using the aliases of the `type_aliases_node` node.
1628 #
1629 # This method modifies `metadata_node`.
1630 #
1631 # When this method returns:
1632 #
1633 # * Any field type alias is replaced with its full field type
1634 # equivalent.
1635 #
1636 # * The `type-aliases` property of `metadata_node` is removed.
6839ffba
PP
1637 def _expand_field_type_aliases(self, metadata_node, type_aliases_node):
1638 def resolve_field_type_aliases(parent_node, key, from_descr,
1639 alias_set=None):
1640 if key not in parent_node:
1641 return
1642
1643 # This set holds all the aliases we need to expand,
1644 # recursively. This is used to detect cycles.
1645 if alias_set is None:
1646 alias_set = set()
1647
1648 node = parent_node[key]
1649
1650 if node is None:
1651 return
1652
1653 if type(node) is str:
1654 alias = node
1655
1656 if alias not in resolved_aliases:
1657 # Only check for a field type alias cycle when we
1658 # didn't resolve the alias yet, as a given node can
1659 # refer to the same field type alias more than once.
1660 if alias in alias_set:
c74b9b04
PP
1661 msg = f'Cycle detected during the `{alias}` field type alias resolution'
1662 raise _ConfigParseError(from_descr, msg)
6839ffba
PP
1663
1664 # try to load field type alias node named `alias`
1665 if alias not in type_aliases_node:
9fb5657f 1666 raise _ConfigParseError(from_descr,
c74b9b04 1667 f'Field type alias `{alias}` does not exist')
6839ffba
PP
1668
1669 # resolve it
1670 alias_set.add(alias)
1671 resolve_field_type_aliases(type_aliases_node, alias,
1672 from_descr, alias_set)
1673 resolved_aliases.add(alias)
1674
1675 parent_node[key] = copy.deepcopy(type_aliases_node[node])
1676 return
1677
1678 # traverse, resolving field type aliases as needed
1679 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1680 resolve_field_type_aliases(node, pkey, from_descr, alias_set)
1681
1682 # structure field type fields
1683 pkey = 'fields'
1684
1685 if pkey in node:
1686 assert type(node[pkey]) is collections.OrderedDict
1687
1688 for field_name in node[pkey]:
1689 resolve_field_type_aliases(node[pkey], field_name,
1690 from_descr, alias_set)
1691
ace614f2
PP
1692 def resolve_field_type_aliases_from(parent_node, key):
1693 resolve_field_type_aliases(parent_node, key,
c74b9b04 1694 f'`{key}` property')
6839ffba
PP
1695
1696 # set of resolved field type aliases
1697 resolved_aliases = set()
1698
aad8e5e8
PP
1699 # Expand field type aliases within trace, stream, and event
1700 # types now.
ace614f2
PP
1701 try:
1702 resolve_field_type_aliases_from(metadata_node['trace'],
1703 'packet-header-type')
1704 except _ConfigParseError as exc:
1705 _append_error_ctx(exc, 'Trace type')
6839ffba
PP
1706
1707 for stream_name, stream in metadata_node['streams'].items():
6839ffba 1708 try:
ace614f2
PP
1709 resolve_field_type_aliases_from(stream, 'packet-context-type')
1710 resolve_field_type_aliases_from(stream, 'event-header-type')
1711 resolve_field_type_aliases_from(stream, 'event-context-type')
1712
6839ffba 1713 for event_name, event in stream['events'].items():
ace614f2
PP
1714 try:
1715 resolve_field_type_aliases_from(event, 'context-type')
1716 resolve_field_type_aliases_from(event, 'payload-type')
1717 except _ConfigParseError as exc:
c74b9b04 1718 _append_error_ctx(exc, f'Event type `{event_name}`')
9fb5657f 1719 except _ConfigParseError as exc:
c74b9b04 1720 _append_error_ctx(exc, f'Stream type `{stream_name}`')
6839ffba 1721
aad8e5e8 1722 # remove the (now unneeded) `type-aliases` node
6839ffba
PP
1723 del metadata_node['type-aliases']
1724
aad8e5e8
PP
1725 # Applies field type inheritance to all field types found in
1726 # `metadata_node`.
1727 #
1728 # This method modifies `metadata_node`.
1729 #
1730 # When this method returns, no field type node has an `$inherit` or
1731 # `inherit` property.
6839ffba
PP
1732 def _expand_field_type_inheritance(self, metadata_node):
1733 def apply_inheritance(parent_node, key):
1734 if key not in parent_node:
1735 return
1736
1737 node = parent_node[key]
1738
1739 if node is None:
1740 return
1741
1742 # process children first
1743 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1744 apply_inheritance(node, pkey)
1745
1746 # structure field type fields
1747 pkey = 'fields'
1748
1749 if pkey in node:
1750 assert type(node[pkey]) is collections.OrderedDict
1751
1752 for field_name, field_type in node[pkey].items():
1753 apply_inheritance(node[pkey], field_name)
1754
1755 # apply inheritance of this node
1756 if 'inherit' in node:
1757 # barectf 2.1: `inherit` property was renamed to `$inherit`
1758 assert '$inherit' not in node
1759 node['$inherit'] = node['inherit']
1760 del node['inherit']
1761
1762 inherit_key = '$inherit'
1763
1764 if inherit_key in node:
1765 assert type(node[inherit_key]) is collections.OrderedDict
1766
1767 # apply inheritance below
1768 apply_inheritance(node, inherit_key)
1769
1770 # `node` is an overlay on the `$inherit` node
1771 base_node = node[inherit_key]
1772 del node[inherit_key]
1773 self._update_node(base_node, node)
1774
1775 # set updated base node as this node
1776 parent_node[key] = base_node
1777
1778 apply_inheritance(metadata_node['trace'], 'packet-header-type')
1779
1780 for stream in metadata_node['streams'].values():
1781 apply_inheritance(stream, 'packet-context-type')
1782 apply_inheritance(stream, 'event-header-type')
1783 apply_inheritance(stream, 'event-context-type')
1784
1785 for event in stream['events'].values():
1786 apply_inheritance(event, 'context-type')
1787 apply_inheritance(event, 'payload-type')
1788
aad8e5e8
PP
1789 # Calls _expand_field_type_aliases() and
1790 # _expand_field_type_inheritance() if the metadata node
1791 # `metadata_node` has a `type-aliases` property.
6839ffba
PP
1792 def _expand_field_types(self, metadata_node):
1793 type_aliases_node = metadata_node.get('type-aliases')
1794
1795 if type_aliases_node is None:
1796 # If there's no `type-aliases` node, then there's no field
1797 # type aliases and therefore no possible inheritance.
1798 return
1799
1800 # first, expand field type aliases
1801 self._expand_field_type_aliases(metadata_node, type_aliases_node)
1802
1803 # next, apply inheritance to create effective field types
1804 self._expand_field_type_inheritance(metadata_node)
1805
aad8e5e8
PP
1806 # Replaces the textual log levels in event type nodes of the
1807 # metadata node `metadata_node` with their numeric equivalent (as
1808 # found in the `$log-levels` or `log-levels` node of
1809 # `metadata_node`).
1810 #
1811 # This method modifies `metadata_node`.
1812 #
1813 # When this method returns, the `$log-levels` or `log-level`
1814 # property of `metadata_node` is removed.
6839ffba
PP
1815 def _expand_log_levels(self, metadata_node):
1816 if 'log-levels' in metadata_node:
aad8e5e8
PP
1817 # barectf 2.1: `log-levels` property was renamed to
1818 # `$log-levels`
8dfc91b0
PP
1819 assert '$log-levels' not in metadata_node
1820 metadata_node['$log-levels'] = metadata_node['log-levels']
1821 del metadata_node['log-levels']
6839ffba
PP
1822
1823 log_levels_key = '$log-levels'
1824 log_levels_node = metadata_node.get(log_levels_key)
1825
1826 if log_levels_node is None:
1827 # no log level aliases
1828 return
1829
1830 # not needed anymore
1831 del metadata_node[log_levels_key]
1832
1833 for stream_name, stream in metadata_node['streams'].items():
1834 try:
1835 for event_name, event in stream['events'].items():
1836 prop_name = 'log-level'
1837 ll_node = event.get(prop_name)
1838
1839 if ll_node is None:
1840 continue
1841
1842 if type(ll_node) is str:
1843 if ll_node not in log_levels_node:
ace614f2 1844 exc = _ConfigParseError('`log-level` property',
c74b9b04
PP
1845 f'Log level alias `{ll_node}` does not exist')
1846 exc.append_ctx(f'Event type `{event_name}`')
ace614f2 1847 raise exc
6839ffba
PP
1848
1849 event[prop_name] = log_levels_node[ll_node]
9fb5657f 1850 except _ConfigParseError as exc:
c74b9b04 1851 _append_error_ctx(exc, f'Stream type `{stream_name}`')
7f4429f2 1852
aad8e5e8 1853 # Dumps the node `node` as YAML, passing `kwds` to yaml.dump().
7f4429f2
PP
1854 def _yaml_ordered_dump(self, node, **kwds):
1855 class ODumper(yaml.Dumper):
1856 pass
1857
1858 def dict_representer(dumper, node):
1859 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1860 node.items())
1861
1862 ODumper.add_representer(collections.OrderedDict, dict_representer)
1863
6839ffba 1864 # Python -> YAML
7f4429f2
PP
1865 return yaml.dump(node, Dumper=ODumper, **kwds)
1866
aad8e5e8
PP
1867 # Loads the content of the YAML file having the path `yaml_path` as
1868 # a Python object.
1869 #
1870 # All YAML maps are loaded as `collections.OrderedDict` objects.
7f4429f2
PP
1871 def _yaml_ordered_load(self, yaml_path):
1872 class OLoader(yaml.Loader):
1873 pass
1874
1875 def construct_mapping(loader, node):
1876 loader.flatten_mapping(node)
1877
1878 return collections.OrderedDict(loader.construct_pairs(node))
1879
1880 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1881 construct_mapping)
1882
1883 # YAML -> Python
1884 try:
1885 with open(yaml_path, 'r') as f:
1886 node = yaml.load(f, OLoader)
ace614f2 1887 except (OSError, IOError) as exc:
c74b9b04
PP
1888 raise _ConfigParseError(f'File `{yaml_path}`',
1889 f'Cannot open file: {exc}')
7f4429f2 1890
ace614f2 1891 assert type(node) is collections.OrderedDict
7f4429f2
PP
1892 return node
1893
c8270369 1894 def _parse(self):
7f4429f2
PP
1895 self._version = None
1896 self._include_stack = []
1897
6839ffba 1898 # load the configuration object as is from the root YAML file
7f4429f2 1899 try:
1f2c551a 1900 config_node = self._yaml_ordered_load(self._root_path)
9fb5657f 1901 except _ConfigParseError as exc:
131d409a 1902 _append_error_ctx(exc, 'Configuration',
c74b9b04 1903 f'Cannot parse YAML file `{self._root_path}`')
7f4429f2 1904
6839ffba
PP
1905 # Make sure the configuration object is minimally valid, that
1906 # is, it contains a valid `version` property.
1907 #
1908 # This step does not validate the whole configuration object
1909 # yet because we don't have an effective configuration object;
1910 # we still need to:
1911 #
1912 # * Process inclusions.
1913 # * Expand field types (inheritance and aliases).
1914 self._schema_validator.validate(config_node, 'config/config-min')
7f4429f2 1915
6839ffba
PP
1916 # Process configuration object inclusions.
1917 #
1918 # self._process_config_includes() returns a new (or the same)
1919 # configuration object without any `$include` property in it,
1920 # recursively.
1921 config_node = self._process_config_includes(config_node)
7f4429f2 1922
6839ffba
PP
1923 # Make sure that the current configuration object is valid
1924 # considering field types are not expanded yet.
1925 self._schema_validator.validate(config_node,
1926 '2/config/config-pre-field-type-expansion')
7f4429f2 1927
6839ffba
PP
1928 # Expand field types.
1929 #
1930 # This process:
1931 #
1932 # 1. Replaces field type aliases with "effective" field
1933 # types, recursively.
1934 #
1935 # After this step, the `type-aliases` property of the
1936 # `metadata` node is gone.
1937 #
aad8e5e8 1938 # 2. Applies inheritance, following the `$inherit`/`inherit`
6839ffba
PP
1939 # properties.
1940 #
1941 # After this step, field type objects do not contain
1942 # `$inherit` or `inherit` properties.
1943 #
1944 # This is done blindly, in that the process _doesn't_ validate
1945 # field type objects at this point.
1946 self._expand_field_types(config_node['metadata'])
7f4429f2 1947
6839ffba
PP
1948 # Make sure that the current configuration object is valid
1949 # considering log levels are not expanded yet.
1950 self._schema_validator.validate(config_node,
1951 '2/config/config-pre-log-level-expansion')
7f4429f2 1952
6839ffba
PP
1953 # Expand log levels, that is, replace log level strings with
1954 # their equivalent numeric values.
1955 self._expand_log_levels(config_node['metadata'])
7f4429f2 1956
6839ffba
PP
1957 # validate the whole, effective configuration object
1958 self._schema_validator.validate(config_node, '2/config/config')
7f4429f2
PP
1959
1960 # dump config if required
1961 if self._dump_config:
6839ffba 1962 print(self._yaml_ordered_dump(config_node, indent=2,
7f4429f2
PP
1963 default_flow_style=False))
1964
6839ffba
PP
1965 # get prefix, options, and metadata pseudo-object
1966 prefix = self._get_prefix(config_node)
1967 opts = self._get_options(config_node)
1968 pseudo_meta = self._create_metadata(config_node)
7f4429f2 1969
6839ffba 1970 # create public configuration
c8270369
PP
1971 self._config = config.Config(pseudo_meta.to_public(), prefix, opts)
1972
1973 @property
1974 def config(self):
1975 return self._config
7f4429f2
PP
1976
1977
1978def _from_file(path, include_dirs, ignore_include_not_found, dump_config):
1979 try:
c8270369
PP
1980 return _YamlConfigParser(path, include_dirs, ignore_include_not_found,
1981 dump_config).config
9fb5657f 1982 except _ConfigParseError as exc:
131d409a 1983 _append_error_ctx(exc, 'Configuration',
c74b9b04 1984 f'Cannot create configuration from YAML file `{path}`')
This page took 0.123997 seconds and 4 git commands to generate.