config_parse.py: fix PEP 8 errors, as reported by `flake8`
[deliverable/barectf.git] / barectf / config_parse.py
CommitLineData
7f4429f2
PP
1# The MIT License (MIT)
2#
4a90140d 3# Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
7f4429f2 4#
1378f213
PP
5# Permission is hereby granted, free of charge, to any person obtaining
6# a copy of this software and associated documentation files (the
7# "Software"), to deal in the Software without restriction, including
8# without limitation the rights to use, copy, modify, merge, publish,
9# distribute, sublicense, and/or sell copies of the Software, and to
10# permit persons to whom the Software is furnished to do so, subject to
11# the following conditions:
7f4429f2 12#
1378f213
PP
13# The above copyright notice and this permission notice shall be
14# included in all copies or substantial portions of the Software.
7f4429f2 15#
1378f213
PP
16# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
7f4429f2
PP
23
24from barectf import metadata
25from barectf import config
6839ffba 26import pkg_resources
7f4429f2 27import collections
6839ffba 28import jsonschema
6839ffba 29import os.path
7f4429f2
PP
30import enum
31import yaml
32import uuid
33import copy
7f4429f2
PP
34import os
35
36
be7df3b2
PP
37# The context of a configuration parsing error.
38#
39# Such a context object has a name and, optionally, a message.
7f4429f2
PP
40class _ConfigParseErrorCtx:
41 def __init__(self, name, msg=None):
42 self._name = name
43 self._msg = msg
44
45 @property
46 def name(self):
47 return self._name
48
49 @property
50 def msg(self):
51 return self._msg
52
53
be7df3b2
PP
54# Appends the context having the object name `obj_name` and the
55# (optional) message `msg` to the `_ConfigParseError` exception `exc`
56# and then raises `exc` again.
57def _append_error_ctx(exc, obj_name, msg=None):
58 exc.append_ctx(obj_name, msg)
59 raise
60
61
62# A configuration parsing error.
63#
64# Such an error object contains a list of contexts (`ctx` property).
65#
66# The first context of this list is the most specific context, while the
67# last is the more general.
68#
69# Use append_ctx() to append a context to an existing configuration
70# parsing error when you catch it before raising it again. You can use
71# _append_error_ctx() to do exactly this in a single call.
9fb5657f 72class _ConfigParseError(RuntimeError):
7f4429f2
PP
73 def __init__(self, init_ctx_name, init_ctx_msg=None):
74 self._ctx = []
75 self.append_ctx(init_ctx_name, init_ctx_msg)
76
77 @property
78 def ctx(self):
79 return self._ctx
80
81 def append_ctx(self, name, msg=None):
82 self._ctx.append(_ConfigParseErrorCtx(name, msg))
83
84
85def _opt_to_public(obj):
86 if obj is None:
87 return
88
89 return obj.to_public()
90
91
aad8e5e8
PP
92# Pseudo object base class.
93#
94# A concrete pseudo object contains the same data as its public version,
95# but it's mutable.
96#
97# The to_public() method converts the pseudo object to an equivalent
98# public, immutable object, caching the result so as to always return
99# the same Python object.
7f4429f2
PP
100class _PseudoObj:
101 def __init__(self):
102 self._public = None
103
104 def to_public(self):
105 if self._public is None:
106 self._public = self._to_public()
107
108 return self._public
109
110 def _to_public(self):
111 raise NotImplementedError
112
113
114class _PropertyMapping(_PseudoObj):
115 def __init__(self):
116 super().__init__()
117 self.object = None
118 self.prop = None
119
120 def _to_public(self):
121 return metadata.PropertyMapping(self.object.to_public(), self.prop)
122
123
124class _Integer(_PseudoObj):
125 def __init__(self):
126 super().__init__()
127 self.size = None
128 self.byte_order = None
7f4429f2 129 self.align = None
7f4429f2 130 self.signed = False
7f4429f2 131 self.base = 10
7f4429f2 132 self.encoding = metadata.Encoding.NONE
7f4429f2
PP
133 self.property_mappings = []
134
135 @property
136 def real_align(self):
137 if self.align is None:
138 if self.size % 8 == 0:
139 return 8
140 else:
141 return 1
142 else:
143 return self.align
144
145 def _to_public(self):
146 prop_mappings = [pm.to_public() for pm in self.property_mappings]
147 return metadata.Integer(self.size, self.byte_order, self.align,
148 self.signed, self.base, self.encoding,
149 prop_mappings)
150
151
152class _FloatingPoint(_PseudoObj):
153 def __init__(self):
154 super().__init__()
155 self.exp_size = None
156 self.mant_size = None
157 self.byte_order = None
7f4429f2
PP
158 self.align = 8
159
160 @property
161 def real_align(self):
162 return self.align
163
164 def _to_public(self):
165 return metadata.FloatingPoint(self.exp_size, self.mant_size,
166 self.byte_order, self.align)
167
168
169class _Enum(_PseudoObj):
170 def __init__(self):
171 super().__init__()
172 self.value_type = None
173 self.members = collections.OrderedDict()
174
7f4429f2
PP
175 @property
176 def real_align(self):
177 return self.value_type.real_align
178
179 def _to_public(self):
180 return metadata.Enum(self.value_type.to_public(), self.members)
181
182
183class _String(_PseudoObj):
184 def __init__(self):
185 super().__init__()
7f4429f2
PP
186 self.encoding = metadata.Encoding.UTF8
187
6839ffba
PP
188 @property
189 def real_align(self):
190 return 8
7f4429f2
PP
191
192 def _to_public(self):
193 return metadata.String(self.encoding)
194
195
196class _Array(_PseudoObj):
197 def __init__(self):
198 super().__init__()
199 self.element_type = None
200 self.length = None
201
202 @property
203 def real_align(self):
204 return self.element_type.real_align
205
206 def _to_public(self):
207 return metadata.Array(self.element_type.to_public(), self.length)
208
209
210class _Struct(_PseudoObj):
211 def __init__(self):
212 super().__init__()
7f4429f2 213 self.min_align = 1
7f4429f2
PP
214 self.fields = collections.OrderedDict()
215
216 @property
217 def real_align(self):
218 align = self.min_align
219
220 for pseudo_field in self.fields.values():
221 if pseudo_field.real_align > align:
222 align = pseudo_field.real_align
223
224 return align
225
226 def _to_public(self):
227 fields = []
228
229 for name, pseudo_field in self.fields.items():
230 fields.append((name, pseudo_field.to_public()))
231
232 return metadata.Struct(self.min_align, collections.OrderedDict(fields))
233
234
235class _Trace(_PseudoObj):
236 def __init__(self):
237 super().__init__()
238 self.byte_order = None
239 self.uuid = None
240 self.packet_header_type = None
241
242 def _to_public(self):
243 return metadata.Trace(self.byte_order, self.uuid,
244 _opt_to_public(self.packet_header_type))
245
246
247class _Clock(_PseudoObj):
248 def __init__(self):
249 super().__init__()
7f4429f2 250 self.name = None
7f4429f2 251 self.uuid = None
7f4429f2 252 self.description = None
7f4429f2 253 self.freq = int(1e9)
7f4429f2 254 self.error_cycles = 0
7f4429f2 255 self.offset_seconds = 0
7f4429f2 256 self.offset_cycles = 0
7f4429f2 257 self.absolute = False
7f4429f2
PP
258 self.return_ctype = 'uint32_t'
259
260 def _to_public(self):
261 return metadata.Clock(self.name, self.uuid, self.description, self.freq,
262 self.error_cycles, self.offset_seconds,
263 self.offset_cycles, self.absolute,
264 self.return_ctype)
265
266
267class _Event(_PseudoObj):
268 def __init__(self):
269 super().__init__()
270 self.id = None
271 self.name = None
272 self.log_level = None
273 self.payload_type = None
274 self.context_type = None
275
276 def _to_public(self):
277 return metadata.Event(self.id, self.name, self.log_level,
278 _opt_to_public(self.payload_type),
279 _opt_to_public(self.context_type))
280
281
282class _Stream(_PseudoObj):
283 def __init__(self):
284 super().__init__()
285 self.id = None
286 self.name = None
287 self.packet_context_type = None
288 self.event_header_type = None
289 self.event_context_type = None
290 self.events = collections.OrderedDict()
291
292 def is_event_empty(self, event):
293 total_fields = 0
294
295 if self.event_header_type is not None:
296 total_fields += len(self.event_header_type.fields)
297
298 if self.event_context_type is not None:
299 total_fields += len(self.event_context_type.fields)
300
301 if event.context_type is not None:
302 total_fields += len(event.context_type.fields)
303
304 if event.payload_type is not None:
305 total_fields += len(event.payload_type.fields)
306
307 return total_fields == 0
308
309 def _to_public(self):
310 events = []
311
312 for name, pseudo_ev in self.events.items():
313 events.append((name, pseudo_ev.to_public()))
314
315 return metadata.Stream(self.id, self.name,
316 _opt_to_public(self.packet_context_type),
317 _opt_to_public(self.event_header_type),
318 _opt_to_public(self.event_context_type),
319 collections.OrderedDict(events))
320
321
322class _Metadata(_PseudoObj):
323 def __init__(self):
324 super().__init__()
325 self.trace = None
326 self.env = None
327 self.clocks = None
328 self.streams = None
329 self.default_stream_name = None
330
331 def _to_public(self):
332 clocks = []
333
334 for name, pseudo_clock in self.clocks.items():
335 clocks.append((name, pseudo_clock.to_public()))
336
337 streams = []
338
339 for name, pseudo_stream in self.streams.items():
340 streams.append((name, pseudo_stream.to_public()))
341
342 return metadata.Metadata(self.trace.to_public(), self.env,
343 collections.OrderedDict(clocks),
344 collections.OrderedDict(streams),
345 self.default_stream_name)
346
347
6839ffba
PP
348# This JSON schema reference resolver only serves to detect when it
349# needs to resolve a remote URI.
350#
351# This must never happen in barectf because all our schemas are local;
352# it would mean a programming or schema error.
353class _RefResolver(jsonschema.RefResolver):
354 def resolve_remote(self, uri):
1bf9d86d 355 raise RuntimeError('Missing local schema with URI `{}`'.format(uri))
6839ffba
PP
356
357
358# Schema validator which considers all the schemas found in the barectf
359# package's `schemas` directory.
360#
361# The only public method is validate() which accepts an instance to
362# validate as well as a schema short ID.
363class _SchemaValidator:
364 def __init__(self):
365 subdirs = ['config', os.path.join('2', 'config')]
366 schemas_dir = pkg_resources.resource_filename(__name__, 'schemas')
367 self._store = {}
368
369 for subdir in subdirs:
370 dir = os.path.join(schemas_dir, subdir)
371
372 for file_name in os.listdir(dir):
373 if not file_name.endswith('.yaml'):
374 continue
375
376 with open(os.path.join(dir, file_name)) as f:
377 schema = yaml.load(f, Loader=yaml.SafeLoader)
378
379 assert '$id' in schema
380 schema_id = schema['$id']
381 assert schema_id not in self._store
382 self._store[schema_id] = schema
383
384 @staticmethod
385 def _dict_from_ordered_dict(o_dict):
386 dct = {}
387
388 for k, v in o_dict.items():
389 new_v = v
390
391 if type(v) is collections.OrderedDict:
392 new_v = _SchemaValidator._dict_from_ordered_dict(v)
7f4429f2 393
6839ffba 394 dct[k] = new_v
7f4429f2 395
6839ffba 396 return dct
7f4429f2 397
6839ffba
PP
398 def _validate(self, instance, schema_short_id):
399 # retrieve full schema ID from short ID
400 schema_id = 'https://barectf.org/schemas/{}.json'.format(schema_short_id)
401 assert schema_id in self._store
7f4429f2 402
6839ffba
PP
403 # retrieve full schema
404 schema = self._store[schema_id]
7f4429f2 405
6839ffba
PP
406 # Create a reference resolver for this schema using this
407 # validator's schema store.
408 resolver = _RefResolver(base_uri=schema_id, referrer=schema,
409 store=self._store)
7f4429f2 410
6839ffba
PP
411 # create a JSON schema validator using this reference resolver
412 validator = jsonschema.Draft7Validator(schema, resolver=resolver)
7f4429f2 413
6839ffba
PP
414 # Validate the instance, converting its
415 # `collections.OrderedDict` objects to `dict` objects so as to
416 # make any error message easier to read (because
417 # validator.validate() below uses str() for error messages, and
aad8e5e8
PP
418 # collections.OrderedDict.__str__() returns a somewhat bulky
419 # representation).
6839ffba 420 validator.validate(self._dict_from_ordered_dict(instance))
7f4429f2 421
6839ffba
PP
422 # Validates `instance` using the schema having the short ID
423 # `schema_short_id`.
424 #
425 # A schema short ID is the part between `schemas/` and `.json` in
426 # its URI.
427 #
9fb5657f 428 # Raises a `_ConfigParseError` object, hiding any `jsonschema`
6839ffba
PP
429 # exception, on validation failure.
430 def validate(self, instance, schema_short_id):
431 try:
432 self._validate(instance, schema_short_id)
433 except jsonschema.ValidationError as exc:
9fb5657f 434 # convert to barectf `_ConfigParseError` exception
6839ffba 435 contexts = ['Configuration object']
c3aed479 436
aad8e5e8
PP
437 # Each element of the instance's absolute path is either an
438 # integer (array element's index) or a string (object
439 # property's name).
c3aed479
PP
440 for elem in exc.absolute_path:
441 if type(elem) is int:
442 ctx = 'Element {}'.format(elem)
443 else:
444 ctx = '`{}` property'.format(elem)
445
446 contexts.append(ctx)
447
6839ffba
PP
448 schema_ctx = ''
449
450 if len(exc.context) > 0:
aad8e5e8
PP
451 # According to the documentation of
452 # jsonschema.ValidationError.context(),
453 # the method returns a
454 #
455 # > list of errors from the subschemas
456 #
457 # This contains additional information about the
458 # validation failure which can help the user figure out
459 # what's wrong exactly.
460 #
461 # Join each message with `; ` and append this to our
462 # configuration parsing error's message.
6839ffba
PP
463 msgs = '; '.join([e.message for e in exc.context])
464 schema_ctx = ': {}'.format(msgs)
7f4429f2 465
9fb5657f
PP
466 new_exc = _ConfigParseError(contexts.pop(),
467 '{}{} (from schema `{}`)'.format(exc.message,
468 schema_ctx,
469 schema_short_id))
7f4429f2 470
6839ffba
PP
471 for ctx in reversed(contexts):
472 new_exc.append_ctx(ctx)
473
474 raise new_exc
7f4429f2
PP
475
476
aad8e5e8
PP
477# Converts the byte order string `bo_str` to a `metadata.ByteOrder`
478# enumerator.
7f4429f2
PP
479def _byte_order_str_to_bo(bo_str):
480 bo_str = bo_str.lower()
481
482 if bo_str == 'le':
483 return metadata.ByteOrder.LE
484 elif bo_str == 'be':
485 return metadata.ByteOrder.BE
486
487
aad8e5e8
PP
488# Converts the encoding string `encoding_str` to a `metadata.Encoding`
489# enumerator.
7f4429f2
PP
490def _encoding_str_to_encoding(encoding_str):
491 encoding_str = encoding_str.lower()
492
493 if encoding_str == 'utf-8' or encoding_str == 'utf8':
494 return metadata.Encoding.UTF8
495 elif encoding_str == 'ascii':
496 return metadata.Encoding.ASCII
497 elif encoding_str == 'none':
498 return metadata.Encoding.NONE
499
500
aad8e5e8
PP
501# Validates the TSDL identifier `iden`, raising a `_ConfigParseError`
502# exception using `ctx_obj_name` and `prop` to format the message if
503# it's invalid.
6839ffba
PP
504def _validate_identifier(iden, ctx_obj_name, prop):
505 assert type(iden) is str
506 ctf_keywords = {
507 'align',
508 'callsite',
509 'clock',
510 'enum',
511 'env',
512 'event',
513 'floating_point',
514 'integer',
515 'stream',
516 'string',
517 'struct',
518 'trace',
519 'typealias',
520 'typedef',
521 'variant',
522 }
7f4429f2 523
6839ffba 524 if iden in ctf_keywords:
1bf9d86d 525 fmt = 'Invalid {} (not a valid identifier): `{}`'
9fb5657f 526 raise _ConfigParseError(ctx_obj_name, fmt.format(prop, iden))
7f4429f2 527
7f4429f2 528
aad8e5e8
PP
529# Validates the alignment `align`, raising a `_ConfigParseError`
530# exception using `ctx_obj_name` if it's invalid.
6839ffba
PP
531def _validate_alignment(align, ctx_obj_name):
532 assert align >= 1
7f4429f2 533
6839ffba 534 if (align & (align - 1)) != 0:
9fb5657f 535 raise _ConfigParseError(ctx_obj_name,
ace614f2 536 'Invalid alignment (not a power of two): {}'.format(align))
7f4429f2
PP
537
538
6839ffba
PP
539# Entities.
540#
541# Order of values is important here.
542@enum.unique
543class _Entity(enum.IntEnum):
544 TRACE_PACKET_HEADER = 0
545 STREAM_PACKET_CONTEXT = 1
546 STREAM_EVENT_HEADER = 2
547 STREAM_EVENT_CONTEXT = 3
548 EVENT_CONTEXT = 4
549 EVENT_PAYLOAD = 5
550
551
aad8e5e8
PP
552# A validator which validates the configured metadata for barectf
553# specific needs.
7f4429f2
PP
554#
555# barectf needs:
556#
aad8e5e8
PP
557# * The alignments of all header/context field types are at least 8.
558#
559# * There are no nested structure or array field types, except the
560# packet header field type's `uuid` field
561#
7f4429f2
PP
562class _BarectfMetadataValidator:
563 def __init__(self):
564 self._type_to_validate_type_func = {
7f4429f2
PP
565 _Struct: self._validate_struct_type,
566 _Array: self._validate_array_type,
567 }
568
7f4429f2
PP
569 def _validate_struct_type(self, t, entity_root):
570 if not entity_root:
ace614f2
PP
571 raise _ConfigParseError('Structure field type',
572 'Inner structure field types are not supported as of this version')
7f4429f2
PP
573
574 for field_name, field_type in t.fields.items():
575 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
576 if field_name == 'uuid':
577 # allow
578 continue
579
580 try:
581 self._validate_type(field_type, False)
9fb5657f 582 except _ConfigParseError as exc:
ace614f2
PP
583 _append_error_ctx(exc,
584 'Structure field type\'s field `{}`'.format(field_name))
7f4429f2
PP
585
586 def _validate_array_type(self, t, entity_root):
ace614f2
PP
587 raise _ConfigParseError('Array field type',
588 'Not supported as of this version')
7f4429f2
PP
589
590 def _validate_type(self, t, entity_root):
6839ffba
PP
591 func = self._type_to_validate_type_func.get(type(t))
592
593 if func is not None:
594 func(t, entity_root)
7f4429f2
PP
595
596 def _validate_entity(self, t):
597 if t is None:
598 return
599
aad8e5e8 600 # make sure root field type has a real alignment of at least 8
7f4429f2 601 if t.real_align < 8:
ace614f2
PP
602 raise _ConfigParseError('Root field type',
603 'Effective alignment must be at least 8 (got {})'.format(t.real_align))
7f4429f2 604
6839ffba 605 assert type(t) is _Struct
7f4429f2 606
aad8e5e8 607 # validate field types
7f4429f2
PP
608 self._validate_type(t, True)
609
ace614f2 610 def _validate_event_entities_and_names(self, stream, ev):
7f4429f2 611 try:
ace614f2
PP
612 _validate_identifier(ev.name, 'Event type', 'event type name')
613
614 self._cur_entity = _Entity.EVENT_CONTEXT
615
616 try:
617 self._validate_entity(ev.context_type)
618 except _ConfigParseError as exc:
619 _append_error_ctx(exc, 'Event type',
620 'Invalid context field type')
621
622 self._cur_entity = _Entity.EVENT_PAYLOAD
623
624 try:
625 self._validate_entity(ev.payload_type)
626 except _ConfigParseError as exc:
627 _append_error_ctx(exc, 'Event type',
628 'Invalid payload field type')
629
630 if stream.is_event_empty(ev):
631 raise _ConfigParseError('Event type', 'Empty')
9fb5657f 632 except _ConfigParseError as exc:
ace614f2 633 _append_error_ctx(exc, 'Event type `{}`'.format(ev.name))
7f4429f2 634
ace614f2
PP
635 def _validate_stream_entities_and_names(self, stream):
636 try:
637 _validate_identifier(stream.name, 'Stream type', 'stream type name')
7f4429f2
PP
638 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
639
640 try:
641 self._validate_entity(stream.packet_context_type)
9fb5657f 642 except _ConfigParseError as exc:
ace614f2
PP
643 _append_error_ctx(exc, 'Stream type',
644 'Invalid packet context field type')
7f4429f2
PP
645
646 self._cur_entity = _Entity.STREAM_EVENT_HEADER
647
648 try:
649 self._validate_entity(stream.event_header_type)
9fb5657f 650 except _ConfigParseError as exc:
ace614f2
PP
651 _append_error_ctx(exc, 'Stream type',
652 'Invalid event header field type')
7f4429f2
PP
653
654 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
655
656 try:
657 self._validate_entity(stream.event_context_type)
9fb5657f 658 except _ConfigParseError as exc:
ace614f2
PP
659 _append_error_ctx(exc, 'Stream type',
660 'Invalid event context field type')
7f4429f2 661
ace614f2
PP
662 for ev in stream.events.values():
663 self._validate_event_entities_and_names(stream, ev)
664 except _ConfigParseError as exc:
665 _append_error_ctx(exc, 'Stream type `{}`'.format(stream.name))
7f4429f2 666
ace614f2
PP
667 def _validate_entities_and_names(self, meta):
668 self._cur_entity = _Entity.TRACE_PACKET_HEADER
7f4429f2 669
ace614f2
PP
670 try:
671 self._validate_entity(meta.trace.packet_header_type)
672 except _ConfigParseError as exc:
673 _append_error_ctx(exc, 'Trace type',
674 'Invalid packet header field type')
7f4429f2 675
ace614f2
PP
676 for stream in meta.streams.values():
677 self._validate_stream_entities_and_names(stream)
7f4429f2
PP
678
679 def _validate_default_stream(self, meta):
aad8e5e8 680 if meta.default_stream_name is not None:
7f4429f2 681 if meta.default_stream_name not in meta.streams.keys():
ace614f2
PP
682 fmt = 'Default stream type name (`{}`) does not name an existing stream type'
683 raise _ConfigParseError('Metadata',
9fb5657f 684 fmt.format(meta.default_stream_name))
7f4429f2
PP
685
686 def validate(self, meta):
ace614f2
PP
687 try:
688 self._validate_entities_and_names(meta)
689 self._validate_default_stream(meta)
690 except _ConfigParseError as exc:
691 _append_error_ctx(exc, 'barectf metadata')
7f4429f2
PP
692
693
aad8e5e8 694# A validator which validates special fields of trace, stream, and event
6839ffba 695# types.
7f4429f2 696class _MetadataSpecialFieldsValidator:
aad8e5e8 697 # Validates the packet header field type `t`.
7f4429f2 698 def _validate_trace_packet_header_type(self, t):
ace614f2
PP
699 ctx_obj_name = '`packet-header-type` property'
700
aad8e5e8
PP
701 # If there's more than one stream type, then the `stream_id`
702 # (stream type ID) field is required.
7f4429f2 703 if len(self._meta.streams) > 1:
7f4429f2 704 if t is None:
ace614f2
PP
705 raise _ConfigParseError('Trace type',
706 '`stream_id` field is required (because there\'s more than one stream type), but packet header field type is missing')
7f4429f2 707
7f4429f2 708 if 'stream_id' not in t.fields:
ace614f2
PP
709 raise _ConfigParseError(ctx_obj_name,
710 '`stream_id` field is required (because there\'s more than one stream type)')
7f4429f2 711
750374a1 712 if t is None:
7f4429f2
PP
713 return
714
aad8e5e8
PP
715 # The `magic` field type must be the first one.
716 #
717 # The `stream_id` field type's size (bits) must be large enough
718 # to accomodate any stream type ID.
7f4429f2
PP
719 for i, (field_name, field_type) in enumerate(t.fields.items()):
720 if field_name == 'magic':
7f4429f2 721 if i != 0:
ace614f2
PP
722 raise _ConfigParseError(ctx_obj_name,
723 '`magic` field must be the first packet header field type\'s field')
7f4429f2 724 elif field_name == 'stream_id':
7f4429f2 725 if len(self._meta.streams) > (1 << field_type.size):
ace614f2
PP
726 raise _ConfigParseError(ctx_obj_name,
727 '`stream_id` field\'s size is too small to accomodate {} stream types'.format(len(self._meta.streams)))
7f4429f2 728
aad8e5e8 729 # Validates the trace type of the metadata object `meta`.
7f4429f2
PP
730 def _validate_trace(self, meta):
731 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
732
aad8e5e8
PP
733 # Validates the packet context field type of the stream type
734 # `stream`.
7f4429f2 735 def _validate_stream_packet_context(self, stream):
ace614f2 736 ctx_obj_name = '`packet-context-type` property'
7f4429f2 737 t = stream.packet_context_type
750374a1 738 assert t is not None
7f4429f2 739
aad8e5e8
PP
740 # The `timestamp_begin` and `timestamp_end` field types must be
741 # mapped to the `value` property of the same clock.
750374a1
PP
742 ts_begin = t.fields.get('timestamp_begin')
743 ts_end = t.fields.get('timestamp_end')
7f4429f2 744
7f4429f2
PP
745 if ts_begin is not None and ts_end is not None:
746 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
ace614f2
PP
747 raise _ConfigParseError(ctx_obj_name,
748 '`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock value')
7f4429f2 749
aad8e5e8
PP
750 # The `packet_size` field type's size must be greater than or
751 # equal to the `content_size` field type's size.
750374a1 752 if t.fields['content_size'].size > t.fields['packet_size'].size:
ace614f2
PP
753 raise _ConfigParseError(ctx_obj_name,
754 '`content_size` field\'s size must be less than or equal to `packet_size` field\'s size')
7f4429f2 755
aad8e5e8 756 # Validates the event header field type of the stream type `stream`.
7f4429f2 757 def _validate_stream_event_header(self, stream):
ace614f2 758 ctx_obj_name = '`event-header-type` property'
7f4429f2
PP
759 t = stream.event_header_type
760
aad8e5e8
PP
761 # If there's more than one event type, then the `id` (event type
762 # ID) field is required.
7f4429f2 763 if len(stream.events) > 1:
7f4429f2 764 if t is None:
ace614f2
PP
765 raise _ConfigParseError('Stream type',
766 '`id` field is required (because there\'s more than one event type), but event header field type is missing')
7f4429f2 767
7f4429f2 768 if 'id' not in t.fields:
ace614f2
PP
769 raise _ConfigParseError(ctx_obj_name,
770 '`id` field is required (because there\'s more than one event type)')
7f4429f2 771
750374a1 772 if t is None:
7f4429f2
PP
773 return
774
aad8e5e8
PP
775 # The `id` field type's size (bits) must be large enough to
776 # accomodate any event type ID.
750374a1 777 eid = t.fields.get('id')
7f4429f2 778
750374a1 779 if eid is not None:
7f4429f2 780 if len(stream.events) > (1 << eid.size):
ace614f2
PP
781 raise _ConfigParseError(ctx_obj_name,
782 '`id` field\'s size is too small to accomodate {} event types'.format(len(stream.events)))
7f4429f2 783
aad8e5e8 784 # Validates the stream type `stream`.
7f4429f2
PP
785 def _validate_stream(self, stream):
786 self._validate_stream_packet_context(stream)
787 self._validate_stream_event_header(stream)
788
aad8e5e8
PP
789 # Validates the trace and stream types of the metadata object
790 # `meta`.
7f4429f2
PP
791 def validate(self, meta):
792 self._meta = meta
7f4429f2 793
ace614f2 794 try:
7f4429f2 795 try:
ace614f2 796 self._validate_trace(meta)
9fb5657f 797 except _ConfigParseError as exc:
ace614f2
PP
798 _append_error_ctx(exc, 'Trace type')
799
800 for stream in meta.streams.values():
801 try:
802 self._validate_stream(stream)
803 except _ConfigParseError as exc:
804 _append_error_ctx(exc, 'Stream type `{}`'.format(stream.name))
805 except _ConfigParseError as exc:
806 _append_error_ctx(exc, 'Metadata')
7f4429f2
PP
807
808
aad8e5e8
PP
809# A barectf YAML configuration parser.
810#
c8270369
PP
811# When you build such a parser, it parses the configuration file and
812# creates a corresponding `config.Config` object which you can get with
813# the `config` property.
aad8e5e8 814#
c8270369 815# See the comments of _parse() for more implementation details about the
aad8e5e8 816# parsing stages and general strategy.
7f4429f2 817class _YamlConfigParser:
c8270369
PP
818 # Builds a barectf YAML configuration parser and parses the
819 # configuration file having the path `path`.
820 #
821 # The parser considers the inclusion directories `include_dirs`,
822 # ignores nonexistent inclusion files if `ignore_include_not_found`
823 # is `True`, and dumps the effective configuration (as YAML) if
824 # `dump_config` is `True`.
1f2c551a
PP
825 #
826 # Raises `_ConfigParseError` on parsing error.
c8270369
PP
827 def __init__(self, path, include_dirs, ignore_include_not_found,
828 dump_config):
1f2c551a 829 self._root_path = path
a644ed68
PP
830 self._class_name_to_create_field_type_func = {
831 'int': self._create_integer_field_type,
832 'integer': self._create_integer_field_type,
833 'flt': self._create_float_field_type,
834 'float': self._create_float_field_type,
835 'floating-point': self._create_float_field_type,
836 'enum': self._create_enum_field_type,
837 'enumeration': self._create_enum_field_type,
838 'str': self._create_string_field_type,
839 'string': self._create_string_field_type,
840 'struct': self._create_struct_field_type,
841 'structure': self._create_struct_field_type,
842 'array': self._create_array_field_type,
7f4429f2 843 }
7f4429f2
PP
844 self._include_dirs = include_dirs
845 self._ignore_include_not_found = ignore_include_not_found
846 self._dump_config = dump_config
6839ffba 847 self._schema_validator = _SchemaValidator()
c8270369 848 self._parse()
7f4429f2 849
aad8e5e8 850 # Sets the default byte order as found in the `metadata_node` node.
7f4429f2 851 def _set_byte_order(self, metadata_node):
6839ffba
PP
852 self._bo = _byte_order_str_to_bo(metadata_node['trace']['byte-order'])
853 assert self._bo is not None
7f4429f2 854
aad8e5e8
PP
855 # Sets the clock value property mapping of the pseudo integer field
856 # type object `int_obj` as found in the `prop_mapping_node` node.
7f4429f2 857 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
7f4429f2 858 clock_name = prop_mapping_node['name']
6839ffba 859 clock = self._clocks.get(clock_name)
7f4429f2 860
6839ffba 861 if clock is None:
ace614f2
PP
862 exc = _ConfigParseError('`property-mappings` property',
863 'Clock type `{}` does not exist'.format(clock_name))
864 exc.append_ctx('Integer field type')
865 raise exc
7f4429f2 866
7f4429f2 867 prop_mapping = _PropertyMapping()
6839ffba
PP
868 prop_mapping.object = clock
869 prop_mapping.prop = 'value'
7f4429f2
PP
870 int_obj.property_mappings.append(prop_mapping)
871
aad8e5e8
PP
872 # Creates a pseudo integer field type from the node `node` and
873 # returns it.
a644ed68 874 def _create_integer_field_type(self, node):
6839ffba 875 obj = _Integer()
6839ffba 876 obj.size = node['size']
6839ffba 877 align_node = node.get('align')
7f4429f2 878
6839ffba 879 if align_node is not None:
ace614f2 880 _validate_alignment(align_node, 'Integer field type')
6839ffba 881 obj.align = align_node
7f4429f2 882
6839ffba 883 signed_node = node.get('signed')
7f4429f2 884
6839ffba
PP
885 if signed_node is not None:
886 obj.signed = signed_node
7f4429f2 887
6839ffba
PP
888 obj.byte_order = self._bo
889 bo_node = node.get('byte-order')
7f4429f2 890
6839ffba
PP
891 if bo_node is not None:
892 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2 893
6839ffba
PP
894 base_node = node.get('base')
895
896 if base_node is not None:
897 if base_node == 'bin':
898 obj.base = 2
899 elif base_node == 'oct':
900 obj.base = 8
901 elif base_node == 'dec':
902 obj.base = 10
7f4429f2 903 else:
6839ffba
PP
904 assert base_node == 'hex'
905 obj.base = 16
7f4429f2 906
6839ffba 907 encoding_node = node.get('encoding')
7f4429f2 908
6839ffba
PP
909 if encoding_node is not None:
910 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2 911
6839ffba 912 pm_node = node.get('property-mappings')
7f4429f2 913
6839ffba
PP
914 if pm_node is not None:
915 assert len(pm_node) == 1
916 self._set_int_clock_prop_mapping(obj, pm_node[0])
7f4429f2
PP
917
918 return obj
919
aad8e5e8
PP
920 # Creates a pseudo floating point number field type from the node
921 # `node` and returns it.
a644ed68 922 def _create_float_field_type(self, node):
6839ffba 923 obj = _FloatingPoint()
6839ffba
PP
924 size_node = node['size']
925 obj.exp_size = size_node['exp']
926 obj.mant_size = size_node['mant']
6839ffba 927 align_node = node.get('align')
7f4429f2 928
6839ffba 929 if align_node is not None:
ace614f2 930 _validate_alignment(align_node, 'Floating point number field type')
6839ffba 931 obj.align = align_node
7f4429f2 932
6839ffba
PP
933 obj.byte_order = self._bo
934 bo_node = node.get('byte-order')
7f4429f2 935
6839ffba
PP
936 if bo_node is not None:
937 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2
PP
938
939 return obj
940
aad8e5e8
PP
941 # Creates a pseudo enumeration field type from the node `node` and
942 # returns it.
a644ed68 943 def _create_enum_field_type(self, node):
ace614f2 944 ctx_obj_name = 'Enumeration field type'
6839ffba 945 obj = _Enum()
7f4429f2 946
aad8e5e8 947 # value (integer) field type
6839ffba
PP
948 try:
949 obj.value_type = self._create_type(node['value-type'])
9fb5657f 950 except _ConfigParseError as exc:
ace614f2
PP
951 _append_error_ctx(exc, ctx_obj_name,
952 'Cannot create value (integer) field type')
7f4429f2
PP
953
954 # members
6839ffba 955 members_node = node.get('members')
7f4429f2 956
6839ffba
PP
957 if members_node is not None:
958 if obj.value_type.signed:
959 value_min = -(1 << obj.value_type.size - 1)
960 value_max = (1 << (obj.value_type.size - 1)) - 1
7f4429f2 961 else:
6839ffba
PP
962 value_min = 0
963 value_max = (1 << obj.value_type.size) - 1
7f4429f2 964
6839ffba 965 cur = 0
7f4429f2 966
6839ffba
PP
967 for m_node in members_node:
968 if type(m_node) is str:
7f4429f2
PP
969 label = m_node
970 value = (cur, cur)
971 cur += 1
972 else:
6839ffba 973 assert type(m_node) is collections.OrderedDict
7f4429f2 974 label = m_node['label']
7f4429f2
PP
975 value = m_node['value']
976
6839ffba 977 if type(value) is int:
7f4429f2
PP
978 cur = value + 1
979 value = (value, value)
980 else:
6839ffba
PP
981 assert type(value) is list
982 assert len(value) == 2
7f4429f2
PP
983 mn = value[0]
984 mx = value[1]
985
986 if mn > mx:
ace614f2
PP
987 exc = _ConfigParseError(ctx_obj_name)
988 exc.append_ctx('Member `{}`'.format(label),
8dfc91b0 989 'Invalid integral range ({} > {})'.format(mn, mx))
ace614f2 990 raise exc
7f4429f2
PP
991
992 value = (mn, mx)
993 cur = mx + 1
994
aad8e5e8
PP
995 # Make sure that all the integral values of the range
996 # fits the enumeration field type's integer value field
997 # type depending on its size (bits).
ace614f2 998 member_obj_name = 'Member `{}`'.format(label)
6839ffba 999 msg_fmt = 'Value {} is outside the value type range [{}, {}]'
ace614f2 1000 msg = msg_fmt.format(value[0], value_min, value_max)
7f4429f2 1001
ace614f2
PP
1002 try:
1003 if value[0] < value_min or value[0] > value_max:
1004 raise _ConfigParseError(member_obj_name, msg)
7f4429f2 1005
ace614f2
PP
1006 if value[1] < value_min or value[1] > value_max:
1007 raise _ConfigParseError(member_obj_name, msg)
1008 except _ConfigParseError as exc:
1009 _append_error_ctx(exc, ctx_obj_name)
7f4429f2 1010
6839ffba 1011 obj.members[label] = value
7f4429f2 1012
6839ffba 1013 return obj
7f4429f2 1014
aad8e5e8
PP
1015 # Creates a pseudo string field type from the node `node` and
1016 # returns it.
a644ed68 1017 def _create_string_field_type(self, node):
6839ffba 1018 obj = _String()
6839ffba 1019 encoding_node = node.get('encoding')
7f4429f2 1020
6839ffba
PP
1021 if encoding_node is not None:
1022 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2
PP
1023
1024 return obj
1025
aad8e5e8
PP
1026 # Creates a pseudo structure field type from the node `node` and
1027 # returns it.
a644ed68 1028 def _create_struct_field_type(self, node):
ace614f2 1029 ctx_obj_name = 'Structure field type'
6839ffba 1030 obj = _Struct()
6839ffba 1031 min_align_node = node.get('min-align')
7f4429f2 1032
6839ffba 1033 if min_align_node is not None:
ace614f2 1034 _validate_alignment(min_align_node, ctx_obj_name)
6839ffba 1035 obj.min_align = min_align_node
7f4429f2 1036
6839ffba 1037 fields_node = node.get('fields')
7f4429f2 1038
6839ffba
PP
1039 if fields_node is not None:
1040 for field_name, field_node in fields_node.items():
ace614f2 1041 _validate_identifier(field_name, ctx_obj_name, 'field name')
7f4429f2 1042
6839ffba
PP
1043 try:
1044 obj.fields[field_name] = self._create_type(field_node)
9fb5657f 1045 except _ConfigParseError as exc:
ace614f2 1046 _append_error_ctx(exc, ctx_obj_name,
1bf9d86d 1047 'Cannot create field `{}`'.format(field_name))
7f4429f2
PP
1048
1049 return obj
1050
aad8e5e8
PP
1051 # Creates a pseudo array field type from the node `node` and returns
1052 # it.
a644ed68 1053 def _create_array_field_type(self, node):
6839ffba 1054 obj = _Array()
6839ffba 1055 obj.length = node['length']
7f4429f2 1056
6839ffba
PP
1057 try:
1058 obj.element_type = self._create_type(node['element-type'])
9fb5657f 1059 except _ConfigParseError as exc:
ace614f2
PP
1060 _append_error_ctx(exc, 'Array field type',
1061 'Cannot create element field type')
7f4429f2 1062
6839ffba 1063 return obj
7f4429f2 1064
aad8e5e8
PP
1065 # Creates a pseudo field type from the node `node` and returns it.
1066 #
1067 # This method checks the `class` property of `node` to determine
a644ed68
PP
1068 # which function of `self._class_name_to_create_field_type_func` to
1069 # call to create the corresponding pseudo field type.
6839ffba 1070 def _create_type(self, type_node):
a644ed68 1071 return self._class_name_to_create_field_type_func[type_node['class']](type_node)
7f4429f2 1072
aad8e5e8 1073 # Creates a pseudo clock type from the node `node` and returns it.
7f4429f2 1074 def _create_clock(self, node):
7f4429f2 1075 clock = _Clock()
6839ffba 1076 uuid_node = node.get('uuid')
7f4429f2 1077
6839ffba
PP
1078 if uuid_node is not None:
1079 try:
1080 clock.uuid = uuid.UUID(uuid_node)
8dfc91b0 1081 except ValueError as exc:
ace614f2 1082 raise _ConfigParseError('Clock type',
8dfc91b0 1083 'Malformed UUID `{}`: {}'.format(uuid_node, exc))
7f4429f2 1084
6839ffba 1085 descr_node = node.get('description')
7f4429f2 1086
6839ffba
PP
1087 if descr_node is not None:
1088 clock.description = descr_node
7f4429f2 1089
6839ffba 1090 freq_node = node.get('freq')
7f4429f2 1091
6839ffba
PP
1092 if freq_node is not None:
1093 clock.freq = freq_node
7f4429f2 1094
6839ffba 1095 error_cycles_node = node.get('error-cycles')
7f4429f2 1096
6839ffba
PP
1097 if error_cycles_node is not None:
1098 clock.error_cycles = error_cycles_node
7f4429f2 1099
6839ffba 1100 offset_node = node.get('offset')
7f4429f2 1101
6839ffba 1102 if offset_node is not None:
6839ffba 1103 offset_cycles_node = offset_node.get('cycles')
7f4429f2 1104
6839ffba
PP
1105 if offset_cycles_node is not None:
1106 clock.offset_cycles = offset_cycles_node
7f4429f2 1107
6839ffba 1108 offset_seconds_node = offset_node.get('seconds')
7f4429f2 1109
6839ffba
PP
1110 if offset_seconds_node is not None:
1111 clock.offset_seconds = offset_seconds_node
7f4429f2 1112
6839ffba 1113 absolute_node = node.get('absolute')
7f4429f2 1114
6839ffba
PP
1115 if absolute_node is not None:
1116 clock.absolute = absolute_node
7f4429f2 1117
6839ffba 1118 return_ctype_node = node.get('$return-ctype')
7f4429f2 1119
6839ffba 1120 if return_ctype_node is None:
aad8e5e8
PP
1121 # barectf 2.1: `return-ctype` property was renamed to
1122 # `$return-ctype`
6839ffba 1123 return_ctype_node = node.get('return-ctype')
7f4429f2
PP
1124
1125 if return_ctype_node is not None:
6839ffba 1126 clock.return_ctype = return_ctype_node
7f4429f2
PP
1127
1128 return clock
1129
aad8e5e8
PP
1130 # Registers all the clock types of the metadata node
1131 # `metadata_node`, creating pseudo clock types during the process,
1132 # within this parser.
1133 #
1134 # The pseudo clock types in `self._clocks` are then accessible when
a644ed68
PP
1135 # creating a pseudo integer field type (see
1136 # _create_integer_field_type() and _set_int_clock_prop_mapping()).
7f4429f2
PP
1137 def _register_clocks(self, metadata_node):
1138 self._clocks = collections.OrderedDict()
6839ffba 1139 clocks_node = metadata_node.get('clocks')
7f4429f2
PP
1140
1141 if clocks_node is None:
1142 return
1143
7f4429f2 1144 for clock_name, clock_node in clocks_node.items():
ace614f2 1145 _validate_identifier(clock_name, 'Metadata', 'clock type name')
6839ffba 1146 assert clock_name not in self._clocks
7f4429f2
PP
1147
1148 try:
1149 clock = self._create_clock(clock_node)
9fb5657f 1150 except _ConfigParseError as exc:
131d409a 1151 _append_error_ctx(exc, 'Metadata',
ace614f2 1152 'Cannot create clock type `{}`'.format(clock_name))
7f4429f2
PP
1153
1154 clock.name = clock_name
1155 self._clocks[clock_name] = clock
1156
aad8e5e8
PP
1157 # Creates an environment object (`collections.OrderedDict`) from the
1158 # metadata node `metadata_node` and returns it.
7f4429f2 1159 def _create_env(self, metadata_node):
6839ffba 1160 env_node = metadata_node.get('env')
7f4429f2
PP
1161
1162 if env_node is None:
6839ffba 1163 return collections.OrderedDict()
7f4429f2
PP
1164
1165 for env_name, env_value in env_node.items():
6839ffba
PP
1166 _validate_identifier(env_name, 'Metadata',
1167 'environment variable name')
7f4429f2 1168
6839ffba 1169 return copy.deepcopy(env_node)
7f4429f2 1170
aad8e5e8
PP
1171 # Creates a pseudo trace type from the metadata node `metadata_node`
1172 # and returns it.
7f4429f2 1173 def _create_trace(self, metadata_node):
ace614f2 1174 ctx_obj_name = 'Trace type'
7f4429f2 1175 trace = _Trace()
7f4429f2 1176 trace_node = metadata_node['trace']
7f4429f2 1177 trace.byte_order = self._bo
6839ffba 1178 uuid_node = trace_node.get('uuid')
7f4429f2 1179
6839ffba 1180 if uuid_node is not None:
aad8e5e8
PP
1181 # The `uuid` property of the trace type node can be `auto`
1182 # to make barectf generate a UUID.
6839ffba
PP
1183 if uuid_node == 'auto':
1184 trace.uuid = uuid.uuid1()
7f4429f2
PP
1185 else:
1186 try:
6839ffba 1187 trace.uuid = uuid.UUID(uuid_node)
8dfc91b0 1188 except ValueError as exc:
ace614f2 1189 raise _ConfigParseError(ctx_obj_name,
8dfc91b0 1190 'Malformed UUID `{}`: {}'.format(uuid_node, exc))
7f4429f2 1191
6839ffba
PP
1192 pht_node = trace_node.get('packet-header-type')
1193
1194 if pht_node is not None:
7f4429f2 1195 try:
6839ffba 1196 trace.packet_header_type = self._create_type(pht_node)
9fb5657f 1197 except _ConfigParseError as exc:
ace614f2
PP
1198 _append_error_ctx(exc, ctx_obj_name,
1199 'Cannot create packet header field type')
7f4429f2 1200
7f4429f2
PP
1201 return trace
1202
aad8e5e8
PP
1203 # Creates a pseudo event type from the event node `event_node` and
1204 # returns it.
7f4429f2 1205 def _create_event(self, event_node):
ace614f2 1206 ctx_obj_name = 'Event type'
7f4429f2 1207 event = _Event()
6839ffba 1208 log_level_node = event_node.get('log-level')
7f4429f2 1209
6839ffba
PP
1210 if log_level_node is not None:
1211 assert type(log_level_node) is int
1212 event.log_level = metadata.LogLevel(None, log_level_node)
7f4429f2 1213
6839ffba 1214 ct_node = event_node.get('context-type')
7f4429f2 1215
6839ffba 1216 if ct_node is not None:
7f4429f2 1217 try:
6839ffba 1218 event.context_type = self._create_type(ct_node)
9fb5657f 1219 except _ConfigParseError as exc:
ace614f2
PP
1220 _append_error_ctx(exc, ctx_obj_name,
1221 'Cannot create context field type')
7f4429f2 1222
6839ffba 1223 pt_node = event_node.get('payload-type')
7f4429f2 1224
6839ffba 1225 if pt_node is not None:
7f4429f2 1226 try:
6839ffba 1227 event.payload_type = self._create_type(pt_node)
9fb5657f 1228 except _ConfigParseError as exc:
ace614f2
PP
1229 _append_error_ctx(exc, ctx_obj_name,
1230 'Cannot create payload field type')
7f4429f2 1231
7f4429f2
PP
1232 return event
1233
aad8e5e8
PP
1234 # Creates a pseudo stream type named `stream_name` from the stream
1235 # node `stream_node` and returns it.
7f4429f2 1236 def _create_stream(self, stream_name, stream_node):
ace614f2 1237 ctx_obj_name = 'Stream type'
7f4429f2 1238 stream = _Stream()
6839ffba 1239 pct_node = stream_node.get('packet-context-type')
7f4429f2 1240
6839ffba 1241 if pct_node is not None:
7f4429f2 1242 try:
6839ffba 1243 stream.packet_context_type = self._create_type(pct_node)
9fb5657f 1244 except _ConfigParseError as exc:
ace614f2
PP
1245 _append_error_ctx(exc, ctx_obj_name,
1246 'Cannot create packet context field type')
7f4429f2 1247
6839ffba 1248 eht_node = stream_node.get('event-header-type')
7f4429f2 1249
6839ffba 1250 if eht_node is not None:
7f4429f2 1251 try:
6839ffba 1252 stream.event_header_type = self._create_type(eht_node)
9fb5657f 1253 except _ConfigParseError as exc:
ace614f2
PP
1254 _append_error_ctx(exc, ctx_obj_name,
1255 'Cannot create event header field type')
7f4429f2 1256
6839ffba 1257 ect_node = stream_node.get('event-context-type')
7f4429f2 1258
6839ffba 1259 if ect_node is not None:
7f4429f2 1260 try:
6839ffba 1261 stream.event_context_type = self._create_type(ect_node)
9fb5657f 1262 except _ConfigParseError as exc:
ace614f2
PP
1263 _append_error_ctx(exc, ctx_obj_name,
1264 'Cannot create event context field type')
7f4429f2 1265
6839ffba
PP
1266 events_node = stream_node['events']
1267 cur_id = 0
7f4429f2 1268
6839ffba
PP
1269 for ev_name, ev_node in events_node.items():
1270 try:
1271 ev = self._create_event(ev_node)
9fb5657f 1272 except _ConfigParseError as exc:
ace614f2
PP
1273 _append_error_ctx(exc, ctx_obj_name,
1274 'Cannot create event type `{}`'.format(ev_name))
7f4429f2 1275
6839ffba
PP
1276 ev.id = cur_id
1277 ev.name = ev_name
1278 stream.events[ev_name] = ev
1279 cur_id += 1
7f4429f2 1280
6839ffba 1281 default_node = stream_node.get('$default')
7f4429f2 1282
6839ffba
PP
1283 if default_node is not None:
1284 if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
ace614f2
PP
1285 fmt = 'Cannot specify more than one default stream type (default stream type already set to `{}`)'
1286 raise _ConfigParseError('Stream type',
9fb5657f 1287 fmt.format(self._meta.default_stream_name))
7f4429f2 1288
6839ffba 1289 self._meta.default_stream_name = stream_name
7f4429f2
PP
1290
1291 return stream
1292
aad8e5e8
PP
1293 # Creates a `collections.OrderedDict` object where keys are stream
1294 # type names and values are pseudo stream types from the metadata
1295 # node `metadata_node` and returns it.
7f4429f2
PP
1296 def _create_streams(self, metadata_node):
1297 streams = collections.OrderedDict()
7f4429f2 1298 streams_node = metadata_node['streams']
7f4429f2
PP
1299 cur_id = 0
1300
1301 for stream_name, stream_node in streams_node.items():
1302 try:
1303 stream = self._create_stream(stream_name, stream_node)
9fb5657f 1304 except _ConfigParseError as exc:
131d409a 1305 _append_error_ctx(exc, 'Metadata',
ace614f2 1306 'Cannot create stream type `{}`'.format(stream_name))
7f4429f2
PP
1307
1308 stream.id = cur_id
6839ffba 1309 stream.name = stream_name
7f4429f2
PP
1310 streams[stream_name] = stream
1311 cur_id += 1
1312
1313 return streams
1314
aad8e5e8
PP
1315 # Creates a pseudo metadata object from the configuration node
1316 # `root` and returns it.
7f4429f2
PP
1317 def _create_metadata(self, root):
1318 self._meta = _Metadata()
7f4429f2
PP
1319 metadata_node = root['metadata']
1320
7f4429f2
PP
1321 if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None:
1322 default_stream_node = metadata_node['$default-stream']
7f4429f2
PP
1323 self._meta.default_stream_name = default_stream_node
1324
1325 self._set_byte_order(metadata_node)
1326 self._register_clocks(metadata_node)
1327 self._meta.clocks = self._clocks
7f4429f2
PP
1328 self._meta.env = self._create_env(metadata_node)
1329 self._meta.trace = self._create_trace(metadata_node)
7f4429f2
PP
1330 self._meta.streams = self._create_streams(metadata_node)
1331
aad8e5e8 1332 # validate the pseudo metadata object
ace614f2
PP
1333 _MetadataSpecialFieldsValidator().validate(self._meta)
1334 _BarectfMetadataValidator().validate(self._meta)
7f4429f2
PP
1335
1336 return self._meta
1337
aad8e5e8
PP
1338 # Gets and validates the tracing prefix as found in the
1339 # configuration node `config_node` and returns it.
6839ffba
PP
1340 def _get_prefix(self, config_node):
1341 prefix = config_node.get('prefix', 'barectf_')
1bf9d86d 1342 _validate_identifier(prefix, '`prefix` property', 'prefix')
6839ffba 1343 return prefix
7f4429f2 1344
aad8e5e8
PP
1345 # Gets the options as found in the configuration node `config_node`
1346 # and returns a corresponding `config.ConfigOptions` object.
6839ffba
PP
1347 def _get_options(self, config_node):
1348 gen_prefix_def = False
1349 gen_default_stream_def = False
1350 options_node = config_node.get('options')
7f4429f2 1351
6839ffba
PP
1352 if options_node is not None:
1353 gen_prefix_def = options_node.get('gen-prefix-def',
1354 gen_prefix_def)
1355 gen_default_stream_def = options_node.get('gen-default-stream-def',
1356 gen_default_stream_def)
7f4429f2
PP
1357
1358 return config.ConfigOptions(gen_prefix_def, gen_default_stream_def)
1359
aad8e5e8
PP
1360 # Returns the last included file name from the parser's inclusion
1361 # file name stack.
7f4429f2
PP
1362 def _get_last_include_file(self):
1363 if self._include_stack:
1364 return self._include_stack[-1]
1365
1f2c551a 1366 return self._root_path
7f4429f2 1367
aad8e5e8
PP
1368 # Loads the inclusion file having the path `yaml_path` and returns
1369 # its content as a `collections.OrderedDict` object.
7f4429f2
PP
1370 def _load_include(self, yaml_path):
1371 for inc_dir in self._include_dirs:
6839ffba
PP
1372 # Current inclusion dir + file name path.
1373 #
1374 # Note: os.path.join() only takes the last argument if it's
1375 # absolute.
7f4429f2
PP
1376 inc_path = os.path.join(inc_dir, yaml_path)
1377
1378 # real path (symbolic links resolved)
1379 real_path = os.path.realpath(inc_path)
1380
1381 # normalized path (weird stuff removed!)
1382 norm_path = os.path.normpath(real_path)
1383
1384 if not os.path.isfile(norm_path):
6839ffba 1385 # file doesn't exist: skip
7f4429f2
PP
1386 continue
1387
1388 if norm_path in self._include_stack:
1389 base_path = self._get_last_include_file()
ace614f2
PP
1390 raise _ConfigParseError('File `{}`'.format(base_path),
1391 'Cannot recursively include file `{}`'.format(norm_path))
7f4429f2
PP
1392
1393 self._include_stack.append(norm_path)
1394
1395 # load raw content
1396 return self._yaml_ordered_load(norm_path)
1397
1398 if not self._ignore_include_not_found:
1399 base_path = self._get_last_include_file()
ace614f2
PP
1400 raise _ConfigParseError('File `{}`'.format(base_path),
1401 'Cannot include file `{}`: file not found in inclusion directories'.format(yaml_path))
8dfc91b0 1402
aad8e5e8
PP
1403 # Returns a list of all the inclusion file paths as found in the
1404 # inclusion node `include_node`.
7f4429f2
PP
1405 def _get_include_paths(self, include_node):
1406 if include_node is None:
6839ffba 1407 # none
7f4429f2
PP
1408 return []
1409
6839ffba
PP
1410 if type(include_node) is str:
1411 # wrap as array
7f4429f2
PP
1412 return [include_node]
1413
6839ffba
PP
1414 # already an array
1415 assert type(include_node) is list
1416 return include_node
7f4429f2 1417
aad8e5e8
PP
1418 # Updates the node `base_node` with an overlay node `overlay_node`.
1419 #
1420 # Both the inclusion and field type inheritance features use this
1421 # update mechanism.
7f4429f2
PP
1422 def _update_node(self, base_node, overlay_node):
1423 for olay_key, olay_value in overlay_node.items():
1424 if olay_key in base_node:
1425 base_value = base_node[olay_key]
1426
6839ffba 1427 if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
aad8e5e8 1428 # merge both objects
7f4429f2 1429 self._update_node(base_value, olay_value)
6839ffba 1430 elif type(olay_value) is list and type(base_value) is list:
7f4429f2
PP
1431 # append extension array items to base items
1432 base_value += olay_value
1433 else:
aad8e5e8 1434 # fall back to replacing base property
7f4429f2
PP
1435 base_node[olay_key] = olay_value
1436 else:
aad8e5e8 1437 # set base property from overlay property
7f4429f2
PP
1438 base_node[olay_key] = olay_value
1439
aad8e5e8
PP
1440 # Processes inclusions using `last_overlay_node` as the last overlay
1441 # node to use to "patch" the node.
1442 #
1443 # If `last_overlay_node` contains an `$include` property, then this
1444 # method patches the current base node (initially empty) in order
1445 # using the content of the inclusion files (recursively).
1446 #
1447 # At the end, this method removes the `$include` of
1448 # `last_overlay_node` and then patches the current base node with
1449 # its other properties before returning the result (always a deep
1450 # copy).
6839ffba 1451 def _process_node_include(self, last_overlay_node,
7f4429f2
PP
1452 process_base_include_cb,
1453 process_children_include_cb=None):
7f4429f2 1454 # process children inclusions first
6839ffba 1455 if process_children_include_cb is not None:
7f4429f2
PP
1456 process_children_include_cb(last_overlay_node)
1457
6839ffba
PP
1458 incl_prop_name = '$include'
1459
1460 if incl_prop_name in last_overlay_node:
1461 include_node = last_overlay_node[incl_prop_name]
7f4429f2 1462 else:
6839ffba 1463 # no inclusions!
7f4429f2
PP
1464 return last_overlay_node
1465
1466 include_paths = self._get_include_paths(include_node)
1467 cur_base_path = self._get_last_include_file()
1468 base_node = None
1469
6839ffba 1470 # keep the inclusion paths and remove the `$include` property
7f4429f2 1471 include_paths = copy.deepcopy(include_paths)
6839ffba 1472 del last_overlay_node[incl_prop_name]
7f4429f2
PP
1473
1474 for include_path in include_paths:
1475 # load raw YAML from included file
1476 overlay_node = self._load_include(include_path)
1477
1478 if overlay_node is None:
6839ffba
PP
1479 # Cannot find inclusion file, but we're ignoring those
1480 # errors, otherwise _load_include() itself raises a
1481 # config error.
7f4429f2
PP
1482 continue
1483
6839ffba 1484 # recursively process inclusions
7f4429f2
PP
1485 try:
1486 overlay_node = process_base_include_cb(overlay_node)
9fb5657f 1487 except _ConfigParseError as exc:
ace614f2 1488 _append_error_ctx(exc, 'File `{}`'.format(cur_base_path))
7f4429f2 1489
6839ffba 1490 # pop inclusion stack now that we're done including
7f4429f2
PP
1491 del self._include_stack[-1]
1492
6839ffba
PP
1493 # At this point, `base_node` is fully resolved (does not
1494 # contain any `$include` property).
7f4429f2
PP
1495 if base_node is None:
1496 base_node = overlay_node
1497 else:
1498 self._update_node(base_node, overlay_node)
1499
6839ffba
PP
1500 # Finally, update the latest base node with our last overlay
1501 # node.
7f4429f2 1502 if base_node is None:
6839ffba
PP
1503 # Nothing was included, which is possible when we're
1504 # ignoring inclusion errors.
7f4429f2
PP
1505 return last_overlay_node
1506
1507 self._update_node(base_node, last_overlay_node)
7f4429f2
PP
1508 return base_node
1509
aad8e5e8
PP
1510 # Process the inclusions of the event type node `event_node`,
1511 # returning the effective node.
7f4429f2 1512 def _process_event_include(self, event_node):
aad8e5e8 1513 # Make sure the event type node is valid for the inclusion
6839ffba
PP
1514 # processing stage.
1515 self._schema_validator.validate(event_node,
1516 '2/config/event-pre-include')
1517
1518 # process inclusions
1519 return self._process_node_include(event_node,
7f4429f2
PP
1520 self._process_event_include)
1521
aad8e5e8
PP
1522 # Process the inclusions of the stream type node `stream_node`,
1523 # returning the effective node.
7f4429f2
PP
1524 def _process_stream_include(self, stream_node):
1525 def process_children_include(stream_node):
1526 if 'events' in stream_node:
1527 events_node = stream_node['events']
1528
6839ffba
PP
1529 for key in list(events_node):
1530 events_node[key] = self._process_event_include(events_node[key])
7f4429f2 1531
aad8e5e8 1532 # Make sure the stream type node is valid for the inclusion
6839ffba
PP
1533 # processing stage.
1534 self._schema_validator.validate(stream_node,
1535 '2/config/stream-pre-include')
7f4429f2 1536
6839ffba
PP
1537 # process inclusions
1538 return self._process_node_include(stream_node,
7f4429f2
PP
1539 self._process_stream_include,
1540 process_children_include)
1541
aad8e5e8
PP
1542 # Process the inclusions of the trace type node `trace_node`,
1543 # returning the effective node.
7f4429f2 1544 def _process_trace_include(self, trace_node):
aad8e5e8 1545 # Make sure the trace type node is valid for the inclusion
6839ffba
PP
1546 # processing stage.
1547 self._schema_validator.validate(trace_node,
1548 '2/config/trace-pre-include')
1549
1550 # process inclusions
1551 return self._process_node_include(trace_node,
7f4429f2
PP
1552 self._process_trace_include)
1553
aad8e5e8
PP
1554 # Process the inclusions of the clock type node `clock_node`,
1555 # returning the effective node.
7f4429f2 1556 def _process_clock_include(self, clock_node):
aad8e5e8 1557 # Make sure the clock type node is valid for the inclusion
6839ffba
PP
1558 # processing stage.
1559 self._schema_validator.validate(clock_node,
1560 '2/config/clock-pre-include')
1561
1562 # process inclusions
1563 return self._process_node_include(clock_node,
7f4429f2
PP
1564 self._process_clock_include)
1565
aad8e5e8
PP
1566 # Process the inclusions of the metadata node `metadata_node`,
1567 # returning the effective node.
7f4429f2
PP
1568 def _process_metadata_include(self, metadata_node):
1569 def process_children_include(metadata_node):
1570 if 'trace' in metadata_node:
1571 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
1572
1573 if 'clocks' in metadata_node:
1574 clocks_node = metadata_node['clocks']
1575
6839ffba
PP
1576 for key in list(clocks_node):
1577 clocks_node[key] = self._process_clock_include(clocks_node[key])
7f4429f2
PP
1578
1579 if 'streams' in metadata_node:
1580 streams_node = metadata_node['streams']
1581
6839ffba
PP
1582 for key in list(streams_node):
1583 streams_node[key] = self._process_stream_include(streams_node[key])
7f4429f2 1584
aad8e5e8 1585 # Make sure the metadata node is valid for the inclusion
6839ffba
PP
1586 # processing stage.
1587 self._schema_validator.validate(metadata_node,
1588 '2/config/metadata-pre-include')
7f4429f2 1589
6839ffba
PP
1590 # process inclusions
1591 return self._process_node_include(metadata_node,
7f4429f2
PP
1592 self._process_metadata_include,
1593 process_children_include)
1594
aad8e5e8
PP
1595 # Process the inclusions of the configuration node `config_node`,
1596 # returning the effective node.
6839ffba
PP
1597 def _process_config_includes(self, config_node):
1598 # Process inclusions in this order:
1599 #
aad8e5e8
PP
1600 # 1. Clock type node, event type nodes, and trace type nodes
1601 # (the order between those is not important).
6839ffba 1602 #
aad8e5e8 1603 # 2. Stream type nodes.
6839ffba 1604 #
aad8e5e8 1605 # 3. Metadata node.
7f4429f2 1606 #
6839ffba 1607 # This is because:
7f4429f2 1608 #
aad8e5e8
PP
1609 # * A metadata node can include clock type nodes, a trace type
1610 # node, stream type nodes, and event type nodes (indirectly).
7f4429f2 1611 #
aad8e5e8 1612 # * A stream type node can include event type nodes.
7f4429f2 1613 #
6839ffba
PP
1614 # We keep a stack of absolute paths to included files
1615 # (`self._include_stack`) to detect recursion.
1616 #
1617 # First, make sure the configuration object itself is valid for
1618 # the inclusion processing stage.
1619 self._schema_validator.validate(config_node,
1620 '2/config/config-pre-include')
1621
aad8e5e8 1622 # Process metadata node inclusions.
6839ffba
PP
1623 #
1624 # self._process_metadata_include() returns a new (or the same)
aad8e5e8 1625 # metadata node without any `$include` property in it,
6839ffba
PP
1626 # recursively.
1627 config_node['metadata'] = self._process_metadata_include(config_node['metadata'])
1628
1629 return config_node
7f4429f2 1630
aad8e5e8
PP
1631 # Expands the field type aliases found in the metadata node
1632 # `metadata_node` using the aliases of the `type_aliases_node` node.
1633 #
1634 # This method modifies `metadata_node`.
1635 #
1636 # When this method returns:
1637 #
1638 # * Any field type alias is replaced with its full field type
1639 # equivalent.
1640 #
1641 # * The `type-aliases` property of `metadata_node` is removed.
6839ffba
PP
1642 def _expand_field_type_aliases(self, metadata_node, type_aliases_node):
1643 def resolve_field_type_aliases(parent_node, key, from_descr,
1644 alias_set=None):
1645 if key not in parent_node:
1646 return
1647
1648 # This set holds all the aliases we need to expand,
1649 # recursively. This is used to detect cycles.
1650 if alias_set is None:
1651 alias_set = set()
1652
1653 node = parent_node[key]
1654
1655 if node is None:
1656 return
1657
1658 if type(node) is str:
1659 alias = node
1660
1661 if alias not in resolved_aliases:
1662 # Only check for a field type alias cycle when we
1663 # didn't resolve the alias yet, as a given node can
1664 # refer to the same field type alias more than once.
1665 if alias in alias_set:
ace614f2 1666 fmt = 'Cycle detected during the `{}` field type alias resolution'
9fb5657f 1667 raise _ConfigParseError(from_descr, fmt.format(alias))
6839ffba
PP
1668
1669 # try to load field type alias node named `alias`
1670 if alias not in type_aliases_node:
9fb5657f 1671 raise _ConfigParseError(from_descr,
ace614f2 1672 'Field type alias `{}` does not exist'.format(alias))
6839ffba
PP
1673
1674 # resolve it
1675 alias_set.add(alias)
1676 resolve_field_type_aliases(type_aliases_node, alias,
1677 from_descr, alias_set)
1678 resolved_aliases.add(alias)
1679
1680 parent_node[key] = copy.deepcopy(type_aliases_node[node])
1681 return
1682
1683 # traverse, resolving field type aliases as needed
1684 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1685 resolve_field_type_aliases(node, pkey, from_descr, alias_set)
1686
1687 # structure field type fields
1688 pkey = 'fields'
1689
1690 if pkey in node:
1691 assert type(node[pkey]) is collections.OrderedDict
1692
1693 for field_name in node[pkey]:
1694 resolve_field_type_aliases(node[pkey], field_name,
1695 from_descr, alias_set)
1696
ace614f2
PP
1697 def resolve_field_type_aliases_from(parent_node, key):
1698 resolve_field_type_aliases(parent_node, key,
1699 '`{}` property'.format(key))
6839ffba
PP
1700
1701 # set of resolved field type aliases
1702 resolved_aliases = set()
1703
aad8e5e8
PP
1704 # Expand field type aliases within trace, stream, and event
1705 # types now.
ace614f2
PP
1706 try:
1707 resolve_field_type_aliases_from(metadata_node['trace'],
1708 'packet-header-type')
1709 except _ConfigParseError as exc:
1710 _append_error_ctx(exc, 'Trace type')
6839ffba
PP
1711
1712 for stream_name, stream in metadata_node['streams'].items():
6839ffba 1713 try:
ace614f2
PP
1714 resolve_field_type_aliases_from(stream, 'packet-context-type')
1715 resolve_field_type_aliases_from(stream, 'event-header-type')
1716 resolve_field_type_aliases_from(stream, 'event-context-type')
1717
6839ffba 1718 for event_name, event in stream['events'].items():
ace614f2
PP
1719 try:
1720 resolve_field_type_aliases_from(event, 'context-type')
1721 resolve_field_type_aliases_from(event, 'payload-type')
1722 except _ConfigParseError as exc:
1723 _append_error_ctx(exc,
1724 'Event type `{}`'.format(event_name))
9fb5657f 1725 except _ConfigParseError as exc:
ace614f2 1726 _append_error_ctx(exc, 'Stream type `{}`'.format(stream_name))
6839ffba 1727
aad8e5e8 1728 # remove the (now unneeded) `type-aliases` node
6839ffba
PP
1729 del metadata_node['type-aliases']
1730
aad8e5e8
PP
1731 # Applies field type inheritance to all field types found in
1732 # `metadata_node`.
1733 #
1734 # This method modifies `metadata_node`.
1735 #
1736 # When this method returns, no field type node has an `$inherit` or
1737 # `inherit` property.
6839ffba
PP
1738 def _expand_field_type_inheritance(self, metadata_node):
1739 def apply_inheritance(parent_node, key):
1740 if key not in parent_node:
1741 return
1742
1743 node = parent_node[key]
1744
1745 if node is None:
1746 return
1747
1748 # process children first
1749 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1750 apply_inheritance(node, pkey)
1751
1752 # structure field type fields
1753 pkey = 'fields'
1754
1755 if pkey in node:
1756 assert type(node[pkey]) is collections.OrderedDict
1757
1758 for field_name, field_type in node[pkey].items():
1759 apply_inheritance(node[pkey], field_name)
1760
1761 # apply inheritance of this node
1762 if 'inherit' in node:
1763 # barectf 2.1: `inherit` property was renamed to `$inherit`
1764 assert '$inherit' not in node
1765 node['$inherit'] = node['inherit']
1766 del node['inherit']
1767
1768 inherit_key = '$inherit'
1769
1770 if inherit_key in node:
1771 assert type(node[inherit_key]) is collections.OrderedDict
1772
1773 # apply inheritance below
1774 apply_inheritance(node, inherit_key)
1775
1776 # `node` is an overlay on the `$inherit` node
1777 base_node = node[inherit_key]
1778 del node[inherit_key]
1779 self._update_node(base_node, node)
1780
1781 # set updated base node as this node
1782 parent_node[key] = base_node
1783
1784 apply_inheritance(metadata_node['trace'], 'packet-header-type')
1785
1786 for stream in metadata_node['streams'].values():
1787 apply_inheritance(stream, 'packet-context-type')
1788 apply_inheritance(stream, 'event-header-type')
1789 apply_inheritance(stream, 'event-context-type')
1790
1791 for event in stream['events'].values():
1792 apply_inheritance(event, 'context-type')
1793 apply_inheritance(event, 'payload-type')
1794
aad8e5e8
PP
1795 # Calls _expand_field_type_aliases() and
1796 # _expand_field_type_inheritance() if the metadata node
1797 # `metadata_node` has a `type-aliases` property.
6839ffba
PP
1798 def _expand_field_types(self, metadata_node):
1799 type_aliases_node = metadata_node.get('type-aliases')
1800
1801 if type_aliases_node is None:
1802 # If there's no `type-aliases` node, then there's no field
1803 # type aliases and therefore no possible inheritance.
1804 return
1805
1806 # first, expand field type aliases
1807 self._expand_field_type_aliases(metadata_node, type_aliases_node)
1808
1809 # next, apply inheritance to create effective field types
1810 self._expand_field_type_inheritance(metadata_node)
1811
aad8e5e8
PP
1812 # Replaces the textual log levels in event type nodes of the
1813 # metadata node `metadata_node` with their numeric equivalent (as
1814 # found in the `$log-levels` or `log-levels` node of
1815 # `metadata_node`).
1816 #
1817 # This method modifies `metadata_node`.
1818 #
1819 # When this method returns, the `$log-levels` or `log-level`
1820 # property of `metadata_node` is removed.
6839ffba
PP
1821 def _expand_log_levels(self, metadata_node):
1822 if 'log-levels' in metadata_node:
aad8e5e8
PP
1823 # barectf 2.1: `log-levels` property was renamed to
1824 # `$log-levels`
8dfc91b0
PP
1825 assert '$log-levels' not in metadata_node
1826 metadata_node['$log-levels'] = metadata_node['log-levels']
1827 del metadata_node['log-levels']
6839ffba
PP
1828
1829 log_levels_key = '$log-levels'
1830 log_levels_node = metadata_node.get(log_levels_key)
1831
1832 if log_levels_node is None:
1833 # no log level aliases
1834 return
1835
1836 # not needed anymore
1837 del metadata_node[log_levels_key]
1838
1839 for stream_name, stream in metadata_node['streams'].items():
1840 try:
1841 for event_name, event in stream['events'].items():
1842 prop_name = 'log-level'
1843 ll_node = event.get(prop_name)
1844
1845 if ll_node is None:
1846 continue
1847
1848 if type(ll_node) is str:
1849 if ll_node not in log_levels_node:
ace614f2
PP
1850 exc = _ConfigParseError('`log-level` property',
1851 'Log level alias `{}` does not exist'.format(ll_node))
1852 exc.append_ctx('Event type `{}`'.format(event_name))
1853 raise exc
6839ffba
PP
1854
1855 event[prop_name] = log_levels_node[ll_node]
9fb5657f 1856 except _ConfigParseError as exc:
ace614f2 1857 _append_error_ctx(exc, 'Stream type `{}`'.format(stream_name))
7f4429f2 1858
aad8e5e8 1859 # Dumps the node `node` as YAML, passing `kwds` to yaml.dump().
7f4429f2
PP
1860 def _yaml_ordered_dump(self, node, **kwds):
1861 class ODumper(yaml.Dumper):
1862 pass
1863
1864 def dict_representer(dumper, node):
1865 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1866 node.items())
1867
1868 ODumper.add_representer(collections.OrderedDict, dict_representer)
1869
6839ffba 1870 # Python -> YAML
7f4429f2
PP
1871 return yaml.dump(node, Dumper=ODumper, **kwds)
1872
aad8e5e8
PP
1873 # Loads the content of the YAML file having the path `yaml_path` as
1874 # a Python object.
1875 #
1876 # All YAML maps are loaded as `collections.OrderedDict` objects.
7f4429f2
PP
1877 def _yaml_ordered_load(self, yaml_path):
1878 class OLoader(yaml.Loader):
1879 pass
1880
1881 def construct_mapping(loader, node):
1882 loader.flatten_mapping(node)
1883
1884 return collections.OrderedDict(loader.construct_pairs(node))
1885
1886 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1887 construct_mapping)
1888
1889 # YAML -> Python
1890 try:
1891 with open(yaml_path, 'r') as f:
1892 node = yaml.load(f, OLoader)
ace614f2
PP
1893 except (OSError, IOError) as exc:
1894 raise _ConfigParseError('File `{}`'.format(yaml_path),
1895 'Cannot open file: {}'.format(exc))
7f4429f2 1896
ace614f2 1897 assert type(node) is collections.OrderedDict
7f4429f2
PP
1898 return node
1899
c8270369 1900 def _parse(self):
7f4429f2
PP
1901 self._version = None
1902 self._include_stack = []
1903
6839ffba 1904 # load the configuration object as is from the root YAML file
7f4429f2 1905 try:
1f2c551a 1906 config_node = self._yaml_ordered_load(self._root_path)
9fb5657f 1907 except _ConfigParseError as exc:
131d409a 1908 _append_error_ctx(exc, 'Configuration',
1f2c551a 1909 'Cannot parse YAML file `{}`'.format(self._root_path))
7f4429f2 1910
6839ffba
PP
1911 # Make sure the configuration object is minimally valid, that
1912 # is, it contains a valid `version` property.
1913 #
1914 # This step does not validate the whole configuration object
1915 # yet because we don't have an effective configuration object;
1916 # we still need to:
1917 #
1918 # * Process inclusions.
1919 # * Expand field types (inheritance and aliases).
1920 self._schema_validator.validate(config_node, 'config/config-min')
7f4429f2 1921
6839ffba
PP
1922 # Process configuration object inclusions.
1923 #
1924 # self._process_config_includes() returns a new (or the same)
1925 # configuration object without any `$include` property in it,
1926 # recursively.
1927 config_node = self._process_config_includes(config_node)
7f4429f2 1928
6839ffba
PP
1929 # Make sure that the current configuration object is valid
1930 # considering field types are not expanded yet.
1931 self._schema_validator.validate(config_node,
1932 '2/config/config-pre-field-type-expansion')
7f4429f2 1933
6839ffba
PP
1934 # Expand field types.
1935 #
1936 # This process:
1937 #
1938 # 1. Replaces field type aliases with "effective" field
1939 # types, recursively.
1940 #
1941 # After this step, the `type-aliases` property of the
1942 # `metadata` node is gone.
1943 #
aad8e5e8 1944 # 2. Applies inheritance, following the `$inherit`/`inherit`
6839ffba
PP
1945 # properties.
1946 #
1947 # After this step, field type objects do not contain
1948 # `$inherit` or `inherit` properties.
1949 #
1950 # This is done blindly, in that the process _doesn't_ validate
1951 # field type objects at this point.
1952 self._expand_field_types(config_node['metadata'])
7f4429f2 1953
6839ffba
PP
1954 # Make sure that the current configuration object is valid
1955 # considering log levels are not expanded yet.
1956 self._schema_validator.validate(config_node,
1957 '2/config/config-pre-log-level-expansion')
7f4429f2 1958
6839ffba
PP
1959 # Expand log levels, that is, replace log level strings with
1960 # their equivalent numeric values.
1961 self._expand_log_levels(config_node['metadata'])
7f4429f2 1962
6839ffba
PP
1963 # validate the whole, effective configuration object
1964 self._schema_validator.validate(config_node, '2/config/config')
7f4429f2
PP
1965
1966 # dump config if required
1967 if self._dump_config:
6839ffba 1968 print(self._yaml_ordered_dump(config_node, indent=2,
7f4429f2
PP
1969 default_flow_style=False))
1970
6839ffba
PP
1971 # get prefix, options, and metadata pseudo-object
1972 prefix = self._get_prefix(config_node)
1973 opts = self._get_options(config_node)
1974 pseudo_meta = self._create_metadata(config_node)
7f4429f2 1975
6839ffba 1976 # create public configuration
c8270369
PP
1977 self._config = config.Config(pseudo_meta.to_public(), prefix, opts)
1978
1979 @property
1980 def config(self):
1981 return self._config
7f4429f2
PP
1982
1983
1984def _from_file(path, include_dirs, ignore_include_not_found, dump_config):
1985 try:
c8270369
PP
1986 return _YamlConfigParser(path, include_dirs, ignore_include_not_found,
1987 dump_config).config
9fb5657f 1988 except _ConfigParseError as exc:
131d409a 1989 _append_error_ctx(exc, 'Configuration',
9fb5657f 1990 'Cannot create configuration from YAML file `{}`'.format(path))
This page took 0.115141 seconds and 4 git commands to generate.