_YamlConfigParser: rename field type creation methods (more explicit)
[barectf.git] / barectf / config_parse.py
CommitLineData
7f4429f2
PP
1# The MIT License (MIT)
2#
4a90140d 3# Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
7f4429f2 4#
1378f213
PP
5# Permission is hereby granted, free of charge, to any person obtaining
6# a copy of this software and associated documentation files (the
7# "Software"), to deal in the Software without restriction, including
8# without limitation the rights to use, copy, modify, merge, publish,
9# distribute, sublicense, and/or sell copies of the Software, and to
10# permit persons to whom the Software is furnished to do so, subject to
11# the following conditions:
7f4429f2 12#
1378f213
PP
13# The above copyright notice and this permission notice shall be
14# included in all copies or substantial portions of the Software.
7f4429f2 15#
1378f213
PP
16# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
7f4429f2
PP
23
24from barectf import metadata
25from barectf import config
6839ffba 26import pkg_resources
7f4429f2 27import collections
6839ffba 28import jsonschema
7f4429f2
PP
29import datetime
30import barectf
6839ffba 31import os.path
7f4429f2
PP
32import enum
33import yaml
34import uuid
35import copy
36import re
37import os
38
39
40class _ConfigParseErrorCtx:
41 def __init__(self, name, msg=None):
42 self._name = name
43 self._msg = msg
44
45 @property
46 def name(self):
47 return self._name
48
49 @property
50 def msg(self):
51 return self._msg
52
53
9fb5657f 54class _ConfigParseError(RuntimeError):
7f4429f2
PP
55 def __init__(self, init_ctx_name, init_ctx_msg=None):
56 self._ctx = []
57 self.append_ctx(init_ctx_name, init_ctx_msg)
58
59 @property
60 def ctx(self):
61 return self._ctx
62
63 def append_ctx(self, name, msg=None):
64 self._ctx.append(_ConfigParseErrorCtx(name, msg))
65
66
67def _opt_to_public(obj):
68 if obj is None:
69 return
70
71 return obj.to_public()
72
73
aad8e5e8
PP
74# Pseudo object base class.
75#
76# A concrete pseudo object contains the same data as its public version,
77# but it's mutable.
78#
79# The to_public() method converts the pseudo object to an equivalent
80# public, immutable object, caching the result so as to always return
81# the same Python object.
7f4429f2
PP
82class _PseudoObj:
83 def __init__(self):
84 self._public = None
85
86 def to_public(self):
87 if self._public is None:
88 self._public = self._to_public()
89
90 return self._public
91
92 def _to_public(self):
93 raise NotImplementedError
94
95
96class _PropertyMapping(_PseudoObj):
97 def __init__(self):
98 super().__init__()
99 self.object = None
100 self.prop = None
101
102 def _to_public(self):
103 return metadata.PropertyMapping(self.object.to_public(), self.prop)
104
105
106class _Integer(_PseudoObj):
107 def __init__(self):
108 super().__init__()
109 self.size = None
110 self.byte_order = None
7f4429f2 111 self.align = None
7f4429f2 112 self.signed = False
7f4429f2 113 self.base = 10
7f4429f2 114 self.encoding = metadata.Encoding.NONE
7f4429f2
PP
115 self.property_mappings = []
116
117 @property
118 def real_align(self):
119 if self.align is None:
120 if self.size % 8 == 0:
121 return 8
122 else:
123 return 1
124 else:
125 return self.align
126
127 def _to_public(self):
128 prop_mappings = [pm.to_public() for pm in self.property_mappings]
129 return metadata.Integer(self.size, self.byte_order, self.align,
130 self.signed, self.base, self.encoding,
131 prop_mappings)
132
133
134class _FloatingPoint(_PseudoObj):
135 def __init__(self):
136 super().__init__()
137 self.exp_size = None
138 self.mant_size = None
139 self.byte_order = None
7f4429f2
PP
140 self.align = 8
141
142 @property
143 def real_align(self):
144 return self.align
145
146 def _to_public(self):
147 return metadata.FloatingPoint(self.exp_size, self.mant_size,
148 self.byte_order, self.align)
149
150
151class _Enum(_PseudoObj):
152 def __init__(self):
153 super().__init__()
154 self.value_type = None
155 self.members = collections.OrderedDict()
156
7f4429f2
PP
157 @property
158 def real_align(self):
159 return self.value_type.real_align
160
161 def _to_public(self):
162 return metadata.Enum(self.value_type.to_public(), self.members)
163
164
165class _String(_PseudoObj):
166 def __init__(self):
167 super().__init__()
7f4429f2
PP
168 self.encoding = metadata.Encoding.UTF8
169
6839ffba
PP
170 @property
171 def real_align(self):
172 return 8
7f4429f2
PP
173
174 def _to_public(self):
175 return metadata.String(self.encoding)
176
177
178class _Array(_PseudoObj):
179 def __init__(self):
180 super().__init__()
181 self.element_type = None
182 self.length = None
183
184 @property
185 def real_align(self):
186 return self.element_type.real_align
187
188 def _to_public(self):
189 return metadata.Array(self.element_type.to_public(), self.length)
190
191
192class _Struct(_PseudoObj):
193 def __init__(self):
194 super().__init__()
7f4429f2 195 self.min_align = 1
7f4429f2
PP
196 self.fields = collections.OrderedDict()
197
198 @property
199 def real_align(self):
200 align = self.min_align
201
202 for pseudo_field in self.fields.values():
203 if pseudo_field.real_align > align:
204 align = pseudo_field.real_align
205
206 return align
207
208 def _to_public(self):
209 fields = []
210
211 for name, pseudo_field in self.fields.items():
212 fields.append((name, pseudo_field.to_public()))
213
214 return metadata.Struct(self.min_align, collections.OrderedDict(fields))
215
216
217class _Trace(_PseudoObj):
218 def __init__(self):
219 super().__init__()
220 self.byte_order = None
221 self.uuid = None
222 self.packet_header_type = None
223
224 def _to_public(self):
225 return metadata.Trace(self.byte_order, self.uuid,
226 _opt_to_public(self.packet_header_type))
227
228
229class _Clock(_PseudoObj):
230 def __init__(self):
231 super().__init__()
7f4429f2 232 self.name = None
7f4429f2 233 self.uuid = None
7f4429f2 234 self.description = None
7f4429f2 235 self.freq = int(1e9)
7f4429f2 236 self.error_cycles = 0
7f4429f2 237 self.offset_seconds = 0
7f4429f2 238 self.offset_cycles = 0
7f4429f2 239 self.absolute = False
7f4429f2
PP
240 self.return_ctype = 'uint32_t'
241
242 def _to_public(self):
243 return metadata.Clock(self.name, self.uuid, self.description, self.freq,
244 self.error_cycles, self.offset_seconds,
245 self.offset_cycles, self.absolute,
246 self.return_ctype)
247
248
249class _Event(_PseudoObj):
250 def __init__(self):
251 super().__init__()
252 self.id = None
253 self.name = None
254 self.log_level = None
255 self.payload_type = None
256 self.context_type = None
257
258 def _to_public(self):
259 return metadata.Event(self.id, self.name, self.log_level,
260 _opt_to_public(self.payload_type),
261 _opt_to_public(self.context_type))
262
263
264class _Stream(_PseudoObj):
265 def __init__(self):
266 super().__init__()
267 self.id = None
268 self.name = None
269 self.packet_context_type = None
270 self.event_header_type = None
271 self.event_context_type = None
272 self.events = collections.OrderedDict()
273
274 def is_event_empty(self, event):
275 total_fields = 0
276
277 if self.event_header_type is not None:
278 total_fields += len(self.event_header_type.fields)
279
280 if self.event_context_type is not None:
281 total_fields += len(self.event_context_type.fields)
282
283 if event.context_type is not None:
284 total_fields += len(event.context_type.fields)
285
286 if event.payload_type is not None:
287 total_fields += len(event.payload_type.fields)
288
289 return total_fields == 0
290
291 def _to_public(self):
292 events = []
293
294 for name, pseudo_ev in self.events.items():
295 events.append((name, pseudo_ev.to_public()))
296
297 return metadata.Stream(self.id, self.name,
298 _opt_to_public(self.packet_context_type),
299 _opt_to_public(self.event_header_type),
300 _opt_to_public(self.event_context_type),
301 collections.OrderedDict(events))
302
303
304class _Metadata(_PseudoObj):
305 def __init__(self):
306 super().__init__()
307 self.trace = None
308 self.env = None
309 self.clocks = None
310 self.streams = None
311 self.default_stream_name = None
312
313 def _to_public(self):
314 clocks = []
315
316 for name, pseudo_clock in self.clocks.items():
317 clocks.append((name, pseudo_clock.to_public()))
318
319 streams = []
320
321 for name, pseudo_stream in self.streams.items():
322 streams.append((name, pseudo_stream.to_public()))
323
324 return metadata.Metadata(self.trace.to_public(), self.env,
325 collections.OrderedDict(clocks),
326 collections.OrderedDict(streams),
327 self.default_stream_name)
328
329
6839ffba
PP
330# This JSON schema reference resolver only serves to detect when it
331# needs to resolve a remote URI.
332#
333# This must never happen in barectf because all our schemas are local;
334# it would mean a programming or schema error.
335class _RefResolver(jsonschema.RefResolver):
336 def resolve_remote(self, uri):
1bf9d86d 337 raise RuntimeError('Missing local schema with URI `{}`'.format(uri))
6839ffba
PP
338
339
340# Schema validator which considers all the schemas found in the barectf
341# package's `schemas` directory.
342#
343# The only public method is validate() which accepts an instance to
344# validate as well as a schema short ID.
345class _SchemaValidator:
346 def __init__(self):
347 subdirs = ['config', os.path.join('2', 'config')]
348 schemas_dir = pkg_resources.resource_filename(__name__, 'schemas')
349 self._store = {}
350
351 for subdir in subdirs:
352 dir = os.path.join(schemas_dir, subdir)
353
354 for file_name in os.listdir(dir):
355 if not file_name.endswith('.yaml'):
356 continue
357
358 with open(os.path.join(dir, file_name)) as f:
359 schema = yaml.load(f, Loader=yaml.SafeLoader)
360
361 assert '$id' in schema
362 schema_id = schema['$id']
363 assert schema_id not in self._store
364 self._store[schema_id] = schema
365
366 @staticmethod
367 def _dict_from_ordered_dict(o_dict):
368 dct = {}
369
370 for k, v in o_dict.items():
371 new_v = v
372
373 if type(v) is collections.OrderedDict:
374 new_v = _SchemaValidator._dict_from_ordered_dict(v)
7f4429f2 375
6839ffba 376 dct[k] = new_v
7f4429f2 377
6839ffba 378 return dct
7f4429f2 379
6839ffba
PP
380 def _validate(self, instance, schema_short_id):
381 # retrieve full schema ID from short ID
382 schema_id = 'https://barectf.org/schemas/{}.json'.format(schema_short_id)
383 assert schema_id in self._store
7f4429f2 384
6839ffba
PP
385 # retrieve full schema
386 schema = self._store[schema_id]
7f4429f2 387
6839ffba
PP
388 # Create a reference resolver for this schema using this
389 # validator's schema store.
390 resolver = _RefResolver(base_uri=schema_id, referrer=schema,
391 store=self._store)
7f4429f2 392
6839ffba
PP
393 # create a JSON schema validator using this reference resolver
394 validator = jsonschema.Draft7Validator(schema, resolver=resolver)
7f4429f2 395
6839ffba
PP
396 # Validate the instance, converting its
397 # `collections.OrderedDict` objects to `dict` objects so as to
398 # make any error message easier to read (because
399 # validator.validate() below uses str() for error messages, and
aad8e5e8
PP
400 # collections.OrderedDict.__str__() returns a somewhat bulky
401 # representation).
6839ffba 402 validator.validate(self._dict_from_ordered_dict(instance))
7f4429f2 403
6839ffba
PP
404 # Validates `instance` using the schema having the short ID
405 # `schema_short_id`.
406 #
407 # A schema short ID is the part between `schemas/` and `.json` in
408 # its URI.
409 #
9fb5657f 410 # Raises a `_ConfigParseError` object, hiding any `jsonschema`
6839ffba
PP
411 # exception, on validation failure.
412 def validate(self, instance, schema_short_id):
413 try:
414 self._validate(instance, schema_short_id)
415 except jsonschema.ValidationError as exc:
9fb5657f 416 # convert to barectf `_ConfigParseError` exception
6839ffba 417 contexts = ['Configuration object']
c3aed479 418
aad8e5e8
PP
419 # Each element of the instance's absolute path is either an
420 # integer (array element's index) or a string (object
421 # property's name).
c3aed479
PP
422 for elem in exc.absolute_path:
423 if type(elem) is int:
424 ctx = 'Element {}'.format(elem)
425 else:
426 ctx = '`{}` property'.format(elem)
427
428 contexts.append(ctx)
429
6839ffba
PP
430 schema_ctx = ''
431
432 if len(exc.context) > 0:
aad8e5e8
PP
433 # According to the documentation of
434 # jsonschema.ValidationError.context(),
435 # the method returns a
436 #
437 # > list of errors from the subschemas
438 #
439 # This contains additional information about the
440 # validation failure which can help the user figure out
441 # what's wrong exactly.
442 #
443 # Join each message with `; ` and append this to our
444 # configuration parsing error's message.
6839ffba
PP
445 msgs = '; '.join([e.message for e in exc.context])
446 schema_ctx = ': {}'.format(msgs)
7f4429f2 447
9fb5657f
PP
448 new_exc = _ConfigParseError(contexts.pop(),
449 '{}{} (from schema `{}`)'.format(exc.message,
450 schema_ctx,
451 schema_short_id))
7f4429f2 452
6839ffba
PP
453 for ctx in reversed(contexts):
454 new_exc.append_ctx(ctx)
455
456 raise new_exc
7f4429f2
PP
457
458
aad8e5e8
PP
459# Converts the byte order string `bo_str` to a `metadata.ByteOrder`
460# enumerator.
7f4429f2
PP
461def _byte_order_str_to_bo(bo_str):
462 bo_str = bo_str.lower()
463
464 if bo_str == 'le':
465 return metadata.ByteOrder.LE
466 elif bo_str == 'be':
467 return metadata.ByteOrder.BE
468
469
aad8e5e8
PP
470# Converts the encoding string `encoding_str` to a `metadata.Encoding`
471# enumerator.
7f4429f2
PP
472def _encoding_str_to_encoding(encoding_str):
473 encoding_str = encoding_str.lower()
474
475 if encoding_str == 'utf-8' or encoding_str == 'utf8':
476 return metadata.Encoding.UTF8
477 elif encoding_str == 'ascii':
478 return metadata.Encoding.ASCII
479 elif encoding_str == 'none':
480 return metadata.Encoding.NONE
481
482
aad8e5e8
PP
483# Validates the TSDL identifier `iden`, raising a `_ConfigParseError`
484# exception using `ctx_obj_name` and `prop` to format the message if
485# it's invalid.
6839ffba
PP
486def _validate_identifier(iden, ctx_obj_name, prop):
487 assert type(iden) is str
488 ctf_keywords = {
489 'align',
490 'callsite',
491 'clock',
492 'enum',
493 'env',
494 'event',
495 'floating_point',
496 'integer',
497 'stream',
498 'string',
499 'struct',
500 'trace',
501 'typealias',
502 'typedef',
503 'variant',
504 }
7f4429f2 505
6839ffba 506 if iden in ctf_keywords:
1bf9d86d 507 fmt = 'Invalid {} (not a valid identifier): `{}`'
9fb5657f 508 raise _ConfigParseError(ctx_obj_name, fmt.format(prop, iden))
7f4429f2 509
7f4429f2 510
aad8e5e8
PP
511# Validates the alignment `align`, raising a `_ConfigParseError`
512# exception using `ctx_obj_name` if it's invalid.
6839ffba
PP
513def _validate_alignment(align, ctx_obj_name):
514 assert align >= 1
7f4429f2 515
6839ffba 516 if (align & (align - 1)) != 0:
9fb5657f 517 raise _ConfigParseError(ctx_obj_name,
ace614f2 518 'Invalid alignment (not a power of two): {}'.format(align))
7f4429f2
PP
519
520
aad8e5e8
PP
521# Appends the context having the object name `obj_name` and the
522# (optional) message `msg` to the `_ConfigParseError` exception `exc`
523# and then raises `exc` again.
131d409a
PP
524def _append_error_ctx(exc, obj_name, msg=None):
525 exc.append_ctx(obj_name, msg)
526 raise
527
528
6839ffba
PP
529# Entities.
530#
531# Order of values is important here.
532@enum.unique
533class _Entity(enum.IntEnum):
534 TRACE_PACKET_HEADER = 0
535 STREAM_PACKET_CONTEXT = 1
536 STREAM_EVENT_HEADER = 2
537 STREAM_EVENT_CONTEXT = 3
538 EVENT_CONTEXT = 4
539 EVENT_PAYLOAD = 5
540
541
aad8e5e8
PP
542# A validator which validates the configured metadata for barectf
543# specific needs.
7f4429f2
PP
544#
545# barectf needs:
546#
aad8e5e8
PP
547# * The alignments of all header/context field types are at least 8.
548#
549# * There are no nested structure or array field types, except the
550# packet header field type's `uuid` field
551#
7f4429f2
PP
552class _BarectfMetadataValidator:
553 def __init__(self):
554 self._type_to_validate_type_func = {
7f4429f2
PP
555 _Struct: self._validate_struct_type,
556 _Array: self._validate_array_type,
557 }
558
7f4429f2
PP
559 def _validate_struct_type(self, t, entity_root):
560 if not entity_root:
ace614f2
PP
561 raise _ConfigParseError('Structure field type',
562 'Inner structure field types are not supported as of this version')
7f4429f2
PP
563
564 for field_name, field_type in t.fields.items():
565 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
566 if field_name == 'uuid':
567 # allow
568 continue
569
570 try:
571 self._validate_type(field_type, False)
9fb5657f 572 except _ConfigParseError as exc:
ace614f2
PP
573 _append_error_ctx(exc,
574 'Structure field type\'s field `{}`'.format(field_name))
7f4429f2
PP
575
576 def _validate_array_type(self, t, entity_root):
ace614f2
PP
577 raise _ConfigParseError('Array field type',
578 'Not supported as of this version')
7f4429f2
PP
579
580 def _validate_type(self, t, entity_root):
6839ffba
PP
581 func = self._type_to_validate_type_func.get(type(t))
582
583 if func is not None:
584 func(t, entity_root)
7f4429f2
PP
585
586 def _validate_entity(self, t):
587 if t is None:
588 return
589
aad8e5e8 590 # make sure root field type has a real alignment of at least 8
7f4429f2 591 if t.real_align < 8:
ace614f2
PP
592 raise _ConfigParseError('Root field type',
593 'Effective alignment must be at least 8 (got {})'.format(t.real_align))
7f4429f2 594
6839ffba 595 assert type(t) is _Struct
7f4429f2 596
aad8e5e8 597 # validate field types
7f4429f2
PP
598 self._validate_type(t, True)
599
ace614f2 600 def _validate_event_entities_and_names(self, stream, ev):
7f4429f2 601 try:
ace614f2
PP
602 _validate_identifier(ev.name, 'Event type', 'event type name')
603
604 self._cur_entity = _Entity.EVENT_CONTEXT
605
606 try:
607 self._validate_entity(ev.context_type)
608 except _ConfigParseError as exc:
609 _append_error_ctx(exc, 'Event type',
610 'Invalid context field type')
611
612 self._cur_entity = _Entity.EVENT_PAYLOAD
613
614 try:
615 self._validate_entity(ev.payload_type)
616 except _ConfigParseError as exc:
617 _append_error_ctx(exc, 'Event type',
618 'Invalid payload field type')
619
620 if stream.is_event_empty(ev):
621 raise _ConfigParseError('Event type', 'Empty')
9fb5657f 622 except _ConfigParseError as exc:
ace614f2 623 _append_error_ctx(exc, 'Event type `{}`'.format(ev.name))
7f4429f2 624
ace614f2
PP
625 def _validate_stream_entities_and_names(self, stream):
626 try:
627 _validate_identifier(stream.name, 'Stream type', 'stream type name')
7f4429f2
PP
628 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
629
630 try:
631 self._validate_entity(stream.packet_context_type)
9fb5657f 632 except _ConfigParseError as exc:
ace614f2
PP
633 _append_error_ctx(exc, 'Stream type',
634 'Invalid packet context field type')
7f4429f2
PP
635
636 self._cur_entity = _Entity.STREAM_EVENT_HEADER
637
638 try:
639 self._validate_entity(stream.event_header_type)
9fb5657f 640 except _ConfigParseError as exc:
ace614f2
PP
641 _append_error_ctx(exc, 'Stream type',
642 'Invalid event header field type')
7f4429f2
PP
643
644 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
645
646 try:
647 self._validate_entity(stream.event_context_type)
9fb5657f 648 except _ConfigParseError as exc:
ace614f2
PP
649 _append_error_ctx(exc, 'Stream type',
650 'Invalid event context field type')
7f4429f2 651
ace614f2
PP
652 for ev in stream.events.values():
653 self._validate_event_entities_and_names(stream, ev)
654 except _ConfigParseError as exc:
655 _append_error_ctx(exc, 'Stream type `{}`'.format(stream.name))
7f4429f2 656
ace614f2
PP
657 def _validate_entities_and_names(self, meta):
658 self._cur_entity = _Entity.TRACE_PACKET_HEADER
7f4429f2 659
ace614f2
PP
660 try:
661 self._validate_entity(meta.trace.packet_header_type)
662 except _ConfigParseError as exc:
663 _append_error_ctx(exc, 'Trace type',
664 'Invalid packet header field type')
7f4429f2 665
ace614f2
PP
666 for stream in meta.streams.values():
667 self._validate_stream_entities_and_names(stream)
7f4429f2
PP
668
669 def _validate_default_stream(self, meta):
aad8e5e8 670 if meta.default_stream_name is not None:
7f4429f2 671 if meta.default_stream_name not in meta.streams.keys():
ace614f2
PP
672 fmt = 'Default stream type name (`{}`) does not name an existing stream type'
673 raise _ConfigParseError('Metadata',
9fb5657f 674 fmt.format(meta.default_stream_name))
7f4429f2
PP
675
676 def validate(self, meta):
ace614f2
PP
677 try:
678 self._validate_entities_and_names(meta)
679 self._validate_default_stream(meta)
680 except _ConfigParseError as exc:
681 _append_error_ctx(exc, 'barectf metadata')
7f4429f2
PP
682
683
aad8e5e8 684# A validator which validates special fields of trace, stream, and event
6839ffba 685# types.
7f4429f2 686class _MetadataSpecialFieldsValidator:
aad8e5e8 687 # Validates the packet header field type `t`.
7f4429f2 688 def _validate_trace_packet_header_type(self, t):
ace614f2
PP
689 ctx_obj_name = '`packet-header-type` property'
690
aad8e5e8
PP
691 # If there's more than one stream type, then the `stream_id`
692 # (stream type ID) field is required.
7f4429f2 693 if len(self._meta.streams) > 1:
7f4429f2 694 if t is None:
ace614f2
PP
695 raise _ConfigParseError('Trace type',
696 '`stream_id` field is required (because there\'s more than one stream type), but packet header field type is missing')
7f4429f2 697
7f4429f2 698 if 'stream_id' not in t.fields:
ace614f2
PP
699 raise _ConfigParseError(ctx_obj_name,
700 '`stream_id` field is required (because there\'s more than one stream type)')
7f4429f2 701
750374a1 702 if t is None:
7f4429f2
PP
703 return
704
aad8e5e8
PP
705 # The `magic` field type must be the first one.
706 #
707 # The `stream_id` field type's size (bits) must be large enough
708 # to accomodate any stream type ID.
7f4429f2
PP
709 for i, (field_name, field_type) in enumerate(t.fields.items()):
710 if field_name == 'magic':
7f4429f2 711 if i != 0:
ace614f2
PP
712 raise _ConfigParseError(ctx_obj_name,
713 '`magic` field must be the first packet header field type\'s field')
7f4429f2 714 elif field_name == 'stream_id':
7f4429f2 715 if len(self._meta.streams) > (1 << field_type.size):
ace614f2
PP
716 raise _ConfigParseError(ctx_obj_name,
717 '`stream_id` field\'s size is too small to accomodate {} stream types'.format(len(self._meta.streams)))
7f4429f2 718
aad8e5e8 719 # Validates the trace type of the metadata object `meta`.
7f4429f2
PP
720 def _validate_trace(self, meta):
721 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
722
aad8e5e8
PP
723 # Validates the packet context field type of the stream type
724 # `stream`.
7f4429f2 725 def _validate_stream_packet_context(self, stream):
ace614f2 726 ctx_obj_name = '`packet-context-type` property'
7f4429f2 727 t = stream.packet_context_type
750374a1 728 assert t is not None
7f4429f2 729
aad8e5e8
PP
730 # The `timestamp_begin` and `timestamp_end` field types must be
731 # mapped to the `value` property of the same clock.
750374a1
PP
732 ts_begin = t.fields.get('timestamp_begin')
733 ts_end = t.fields.get('timestamp_end')
7f4429f2 734
7f4429f2
PP
735 if ts_begin is not None and ts_end is not None:
736 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
ace614f2
PP
737 raise _ConfigParseError(ctx_obj_name,
738 '`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock value')
7f4429f2 739
aad8e5e8
PP
740 # The `packet_size` field type's size must be greater than or
741 # equal to the `content_size` field type's size.
750374a1 742 if t.fields['content_size'].size > t.fields['packet_size'].size:
ace614f2
PP
743 raise _ConfigParseError(ctx_obj_name,
744 '`content_size` field\'s size must be less than or equal to `packet_size` field\'s size')
7f4429f2 745
aad8e5e8 746 # Validates the event header field type of the stream type `stream`.
7f4429f2 747 def _validate_stream_event_header(self, stream):
ace614f2 748 ctx_obj_name = '`event-header-type` property'
7f4429f2
PP
749 t = stream.event_header_type
750
aad8e5e8
PP
751 # If there's more than one event type, then the `id` (event type
752 # ID) field is required.
7f4429f2 753 if len(stream.events) > 1:
7f4429f2 754 if t is None:
ace614f2
PP
755 raise _ConfigParseError('Stream type',
756 '`id` field is required (because there\'s more than one event type), but event header field type is missing')
7f4429f2 757
7f4429f2 758 if 'id' not in t.fields:
ace614f2
PP
759 raise _ConfigParseError(ctx_obj_name,
760 '`id` field is required (because there\'s more than one event type)')
7f4429f2 761
750374a1 762 if t is None:
7f4429f2
PP
763 return
764
aad8e5e8
PP
765 # The `id` field type's size (bits) must be large enough to
766 # accomodate any event type ID.
750374a1 767 eid = t.fields.get('id')
7f4429f2 768
750374a1 769 if eid is not None:
7f4429f2 770 if len(stream.events) > (1 << eid.size):
ace614f2
PP
771 raise _ConfigParseError(ctx_obj_name,
772 '`id` field\'s size is too small to accomodate {} event types'.format(len(stream.events)))
7f4429f2 773
aad8e5e8 774 # Validates the stream type `stream`.
7f4429f2
PP
775 def _validate_stream(self, stream):
776 self._validate_stream_packet_context(stream)
777 self._validate_stream_event_header(stream)
778
aad8e5e8
PP
779 # Validates the trace and stream types of the metadata object
780 # `meta`.
7f4429f2
PP
781 def validate(self, meta):
782 self._meta = meta
7f4429f2 783
ace614f2 784 try:
7f4429f2 785 try:
ace614f2 786 self._validate_trace(meta)
9fb5657f 787 except _ConfigParseError as exc:
ace614f2
PP
788 _append_error_ctx(exc, 'Trace type')
789
790 for stream in meta.streams.values():
791 try:
792 self._validate_stream(stream)
793 except _ConfigParseError as exc:
794 _append_error_ctx(exc, 'Stream type `{}`'.format(stream.name))
795 except _ConfigParseError as exc:
796 _append_error_ctx(exc, 'Metadata')
7f4429f2
PP
797
798
aad8e5e8
PP
799# A barectf YAML configuration parser.
800#
c8270369
PP
801# When you build such a parser, it parses the configuration file and
802# creates a corresponding `config.Config` object which you can get with
803# the `config` property.
aad8e5e8 804#
c8270369 805# See the comments of _parse() for more implementation details about the
aad8e5e8 806# parsing stages and general strategy.
7f4429f2 807class _YamlConfigParser:
c8270369
PP
808 # Builds a barectf YAML configuration parser and parses the
809 # configuration file having the path `path`.
810 #
811 # The parser considers the inclusion directories `include_dirs`,
812 # ignores nonexistent inclusion files if `ignore_include_not_found`
813 # is `True`, and dumps the effective configuration (as YAML) if
814 # `dump_config` is `True`.
1f2c551a
PP
815 #
816 # Raises `_ConfigParseError` on parsing error.
c8270369
PP
817 def __init__(self, path, include_dirs, ignore_include_not_found,
818 dump_config):
1f2c551a 819 self._root_path = path
a644ed68
PP
820 self._class_name_to_create_field_type_func = {
821 'int': self._create_integer_field_type,
822 'integer': self._create_integer_field_type,
823 'flt': self._create_float_field_type,
824 'float': self._create_float_field_type,
825 'floating-point': self._create_float_field_type,
826 'enum': self._create_enum_field_type,
827 'enumeration': self._create_enum_field_type,
828 'str': self._create_string_field_type,
829 'string': self._create_string_field_type,
830 'struct': self._create_struct_field_type,
831 'structure': self._create_struct_field_type,
832 'array': self._create_array_field_type,
7f4429f2 833 }
7f4429f2
PP
834 self._include_dirs = include_dirs
835 self._ignore_include_not_found = ignore_include_not_found
836 self._dump_config = dump_config
6839ffba 837 self._schema_validator = _SchemaValidator()
c8270369 838 self._parse()
7f4429f2 839
aad8e5e8 840 # Sets the default byte order as found in the `metadata_node` node.
7f4429f2 841 def _set_byte_order(self, metadata_node):
6839ffba
PP
842 self._bo = _byte_order_str_to_bo(metadata_node['trace']['byte-order'])
843 assert self._bo is not None
7f4429f2 844
aad8e5e8
PP
845 # Sets the clock value property mapping of the pseudo integer field
846 # type object `int_obj` as found in the `prop_mapping_node` node.
7f4429f2 847 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
7f4429f2 848 clock_name = prop_mapping_node['name']
6839ffba 849 clock = self._clocks.get(clock_name)
7f4429f2 850
6839ffba 851 if clock is None:
ace614f2
PP
852 exc = _ConfigParseError('`property-mappings` property',
853 'Clock type `{}` does not exist'.format(clock_name))
854 exc.append_ctx('Integer field type')
855 raise exc
7f4429f2 856
7f4429f2 857 prop_mapping = _PropertyMapping()
6839ffba
PP
858 prop_mapping.object = clock
859 prop_mapping.prop = 'value'
7f4429f2
PP
860 int_obj.property_mappings.append(prop_mapping)
861
aad8e5e8
PP
862 # Creates a pseudo integer field type from the node `node` and
863 # returns it.
a644ed68 864 def _create_integer_field_type(self, node):
6839ffba 865 obj = _Integer()
6839ffba 866 obj.size = node['size']
6839ffba 867 align_node = node.get('align')
7f4429f2 868
6839ffba 869 if align_node is not None:
ace614f2 870 _validate_alignment(align_node, 'Integer field type')
6839ffba 871 obj.align = align_node
7f4429f2 872
6839ffba 873 signed_node = node.get('signed')
7f4429f2 874
6839ffba
PP
875 if signed_node is not None:
876 obj.signed = signed_node
7f4429f2 877
6839ffba
PP
878 obj.byte_order = self._bo
879 bo_node = node.get('byte-order')
7f4429f2 880
6839ffba
PP
881 if bo_node is not None:
882 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2 883
6839ffba
PP
884 base_node = node.get('base')
885
886 if base_node is not None:
887 if base_node == 'bin':
888 obj.base = 2
889 elif base_node == 'oct':
890 obj.base = 8
891 elif base_node == 'dec':
892 obj.base = 10
7f4429f2 893 else:
6839ffba
PP
894 assert base_node == 'hex'
895 obj.base = 16
7f4429f2 896
6839ffba 897 encoding_node = node.get('encoding')
7f4429f2 898
6839ffba
PP
899 if encoding_node is not None:
900 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2 901
6839ffba 902 pm_node = node.get('property-mappings')
7f4429f2 903
6839ffba
PP
904 if pm_node is not None:
905 assert len(pm_node) == 1
906 self._set_int_clock_prop_mapping(obj, pm_node[0])
7f4429f2
PP
907
908 return obj
909
aad8e5e8
PP
910 # Creates a pseudo floating point number field type from the node
911 # `node` and returns it.
a644ed68 912 def _create_float_field_type(self, node):
6839ffba 913 obj = _FloatingPoint()
6839ffba
PP
914 size_node = node['size']
915 obj.exp_size = size_node['exp']
916 obj.mant_size = size_node['mant']
6839ffba 917 align_node = node.get('align')
7f4429f2 918
6839ffba 919 if align_node is not None:
ace614f2 920 _validate_alignment(align_node, 'Floating point number field type')
6839ffba 921 obj.align = align_node
7f4429f2 922
6839ffba
PP
923 obj.byte_order = self._bo
924 bo_node = node.get('byte-order')
7f4429f2 925
6839ffba
PP
926 if bo_node is not None:
927 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2
PP
928
929 return obj
930
aad8e5e8
PP
931 # Creates a pseudo enumeration field type from the node `node` and
932 # returns it.
a644ed68 933 def _create_enum_field_type(self, node):
ace614f2 934 ctx_obj_name = 'Enumeration field type'
6839ffba 935 obj = _Enum()
7f4429f2 936
aad8e5e8 937 # value (integer) field type
6839ffba
PP
938 try:
939 obj.value_type = self._create_type(node['value-type'])
9fb5657f 940 except _ConfigParseError as exc:
ace614f2
PP
941 _append_error_ctx(exc, ctx_obj_name,
942 'Cannot create value (integer) field type')
7f4429f2
PP
943
944 # members
6839ffba 945 members_node = node.get('members')
7f4429f2 946
6839ffba
PP
947 if members_node is not None:
948 if obj.value_type.signed:
949 value_min = -(1 << obj.value_type.size - 1)
950 value_max = (1 << (obj.value_type.size - 1)) - 1
7f4429f2 951 else:
6839ffba
PP
952 value_min = 0
953 value_max = (1 << obj.value_type.size) - 1
7f4429f2 954
6839ffba 955 cur = 0
7f4429f2 956
6839ffba
PP
957 for m_node in members_node:
958 if type(m_node) is str:
7f4429f2
PP
959 label = m_node
960 value = (cur, cur)
961 cur += 1
962 else:
6839ffba 963 assert type(m_node) is collections.OrderedDict
7f4429f2 964 label = m_node['label']
7f4429f2
PP
965 value = m_node['value']
966
6839ffba 967 if type(value) is int:
7f4429f2
PP
968 cur = value + 1
969 value = (value, value)
970 else:
6839ffba
PP
971 assert type(value) is list
972 assert len(value) == 2
7f4429f2
PP
973 mn = value[0]
974 mx = value[1]
975
976 if mn > mx:
ace614f2
PP
977 exc = _ConfigParseError(ctx_obj_name)
978 exc.append_ctx('Member `{}`'.format(label),
979 'Invalid integral range ({} > {})'.format(label, mn, mx))
980 raise exc
7f4429f2
PP
981
982 value = (mn, mx)
983 cur = mx + 1
984
aad8e5e8
PP
985 # Make sure that all the integral values of the range
986 # fits the enumeration field type's integer value field
987 # type depending on its size (bits).
ace614f2 988 member_obj_name = 'Member `{}`'.format(label)
6839ffba 989 msg_fmt = 'Value {} is outside the value type range [{}, {}]'
ace614f2 990 msg = msg_fmt.format(value[0], value_min, value_max)
7f4429f2 991
ace614f2
PP
992 try:
993 if value[0] < value_min or value[0] > value_max:
994 raise _ConfigParseError(member_obj_name, msg)
7f4429f2 995
ace614f2
PP
996 if value[1] < value_min or value[1] > value_max:
997 raise _ConfigParseError(member_obj_name, msg)
998 except _ConfigParseError as exc:
999 _append_error_ctx(exc, ctx_obj_name)
7f4429f2 1000
6839ffba 1001 obj.members[label] = value
7f4429f2 1002
6839ffba 1003 return obj
7f4429f2 1004
aad8e5e8
PP
1005 # Creates a pseudo string field type from the node `node` and
1006 # returns it.
a644ed68 1007 def _create_string_field_type(self, node):
6839ffba 1008 obj = _String()
6839ffba 1009 encoding_node = node.get('encoding')
7f4429f2 1010
6839ffba
PP
1011 if encoding_node is not None:
1012 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2
PP
1013
1014 return obj
1015
aad8e5e8
PP
1016 # Creates a pseudo structure field type from the node `node` and
1017 # returns it.
a644ed68 1018 def _create_struct_field_type(self, node):
ace614f2 1019 ctx_obj_name = 'Structure field type'
6839ffba 1020 obj = _Struct()
6839ffba 1021 min_align_node = node.get('min-align')
7f4429f2 1022
6839ffba 1023 if min_align_node is not None:
ace614f2 1024 _validate_alignment(min_align_node, ctx_obj_name)
6839ffba 1025 obj.min_align = min_align_node
7f4429f2 1026
6839ffba 1027 fields_node = node.get('fields')
7f4429f2 1028
6839ffba
PP
1029 if fields_node is not None:
1030 for field_name, field_node in fields_node.items():
ace614f2 1031 _validate_identifier(field_name, ctx_obj_name, 'field name')
7f4429f2 1032
6839ffba
PP
1033 try:
1034 obj.fields[field_name] = self._create_type(field_node)
9fb5657f 1035 except _ConfigParseError as exc:
ace614f2 1036 _append_error_ctx(exc, ctx_obj_name,
1bf9d86d 1037 'Cannot create field `{}`'.format(field_name))
7f4429f2
PP
1038
1039 return obj
1040
aad8e5e8
PP
1041 # Creates a pseudo array field type from the node `node` and returns
1042 # it.
a644ed68 1043 def _create_array_field_type(self, node):
6839ffba 1044 obj = _Array()
6839ffba 1045 obj.length = node['length']
7f4429f2 1046
6839ffba
PP
1047 try:
1048 obj.element_type = self._create_type(node['element-type'])
9fb5657f 1049 except _ConfigParseError as exc:
ace614f2
PP
1050 _append_error_ctx(exc, 'Array field type',
1051 'Cannot create element field type')
7f4429f2 1052
6839ffba 1053 return obj
7f4429f2 1054
aad8e5e8
PP
1055 # Creates a pseudo field type from the node `node` and returns it.
1056 #
1057 # This method checks the `class` property of `node` to determine
a644ed68
PP
1058 # which function of `self._class_name_to_create_field_type_func` to
1059 # call to create the corresponding pseudo field type.
6839ffba 1060 def _create_type(self, type_node):
a644ed68 1061 return self._class_name_to_create_field_type_func[type_node['class']](type_node)
7f4429f2 1062
aad8e5e8 1063 # Creates a pseudo clock type from the node `node` and returns it.
7f4429f2 1064 def _create_clock(self, node):
7f4429f2 1065 clock = _Clock()
6839ffba 1066 uuid_node = node.get('uuid')
7f4429f2 1067
6839ffba
PP
1068 if uuid_node is not None:
1069 try:
1070 clock.uuid = uuid.UUID(uuid_node)
1071 except:
ace614f2
PP
1072 raise _ConfigParseError('Clock type',
1073 'Malformed UUID `{}`'.format(uuid_node))
7f4429f2 1074
6839ffba 1075 descr_node = node.get('description')
7f4429f2 1076
6839ffba
PP
1077 if descr_node is not None:
1078 clock.description = descr_node
7f4429f2 1079
6839ffba 1080 freq_node = node.get('freq')
7f4429f2 1081
6839ffba
PP
1082 if freq_node is not None:
1083 clock.freq = freq_node
7f4429f2 1084
6839ffba 1085 error_cycles_node = node.get('error-cycles')
7f4429f2 1086
6839ffba
PP
1087 if error_cycles_node is not None:
1088 clock.error_cycles = error_cycles_node
7f4429f2 1089
6839ffba 1090 offset_node = node.get('offset')
7f4429f2 1091
6839ffba 1092 if offset_node is not None:
6839ffba 1093 offset_cycles_node = offset_node.get('cycles')
7f4429f2 1094
6839ffba
PP
1095 if offset_cycles_node is not None:
1096 clock.offset_cycles = offset_cycles_node
7f4429f2 1097
6839ffba 1098 offset_seconds_node = offset_node.get('seconds')
7f4429f2 1099
6839ffba
PP
1100 if offset_seconds_node is not None:
1101 clock.offset_seconds = offset_seconds_node
7f4429f2 1102
6839ffba 1103 absolute_node = node.get('absolute')
7f4429f2 1104
6839ffba
PP
1105 if absolute_node is not None:
1106 clock.absolute = absolute_node
7f4429f2 1107
6839ffba 1108 return_ctype_node = node.get('$return-ctype')
7f4429f2 1109
6839ffba 1110 if return_ctype_node is None:
aad8e5e8
PP
1111 # barectf 2.1: `return-ctype` property was renamed to
1112 # `$return-ctype`
6839ffba 1113 return_ctype_node = node.get('return-ctype')
7f4429f2
PP
1114
1115 if return_ctype_node is not None:
6839ffba 1116 clock.return_ctype = return_ctype_node
7f4429f2
PP
1117
1118 return clock
1119
aad8e5e8
PP
1120 # Registers all the clock types of the metadata node
1121 # `metadata_node`, creating pseudo clock types during the process,
1122 # within this parser.
1123 #
1124 # The pseudo clock types in `self._clocks` are then accessible when
a644ed68
PP
1125 # creating a pseudo integer field type (see
1126 # _create_integer_field_type() and _set_int_clock_prop_mapping()).
7f4429f2
PP
1127 def _register_clocks(self, metadata_node):
1128 self._clocks = collections.OrderedDict()
6839ffba 1129 clocks_node = metadata_node.get('clocks')
7f4429f2
PP
1130
1131 if clocks_node is None:
1132 return
1133
7f4429f2 1134 for clock_name, clock_node in clocks_node.items():
ace614f2 1135 _validate_identifier(clock_name, 'Metadata', 'clock type name')
6839ffba 1136 assert clock_name not in self._clocks
7f4429f2
PP
1137
1138 try:
1139 clock = self._create_clock(clock_node)
9fb5657f 1140 except _ConfigParseError as exc:
131d409a 1141 _append_error_ctx(exc, 'Metadata',
ace614f2 1142 'Cannot create clock type `{}`'.format(clock_name))
7f4429f2
PP
1143
1144 clock.name = clock_name
1145 self._clocks[clock_name] = clock
1146
aad8e5e8
PP
1147 # Creates an environment object (`collections.OrderedDict`) from the
1148 # metadata node `metadata_node` and returns it.
7f4429f2 1149 def _create_env(self, metadata_node):
6839ffba 1150 env_node = metadata_node.get('env')
7f4429f2
PP
1151
1152 if env_node is None:
6839ffba 1153 return collections.OrderedDict()
7f4429f2
PP
1154
1155 for env_name, env_value in env_node.items():
6839ffba
PP
1156 _validate_identifier(env_name, 'Metadata',
1157 'environment variable name')
7f4429f2 1158
6839ffba 1159 return copy.deepcopy(env_node)
7f4429f2 1160
aad8e5e8
PP
1161 # Creates a pseudo trace type from the metadata node `metadata_node`
1162 # and returns it.
7f4429f2 1163 def _create_trace(self, metadata_node):
ace614f2 1164 ctx_obj_name = 'Trace type'
7f4429f2 1165 trace = _Trace()
7f4429f2 1166 trace_node = metadata_node['trace']
7f4429f2 1167 trace.byte_order = self._bo
6839ffba 1168 uuid_node = trace_node.get('uuid')
7f4429f2 1169
6839ffba 1170 if uuid_node is not None:
aad8e5e8
PP
1171 # The `uuid` property of the trace type node can be `auto`
1172 # to make barectf generate a UUID.
6839ffba
PP
1173 if uuid_node == 'auto':
1174 trace.uuid = uuid.uuid1()
7f4429f2
PP
1175 else:
1176 try:
6839ffba 1177 trace.uuid = uuid.UUID(uuid_node)
7f4429f2 1178 except:
ace614f2
PP
1179 raise _ConfigParseError(ctx_obj_name,
1180 'Malformed UUID `{}`'.format(uuid_node))
7f4429f2 1181
6839ffba
PP
1182 pht_node = trace_node.get('packet-header-type')
1183
1184 if pht_node is not None:
7f4429f2 1185 try:
6839ffba 1186 trace.packet_header_type = self._create_type(pht_node)
9fb5657f 1187 except _ConfigParseError as exc:
ace614f2
PP
1188 _append_error_ctx(exc, ctx_obj_name,
1189 'Cannot create packet header field type')
7f4429f2 1190
7f4429f2
PP
1191 return trace
1192
aad8e5e8
PP
1193 # Creates a pseudo event type from the event node `event_node` and
1194 # returns it.
7f4429f2 1195 def _create_event(self, event_node):
ace614f2 1196 ctx_obj_name = 'Event type'
7f4429f2 1197 event = _Event()
6839ffba 1198 log_level_node = event_node.get('log-level')
7f4429f2 1199
6839ffba
PP
1200 if log_level_node is not None:
1201 assert type(log_level_node) is int
1202 event.log_level = metadata.LogLevel(None, log_level_node)
7f4429f2 1203
6839ffba 1204 ct_node = event_node.get('context-type')
7f4429f2 1205
6839ffba 1206 if ct_node is not None:
7f4429f2 1207 try:
6839ffba 1208 event.context_type = self._create_type(ct_node)
9fb5657f 1209 except _ConfigParseError as exc:
ace614f2
PP
1210 _append_error_ctx(exc, ctx_obj_name,
1211 'Cannot create context field type')
7f4429f2 1212
6839ffba 1213 pt_node = event_node.get('payload-type')
7f4429f2 1214
6839ffba 1215 if pt_node is not None:
7f4429f2 1216 try:
6839ffba 1217 event.payload_type = self._create_type(pt_node)
9fb5657f 1218 except _ConfigParseError as exc:
ace614f2
PP
1219 _append_error_ctx(exc, ctx_obj_name,
1220 'Cannot create payload field type')
7f4429f2 1221
7f4429f2
PP
1222 return event
1223
aad8e5e8
PP
1224 # Creates a pseudo stream type named `stream_name` from the stream
1225 # node `stream_node` and returns it.
7f4429f2 1226 def _create_stream(self, stream_name, stream_node):
ace614f2 1227 ctx_obj_name = 'Stream type'
7f4429f2 1228 stream = _Stream()
6839ffba 1229 pct_node = stream_node.get('packet-context-type')
7f4429f2 1230
6839ffba 1231 if pct_node is not None:
7f4429f2 1232 try:
6839ffba 1233 stream.packet_context_type = self._create_type(pct_node)
9fb5657f 1234 except _ConfigParseError as exc:
ace614f2
PP
1235 _append_error_ctx(exc, ctx_obj_name,
1236 'Cannot create packet context field type')
7f4429f2 1237
6839ffba 1238 eht_node = stream_node.get('event-header-type')
7f4429f2 1239
6839ffba 1240 if eht_node is not None:
7f4429f2 1241 try:
6839ffba 1242 stream.event_header_type = self._create_type(eht_node)
9fb5657f 1243 except _ConfigParseError as exc:
ace614f2
PP
1244 _append_error_ctx(exc, ctx_obj_name,
1245 'Cannot create event header field type')
7f4429f2 1246
6839ffba 1247 ect_node = stream_node.get('event-context-type')
7f4429f2 1248
6839ffba 1249 if ect_node is not None:
7f4429f2 1250 try:
6839ffba 1251 stream.event_context_type = self._create_type(ect_node)
9fb5657f 1252 except _ConfigParseError as exc:
ace614f2
PP
1253 _append_error_ctx(exc, ctx_obj_name,
1254 'Cannot create event context field type')
7f4429f2 1255
6839ffba
PP
1256 events_node = stream_node['events']
1257 cur_id = 0
7f4429f2 1258
6839ffba
PP
1259 for ev_name, ev_node in events_node.items():
1260 try:
1261 ev = self._create_event(ev_node)
9fb5657f 1262 except _ConfigParseError as exc:
ace614f2
PP
1263 _append_error_ctx(exc, ctx_obj_name,
1264 'Cannot create event type `{}`'.format(ev_name))
7f4429f2 1265
6839ffba
PP
1266 ev.id = cur_id
1267 ev.name = ev_name
1268 stream.events[ev_name] = ev
1269 cur_id += 1
7f4429f2 1270
6839ffba 1271 default_node = stream_node.get('$default')
7f4429f2 1272
6839ffba
PP
1273 if default_node is not None:
1274 if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
ace614f2
PP
1275 fmt = 'Cannot specify more than one default stream type (default stream type already set to `{}`)'
1276 raise _ConfigParseError('Stream type',
9fb5657f 1277 fmt.format(self._meta.default_stream_name))
7f4429f2 1278
6839ffba 1279 self._meta.default_stream_name = stream_name
7f4429f2
PP
1280
1281 return stream
1282
aad8e5e8
PP
1283 # Creates a `collections.OrderedDict` object where keys are stream
1284 # type names and values are pseudo stream types from the metadata
1285 # node `metadata_node` and returns it.
7f4429f2
PP
1286 def _create_streams(self, metadata_node):
1287 streams = collections.OrderedDict()
7f4429f2 1288 streams_node = metadata_node['streams']
7f4429f2
PP
1289 cur_id = 0
1290
1291 for stream_name, stream_node in streams_node.items():
1292 try:
1293 stream = self._create_stream(stream_name, stream_node)
9fb5657f 1294 except _ConfigParseError as exc:
131d409a 1295 _append_error_ctx(exc, 'Metadata',
ace614f2 1296 'Cannot create stream type `{}`'.format(stream_name))
7f4429f2
PP
1297
1298 stream.id = cur_id
6839ffba 1299 stream.name = stream_name
7f4429f2
PP
1300 streams[stream_name] = stream
1301 cur_id += 1
1302
1303 return streams
1304
aad8e5e8
PP
1305 # Creates a pseudo metadata object from the configuration node
1306 # `root` and returns it.
7f4429f2
PP
1307 def _create_metadata(self, root):
1308 self._meta = _Metadata()
7f4429f2
PP
1309 metadata_node = root['metadata']
1310
7f4429f2
PP
1311 if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None:
1312 default_stream_node = metadata_node['$default-stream']
7f4429f2
PP
1313 self._meta.default_stream_name = default_stream_node
1314
1315 self._set_byte_order(metadata_node)
1316 self._register_clocks(metadata_node)
1317 self._meta.clocks = self._clocks
7f4429f2
PP
1318 self._meta.env = self._create_env(metadata_node)
1319 self._meta.trace = self._create_trace(metadata_node)
7f4429f2
PP
1320 self._meta.streams = self._create_streams(metadata_node)
1321
aad8e5e8 1322 # validate the pseudo metadata object
ace614f2
PP
1323 _MetadataSpecialFieldsValidator().validate(self._meta)
1324 _BarectfMetadataValidator().validate(self._meta)
7f4429f2
PP
1325
1326 return self._meta
1327
aad8e5e8
PP
1328 # Gets and validates the tracing prefix as found in the
1329 # configuration node `config_node` and returns it.
6839ffba
PP
1330 def _get_prefix(self, config_node):
1331 prefix = config_node.get('prefix', 'barectf_')
1bf9d86d 1332 _validate_identifier(prefix, '`prefix` property', 'prefix')
6839ffba 1333 return prefix
7f4429f2 1334
aad8e5e8
PP
1335 # Gets the options as found in the configuration node `config_node`
1336 # and returns a corresponding `config.ConfigOptions` object.
6839ffba
PP
1337 def _get_options(self, config_node):
1338 gen_prefix_def = False
1339 gen_default_stream_def = False
1340 options_node = config_node.get('options')
7f4429f2 1341
6839ffba
PP
1342 if options_node is not None:
1343 gen_prefix_def = options_node.get('gen-prefix-def',
1344 gen_prefix_def)
1345 gen_default_stream_def = options_node.get('gen-default-stream-def',
1346 gen_default_stream_def)
7f4429f2
PP
1347
1348 return config.ConfigOptions(gen_prefix_def, gen_default_stream_def)
1349
aad8e5e8
PP
1350 # Returns the last included file name from the parser's inclusion
1351 # file name stack.
7f4429f2
PP
1352 def _get_last_include_file(self):
1353 if self._include_stack:
1354 return self._include_stack[-1]
1355
1f2c551a 1356 return self._root_path
7f4429f2 1357
aad8e5e8
PP
1358 # Loads the inclusion file having the path `yaml_path` and returns
1359 # its content as a `collections.OrderedDict` object.
7f4429f2
PP
1360 def _load_include(self, yaml_path):
1361 for inc_dir in self._include_dirs:
6839ffba
PP
1362 # Current inclusion dir + file name path.
1363 #
1364 # Note: os.path.join() only takes the last argument if it's
1365 # absolute.
7f4429f2
PP
1366 inc_path = os.path.join(inc_dir, yaml_path)
1367
1368 # real path (symbolic links resolved)
1369 real_path = os.path.realpath(inc_path)
1370
1371 # normalized path (weird stuff removed!)
1372 norm_path = os.path.normpath(real_path)
1373
1374 if not os.path.isfile(norm_path):
6839ffba 1375 # file doesn't exist: skip
7f4429f2
PP
1376 continue
1377
1378 if norm_path in self._include_stack:
1379 base_path = self._get_last_include_file()
ace614f2
PP
1380 raise _ConfigParseError('File `{}`'.format(base_path),
1381 'Cannot recursively include file `{}`'.format(norm_path))
7f4429f2
PP
1382
1383 self._include_stack.append(norm_path)
1384
1385 # load raw content
1386 return self._yaml_ordered_load(norm_path)
1387
1388 if not self._ignore_include_not_found:
1389 base_path = self._get_last_include_file()
ace614f2
PP
1390 raise _ConfigParseError('File `{}`'.format(base_path),
1391 'Cannot include file `{}`: file not found in inclusion directories'.format(yaml_path))
aad8e5e8
PP
1392 # Returns a list of all the inclusion file paths as found in the
1393 # inclusion node `include_node`.
7f4429f2
PP
1394 def _get_include_paths(self, include_node):
1395 if include_node is None:
6839ffba 1396 # none
7f4429f2
PP
1397 return []
1398
6839ffba
PP
1399 if type(include_node) is str:
1400 # wrap as array
7f4429f2
PP
1401 return [include_node]
1402
6839ffba
PP
1403 # already an array
1404 assert type(include_node) is list
1405 return include_node
7f4429f2 1406
aad8e5e8
PP
1407 # Updates the node `base_node` with an overlay node `overlay_node`.
1408 #
1409 # Both the inclusion and field type inheritance features use this
1410 # update mechanism.
7f4429f2
PP
1411 def _update_node(self, base_node, overlay_node):
1412 for olay_key, olay_value in overlay_node.items():
1413 if olay_key in base_node:
1414 base_value = base_node[olay_key]
1415
6839ffba 1416 if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
aad8e5e8 1417 # merge both objects
7f4429f2 1418 self._update_node(base_value, olay_value)
6839ffba 1419 elif type(olay_value) is list and type(base_value) is list:
7f4429f2
PP
1420 # append extension array items to base items
1421 base_value += olay_value
1422 else:
aad8e5e8 1423 # fall back to replacing base property
7f4429f2
PP
1424 base_node[olay_key] = olay_value
1425 else:
aad8e5e8 1426 # set base property from overlay property
7f4429f2
PP
1427 base_node[olay_key] = olay_value
1428
aad8e5e8
PP
1429 # Processes inclusions using `last_overlay_node` as the last overlay
1430 # node to use to "patch" the node.
1431 #
1432 # If `last_overlay_node` contains an `$include` property, then this
1433 # method patches the current base node (initially empty) in order
1434 # using the content of the inclusion files (recursively).
1435 #
1436 # At the end, this method removes the `$include` of
1437 # `last_overlay_node` and then patches the current base node with
1438 # its other properties before returning the result (always a deep
1439 # copy).
6839ffba 1440 def _process_node_include(self, last_overlay_node,
7f4429f2
PP
1441 process_base_include_cb,
1442 process_children_include_cb=None):
7f4429f2 1443 # process children inclusions first
6839ffba 1444 if process_children_include_cb is not None:
7f4429f2
PP
1445 process_children_include_cb(last_overlay_node)
1446
6839ffba
PP
1447 incl_prop_name = '$include'
1448
1449 if incl_prop_name in last_overlay_node:
1450 include_node = last_overlay_node[incl_prop_name]
7f4429f2 1451 else:
6839ffba 1452 # no inclusions!
7f4429f2
PP
1453 return last_overlay_node
1454
1455 include_paths = self._get_include_paths(include_node)
1456 cur_base_path = self._get_last_include_file()
1457 base_node = None
1458
6839ffba 1459 # keep the inclusion paths and remove the `$include` property
7f4429f2 1460 include_paths = copy.deepcopy(include_paths)
6839ffba 1461 del last_overlay_node[incl_prop_name]
7f4429f2
PP
1462
1463 for include_path in include_paths:
1464 # load raw YAML from included file
1465 overlay_node = self._load_include(include_path)
1466
1467 if overlay_node is None:
6839ffba
PP
1468 # Cannot find inclusion file, but we're ignoring those
1469 # errors, otherwise _load_include() itself raises a
1470 # config error.
7f4429f2
PP
1471 continue
1472
6839ffba 1473 # recursively process inclusions
7f4429f2
PP
1474 try:
1475 overlay_node = process_base_include_cb(overlay_node)
9fb5657f 1476 except _ConfigParseError as exc:
ace614f2 1477 _append_error_ctx(exc, 'File `{}`'.format(cur_base_path))
7f4429f2 1478
6839ffba 1479 # pop inclusion stack now that we're done including
7f4429f2
PP
1480 del self._include_stack[-1]
1481
6839ffba
PP
1482 # At this point, `base_node` is fully resolved (does not
1483 # contain any `$include` property).
7f4429f2
PP
1484 if base_node is None:
1485 base_node = overlay_node
1486 else:
1487 self._update_node(base_node, overlay_node)
1488
6839ffba
PP
1489 # Finally, update the latest base node with our last overlay
1490 # node.
7f4429f2 1491 if base_node is None:
6839ffba
PP
1492 # Nothing was included, which is possible when we're
1493 # ignoring inclusion errors.
7f4429f2
PP
1494 return last_overlay_node
1495
1496 self._update_node(base_node, last_overlay_node)
7f4429f2
PP
1497 return base_node
1498
aad8e5e8
PP
1499 # Process the inclusions of the event type node `event_node`,
1500 # returning the effective node.
7f4429f2 1501 def _process_event_include(self, event_node):
aad8e5e8 1502 # Make sure the event type node is valid for the inclusion
6839ffba
PP
1503 # processing stage.
1504 self._schema_validator.validate(event_node,
1505 '2/config/event-pre-include')
1506
1507 # process inclusions
1508 return self._process_node_include(event_node,
7f4429f2
PP
1509 self._process_event_include)
1510
aad8e5e8
PP
1511 # Process the inclusions of the stream type node `stream_node`,
1512 # returning the effective node.
7f4429f2
PP
1513 def _process_stream_include(self, stream_node):
1514 def process_children_include(stream_node):
1515 if 'events' in stream_node:
1516 events_node = stream_node['events']
1517
6839ffba
PP
1518 for key in list(events_node):
1519 events_node[key] = self._process_event_include(events_node[key])
7f4429f2 1520
aad8e5e8 1521 # Make sure the stream type node is valid for the inclusion
6839ffba
PP
1522 # processing stage.
1523 self._schema_validator.validate(stream_node,
1524 '2/config/stream-pre-include')
7f4429f2 1525
6839ffba
PP
1526 # process inclusions
1527 return self._process_node_include(stream_node,
7f4429f2
PP
1528 self._process_stream_include,
1529 process_children_include)
1530
aad8e5e8
PP
1531 # Process the inclusions of the trace type node `trace_node`,
1532 # returning the effective node.
7f4429f2 1533 def _process_trace_include(self, trace_node):
aad8e5e8 1534 # Make sure the trace type node is valid for the inclusion
6839ffba
PP
1535 # processing stage.
1536 self._schema_validator.validate(trace_node,
1537 '2/config/trace-pre-include')
1538
1539 # process inclusions
1540 return self._process_node_include(trace_node,
7f4429f2
PP
1541 self._process_trace_include)
1542
aad8e5e8
PP
1543 # Process the inclusions of the clock type node `clock_node`,
1544 # returning the effective node.
7f4429f2 1545 def _process_clock_include(self, clock_node):
aad8e5e8 1546 # Make sure the clock type node is valid for the inclusion
6839ffba
PP
1547 # processing stage.
1548 self._schema_validator.validate(clock_node,
1549 '2/config/clock-pre-include')
1550
1551 # process inclusions
1552 return self._process_node_include(clock_node,
7f4429f2
PP
1553 self._process_clock_include)
1554
aad8e5e8
PP
1555 # Process the inclusions of the metadata node `metadata_node`,
1556 # returning the effective node.
7f4429f2
PP
1557 def _process_metadata_include(self, metadata_node):
1558 def process_children_include(metadata_node):
1559 if 'trace' in metadata_node:
1560 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
1561
1562 if 'clocks' in metadata_node:
1563 clocks_node = metadata_node['clocks']
1564
6839ffba
PP
1565 for key in list(clocks_node):
1566 clocks_node[key] = self._process_clock_include(clocks_node[key])
7f4429f2
PP
1567
1568 if 'streams' in metadata_node:
1569 streams_node = metadata_node['streams']
1570
6839ffba
PP
1571 for key in list(streams_node):
1572 streams_node[key] = self._process_stream_include(streams_node[key])
7f4429f2 1573
aad8e5e8 1574 # Make sure the metadata node is valid for the inclusion
6839ffba
PP
1575 # processing stage.
1576 self._schema_validator.validate(metadata_node,
1577 '2/config/metadata-pre-include')
7f4429f2 1578
6839ffba
PP
1579 # process inclusions
1580 return self._process_node_include(metadata_node,
7f4429f2
PP
1581 self._process_metadata_include,
1582 process_children_include)
1583
aad8e5e8
PP
1584 # Process the inclusions of the configuration node `config_node`,
1585 # returning the effective node.
6839ffba
PP
1586 def _process_config_includes(self, config_node):
1587 # Process inclusions in this order:
1588 #
aad8e5e8
PP
1589 # 1. Clock type node, event type nodes, and trace type nodes
1590 # (the order between those is not important).
6839ffba 1591 #
aad8e5e8 1592 # 2. Stream type nodes.
6839ffba 1593 #
aad8e5e8 1594 # 3. Metadata node.
7f4429f2 1595 #
6839ffba 1596 # This is because:
7f4429f2 1597 #
aad8e5e8
PP
1598 # * A metadata node can include clock type nodes, a trace type
1599 # node, stream type nodes, and event type nodes (indirectly).
7f4429f2 1600 #
aad8e5e8 1601 # * A stream type node can include event type nodes.
7f4429f2 1602 #
6839ffba
PP
1603 # We keep a stack of absolute paths to included files
1604 # (`self._include_stack`) to detect recursion.
1605 #
1606 # First, make sure the configuration object itself is valid for
1607 # the inclusion processing stage.
1608 self._schema_validator.validate(config_node,
1609 '2/config/config-pre-include')
1610
aad8e5e8 1611 # Process metadata node inclusions.
6839ffba
PP
1612 #
1613 # self._process_metadata_include() returns a new (or the same)
aad8e5e8 1614 # metadata node without any `$include` property in it,
6839ffba
PP
1615 # recursively.
1616 config_node['metadata'] = self._process_metadata_include(config_node['metadata'])
1617
1618 return config_node
7f4429f2 1619
aad8e5e8
PP
1620 # Expands the field type aliases found in the metadata node
1621 # `metadata_node` using the aliases of the `type_aliases_node` node.
1622 #
1623 # This method modifies `metadata_node`.
1624 #
1625 # When this method returns:
1626 #
1627 # * Any field type alias is replaced with its full field type
1628 # equivalent.
1629 #
1630 # * The `type-aliases` property of `metadata_node` is removed.
6839ffba
PP
1631 def _expand_field_type_aliases(self, metadata_node, type_aliases_node):
1632 def resolve_field_type_aliases(parent_node, key, from_descr,
1633 alias_set=None):
1634 if key not in parent_node:
1635 return
1636
1637 # This set holds all the aliases we need to expand,
1638 # recursively. This is used to detect cycles.
1639 if alias_set is None:
1640 alias_set = set()
1641
1642 node = parent_node[key]
1643
1644 if node is None:
1645 return
1646
1647 if type(node) is str:
1648 alias = node
1649
1650 if alias not in resolved_aliases:
1651 # Only check for a field type alias cycle when we
1652 # didn't resolve the alias yet, as a given node can
1653 # refer to the same field type alias more than once.
1654 if alias in alias_set:
ace614f2 1655 fmt = 'Cycle detected during the `{}` field type alias resolution'
9fb5657f 1656 raise _ConfigParseError(from_descr, fmt.format(alias))
6839ffba
PP
1657
1658 # try to load field type alias node named `alias`
1659 if alias not in type_aliases_node:
9fb5657f 1660 raise _ConfigParseError(from_descr,
ace614f2 1661 'Field type alias `{}` does not exist'.format(alias))
6839ffba
PP
1662
1663 # resolve it
1664 alias_set.add(alias)
1665 resolve_field_type_aliases(type_aliases_node, alias,
1666 from_descr, alias_set)
1667 resolved_aliases.add(alias)
1668
1669 parent_node[key] = copy.deepcopy(type_aliases_node[node])
1670 return
1671
1672 # traverse, resolving field type aliases as needed
1673 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1674 resolve_field_type_aliases(node, pkey, from_descr, alias_set)
1675
1676 # structure field type fields
1677 pkey = 'fields'
1678
1679 if pkey in node:
1680 assert type(node[pkey]) is collections.OrderedDict
1681
1682 for field_name in node[pkey]:
1683 resolve_field_type_aliases(node[pkey], field_name,
1684 from_descr, alias_set)
1685
ace614f2
PP
1686 def resolve_field_type_aliases_from(parent_node, key):
1687 resolve_field_type_aliases(parent_node, key,
1688 '`{}` property'.format(key))
6839ffba
PP
1689
1690 # set of resolved field type aliases
1691 resolved_aliases = set()
1692
aad8e5e8
PP
1693 # Expand field type aliases within trace, stream, and event
1694 # types now.
ace614f2
PP
1695 try:
1696 resolve_field_type_aliases_from(metadata_node['trace'],
1697 'packet-header-type')
1698 except _ConfigParseError as exc:
1699 _append_error_ctx(exc, 'Trace type')
6839ffba
PP
1700
1701 for stream_name, stream in metadata_node['streams'].items():
6839ffba 1702 try:
ace614f2
PP
1703 resolve_field_type_aliases_from(stream, 'packet-context-type')
1704 resolve_field_type_aliases_from(stream, 'event-header-type')
1705 resolve_field_type_aliases_from(stream, 'event-context-type')
1706
6839ffba 1707 for event_name, event in stream['events'].items():
ace614f2
PP
1708 try:
1709 resolve_field_type_aliases_from(event, 'context-type')
1710 resolve_field_type_aliases_from(event, 'payload-type')
1711 except _ConfigParseError as exc:
1712 _append_error_ctx(exc,
1713 'Event type `{}`'.format(event_name))
9fb5657f 1714 except _ConfigParseError as exc:
ace614f2 1715 _append_error_ctx(exc, 'Stream type `{}`'.format(stream_name))
6839ffba 1716
aad8e5e8 1717 # remove the (now unneeded) `type-aliases` node
6839ffba
PP
1718 del metadata_node['type-aliases']
1719
aad8e5e8
PP
1720 # Applies field type inheritance to all field types found in
1721 # `metadata_node`.
1722 #
1723 # This method modifies `metadata_node`.
1724 #
1725 # When this method returns, no field type node has an `$inherit` or
1726 # `inherit` property.
6839ffba
PP
1727 def _expand_field_type_inheritance(self, metadata_node):
1728 def apply_inheritance(parent_node, key):
1729 if key not in parent_node:
1730 return
1731
1732 node = parent_node[key]
1733
1734 if node is None:
1735 return
1736
1737 # process children first
1738 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1739 apply_inheritance(node, pkey)
1740
1741 # structure field type fields
1742 pkey = 'fields'
1743
1744 if pkey in node:
1745 assert type(node[pkey]) is collections.OrderedDict
1746
1747 for field_name, field_type in node[pkey].items():
1748 apply_inheritance(node[pkey], field_name)
1749
1750 # apply inheritance of this node
1751 if 'inherit' in node:
1752 # barectf 2.1: `inherit` property was renamed to `$inherit`
1753 assert '$inherit' not in node
1754 node['$inherit'] = node['inherit']
1755 del node['inherit']
1756
1757 inherit_key = '$inherit'
1758
1759 if inherit_key in node:
1760 assert type(node[inherit_key]) is collections.OrderedDict
1761
1762 # apply inheritance below
1763 apply_inheritance(node, inherit_key)
1764
1765 # `node` is an overlay on the `$inherit` node
1766 base_node = node[inherit_key]
1767 del node[inherit_key]
1768 self._update_node(base_node, node)
1769
1770 # set updated base node as this node
1771 parent_node[key] = base_node
1772
1773 apply_inheritance(metadata_node['trace'], 'packet-header-type')
1774
1775 for stream in metadata_node['streams'].values():
1776 apply_inheritance(stream, 'packet-context-type')
1777 apply_inheritance(stream, 'event-header-type')
1778 apply_inheritance(stream, 'event-context-type')
1779
1780 for event in stream['events'].values():
1781 apply_inheritance(event, 'context-type')
1782 apply_inheritance(event, 'payload-type')
1783
aad8e5e8
PP
1784 # Calls _expand_field_type_aliases() and
1785 # _expand_field_type_inheritance() if the metadata node
1786 # `metadata_node` has a `type-aliases` property.
6839ffba
PP
1787 def _expand_field_types(self, metadata_node):
1788 type_aliases_node = metadata_node.get('type-aliases')
1789
1790 if type_aliases_node is None:
1791 # If there's no `type-aliases` node, then there's no field
1792 # type aliases and therefore no possible inheritance.
1793 return
1794
1795 # first, expand field type aliases
1796 self._expand_field_type_aliases(metadata_node, type_aliases_node)
1797
1798 # next, apply inheritance to create effective field types
1799 self._expand_field_type_inheritance(metadata_node)
1800
aad8e5e8
PP
1801 # Replaces the textual log levels in event type nodes of the
1802 # metadata node `metadata_node` with their numeric equivalent (as
1803 # found in the `$log-levels` or `log-levels` node of
1804 # `metadata_node`).
1805 #
1806 # This method modifies `metadata_node`.
1807 #
1808 # When this method returns, the `$log-levels` or `log-level`
1809 # property of `metadata_node` is removed.
6839ffba
PP
1810 def _expand_log_levels(self, metadata_node):
1811 if 'log-levels' in metadata_node:
aad8e5e8
PP
1812 # barectf 2.1: `log-levels` property was renamed to
1813 # `$log-levels`
6839ffba
PP
1814 assert '$log-levels' not in node
1815 node['$log-levels'] = node['log-levels']
1816 del node['log-levels']
1817
1818 log_levels_key = '$log-levels'
1819 log_levels_node = metadata_node.get(log_levels_key)
1820
1821 if log_levels_node is None:
1822 # no log level aliases
1823 return
1824
1825 # not needed anymore
1826 del metadata_node[log_levels_key]
1827
1828 for stream_name, stream in metadata_node['streams'].items():
1829 try:
1830 for event_name, event in stream['events'].items():
1831 prop_name = 'log-level'
1832 ll_node = event.get(prop_name)
1833
1834 if ll_node is None:
1835 continue
1836
1837 if type(ll_node) is str:
1838 if ll_node not in log_levels_node:
ace614f2
PP
1839 exc = _ConfigParseError('`log-level` property',
1840 'Log level alias `{}` does not exist'.format(ll_node))
1841 exc.append_ctx('Event type `{}`'.format(event_name))
1842 raise exc
6839ffba
PP
1843
1844 event[prop_name] = log_levels_node[ll_node]
9fb5657f 1845 except _ConfigParseError as exc:
ace614f2 1846 _append_error_ctx(exc, 'Stream type `{}`'.format(stream_name))
7f4429f2 1847
aad8e5e8 1848 # Dumps the node `node` as YAML, passing `kwds` to yaml.dump().
7f4429f2
PP
1849 def _yaml_ordered_dump(self, node, **kwds):
1850 class ODumper(yaml.Dumper):
1851 pass
1852
1853 def dict_representer(dumper, node):
1854 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1855 node.items())
1856
1857 ODumper.add_representer(collections.OrderedDict, dict_representer)
1858
6839ffba 1859 # Python -> YAML
7f4429f2
PP
1860 return yaml.dump(node, Dumper=ODumper, **kwds)
1861
aad8e5e8
PP
1862 # Loads the content of the YAML file having the path `yaml_path` as
1863 # a Python object.
1864 #
1865 # All YAML maps are loaded as `collections.OrderedDict` objects.
7f4429f2
PP
1866 def _yaml_ordered_load(self, yaml_path):
1867 class OLoader(yaml.Loader):
1868 pass
1869
1870 def construct_mapping(loader, node):
1871 loader.flatten_mapping(node)
1872
1873 return collections.OrderedDict(loader.construct_pairs(node))
1874
1875 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1876 construct_mapping)
1877
1878 # YAML -> Python
1879 try:
1880 with open(yaml_path, 'r') as f:
1881 node = yaml.load(f, OLoader)
ace614f2
PP
1882 except (OSError, IOError) as exc:
1883 raise _ConfigParseError('File `{}`'.format(yaml_path),
1884 'Cannot open file: {}'.format(exc))
7f4429f2 1885
ace614f2 1886 assert type(node) is collections.OrderedDict
7f4429f2
PP
1887 return node
1888
c8270369 1889 def _parse(self):
7f4429f2
PP
1890 self._version = None
1891 self._include_stack = []
1892
6839ffba 1893 # load the configuration object as is from the root YAML file
7f4429f2 1894 try:
1f2c551a 1895 config_node = self._yaml_ordered_load(self._root_path)
9fb5657f 1896 except _ConfigParseError as exc:
131d409a 1897 _append_error_ctx(exc, 'Configuration',
1f2c551a 1898 'Cannot parse YAML file `{}`'.format(self._root_path))
7f4429f2 1899
6839ffba
PP
1900 # Make sure the configuration object is minimally valid, that
1901 # is, it contains a valid `version` property.
1902 #
1903 # This step does not validate the whole configuration object
1904 # yet because we don't have an effective configuration object;
1905 # we still need to:
1906 #
1907 # * Process inclusions.
1908 # * Expand field types (inheritance and aliases).
1909 self._schema_validator.validate(config_node, 'config/config-min')
7f4429f2 1910
6839ffba
PP
1911 # Process configuration object inclusions.
1912 #
1913 # self._process_config_includes() returns a new (or the same)
1914 # configuration object without any `$include` property in it,
1915 # recursively.
1916 config_node = self._process_config_includes(config_node)
7f4429f2 1917
6839ffba
PP
1918 # Make sure that the current configuration object is valid
1919 # considering field types are not expanded yet.
1920 self._schema_validator.validate(config_node,
1921 '2/config/config-pre-field-type-expansion')
7f4429f2 1922
6839ffba
PP
1923 # Expand field types.
1924 #
1925 # This process:
1926 #
1927 # 1. Replaces field type aliases with "effective" field
1928 # types, recursively.
1929 #
1930 # After this step, the `type-aliases` property of the
1931 # `metadata` node is gone.
1932 #
aad8e5e8 1933 # 2. Applies inheritance, following the `$inherit`/`inherit`
6839ffba
PP
1934 # properties.
1935 #
1936 # After this step, field type objects do not contain
1937 # `$inherit` or `inherit` properties.
1938 #
1939 # This is done blindly, in that the process _doesn't_ validate
1940 # field type objects at this point.
1941 self._expand_field_types(config_node['metadata'])
7f4429f2 1942
6839ffba
PP
1943 # Make sure that the current configuration object is valid
1944 # considering log levels are not expanded yet.
1945 self._schema_validator.validate(config_node,
1946 '2/config/config-pre-log-level-expansion')
7f4429f2 1947
6839ffba
PP
1948 # Expand log levels, that is, replace log level strings with
1949 # their equivalent numeric values.
1950 self._expand_log_levels(config_node['metadata'])
7f4429f2 1951
6839ffba
PP
1952 # validate the whole, effective configuration object
1953 self._schema_validator.validate(config_node, '2/config/config')
7f4429f2
PP
1954
1955 # dump config if required
1956 if self._dump_config:
6839ffba 1957 print(self._yaml_ordered_dump(config_node, indent=2,
7f4429f2
PP
1958 default_flow_style=False))
1959
6839ffba
PP
1960 # get prefix, options, and metadata pseudo-object
1961 prefix = self._get_prefix(config_node)
1962 opts = self._get_options(config_node)
1963 pseudo_meta = self._create_metadata(config_node)
7f4429f2 1964
6839ffba 1965 # create public configuration
c8270369
PP
1966 self._config = config.Config(pseudo_meta.to_public(), prefix, opts)
1967
1968 @property
1969 def config(self):
1970 return self._config
7f4429f2
PP
1971
1972
1973def _from_file(path, include_dirs, ignore_include_not_found, dump_config):
1974 try:
c8270369
PP
1975 return _YamlConfigParser(path, include_dirs, ignore_include_not_found,
1976 dump_config).config
9fb5657f 1977 except _ConfigParseError as exc:
131d409a 1978 _append_error_ctx(exc, 'Configuration',
9fb5657f 1979 'Cannot create configuration from YAML file `{}`'.format(path))
This page took 0.114895 seconds and 4 git commands to generate.