config_parse.py: use a `_YamlConfigParser` object a single time
[barectf.git] / barectf / config_parse.py
CommitLineData
7f4429f2
PP
1# The MIT License (MIT)
2#
4a90140d 3# Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
7f4429f2 4#
1378f213
PP
5# Permission is hereby granted, free of charge, to any person obtaining
6# a copy of this software and associated documentation files (the
7# "Software"), to deal in the Software without restriction, including
8# without limitation the rights to use, copy, modify, merge, publish,
9# distribute, sublicense, and/or sell copies of the Software, and to
10# permit persons to whom the Software is furnished to do so, subject to
11# the following conditions:
7f4429f2 12#
1378f213
PP
13# The above copyright notice and this permission notice shall be
14# included in all copies or substantial portions of the Software.
7f4429f2 15#
1378f213
PP
16# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
7f4429f2
PP
23
24from barectf import metadata
25from barectf import config
6839ffba 26import pkg_resources
7f4429f2 27import collections
6839ffba 28import jsonschema
7f4429f2
PP
29import datetime
30import barectf
6839ffba 31import os.path
7f4429f2
PP
32import enum
33import yaml
34import uuid
35import copy
36import re
37import os
38
39
40class _ConfigParseErrorCtx:
41 def __init__(self, name, msg=None):
42 self._name = name
43 self._msg = msg
44
45 @property
46 def name(self):
47 return self._name
48
49 @property
50 def msg(self):
51 return self._msg
52
53
9fb5657f 54class _ConfigParseError(RuntimeError):
7f4429f2
PP
55 def __init__(self, init_ctx_name, init_ctx_msg=None):
56 self._ctx = []
57 self.append_ctx(init_ctx_name, init_ctx_msg)
58
59 @property
60 def ctx(self):
61 return self._ctx
62
63 def append_ctx(self, name, msg=None):
64 self._ctx.append(_ConfigParseErrorCtx(name, msg))
65
66
67def _opt_to_public(obj):
68 if obj is None:
69 return
70
71 return obj.to_public()
72
73
aad8e5e8
PP
74# Pseudo object base class.
75#
76# A concrete pseudo object contains the same data as its public version,
77# but it's mutable.
78#
79# The to_public() method converts the pseudo object to an equivalent
80# public, immutable object, caching the result so as to always return
81# the same Python object.
7f4429f2
PP
82class _PseudoObj:
83 def __init__(self):
84 self._public = None
85
86 def to_public(self):
87 if self._public is None:
88 self._public = self._to_public()
89
90 return self._public
91
92 def _to_public(self):
93 raise NotImplementedError
94
95
96class _PropertyMapping(_PseudoObj):
97 def __init__(self):
98 super().__init__()
99 self.object = None
100 self.prop = None
101
102 def _to_public(self):
103 return metadata.PropertyMapping(self.object.to_public(), self.prop)
104
105
106class _Integer(_PseudoObj):
107 def __init__(self):
108 super().__init__()
109 self.size = None
110 self.byte_order = None
7f4429f2 111 self.align = None
7f4429f2 112 self.signed = False
7f4429f2 113 self.base = 10
7f4429f2 114 self.encoding = metadata.Encoding.NONE
7f4429f2
PP
115 self.property_mappings = []
116
117 @property
118 def real_align(self):
119 if self.align is None:
120 if self.size % 8 == 0:
121 return 8
122 else:
123 return 1
124 else:
125 return self.align
126
127 def _to_public(self):
128 prop_mappings = [pm.to_public() for pm in self.property_mappings]
129 return metadata.Integer(self.size, self.byte_order, self.align,
130 self.signed, self.base, self.encoding,
131 prop_mappings)
132
133
134class _FloatingPoint(_PseudoObj):
135 def __init__(self):
136 super().__init__()
137 self.exp_size = None
138 self.mant_size = None
139 self.byte_order = None
7f4429f2
PP
140 self.align = 8
141
142 @property
143 def real_align(self):
144 return self.align
145
146 def _to_public(self):
147 return metadata.FloatingPoint(self.exp_size, self.mant_size,
148 self.byte_order, self.align)
149
150
151class _Enum(_PseudoObj):
152 def __init__(self):
153 super().__init__()
154 self.value_type = None
155 self.members = collections.OrderedDict()
156
7f4429f2
PP
157 @property
158 def real_align(self):
159 return self.value_type.real_align
160
161 def _to_public(self):
162 return metadata.Enum(self.value_type.to_public(), self.members)
163
164
165class _String(_PseudoObj):
166 def __init__(self):
167 super().__init__()
7f4429f2
PP
168 self.encoding = metadata.Encoding.UTF8
169
6839ffba
PP
170 @property
171 def real_align(self):
172 return 8
7f4429f2
PP
173
174 def _to_public(self):
175 return metadata.String(self.encoding)
176
177
178class _Array(_PseudoObj):
179 def __init__(self):
180 super().__init__()
181 self.element_type = None
182 self.length = None
183
184 @property
185 def real_align(self):
186 return self.element_type.real_align
187
188 def _to_public(self):
189 return metadata.Array(self.element_type.to_public(), self.length)
190
191
192class _Struct(_PseudoObj):
193 def __init__(self):
194 super().__init__()
7f4429f2 195 self.min_align = 1
7f4429f2
PP
196 self.fields = collections.OrderedDict()
197
198 @property
199 def real_align(self):
200 align = self.min_align
201
202 for pseudo_field in self.fields.values():
203 if pseudo_field.real_align > align:
204 align = pseudo_field.real_align
205
206 return align
207
208 def _to_public(self):
209 fields = []
210
211 for name, pseudo_field in self.fields.items():
212 fields.append((name, pseudo_field.to_public()))
213
214 return metadata.Struct(self.min_align, collections.OrderedDict(fields))
215
216
217class _Trace(_PseudoObj):
218 def __init__(self):
219 super().__init__()
220 self.byte_order = None
221 self.uuid = None
222 self.packet_header_type = None
223
224 def _to_public(self):
225 return metadata.Trace(self.byte_order, self.uuid,
226 _opt_to_public(self.packet_header_type))
227
228
229class _Clock(_PseudoObj):
230 def __init__(self):
231 super().__init__()
7f4429f2 232 self.name = None
7f4429f2 233 self.uuid = None
7f4429f2 234 self.description = None
7f4429f2 235 self.freq = int(1e9)
7f4429f2 236 self.error_cycles = 0
7f4429f2 237 self.offset_seconds = 0
7f4429f2 238 self.offset_cycles = 0
7f4429f2 239 self.absolute = False
7f4429f2
PP
240 self.return_ctype = 'uint32_t'
241
242 def _to_public(self):
243 return metadata.Clock(self.name, self.uuid, self.description, self.freq,
244 self.error_cycles, self.offset_seconds,
245 self.offset_cycles, self.absolute,
246 self.return_ctype)
247
248
249class _Event(_PseudoObj):
250 def __init__(self):
251 super().__init__()
252 self.id = None
253 self.name = None
254 self.log_level = None
255 self.payload_type = None
256 self.context_type = None
257
258 def _to_public(self):
259 return metadata.Event(self.id, self.name, self.log_level,
260 _opt_to_public(self.payload_type),
261 _opt_to_public(self.context_type))
262
263
264class _Stream(_PseudoObj):
265 def __init__(self):
266 super().__init__()
267 self.id = None
268 self.name = None
269 self.packet_context_type = None
270 self.event_header_type = None
271 self.event_context_type = None
272 self.events = collections.OrderedDict()
273
274 def is_event_empty(self, event):
275 total_fields = 0
276
277 if self.event_header_type is not None:
278 total_fields += len(self.event_header_type.fields)
279
280 if self.event_context_type is not None:
281 total_fields += len(self.event_context_type.fields)
282
283 if event.context_type is not None:
284 total_fields += len(event.context_type.fields)
285
286 if event.payload_type is not None:
287 total_fields += len(event.payload_type.fields)
288
289 return total_fields == 0
290
291 def _to_public(self):
292 events = []
293
294 for name, pseudo_ev in self.events.items():
295 events.append((name, pseudo_ev.to_public()))
296
297 return metadata.Stream(self.id, self.name,
298 _opt_to_public(self.packet_context_type),
299 _opt_to_public(self.event_header_type),
300 _opt_to_public(self.event_context_type),
301 collections.OrderedDict(events))
302
303
304class _Metadata(_PseudoObj):
305 def __init__(self):
306 super().__init__()
307 self.trace = None
308 self.env = None
309 self.clocks = None
310 self.streams = None
311 self.default_stream_name = None
312
313 def _to_public(self):
314 clocks = []
315
316 for name, pseudo_clock in self.clocks.items():
317 clocks.append((name, pseudo_clock.to_public()))
318
319 streams = []
320
321 for name, pseudo_stream in self.streams.items():
322 streams.append((name, pseudo_stream.to_public()))
323
324 return metadata.Metadata(self.trace.to_public(), self.env,
325 collections.OrderedDict(clocks),
326 collections.OrderedDict(streams),
327 self.default_stream_name)
328
329
6839ffba
PP
330# This JSON schema reference resolver only serves to detect when it
331# needs to resolve a remote URI.
332#
333# This must never happen in barectf because all our schemas are local;
334# it would mean a programming or schema error.
335class _RefResolver(jsonschema.RefResolver):
336 def resolve_remote(self, uri):
1bf9d86d 337 raise RuntimeError('Missing local schema with URI `{}`'.format(uri))
6839ffba
PP
338
339
340# Schema validator which considers all the schemas found in the barectf
341# package's `schemas` directory.
342#
343# The only public method is validate() which accepts an instance to
344# validate as well as a schema short ID.
345class _SchemaValidator:
346 def __init__(self):
347 subdirs = ['config', os.path.join('2', 'config')]
348 schemas_dir = pkg_resources.resource_filename(__name__, 'schemas')
349 self._store = {}
350
351 for subdir in subdirs:
352 dir = os.path.join(schemas_dir, subdir)
353
354 for file_name in os.listdir(dir):
355 if not file_name.endswith('.yaml'):
356 continue
357
358 with open(os.path.join(dir, file_name)) as f:
359 schema = yaml.load(f, Loader=yaml.SafeLoader)
360
361 assert '$id' in schema
362 schema_id = schema['$id']
363 assert schema_id not in self._store
364 self._store[schema_id] = schema
365
366 @staticmethod
367 def _dict_from_ordered_dict(o_dict):
368 dct = {}
369
370 for k, v in o_dict.items():
371 new_v = v
372
373 if type(v) is collections.OrderedDict:
374 new_v = _SchemaValidator._dict_from_ordered_dict(v)
7f4429f2 375
6839ffba 376 dct[k] = new_v
7f4429f2 377
6839ffba 378 return dct
7f4429f2 379
6839ffba
PP
380 def _validate(self, instance, schema_short_id):
381 # retrieve full schema ID from short ID
382 schema_id = 'https://barectf.org/schemas/{}.json'.format(schema_short_id)
383 assert schema_id in self._store
7f4429f2 384
6839ffba
PP
385 # retrieve full schema
386 schema = self._store[schema_id]
7f4429f2 387
6839ffba
PP
388 # Create a reference resolver for this schema using this
389 # validator's schema store.
390 resolver = _RefResolver(base_uri=schema_id, referrer=schema,
391 store=self._store)
7f4429f2 392
6839ffba
PP
393 # create a JSON schema validator using this reference resolver
394 validator = jsonschema.Draft7Validator(schema, resolver=resolver)
7f4429f2 395
6839ffba
PP
396 # Validate the instance, converting its
397 # `collections.OrderedDict` objects to `dict` objects so as to
398 # make any error message easier to read (because
399 # validator.validate() below uses str() for error messages, and
aad8e5e8
PP
400 # collections.OrderedDict.__str__() returns a somewhat bulky
401 # representation).
6839ffba 402 validator.validate(self._dict_from_ordered_dict(instance))
7f4429f2 403
6839ffba
PP
404 # Validates `instance` using the schema having the short ID
405 # `schema_short_id`.
406 #
407 # A schema short ID is the part between `schemas/` and `.json` in
408 # its URI.
409 #
9fb5657f 410 # Raises a `_ConfigParseError` object, hiding any `jsonschema`
6839ffba
PP
411 # exception, on validation failure.
412 def validate(self, instance, schema_short_id):
413 try:
414 self._validate(instance, schema_short_id)
415 except jsonschema.ValidationError as exc:
9fb5657f 416 # convert to barectf `_ConfigParseError` exception
6839ffba 417 contexts = ['Configuration object']
c3aed479 418
aad8e5e8
PP
419 # Each element of the instance's absolute path is either an
420 # integer (array element's index) or a string (object
421 # property's name).
c3aed479
PP
422 for elem in exc.absolute_path:
423 if type(elem) is int:
424 ctx = 'Element {}'.format(elem)
425 else:
426 ctx = '`{}` property'.format(elem)
427
428 contexts.append(ctx)
429
6839ffba
PP
430 schema_ctx = ''
431
432 if len(exc.context) > 0:
aad8e5e8
PP
433 # According to the documentation of
434 # jsonschema.ValidationError.context(),
435 # the method returns a
436 #
437 # > list of errors from the subschemas
438 #
439 # This contains additional information about the
440 # validation failure which can help the user figure out
441 # what's wrong exactly.
442 #
443 # Join each message with `; ` and append this to our
444 # configuration parsing error's message.
6839ffba
PP
445 msgs = '; '.join([e.message for e in exc.context])
446 schema_ctx = ': {}'.format(msgs)
7f4429f2 447
9fb5657f
PP
448 new_exc = _ConfigParseError(contexts.pop(),
449 '{}{} (from schema `{}`)'.format(exc.message,
450 schema_ctx,
451 schema_short_id))
7f4429f2 452
6839ffba
PP
453 for ctx in reversed(contexts):
454 new_exc.append_ctx(ctx)
455
456 raise new_exc
7f4429f2
PP
457
458
aad8e5e8
PP
459# Converts the byte order string `bo_str` to a `metadata.ByteOrder`
460# enumerator.
7f4429f2
PP
461def _byte_order_str_to_bo(bo_str):
462 bo_str = bo_str.lower()
463
464 if bo_str == 'le':
465 return metadata.ByteOrder.LE
466 elif bo_str == 'be':
467 return metadata.ByteOrder.BE
468
469
aad8e5e8
PP
470# Converts the encoding string `encoding_str` to a `metadata.Encoding`
471# enumerator.
7f4429f2
PP
472def _encoding_str_to_encoding(encoding_str):
473 encoding_str = encoding_str.lower()
474
475 if encoding_str == 'utf-8' or encoding_str == 'utf8':
476 return metadata.Encoding.UTF8
477 elif encoding_str == 'ascii':
478 return metadata.Encoding.ASCII
479 elif encoding_str == 'none':
480 return metadata.Encoding.NONE
481
482
aad8e5e8
PP
483# Validates the TSDL identifier `iden`, raising a `_ConfigParseError`
484# exception using `ctx_obj_name` and `prop` to format the message if
485# it's invalid.
6839ffba
PP
486def _validate_identifier(iden, ctx_obj_name, prop):
487 assert type(iden) is str
488 ctf_keywords = {
489 'align',
490 'callsite',
491 'clock',
492 'enum',
493 'env',
494 'event',
495 'floating_point',
496 'integer',
497 'stream',
498 'string',
499 'struct',
500 'trace',
501 'typealias',
502 'typedef',
503 'variant',
504 }
7f4429f2 505
6839ffba 506 if iden in ctf_keywords:
1bf9d86d 507 fmt = 'Invalid {} (not a valid identifier): `{}`'
9fb5657f 508 raise _ConfigParseError(ctx_obj_name, fmt.format(prop, iden))
7f4429f2 509
7f4429f2 510
aad8e5e8
PP
511# Validates the alignment `align`, raising a `_ConfigParseError`
512# exception using `ctx_obj_name` if it's invalid.
6839ffba
PP
513def _validate_alignment(align, ctx_obj_name):
514 assert align >= 1
7f4429f2 515
6839ffba 516 if (align & (align - 1)) != 0:
9fb5657f 517 raise _ConfigParseError(ctx_obj_name,
ace614f2 518 'Invalid alignment (not a power of two): {}'.format(align))
7f4429f2
PP
519
520
aad8e5e8
PP
521# Appends the context having the object name `obj_name` and the
522# (optional) message `msg` to the `_ConfigParseError` exception `exc`
523# and then raises `exc` again.
131d409a
PP
524def _append_error_ctx(exc, obj_name, msg=None):
525 exc.append_ctx(obj_name, msg)
526 raise
527
528
6839ffba
PP
529# Entities.
530#
531# Order of values is important here.
532@enum.unique
533class _Entity(enum.IntEnum):
534 TRACE_PACKET_HEADER = 0
535 STREAM_PACKET_CONTEXT = 1
536 STREAM_EVENT_HEADER = 2
537 STREAM_EVENT_CONTEXT = 3
538 EVENT_CONTEXT = 4
539 EVENT_PAYLOAD = 5
540
541
aad8e5e8
PP
542# A validator which validates the configured metadata for barectf
543# specific needs.
7f4429f2
PP
544#
545# barectf needs:
546#
aad8e5e8
PP
547# * The alignments of all header/context field types are at least 8.
548#
549# * There are no nested structure or array field types, except the
550# packet header field type's `uuid` field
551#
7f4429f2
PP
552class _BarectfMetadataValidator:
553 def __init__(self):
554 self._type_to_validate_type_func = {
7f4429f2
PP
555 _Struct: self._validate_struct_type,
556 _Array: self._validate_array_type,
557 }
558
7f4429f2
PP
559 def _validate_struct_type(self, t, entity_root):
560 if not entity_root:
ace614f2
PP
561 raise _ConfigParseError('Structure field type',
562 'Inner structure field types are not supported as of this version')
7f4429f2
PP
563
564 for field_name, field_type in t.fields.items():
565 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
566 if field_name == 'uuid':
567 # allow
568 continue
569
570 try:
571 self._validate_type(field_type, False)
9fb5657f 572 except _ConfigParseError as exc:
ace614f2
PP
573 _append_error_ctx(exc,
574 'Structure field type\'s field `{}`'.format(field_name))
7f4429f2
PP
575
576 def _validate_array_type(self, t, entity_root):
ace614f2
PP
577 raise _ConfigParseError('Array field type',
578 'Not supported as of this version')
7f4429f2
PP
579
580 def _validate_type(self, t, entity_root):
6839ffba
PP
581 func = self._type_to_validate_type_func.get(type(t))
582
583 if func is not None:
584 func(t, entity_root)
7f4429f2
PP
585
586 def _validate_entity(self, t):
587 if t is None:
588 return
589
aad8e5e8 590 # make sure root field type has a real alignment of at least 8
7f4429f2 591 if t.real_align < 8:
ace614f2
PP
592 raise _ConfigParseError('Root field type',
593 'Effective alignment must be at least 8 (got {})'.format(t.real_align))
7f4429f2 594
6839ffba 595 assert type(t) is _Struct
7f4429f2 596
aad8e5e8 597 # validate field types
7f4429f2
PP
598 self._validate_type(t, True)
599
ace614f2 600 def _validate_event_entities_and_names(self, stream, ev):
7f4429f2 601 try:
ace614f2
PP
602 _validate_identifier(ev.name, 'Event type', 'event type name')
603
604 self._cur_entity = _Entity.EVENT_CONTEXT
605
606 try:
607 self._validate_entity(ev.context_type)
608 except _ConfigParseError as exc:
609 _append_error_ctx(exc, 'Event type',
610 'Invalid context field type')
611
612 self._cur_entity = _Entity.EVENT_PAYLOAD
613
614 try:
615 self._validate_entity(ev.payload_type)
616 except _ConfigParseError as exc:
617 _append_error_ctx(exc, 'Event type',
618 'Invalid payload field type')
619
620 if stream.is_event_empty(ev):
621 raise _ConfigParseError('Event type', 'Empty')
9fb5657f 622 except _ConfigParseError as exc:
ace614f2 623 _append_error_ctx(exc, 'Event type `{}`'.format(ev.name))
7f4429f2 624
ace614f2
PP
625 def _validate_stream_entities_and_names(self, stream):
626 try:
627 _validate_identifier(stream.name, 'Stream type', 'stream type name')
7f4429f2
PP
628 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
629
630 try:
631 self._validate_entity(stream.packet_context_type)
9fb5657f 632 except _ConfigParseError as exc:
ace614f2
PP
633 _append_error_ctx(exc, 'Stream type',
634 'Invalid packet context field type')
7f4429f2
PP
635
636 self._cur_entity = _Entity.STREAM_EVENT_HEADER
637
638 try:
639 self._validate_entity(stream.event_header_type)
9fb5657f 640 except _ConfigParseError as exc:
ace614f2
PP
641 _append_error_ctx(exc, 'Stream type',
642 'Invalid event header field type')
7f4429f2
PP
643
644 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
645
646 try:
647 self._validate_entity(stream.event_context_type)
9fb5657f 648 except _ConfigParseError as exc:
ace614f2
PP
649 _append_error_ctx(exc, 'Stream type',
650 'Invalid event context field type')
7f4429f2 651
ace614f2
PP
652 for ev in stream.events.values():
653 self._validate_event_entities_and_names(stream, ev)
654 except _ConfigParseError as exc:
655 _append_error_ctx(exc, 'Stream type `{}`'.format(stream.name))
7f4429f2 656
ace614f2
PP
657 def _validate_entities_and_names(self, meta):
658 self._cur_entity = _Entity.TRACE_PACKET_HEADER
7f4429f2 659
ace614f2
PP
660 try:
661 self._validate_entity(meta.trace.packet_header_type)
662 except _ConfigParseError as exc:
663 _append_error_ctx(exc, 'Trace type',
664 'Invalid packet header field type')
7f4429f2 665
ace614f2
PP
666 for stream in meta.streams.values():
667 self._validate_stream_entities_and_names(stream)
7f4429f2
PP
668
669 def _validate_default_stream(self, meta):
aad8e5e8 670 if meta.default_stream_name is not None:
7f4429f2 671 if meta.default_stream_name not in meta.streams.keys():
ace614f2
PP
672 fmt = 'Default stream type name (`{}`) does not name an existing stream type'
673 raise _ConfigParseError('Metadata',
9fb5657f 674 fmt.format(meta.default_stream_name))
7f4429f2
PP
675
676 def validate(self, meta):
ace614f2
PP
677 try:
678 self._validate_entities_and_names(meta)
679 self._validate_default_stream(meta)
680 except _ConfigParseError as exc:
681 _append_error_ctx(exc, 'barectf metadata')
7f4429f2
PP
682
683
aad8e5e8 684# A validator which validates special fields of trace, stream, and event
6839ffba 685# types.
7f4429f2 686class _MetadataSpecialFieldsValidator:
aad8e5e8 687 # Validates the packet header field type `t`.
7f4429f2 688 def _validate_trace_packet_header_type(self, t):
ace614f2
PP
689 ctx_obj_name = '`packet-header-type` property'
690
aad8e5e8
PP
691 # If there's more than one stream type, then the `stream_id`
692 # (stream type ID) field is required.
7f4429f2 693 if len(self._meta.streams) > 1:
7f4429f2 694 if t is None:
ace614f2
PP
695 raise _ConfigParseError('Trace type',
696 '`stream_id` field is required (because there\'s more than one stream type), but packet header field type is missing')
7f4429f2 697
7f4429f2 698 if 'stream_id' not in t.fields:
ace614f2
PP
699 raise _ConfigParseError(ctx_obj_name,
700 '`stream_id` field is required (because there\'s more than one stream type)')
7f4429f2 701
750374a1 702 if t is None:
7f4429f2
PP
703 return
704
aad8e5e8
PP
705 # The `magic` field type must be the first one.
706 #
707 # The `stream_id` field type's size (bits) must be large enough
708 # to accomodate any stream type ID.
7f4429f2
PP
709 for i, (field_name, field_type) in enumerate(t.fields.items()):
710 if field_name == 'magic':
7f4429f2 711 if i != 0:
ace614f2
PP
712 raise _ConfigParseError(ctx_obj_name,
713 '`magic` field must be the first packet header field type\'s field')
7f4429f2 714 elif field_name == 'stream_id':
7f4429f2 715 if len(self._meta.streams) > (1 << field_type.size):
ace614f2
PP
716 raise _ConfigParseError(ctx_obj_name,
717 '`stream_id` field\'s size is too small to accomodate {} stream types'.format(len(self._meta.streams)))
7f4429f2 718
aad8e5e8 719 # Validates the trace type of the metadata object `meta`.
7f4429f2
PP
720 def _validate_trace(self, meta):
721 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
722
aad8e5e8
PP
723 # Validates the packet context field type of the stream type
724 # `stream`.
7f4429f2 725 def _validate_stream_packet_context(self, stream):
ace614f2 726 ctx_obj_name = '`packet-context-type` property'
7f4429f2 727 t = stream.packet_context_type
750374a1 728 assert t is not None
7f4429f2 729
aad8e5e8
PP
730 # The `timestamp_begin` and `timestamp_end` field types must be
731 # mapped to the `value` property of the same clock.
750374a1
PP
732 ts_begin = t.fields.get('timestamp_begin')
733 ts_end = t.fields.get('timestamp_end')
7f4429f2 734
7f4429f2
PP
735 if ts_begin is not None and ts_end is not None:
736 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
ace614f2
PP
737 raise _ConfigParseError(ctx_obj_name,
738 '`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock value')
7f4429f2 739
aad8e5e8
PP
740 # The `packet_size` field type's size must be greater than or
741 # equal to the `content_size` field type's size.
750374a1 742 if t.fields['content_size'].size > t.fields['packet_size'].size:
ace614f2
PP
743 raise _ConfigParseError(ctx_obj_name,
744 '`content_size` field\'s size must be less than or equal to `packet_size` field\'s size')
7f4429f2 745
aad8e5e8 746 # Validates the event header field type of the stream type `stream`.
7f4429f2 747 def _validate_stream_event_header(self, stream):
ace614f2 748 ctx_obj_name = '`event-header-type` property'
7f4429f2
PP
749 t = stream.event_header_type
750
aad8e5e8
PP
751 # If there's more than one event type, then the `id` (event type
752 # ID) field is required.
7f4429f2 753 if len(stream.events) > 1:
7f4429f2 754 if t is None:
ace614f2
PP
755 raise _ConfigParseError('Stream type',
756 '`id` field is required (because there\'s more than one event type), but event header field type is missing')
7f4429f2 757
7f4429f2 758 if 'id' not in t.fields:
ace614f2
PP
759 raise _ConfigParseError(ctx_obj_name,
760 '`id` field is required (because there\'s more than one event type)')
7f4429f2 761
750374a1 762 if t is None:
7f4429f2
PP
763 return
764
aad8e5e8
PP
765 # The `id` field type's size (bits) must be large enough to
766 # accomodate any event type ID.
750374a1 767 eid = t.fields.get('id')
7f4429f2 768
750374a1 769 if eid is not None:
7f4429f2 770 if len(stream.events) > (1 << eid.size):
ace614f2
PP
771 raise _ConfigParseError(ctx_obj_name,
772 '`id` field\'s size is too small to accomodate {} event types'.format(len(stream.events)))
7f4429f2 773
aad8e5e8 774 # Validates the stream type `stream`.
7f4429f2
PP
775 def _validate_stream(self, stream):
776 self._validate_stream_packet_context(stream)
777 self._validate_stream_event_header(stream)
778
aad8e5e8
PP
779 # Validates the trace and stream types of the metadata object
780 # `meta`.
7f4429f2
PP
781 def validate(self, meta):
782 self._meta = meta
7f4429f2 783
ace614f2 784 try:
7f4429f2 785 try:
ace614f2 786 self._validate_trace(meta)
9fb5657f 787 except _ConfigParseError as exc:
ace614f2
PP
788 _append_error_ctx(exc, 'Trace type')
789
790 for stream in meta.streams.values():
791 try:
792 self._validate_stream(stream)
793 except _ConfigParseError as exc:
794 _append_error_ctx(exc, 'Stream type `{}`'.format(stream.name))
795 except _ConfigParseError as exc:
796 _append_error_ctx(exc, 'Metadata')
7f4429f2
PP
797
798
aad8e5e8
PP
799# A barectf YAML configuration parser.
800#
c8270369
PP
801# When you build such a parser, it parses the configuration file and
802# creates a corresponding `config.Config` object which you can get with
803# the `config` property.
aad8e5e8 804#
c8270369 805# See the comments of _parse() for more implementation details about the
aad8e5e8 806# parsing stages and general strategy.
7f4429f2 807class _YamlConfigParser:
c8270369
PP
808 # Builds a barectf YAML configuration parser and parses the
809 # configuration file having the path `path`.
810 #
811 # The parser considers the inclusion directories `include_dirs`,
812 # ignores nonexistent inclusion files if `ignore_include_not_found`
813 # is `True`, and dumps the effective configuration (as YAML) if
814 # `dump_config` is `True`.
815 def __init__(self, path, include_dirs, ignore_include_not_found,
816 dump_config):
817 self._root_yaml_path = path
7f4429f2
PP
818 self._class_name_to_create_type_func = {
819 'int': self._create_integer,
820 'integer': self._create_integer,
821 'flt': self._create_float,
822 'float': self._create_float,
823 'floating-point': self._create_float,
824 'enum': self._create_enum,
825 'enumeration': self._create_enum,
826 'str': self._create_string,
827 'string': self._create_string,
828 'struct': self._create_struct,
829 'structure': self._create_struct,
830 'array': self._create_array,
831 }
7f4429f2
PP
832 self._include_dirs = include_dirs
833 self._ignore_include_not_found = ignore_include_not_found
834 self._dump_config = dump_config
6839ffba 835 self._schema_validator = _SchemaValidator()
c8270369 836 self._parse()
7f4429f2 837
aad8e5e8 838 # Sets the default byte order as found in the `metadata_node` node.
7f4429f2 839 def _set_byte_order(self, metadata_node):
6839ffba
PP
840 self._bo = _byte_order_str_to_bo(metadata_node['trace']['byte-order'])
841 assert self._bo is not None
7f4429f2 842
aad8e5e8
PP
843 # Sets the clock value property mapping of the pseudo integer field
844 # type object `int_obj` as found in the `prop_mapping_node` node.
7f4429f2 845 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
7f4429f2 846 clock_name = prop_mapping_node['name']
6839ffba 847 clock = self._clocks.get(clock_name)
7f4429f2 848
6839ffba 849 if clock is None:
ace614f2
PP
850 exc = _ConfigParseError('`property-mappings` property',
851 'Clock type `{}` does not exist'.format(clock_name))
852 exc.append_ctx('Integer field type')
853 raise exc
7f4429f2 854
7f4429f2 855 prop_mapping = _PropertyMapping()
6839ffba
PP
856 prop_mapping.object = clock
857 prop_mapping.prop = 'value'
7f4429f2
PP
858 int_obj.property_mappings.append(prop_mapping)
859
aad8e5e8
PP
860 # Creates a pseudo integer field type from the node `node` and
861 # returns it.
6839ffba
PP
862 def _create_integer(self, node):
863 obj = _Integer()
6839ffba 864 obj.size = node['size']
6839ffba 865 align_node = node.get('align')
7f4429f2 866
6839ffba 867 if align_node is not None:
ace614f2 868 _validate_alignment(align_node, 'Integer field type')
6839ffba 869 obj.align = align_node
7f4429f2 870
6839ffba 871 signed_node = node.get('signed')
7f4429f2 872
6839ffba
PP
873 if signed_node is not None:
874 obj.signed = signed_node
7f4429f2 875
6839ffba
PP
876 obj.byte_order = self._bo
877 bo_node = node.get('byte-order')
7f4429f2 878
6839ffba
PP
879 if bo_node is not None:
880 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2 881
6839ffba
PP
882 base_node = node.get('base')
883
884 if base_node is not None:
885 if base_node == 'bin':
886 obj.base = 2
887 elif base_node == 'oct':
888 obj.base = 8
889 elif base_node == 'dec':
890 obj.base = 10
7f4429f2 891 else:
6839ffba
PP
892 assert base_node == 'hex'
893 obj.base = 16
7f4429f2 894
6839ffba 895 encoding_node = node.get('encoding')
7f4429f2 896
6839ffba
PP
897 if encoding_node is not None:
898 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2 899
6839ffba 900 pm_node = node.get('property-mappings')
7f4429f2 901
6839ffba
PP
902 if pm_node is not None:
903 assert len(pm_node) == 1
904 self._set_int_clock_prop_mapping(obj, pm_node[0])
7f4429f2
PP
905
906 return obj
907
aad8e5e8
PP
908 # Creates a pseudo floating point number field type from the node
909 # `node` and returns it.
6839ffba
PP
910 def _create_float(self, node):
911 obj = _FloatingPoint()
6839ffba
PP
912 size_node = node['size']
913 obj.exp_size = size_node['exp']
914 obj.mant_size = size_node['mant']
6839ffba 915 align_node = node.get('align')
7f4429f2 916
6839ffba 917 if align_node is not None:
ace614f2 918 _validate_alignment(align_node, 'Floating point number field type')
6839ffba 919 obj.align = align_node
7f4429f2 920
6839ffba
PP
921 obj.byte_order = self._bo
922 bo_node = node.get('byte-order')
7f4429f2 923
6839ffba
PP
924 if bo_node is not None:
925 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2
PP
926
927 return obj
928
aad8e5e8
PP
929 # Creates a pseudo enumeration field type from the node `node` and
930 # returns it.
6839ffba 931 def _create_enum(self, node):
ace614f2 932 ctx_obj_name = 'Enumeration field type'
6839ffba 933 obj = _Enum()
7f4429f2 934
aad8e5e8 935 # value (integer) field type
6839ffba
PP
936 try:
937 obj.value_type = self._create_type(node['value-type'])
9fb5657f 938 except _ConfigParseError as exc:
ace614f2
PP
939 _append_error_ctx(exc, ctx_obj_name,
940 'Cannot create value (integer) field type')
7f4429f2
PP
941
942 # members
6839ffba 943 members_node = node.get('members')
7f4429f2 944
6839ffba
PP
945 if members_node is not None:
946 if obj.value_type.signed:
947 value_min = -(1 << obj.value_type.size - 1)
948 value_max = (1 << (obj.value_type.size - 1)) - 1
7f4429f2 949 else:
6839ffba
PP
950 value_min = 0
951 value_max = (1 << obj.value_type.size) - 1
7f4429f2 952
6839ffba 953 cur = 0
7f4429f2 954
6839ffba
PP
955 for m_node in members_node:
956 if type(m_node) is str:
7f4429f2
PP
957 label = m_node
958 value = (cur, cur)
959 cur += 1
960 else:
6839ffba 961 assert type(m_node) is collections.OrderedDict
7f4429f2 962 label = m_node['label']
7f4429f2
PP
963 value = m_node['value']
964
6839ffba 965 if type(value) is int:
7f4429f2
PP
966 cur = value + 1
967 value = (value, value)
968 else:
6839ffba
PP
969 assert type(value) is list
970 assert len(value) == 2
7f4429f2
PP
971 mn = value[0]
972 mx = value[1]
973
974 if mn > mx:
ace614f2
PP
975 exc = _ConfigParseError(ctx_obj_name)
976 exc.append_ctx('Member `{}`'.format(label),
977 'Invalid integral range ({} > {})'.format(label, mn, mx))
978 raise exc
7f4429f2
PP
979
980 value = (mn, mx)
981 cur = mx + 1
982
aad8e5e8
PP
983 # Make sure that all the integral values of the range
984 # fits the enumeration field type's integer value field
985 # type depending on its size (bits).
ace614f2 986 member_obj_name = 'Member `{}`'.format(label)
6839ffba 987 msg_fmt = 'Value {} is outside the value type range [{}, {}]'
ace614f2 988 msg = msg_fmt.format(value[0], value_min, value_max)
7f4429f2 989
ace614f2
PP
990 try:
991 if value[0] < value_min or value[0] > value_max:
992 raise _ConfigParseError(member_obj_name, msg)
7f4429f2 993
ace614f2
PP
994 if value[1] < value_min or value[1] > value_max:
995 raise _ConfigParseError(member_obj_name, msg)
996 except _ConfigParseError as exc:
997 _append_error_ctx(exc, ctx_obj_name)
7f4429f2 998
6839ffba 999 obj.members[label] = value
7f4429f2 1000
6839ffba 1001 return obj
7f4429f2 1002
aad8e5e8
PP
1003 # Creates a pseudo string field type from the node `node` and
1004 # returns it.
6839ffba
PP
1005 def _create_string(self, node):
1006 obj = _String()
6839ffba 1007 encoding_node = node.get('encoding')
7f4429f2 1008
6839ffba
PP
1009 if encoding_node is not None:
1010 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2
PP
1011
1012 return obj
1013
aad8e5e8
PP
1014 # Creates a pseudo structure field type from the node `node` and
1015 # returns it.
6839ffba 1016 def _create_struct(self, node):
ace614f2 1017 ctx_obj_name = 'Structure field type'
6839ffba 1018 obj = _Struct()
6839ffba 1019 min_align_node = node.get('min-align')
7f4429f2 1020
6839ffba 1021 if min_align_node is not None:
ace614f2 1022 _validate_alignment(min_align_node, ctx_obj_name)
6839ffba 1023 obj.min_align = min_align_node
7f4429f2 1024
6839ffba 1025 fields_node = node.get('fields')
7f4429f2 1026
6839ffba
PP
1027 if fields_node is not None:
1028 for field_name, field_node in fields_node.items():
ace614f2 1029 _validate_identifier(field_name, ctx_obj_name, 'field name')
7f4429f2 1030
6839ffba
PP
1031 try:
1032 obj.fields[field_name] = self._create_type(field_node)
9fb5657f 1033 except _ConfigParseError as exc:
ace614f2 1034 _append_error_ctx(exc, ctx_obj_name,
1bf9d86d 1035 'Cannot create field `{}`'.format(field_name))
7f4429f2
PP
1036
1037 return obj
1038
aad8e5e8
PP
1039 # Creates a pseudo array field type from the node `node` and returns
1040 # it.
6839ffba
PP
1041 def _create_array(self, node):
1042 obj = _Array()
6839ffba 1043 obj.length = node['length']
7f4429f2 1044
6839ffba
PP
1045 try:
1046 obj.element_type = self._create_type(node['element-type'])
9fb5657f 1047 except _ConfigParseError as exc:
ace614f2
PP
1048 _append_error_ctx(exc, 'Array field type',
1049 'Cannot create element field type')
7f4429f2 1050
6839ffba 1051 return obj
7f4429f2 1052
aad8e5e8
PP
1053 # Creates a pseudo field type from the node `node` and returns it.
1054 #
1055 # This method checks the `class` property of `node` to determine
1056 # which function of `self._class_name_to_create_type_func` to call
1057 # to create the corresponding pseudo field type.
6839ffba
PP
1058 def _create_type(self, type_node):
1059 return self._class_name_to_create_type_func[type_node['class']](type_node)
7f4429f2 1060
aad8e5e8 1061 # Creates a pseudo clock type from the node `node` and returns it.
7f4429f2 1062 def _create_clock(self, node):
7f4429f2 1063 clock = _Clock()
6839ffba 1064 uuid_node = node.get('uuid')
7f4429f2 1065
6839ffba
PP
1066 if uuid_node is not None:
1067 try:
1068 clock.uuid = uuid.UUID(uuid_node)
1069 except:
ace614f2
PP
1070 raise _ConfigParseError('Clock type',
1071 'Malformed UUID `{}`'.format(uuid_node))
7f4429f2 1072
6839ffba 1073 descr_node = node.get('description')
7f4429f2 1074
6839ffba
PP
1075 if descr_node is not None:
1076 clock.description = descr_node
7f4429f2 1077
6839ffba 1078 freq_node = node.get('freq')
7f4429f2 1079
6839ffba
PP
1080 if freq_node is not None:
1081 clock.freq = freq_node
7f4429f2 1082
6839ffba 1083 error_cycles_node = node.get('error-cycles')
7f4429f2 1084
6839ffba
PP
1085 if error_cycles_node is not None:
1086 clock.error_cycles = error_cycles_node
7f4429f2 1087
6839ffba 1088 offset_node = node.get('offset')
7f4429f2 1089
6839ffba 1090 if offset_node is not None:
6839ffba 1091 offset_cycles_node = offset_node.get('cycles')
7f4429f2 1092
6839ffba
PP
1093 if offset_cycles_node is not None:
1094 clock.offset_cycles = offset_cycles_node
7f4429f2 1095
6839ffba 1096 offset_seconds_node = offset_node.get('seconds')
7f4429f2 1097
6839ffba
PP
1098 if offset_seconds_node is not None:
1099 clock.offset_seconds = offset_seconds_node
7f4429f2 1100
6839ffba 1101 absolute_node = node.get('absolute')
7f4429f2 1102
6839ffba
PP
1103 if absolute_node is not None:
1104 clock.absolute = absolute_node
7f4429f2 1105
6839ffba 1106 return_ctype_node = node.get('$return-ctype')
7f4429f2 1107
6839ffba 1108 if return_ctype_node is None:
aad8e5e8
PP
1109 # barectf 2.1: `return-ctype` property was renamed to
1110 # `$return-ctype`
6839ffba 1111 return_ctype_node = node.get('return-ctype')
7f4429f2
PP
1112
1113 if return_ctype_node is not None:
6839ffba 1114 clock.return_ctype = return_ctype_node
7f4429f2
PP
1115
1116 return clock
1117
aad8e5e8
PP
1118 # Registers all the clock types of the metadata node
1119 # `metadata_node`, creating pseudo clock types during the process,
1120 # within this parser.
1121 #
1122 # The pseudo clock types in `self._clocks` are then accessible when
1123 # creating a pseudo integer field type (see _create_integer() and
1124 # _set_int_clock_prop_mapping()).
7f4429f2
PP
1125 def _register_clocks(self, metadata_node):
1126 self._clocks = collections.OrderedDict()
6839ffba 1127 clocks_node = metadata_node.get('clocks')
7f4429f2
PP
1128
1129 if clocks_node is None:
1130 return
1131
7f4429f2 1132 for clock_name, clock_node in clocks_node.items():
ace614f2 1133 _validate_identifier(clock_name, 'Metadata', 'clock type name')
6839ffba 1134 assert clock_name not in self._clocks
7f4429f2
PP
1135
1136 try:
1137 clock = self._create_clock(clock_node)
9fb5657f 1138 except _ConfigParseError as exc:
131d409a 1139 _append_error_ctx(exc, 'Metadata',
ace614f2 1140 'Cannot create clock type `{}`'.format(clock_name))
7f4429f2
PP
1141
1142 clock.name = clock_name
1143 self._clocks[clock_name] = clock
1144
aad8e5e8
PP
1145 # Creates an environment object (`collections.OrderedDict`) from the
1146 # metadata node `metadata_node` and returns it.
7f4429f2 1147 def _create_env(self, metadata_node):
6839ffba 1148 env_node = metadata_node.get('env')
7f4429f2
PP
1149
1150 if env_node is None:
6839ffba 1151 return collections.OrderedDict()
7f4429f2
PP
1152
1153 for env_name, env_value in env_node.items():
6839ffba
PP
1154 _validate_identifier(env_name, 'Metadata',
1155 'environment variable name')
7f4429f2 1156
6839ffba 1157 return copy.deepcopy(env_node)
7f4429f2 1158
aad8e5e8
PP
1159 # Creates a pseudo trace type from the metadata node `metadata_node`
1160 # and returns it.
7f4429f2 1161 def _create_trace(self, metadata_node):
ace614f2 1162 ctx_obj_name = 'Trace type'
7f4429f2 1163 trace = _Trace()
7f4429f2 1164 trace_node = metadata_node['trace']
7f4429f2 1165 trace.byte_order = self._bo
6839ffba 1166 uuid_node = trace_node.get('uuid')
7f4429f2 1167
6839ffba 1168 if uuid_node is not None:
aad8e5e8
PP
1169 # The `uuid` property of the trace type node can be `auto`
1170 # to make barectf generate a UUID.
6839ffba
PP
1171 if uuid_node == 'auto':
1172 trace.uuid = uuid.uuid1()
7f4429f2
PP
1173 else:
1174 try:
6839ffba 1175 trace.uuid = uuid.UUID(uuid_node)
7f4429f2 1176 except:
ace614f2
PP
1177 raise _ConfigParseError(ctx_obj_name,
1178 'Malformed UUID `{}`'.format(uuid_node))
7f4429f2 1179
6839ffba
PP
1180 pht_node = trace_node.get('packet-header-type')
1181
1182 if pht_node is not None:
7f4429f2 1183 try:
6839ffba 1184 trace.packet_header_type = self._create_type(pht_node)
9fb5657f 1185 except _ConfigParseError as exc:
ace614f2
PP
1186 _append_error_ctx(exc, ctx_obj_name,
1187 'Cannot create packet header field type')
7f4429f2 1188
7f4429f2
PP
1189 return trace
1190
aad8e5e8
PP
1191 # Creates a pseudo event type from the event node `event_node` and
1192 # returns it.
7f4429f2 1193 def _create_event(self, event_node):
ace614f2 1194 ctx_obj_name = 'Event type'
7f4429f2 1195 event = _Event()
6839ffba 1196 log_level_node = event_node.get('log-level')
7f4429f2 1197
6839ffba
PP
1198 if log_level_node is not None:
1199 assert type(log_level_node) is int
1200 event.log_level = metadata.LogLevel(None, log_level_node)
7f4429f2 1201
6839ffba 1202 ct_node = event_node.get('context-type')
7f4429f2 1203
6839ffba 1204 if ct_node is not None:
7f4429f2 1205 try:
6839ffba 1206 event.context_type = self._create_type(ct_node)
9fb5657f 1207 except _ConfigParseError as exc:
ace614f2
PP
1208 _append_error_ctx(exc, ctx_obj_name,
1209 'Cannot create context field type')
7f4429f2 1210
6839ffba 1211 pt_node = event_node.get('payload-type')
7f4429f2 1212
6839ffba 1213 if pt_node is not None:
7f4429f2 1214 try:
6839ffba 1215 event.payload_type = self._create_type(pt_node)
9fb5657f 1216 except _ConfigParseError as exc:
ace614f2
PP
1217 _append_error_ctx(exc, ctx_obj_name,
1218 'Cannot create payload field type')
7f4429f2 1219
7f4429f2
PP
1220 return event
1221
aad8e5e8
PP
1222 # Creates a pseudo stream type named `stream_name` from the stream
1223 # node `stream_node` and returns it.
7f4429f2 1224 def _create_stream(self, stream_name, stream_node):
ace614f2 1225 ctx_obj_name = 'Stream type'
7f4429f2 1226 stream = _Stream()
6839ffba 1227 pct_node = stream_node.get('packet-context-type')
7f4429f2 1228
6839ffba 1229 if pct_node is not None:
7f4429f2 1230 try:
6839ffba 1231 stream.packet_context_type = self._create_type(pct_node)
9fb5657f 1232 except _ConfigParseError as exc:
ace614f2
PP
1233 _append_error_ctx(exc, ctx_obj_name,
1234 'Cannot create packet context field type')
7f4429f2 1235
6839ffba 1236 eht_node = stream_node.get('event-header-type')
7f4429f2 1237
6839ffba 1238 if eht_node is not None:
7f4429f2 1239 try:
6839ffba 1240 stream.event_header_type = self._create_type(eht_node)
9fb5657f 1241 except _ConfigParseError as exc:
ace614f2
PP
1242 _append_error_ctx(exc, ctx_obj_name,
1243 'Cannot create event header field type')
7f4429f2 1244
6839ffba 1245 ect_node = stream_node.get('event-context-type')
7f4429f2 1246
6839ffba 1247 if ect_node is not None:
7f4429f2 1248 try:
6839ffba 1249 stream.event_context_type = self._create_type(ect_node)
9fb5657f 1250 except _ConfigParseError as exc:
ace614f2
PP
1251 _append_error_ctx(exc, ctx_obj_name,
1252 'Cannot create event context field type')
7f4429f2 1253
6839ffba
PP
1254 events_node = stream_node['events']
1255 cur_id = 0
7f4429f2 1256
6839ffba
PP
1257 for ev_name, ev_node in events_node.items():
1258 try:
1259 ev = self._create_event(ev_node)
9fb5657f 1260 except _ConfigParseError as exc:
ace614f2
PP
1261 _append_error_ctx(exc, ctx_obj_name,
1262 'Cannot create event type `{}`'.format(ev_name))
7f4429f2 1263
6839ffba
PP
1264 ev.id = cur_id
1265 ev.name = ev_name
1266 stream.events[ev_name] = ev
1267 cur_id += 1
7f4429f2 1268
6839ffba 1269 default_node = stream_node.get('$default')
7f4429f2 1270
6839ffba
PP
1271 if default_node is not None:
1272 if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
ace614f2
PP
1273 fmt = 'Cannot specify more than one default stream type (default stream type already set to `{}`)'
1274 raise _ConfigParseError('Stream type',
9fb5657f 1275 fmt.format(self._meta.default_stream_name))
7f4429f2 1276
6839ffba 1277 self._meta.default_stream_name = stream_name
7f4429f2
PP
1278
1279 return stream
1280
aad8e5e8
PP
1281 # Creates a `collections.OrderedDict` object where keys are stream
1282 # type names and values are pseudo stream types from the metadata
1283 # node `metadata_node` and returns it.
7f4429f2
PP
1284 def _create_streams(self, metadata_node):
1285 streams = collections.OrderedDict()
7f4429f2 1286 streams_node = metadata_node['streams']
7f4429f2
PP
1287 cur_id = 0
1288
1289 for stream_name, stream_node in streams_node.items():
1290 try:
1291 stream = self._create_stream(stream_name, stream_node)
9fb5657f 1292 except _ConfigParseError as exc:
131d409a 1293 _append_error_ctx(exc, 'Metadata',
ace614f2 1294 'Cannot create stream type `{}`'.format(stream_name))
7f4429f2
PP
1295
1296 stream.id = cur_id
6839ffba 1297 stream.name = stream_name
7f4429f2
PP
1298 streams[stream_name] = stream
1299 cur_id += 1
1300
1301 return streams
1302
aad8e5e8
PP
1303 # Creates a pseudo metadata object from the configuration node
1304 # `root` and returns it.
7f4429f2
PP
1305 def _create_metadata(self, root):
1306 self._meta = _Metadata()
7f4429f2
PP
1307 metadata_node = root['metadata']
1308
7f4429f2
PP
1309 if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None:
1310 default_stream_node = metadata_node['$default-stream']
7f4429f2
PP
1311 self._meta.default_stream_name = default_stream_node
1312
1313 self._set_byte_order(metadata_node)
1314 self._register_clocks(metadata_node)
1315 self._meta.clocks = self._clocks
7f4429f2
PP
1316 self._meta.env = self._create_env(metadata_node)
1317 self._meta.trace = self._create_trace(metadata_node)
7f4429f2
PP
1318 self._meta.streams = self._create_streams(metadata_node)
1319
aad8e5e8 1320 # validate the pseudo metadata object
ace614f2
PP
1321 _MetadataSpecialFieldsValidator().validate(self._meta)
1322 _BarectfMetadataValidator().validate(self._meta)
7f4429f2
PP
1323
1324 return self._meta
1325
aad8e5e8
PP
1326 # Gets and validates the tracing prefix as found in the
1327 # configuration node `config_node` and returns it.
6839ffba
PP
1328 def _get_prefix(self, config_node):
1329 prefix = config_node.get('prefix', 'barectf_')
1bf9d86d 1330 _validate_identifier(prefix, '`prefix` property', 'prefix')
6839ffba 1331 return prefix
7f4429f2 1332
aad8e5e8
PP
1333 # Gets the options as found in the configuration node `config_node`
1334 # and returns a corresponding `config.ConfigOptions` object.
6839ffba
PP
1335 def _get_options(self, config_node):
1336 gen_prefix_def = False
1337 gen_default_stream_def = False
1338 options_node = config_node.get('options')
7f4429f2 1339
6839ffba
PP
1340 if options_node is not None:
1341 gen_prefix_def = options_node.get('gen-prefix-def',
1342 gen_prefix_def)
1343 gen_default_stream_def = options_node.get('gen-default-stream-def',
1344 gen_default_stream_def)
7f4429f2
PP
1345
1346 return config.ConfigOptions(gen_prefix_def, gen_default_stream_def)
1347
aad8e5e8
PP
1348 # Returns the last included file name from the parser's inclusion
1349 # file name stack.
7f4429f2
PP
1350 def _get_last_include_file(self):
1351 if self._include_stack:
1352 return self._include_stack[-1]
1353
1354 return self._root_yaml_path
1355
aad8e5e8
PP
1356 # Loads the inclusion file having the path `yaml_path` and returns
1357 # its content as a `collections.OrderedDict` object.
7f4429f2
PP
1358 def _load_include(self, yaml_path):
1359 for inc_dir in self._include_dirs:
6839ffba
PP
1360 # Current inclusion dir + file name path.
1361 #
1362 # Note: os.path.join() only takes the last argument if it's
1363 # absolute.
7f4429f2
PP
1364 inc_path = os.path.join(inc_dir, yaml_path)
1365
1366 # real path (symbolic links resolved)
1367 real_path = os.path.realpath(inc_path)
1368
1369 # normalized path (weird stuff removed!)
1370 norm_path = os.path.normpath(real_path)
1371
1372 if not os.path.isfile(norm_path):
6839ffba 1373 # file doesn't exist: skip
7f4429f2
PP
1374 continue
1375
1376 if norm_path in self._include_stack:
1377 base_path = self._get_last_include_file()
ace614f2
PP
1378 raise _ConfigParseError('File `{}`'.format(base_path),
1379 'Cannot recursively include file `{}`'.format(norm_path))
7f4429f2
PP
1380
1381 self._include_stack.append(norm_path)
1382
1383 # load raw content
1384 return self._yaml_ordered_load(norm_path)
1385
1386 if not self._ignore_include_not_found:
1387 base_path = self._get_last_include_file()
ace614f2
PP
1388 raise _ConfigParseError('File `{}`'.format(base_path),
1389 'Cannot include file `{}`: file not found in inclusion directories'.format(yaml_path))
aad8e5e8
PP
1390 # Returns a list of all the inclusion file paths as found in the
1391 # inclusion node `include_node`.
7f4429f2
PP
1392 def _get_include_paths(self, include_node):
1393 if include_node is None:
6839ffba 1394 # none
7f4429f2
PP
1395 return []
1396
6839ffba
PP
1397 if type(include_node) is str:
1398 # wrap as array
7f4429f2
PP
1399 return [include_node]
1400
6839ffba
PP
1401 # already an array
1402 assert type(include_node) is list
1403 return include_node
7f4429f2 1404
aad8e5e8
PP
1405 # Updates the node `base_node` with an overlay node `overlay_node`.
1406 #
1407 # Both the inclusion and field type inheritance features use this
1408 # update mechanism.
7f4429f2
PP
1409 def _update_node(self, base_node, overlay_node):
1410 for olay_key, olay_value in overlay_node.items():
1411 if olay_key in base_node:
1412 base_value = base_node[olay_key]
1413
6839ffba 1414 if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
aad8e5e8 1415 # merge both objects
7f4429f2 1416 self._update_node(base_value, olay_value)
6839ffba 1417 elif type(olay_value) is list and type(base_value) is list:
7f4429f2
PP
1418 # append extension array items to base items
1419 base_value += olay_value
1420 else:
aad8e5e8 1421 # fall back to replacing base property
7f4429f2
PP
1422 base_node[olay_key] = olay_value
1423 else:
aad8e5e8 1424 # set base property from overlay property
7f4429f2
PP
1425 base_node[olay_key] = olay_value
1426
aad8e5e8
PP
1427 # Processes inclusions using `last_overlay_node` as the last overlay
1428 # node to use to "patch" the node.
1429 #
1430 # If `last_overlay_node` contains an `$include` property, then this
1431 # method patches the current base node (initially empty) in order
1432 # using the content of the inclusion files (recursively).
1433 #
1434 # At the end, this method removes the `$include` of
1435 # `last_overlay_node` and then patches the current base node with
1436 # its other properties before returning the result (always a deep
1437 # copy).
6839ffba 1438 def _process_node_include(self, last_overlay_node,
7f4429f2
PP
1439 process_base_include_cb,
1440 process_children_include_cb=None):
7f4429f2 1441 # process children inclusions first
6839ffba 1442 if process_children_include_cb is not None:
7f4429f2
PP
1443 process_children_include_cb(last_overlay_node)
1444
6839ffba
PP
1445 incl_prop_name = '$include'
1446
1447 if incl_prop_name in last_overlay_node:
1448 include_node = last_overlay_node[incl_prop_name]
7f4429f2 1449 else:
6839ffba 1450 # no inclusions!
7f4429f2
PP
1451 return last_overlay_node
1452
1453 include_paths = self._get_include_paths(include_node)
1454 cur_base_path = self._get_last_include_file()
1455 base_node = None
1456
6839ffba 1457 # keep the inclusion paths and remove the `$include` property
7f4429f2 1458 include_paths = copy.deepcopy(include_paths)
6839ffba 1459 del last_overlay_node[incl_prop_name]
7f4429f2
PP
1460
1461 for include_path in include_paths:
1462 # load raw YAML from included file
1463 overlay_node = self._load_include(include_path)
1464
1465 if overlay_node is None:
6839ffba
PP
1466 # Cannot find inclusion file, but we're ignoring those
1467 # errors, otherwise _load_include() itself raises a
1468 # config error.
7f4429f2
PP
1469 continue
1470
6839ffba 1471 # recursively process inclusions
7f4429f2
PP
1472 try:
1473 overlay_node = process_base_include_cb(overlay_node)
9fb5657f 1474 except _ConfigParseError as exc:
ace614f2 1475 _append_error_ctx(exc, 'File `{}`'.format(cur_base_path))
7f4429f2 1476
6839ffba 1477 # pop inclusion stack now that we're done including
7f4429f2
PP
1478 del self._include_stack[-1]
1479
6839ffba
PP
1480 # At this point, `base_node` is fully resolved (does not
1481 # contain any `$include` property).
7f4429f2
PP
1482 if base_node is None:
1483 base_node = overlay_node
1484 else:
1485 self._update_node(base_node, overlay_node)
1486
6839ffba
PP
1487 # Finally, update the latest base node with our last overlay
1488 # node.
7f4429f2 1489 if base_node is None:
6839ffba
PP
1490 # Nothing was included, which is possible when we're
1491 # ignoring inclusion errors.
7f4429f2
PP
1492 return last_overlay_node
1493
1494 self._update_node(base_node, last_overlay_node)
7f4429f2
PP
1495 return base_node
1496
aad8e5e8
PP
1497 # Process the inclusions of the event type node `event_node`,
1498 # returning the effective node.
7f4429f2 1499 def _process_event_include(self, event_node):
aad8e5e8 1500 # Make sure the event type node is valid for the inclusion
6839ffba
PP
1501 # processing stage.
1502 self._schema_validator.validate(event_node,
1503 '2/config/event-pre-include')
1504
1505 # process inclusions
1506 return self._process_node_include(event_node,
7f4429f2
PP
1507 self._process_event_include)
1508
aad8e5e8
PP
1509 # Process the inclusions of the stream type node `stream_node`,
1510 # returning the effective node.
7f4429f2
PP
1511 def _process_stream_include(self, stream_node):
1512 def process_children_include(stream_node):
1513 if 'events' in stream_node:
1514 events_node = stream_node['events']
1515
6839ffba
PP
1516 for key in list(events_node):
1517 events_node[key] = self._process_event_include(events_node[key])
7f4429f2 1518
aad8e5e8 1519 # Make sure the stream type node is valid for the inclusion
6839ffba
PP
1520 # processing stage.
1521 self._schema_validator.validate(stream_node,
1522 '2/config/stream-pre-include')
7f4429f2 1523
6839ffba
PP
1524 # process inclusions
1525 return self._process_node_include(stream_node,
7f4429f2
PP
1526 self._process_stream_include,
1527 process_children_include)
1528
aad8e5e8
PP
1529 # Process the inclusions of the trace type node `trace_node`,
1530 # returning the effective node.
7f4429f2 1531 def _process_trace_include(self, trace_node):
aad8e5e8 1532 # Make sure the trace type node is valid for the inclusion
6839ffba
PP
1533 # processing stage.
1534 self._schema_validator.validate(trace_node,
1535 '2/config/trace-pre-include')
1536
1537 # process inclusions
1538 return self._process_node_include(trace_node,
7f4429f2
PP
1539 self._process_trace_include)
1540
aad8e5e8
PP
1541 # Process the inclusions of the clock type node `clock_node`,
1542 # returning the effective node.
7f4429f2 1543 def _process_clock_include(self, clock_node):
aad8e5e8 1544 # Make sure the clock type node is valid for the inclusion
6839ffba
PP
1545 # processing stage.
1546 self._schema_validator.validate(clock_node,
1547 '2/config/clock-pre-include')
1548
1549 # process inclusions
1550 return self._process_node_include(clock_node,
7f4429f2
PP
1551 self._process_clock_include)
1552
aad8e5e8
PP
1553 # Process the inclusions of the metadata node `metadata_node`,
1554 # returning the effective node.
7f4429f2
PP
1555 def _process_metadata_include(self, metadata_node):
1556 def process_children_include(metadata_node):
1557 if 'trace' in metadata_node:
1558 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
1559
1560 if 'clocks' in metadata_node:
1561 clocks_node = metadata_node['clocks']
1562
6839ffba
PP
1563 for key in list(clocks_node):
1564 clocks_node[key] = self._process_clock_include(clocks_node[key])
7f4429f2
PP
1565
1566 if 'streams' in metadata_node:
1567 streams_node = metadata_node['streams']
1568
6839ffba
PP
1569 for key in list(streams_node):
1570 streams_node[key] = self._process_stream_include(streams_node[key])
7f4429f2 1571
aad8e5e8 1572 # Make sure the metadata node is valid for the inclusion
6839ffba
PP
1573 # processing stage.
1574 self._schema_validator.validate(metadata_node,
1575 '2/config/metadata-pre-include')
7f4429f2 1576
6839ffba
PP
1577 # process inclusions
1578 return self._process_node_include(metadata_node,
7f4429f2
PP
1579 self._process_metadata_include,
1580 process_children_include)
1581
aad8e5e8
PP
1582 # Process the inclusions of the configuration node `config_node`,
1583 # returning the effective node.
6839ffba
PP
1584 def _process_config_includes(self, config_node):
1585 # Process inclusions in this order:
1586 #
aad8e5e8
PP
1587 # 1. Clock type node, event type nodes, and trace type nodes
1588 # (the order between those is not important).
6839ffba 1589 #
aad8e5e8 1590 # 2. Stream type nodes.
6839ffba 1591 #
aad8e5e8 1592 # 3. Metadata node.
7f4429f2 1593 #
6839ffba 1594 # This is because:
7f4429f2 1595 #
aad8e5e8
PP
1596 # * A metadata node can include clock type nodes, a trace type
1597 # node, stream type nodes, and event type nodes (indirectly).
7f4429f2 1598 #
aad8e5e8 1599 # * A stream type node can include event type nodes.
7f4429f2 1600 #
6839ffba
PP
1601 # We keep a stack of absolute paths to included files
1602 # (`self._include_stack`) to detect recursion.
1603 #
1604 # First, make sure the configuration object itself is valid for
1605 # the inclusion processing stage.
1606 self._schema_validator.validate(config_node,
1607 '2/config/config-pre-include')
1608
aad8e5e8 1609 # Process metadata node inclusions.
6839ffba
PP
1610 #
1611 # self._process_metadata_include() returns a new (or the same)
aad8e5e8 1612 # metadata node without any `$include` property in it,
6839ffba
PP
1613 # recursively.
1614 config_node['metadata'] = self._process_metadata_include(config_node['metadata'])
1615
1616 return config_node
7f4429f2 1617
aad8e5e8
PP
1618 # Expands the field type aliases found in the metadata node
1619 # `metadata_node` using the aliases of the `type_aliases_node` node.
1620 #
1621 # This method modifies `metadata_node`.
1622 #
1623 # When this method returns:
1624 #
1625 # * Any field type alias is replaced with its full field type
1626 # equivalent.
1627 #
1628 # * The `type-aliases` property of `metadata_node` is removed.
6839ffba
PP
1629 def _expand_field_type_aliases(self, metadata_node, type_aliases_node):
1630 def resolve_field_type_aliases(parent_node, key, from_descr,
1631 alias_set=None):
1632 if key not in parent_node:
1633 return
1634
1635 # This set holds all the aliases we need to expand,
1636 # recursively. This is used to detect cycles.
1637 if alias_set is None:
1638 alias_set = set()
1639
1640 node = parent_node[key]
1641
1642 if node is None:
1643 return
1644
1645 if type(node) is str:
1646 alias = node
1647
1648 if alias not in resolved_aliases:
1649 # Only check for a field type alias cycle when we
1650 # didn't resolve the alias yet, as a given node can
1651 # refer to the same field type alias more than once.
1652 if alias in alias_set:
ace614f2 1653 fmt = 'Cycle detected during the `{}` field type alias resolution'
9fb5657f 1654 raise _ConfigParseError(from_descr, fmt.format(alias))
6839ffba
PP
1655
1656 # try to load field type alias node named `alias`
1657 if alias not in type_aliases_node:
9fb5657f 1658 raise _ConfigParseError(from_descr,
ace614f2 1659 'Field type alias `{}` does not exist'.format(alias))
6839ffba
PP
1660
1661 # resolve it
1662 alias_set.add(alias)
1663 resolve_field_type_aliases(type_aliases_node, alias,
1664 from_descr, alias_set)
1665 resolved_aliases.add(alias)
1666
1667 parent_node[key] = copy.deepcopy(type_aliases_node[node])
1668 return
1669
1670 # traverse, resolving field type aliases as needed
1671 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1672 resolve_field_type_aliases(node, pkey, from_descr, alias_set)
1673
1674 # structure field type fields
1675 pkey = 'fields'
1676
1677 if pkey in node:
1678 assert type(node[pkey]) is collections.OrderedDict
1679
1680 for field_name in node[pkey]:
1681 resolve_field_type_aliases(node[pkey], field_name,
1682 from_descr, alias_set)
1683
ace614f2
PP
1684 def resolve_field_type_aliases_from(parent_node, key):
1685 resolve_field_type_aliases(parent_node, key,
1686 '`{}` property'.format(key))
6839ffba
PP
1687
1688 # set of resolved field type aliases
1689 resolved_aliases = set()
1690
aad8e5e8
PP
1691 # Expand field type aliases within trace, stream, and event
1692 # types now.
ace614f2
PP
1693 try:
1694 resolve_field_type_aliases_from(metadata_node['trace'],
1695 'packet-header-type')
1696 except _ConfigParseError as exc:
1697 _append_error_ctx(exc, 'Trace type')
6839ffba
PP
1698
1699 for stream_name, stream in metadata_node['streams'].items():
6839ffba 1700 try:
ace614f2
PP
1701 resolve_field_type_aliases_from(stream, 'packet-context-type')
1702 resolve_field_type_aliases_from(stream, 'event-header-type')
1703 resolve_field_type_aliases_from(stream, 'event-context-type')
1704
6839ffba 1705 for event_name, event in stream['events'].items():
ace614f2
PP
1706 try:
1707 resolve_field_type_aliases_from(event, 'context-type')
1708 resolve_field_type_aliases_from(event, 'payload-type')
1709 except _ConfigParseError as exc:
1710 _append_error_ctx(exc,
1711 'Event type `{}`'.format(event_name))
9fb5657f 1712 except _ConfigParseError as exc:
ace614f2 1713 _append_error_ctx(exc, 'Stream type `{}`'.format(stream_name))
6839ffba 1714
aad8e5e8 1715 # remove the (now unneeded) `type-aliases` node
6839ffba
PP
1716 del metadata_node['type-aliases']
1717
aad8e5e8
PP
1718 # Applies field type inheritance to all field types found in
1719 # `metadata_node`.
1720 #
1721 # This method modifies `metadata_node`.
1722 #
1723 # When this method returns, no field type node has an `$inherit` or
1724 # `inherit` property.
6839ffba
PP
1725 def _expand_field_type_inheritance(self, metadata_node):
1726 def apply_inheritance(parent_node, key):
1727 if key not in parent_node:
1728 return
1729
1730 node = parent_node[key]
1731
1732 if node is None:
1733 return
1734
1735 # process children first
1736 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1737 apply_inheritance(node, pkey)
1738
1739 # structure field type fields
1740 pkey = 'fields'
1741
1742 if pkey in node:
1743 assert type(node[pkey]) is collections.OrderedDict
1744
1745 for field_name, field_type in node[pkey].items():
1746 apply_inheritance(node[pkey], field_name)
1747
1748 # apply inheritance of this node
1749 if 'inherit' in node:
1750 # barectf 2.1: `inherit` property was renamed to `$inherit`
1751 assert '$inherit' not in node
1752 node['$inherit'] = node['inherit']
1753 del node['inherit']
1754
1755 inherit_key = '$inherit'
1756
1757 if inherit_key in node:
1758 assert type(node[inherit_key]) is collections.OrderedDict
1759
1760 # apply inheritance below
1761 apply_inheritance(node, inherit_key)
1762
1763 # `node` is an overlay on the `$inherit` node
1764 base_node = node[inherit_key]
1765 del node[inherit_key]
1766 self._update_node(base_node, node)
1767
1768 # set updated base node as this node
1769 parent_node[key] = base_node
1770
1771 apply_inheritance(metadata_node['trace'], 'packet-header-type')
1772
1773 for stream in metadata_node['streams'].values():
1774 apply_inheritance(stream, 'packet-context-type')
1775 apply_inheritance(stream, 'event-header-type')
1776 apply_inheritance(stream, 'event-context-type')
1777
1778 for event in stream['events'].values():
1779 apply_inheritance(event, 'context-type')
1780 apply_inheritance(event, 'payload-type')
1781
aad8e5e8
PP
1782 # Calls _expand_field_type_aliases() and
1783 # _expand_field_type_inheritance() if the metadata node
1784 # `metadata_node` has a `type-aliases` property.
6839ffba
PP
1785 def _expand_field_types(self, metadata_node):
1786 type_aliases_node = metadata_node.get('type-aliases')
1787
1788 if type_aliases_node is None:
1789 # If there's no `type-aliases` node, then there's no field
1790 # type aliases and therefore no possible inheritance.
1791 return
1792
1793 # first, expand field type aliases
1794 self._expand_field_type_aliases(metadata_node, type_aliases_node)
1795
1796 # next, apply inheritance to create effective field types
1797 self._expand_field_type_inheritance(metadata_node)
1798
aad8e5e8
PP
1799 # Replaces the textual log levels in event type nodes of the
1800 # metadata node `metadata_node` with their numeric equivalent (as
1801 # found in the `$log-levels` or `log-levels` node of
1802 # `metadata_node`).
1803 #
1804 # This method modifies `metadata_node`.
1805 #
1806 # When this method returns, the `$log-levels` or `log-level`
1807 # property of `metadata_node` is removed.
6839ffba
PP
1808 def _expand_log_levels(self, metadata_node):
1809 if 'log-levels' in metadata_node:
aad8e5e8
PP
1810 # barectf 2.1: `log-levels` property was renamed to
1811 # `$log-levels`
6839ffba
PP
1812 assert '$log-levels' not in node
1813 node['$log-levels'] = node['log-levels']
1814 del node['log-levels']
1815
1816 log_levels_key = '$log-levels'
1817 log_levels_node = metadata_node.get(log_levels_key)
1818
1819 if log_levels_node is None:
1820 # no log level aliases
1821 return
1822
1823 # not needed anymore
1824 del metadata_node[log_levels_key]
1825
1826 for stream_name, stream in metadata_node['streams'].items():
1827 try:
1828 for event_name, event in stream['events'].items():
1829 prop_name = 'log-level'
1830 ll_node = event.get(prop_name)
1831
1832 if ll_node is None:
1833 continue
1834
1835 if type(ll_node) is str:
1836 if ll_node not in log_levels_node:
ace614f2
PP
1837 exc = _ConfigParseError('`log-level` property',
1838 'Log level alias `{}` does not exist'.format(ll_node))
1839 exc.append_ctx('Event type `{}`'.format(event_name))
1840 raise exc
6839ffba
PP
1841
1842 event[prop_name] = log_levels_node[ll_node]
9fb5657f 1843 except _ConfigParseError as exc:
ace614f2 1844 _append_error_ctx(exc, 'Stream type `{}`'.format(stream_name))
7f4429f2 1845
aad8e5e8 1846 # Dumps the node `node` as YAML, passing `kwds` to yaml.dump().
7f4429f2
PP
1847 def _yaml_ordered_dump(self, node, **kwds):
1848 class ODumper(yaml.Dumper):
1849 pass
1850
1851 def dict_representer(dumper, node):
1852 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1853 node.items())
1854
1855 ODumper.add_representer(collections.OrderedDict, dict_representer)
1856
6839ffba 1857 # Python -> YAML
7f4429f2
PP
1858 return yaml.dump(node, Dumper=ODumper, **kwds)
1859
aad8e5e8
PP
1860 # Loads the content of the YAML file having the path `yaml_path` as
1861 # a Python object.
1862 #
1863 # All YAML maps are loaded as `collections.OrderedDict` objects.
7f4429f2
PP
1864 def _yaml_ordered_load(self, yaml_path):
1865 class OLoader(yaml.Loader):
1866 pass
1867
1868 def construct_mapping(loader, node):
1869 loader.flatten_mapping(node)
1870
1871 return collections.OrderedDict(loader.construct_pairs(node))
1872
1873 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1874 construct_mapping)
1875
1876 # YAML -> Python
1877 try:
1878 with open(yaml_path, 'r') as f:
1879 node = yaml.load(f, OLoader)
ace614f2
PP
1880 except (OSError, IOError) as exc:
1881 raise _ConfigParseError('File `{}`'.format(yaml_path),
1882 'Cannot open file: {}'.format(exc))
7f4429f2 1883
ace614f2 1884 assert type(node) is collections.OrderedDict
7f4429f2
PP
1885 return node
1886
c8270369 1887 def _parse(self):
7f4429f2
PP
1888 self._version = None
1889 self._include_stack = []
1890
6839ffba 1891 # load the configuration object as is from the root YAML file
7f4429f2 1892 try:
c8270369 1893 config_node = self._yaml_ordered_load(self._root_yaml_path)
9fb5657f 1894 except _ConfigParseError as exc:
131d409a 1895 _append_error_ctx(exc, 'Configuration',
c8270369 1896 'Cannot parse YAML file `{}`'.format(self._root_yaml_path))
7f4429f2 1897
6839ffba
PP
1898 # Make sure the configuration object is minimally valid, that
1899 # is, it contains a valid `version` property.
1900 #
1901 # This step does not validate the whole configuration object
1902 # yet because we don't have an effective configuration object;
1903 # we still need to:
1904 #
1905 # * Process inclusions.
1906 # * Expand field types (inheritance and aliases).
1907 self._schema_validator.validate(config_node, 'config/config-min')
7f4429f2 1908
6839ffba
PP
1909 # Process configuration object inclusions.
1910 #
1911 # self._process_config_includes() returns a new (or the same)
1912 # configuration object without any `$include` property in it,
1913 # recursively.
1914 config_node = self._process_config_includes(config_node)
7f4429f2 1915
6839ffba
PP
1916 # Make sure that the current configuration object is valid
1917 # considering field types are not expanded yet.
1918 self._schema_validator.validate(config_node,
1919 '2/config/config-pre-field-type-expansion')
7f4429f2 1920
6839ffba
PP
1921 # Expand field types.
1922 #
1923 # This process:
1924 #
1925 # 1. Replaces field type aliases with "effective" field
1926 # types, recursively.
1927 #
1928 # After this step, the `type-aliases` property of the
1929 # `metadata` node is gone.
1930 #
aad8e5e8 1931 # 2. Applies inheritance, following the `$inherit`/`inherit`
6839ffba
PP
1932 # properties.
1933 #
1934 # After this step, field type objects do not contain
1935 # `$inherit` or `inherit` properties.
1936 #
1937 # This is done blindly, in that the process _doesn't_ validate
1938 # field type objects at this point.
1939 self._expand_field_types(config_node['metadata'])
7f4429f2 1940
6839ffba
PP
1941 # Make sure that the current configuration object is valid
1942 # considering log levels are not expanded yet.
1943 self._schema_validator.validate(config_node,
1944 '2/config/config-pre-log-level-expansion')
7f4429f2 1945
6839ffba
PP
1946 # Expand log levels, that is, replace log level strings with
1947 # their equivalent numeric values.
1948 self._expand_log_levels(config_node['metadata'])
7f4429f2 1949
6839ffba
PP
1950 # validate the whole, effective configuration object
1951 self._schema_validator.validate(config_node, '2/config/config')
7f4429f2
PP
1952
1953 # dump config if required
1954 if self._dump_config:
6839ffba 1955 print(self._yaml_ordered_dump(config_node, indent=2,
7f4429f2
PP
1956 default_flow_style=False))
1957
6839ffba
PP
1958 # get prefix, options, and metadata pseudo-object
1959 prefix = self._get_prefix(config_node)
1960 opts = self._get_options(config_node)
1961 pseudo_meta = self._create_metadata(config_node)
7f4429f2 1962
6839ffba 1963 # create public configuration
c8270369
PP
1964 self._config = config.Config(pseudo_meta.to_public(), prefix, opts)
1965
1966 @property
1967 def config(self):
1968 return self._config
7f4429f2
PP
1969
1970
1971def _from_file(path, include_dirs, ignore_include_not_found, dump_config):
1972 try:
c8270369
PP
1973 return _YamlConfigParser(path, include_dirs, ignore_include_not_found,
1974 dump_config).config
9fb5657f 1975 except _ConfigParseError as exc:
131d409a 1976 _append_error_ctx(exc, 'Configuration',
9fb5657f 1977 'Cannot create configuration from YAML file `{}`'.format(path))
This page took 0.11146 seconds and 4 git commands to generate.