1 # The MIT License (MIT)
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 from barectf
import metadata
25 from barectf
import config
37 # The context of a configuration parsing error.
39 # Such a context object has a name and, optionally, a message.
40 class _ConfigParseErrorCtx
:
41 def __init__(self
, name
, msg
=None):
54 # Appends the context having the object name `obj_name` and the
55 # (optional) message `msg` to the `_ConfigParseError` exception `exc`
56 # and then raises `exc` again.
57 def _append_error_ctx(exc
, obj_name
, msg
=None):
58 exc
.append_ctx(obj_name
, msg
)
62 # A configuration parsing error.
64 # Such an error object contains a list of contexts (`ctx` property).
66 # The first context of this list is the most specific context, while the
67 # last is the more general.
69 # Use append_ctx() to append a context to an existing configuration
70 # parsing error when you catch it before raising it again. You can use
71 # _append_error_ctx() to do exactly this in a single call.
72 class _ConfigParseError(RuntimeError):
73 def __init__(self
, init_ctx_name
, init_ctx_msg
=None):
75 self
.append_ctx(init_ctx_name
, init_ctx_msg
)
81 def append_ctx(self
, name
, msg
=None):
82 self
._ctx
.append(_ConfigParseErrorCtx(name
, msg
))
85 def _opt_to_public(obj
):
89 return obj
.to_public()
92 # Pseudo object base class.
94 # A concrete pseudo object contains the same data as its public version,
97 # The to_public() method converts the pseudo object to an equivalent
98 # public, immutable object, caching the result so as to always return
99 # the same Python object.
105 if self
._public
is None:
106 self
._public
= self
._to
_public
()
110 def _to_public(self
):
111 raise NotImplementedError
114 class _PropertyMapping(_PseudoObj
):
120 def _to_public(self
):
121 return metadata
.PropertyMapping(self
.object.to_public(), self
.prop
)
124 class _Integer(_PseudoObj
):
128 self
.byte_order
= None
132 self
.encoding
= metadata
.Encoding
.NONE
133 self
.property_mappings
= []
136 def real_align(self
):
137 if self
.align
is None:
138 if self
.size
% 8 == 0:
145 def _to_public(self
):
146 prop_mappings
= [pm
.to_public() for pm
in self
.property_mappings
]
147 return metadata
.Integer(self
.size
, self
.byte_order
, self
.align
,
148 self
.signed
, self
.base
, self
.encoding
,
152 class _FloatingPoint(_PseudoObj
):
156 self
.mant_size
= None
157 self
.byte_order
= None
161 def real_align(self
):
164 def _to_public(self
):
165 return metadata
.FloatingPoint(self
.exp_size
, self
.mant_size
,
166 self
.byte_order
, self
.align
)
169 class _Enum(_PseudoObj
):
172 self
.value_type
= None
173 self
.members
= collections
.OrderedDict()
176 def real_align(self
):
177 return self
.value_type
.real_align
179 def _to_public(self
):
180 return metadata
.Enum(self
.value_type
.to_public(), self
.members
)
183 class _String(_PseudoObj
):
186 self
.encoding
= metadata
.Encoding
.UTF8
189 def real_align(self
):
192 def _to_public(self
):
193 return metadata
.String(self
.encoding
)
196 class _Array(_PseudoObj
):
199 self
.element_type
= None
203 def real_align(self
):
204 return self
.element_type
.real_align
206 def _to_public(self
):
207 return metadata
.Array(self
.element_type
.to_public(), self
.length
)
210 class _Struct(_PseudoObj
):
214 self
.fields
= collections
.OrderedDict()
217 def real_align(self
):
218 align
= self
.min_align
220 for pseudo_field
in self
.fields
.values():
221 if pseudo_field
.real_align
> align
:
222 align
= pseudo_field
.real_align
226 def _to_public(self
):
229 for name
, pseudo_field
in self
.fields
.items():
230 fields
.append((name
, pseudo_field
.to_public()))
232 return metadata
.Struct(self
.min_align
, collections
.OrderedDict(fields
))
235 class _Trace(_PseudoObj
):
238 self
.byte_order
= None
240 self
.packet_header_type
= None
242 def _to_public(self
):
243 return metadata
.Trace(self
.byte_order
, self
.uuid
,
244 _opt_to_public(self
.packet_header_type
))
247 class _Clock(_PseudoObj
):
252 self
.description
= None
254 self
.error_cycles
= 0
255 self
.offset_seconds
= 0
256 self
.offset_cycles
= 0
257 self
.absolute
= False
258 self
.return_ctype
= 'uint32_t'
260 def _to_public(self
):
261 return metadata
.Clock(self
.name
, self
.uuid
, self
.description
, self
.freq
,
262 self
.error_cycles
, self
.offset_seconds
,
263 self
.offset_cycles
, self
.absolute
,
267 class _Event(_PseudoObj
):
272 self
.log_level
= None
273 self
.payload_type
= None
274 self
.context_type
= None
276 def _to_public(self
):
277 return metadata
.Event(self
.id, self
.name
, self
.log_level
,
278 _opt_to_public(self
.payload_type
),
279 _opt_to_public(self
.context_type
))
282 class _Stream(_PseudoObj
):
287 self
.packet_context_type
= None
288 self
.event_header_type
= None
289 self
.event_context_type
= None
290 self
.events
= collections
.OrderedDict()
292 def is_event_empty(self
, event
):
295 if self
.event_header_type
is not None:
296 total_fields
+= len(self
.event_header_type
.fields
)
298 if self
.event_context_type
is not None:
299 total_fields
+= len(self
.event_context_type
.fields
)
301 if event
.context_type
is not None:
302 total_fields
+= len(event
.context_type
.fields
)
304 if event
.payload_type
is not None:
305 total_fields
+= len(event
.payload_type
.fields
)
307 return total_fields
== 0
309 def _to_public(self
):
312 for name
, pseudo_ev
in self
.events
.items():
313 events
.append((name
, pseudo_ev
.to_public()))
315 return metadata
.Stream(self
.id, self
.name
,
316 _opt_to_public(self
.packet_context_type
),
317 _opt_to_public(self
.event_header_type
),
318 _opt_to_public(self
.event_context_type
),
319 collections
.OrderedDict(events
))
322 class _Metadata(_PseudoObj
):
329 self
.default_stream_name
= None
331 def _to_public(self
):
334 for name
, pseudo_clock
in self
.clocks
.items():
335 clocks
.append((name
, pseudo_clock
.to_public()))
339 for name
, pseudo_stream
in self
.streams
.items():
340 streams
.append((name
, pseudo_stream
.to_public()))
342 return metadata
.Metadata(self
.trace
.to_public(), self
.env
,
343 collections
.OrderedDict(clocks
),
344 collections
.OrderedDict(streams
),
345 self
.default_stream_name
)
348 # This JSON schema reference resolver only serves to detect when it
349 # needs to resolve a remote URI.
351 # This must never happen in barectf because all our schemas are local;
352 # it would mean a programming or schema error.
353 class _RefResolver(jsonschema
.RefResolver
):
354 def resolve_remote(self
, uri
):
355 raise RuntimeError('Missing local schema with URI `{}`'.format(uri
))
358 # Schema validator which considers all the schemas found in the barectf
359 # package's `schemas` directory.
361 # The only public method is validate() which accepts an instance to
362 # validate as well as a schema short ID.
363 class _SchemaValidator
:
365 subdirs
= ['config', os
.path
.join('2', 'config')]
366 schemas_dir
= pkg_resources
.resource_filename(__name__
, 'schemas')
369 for subdir
in subdirs
:
370 dir = os
.path
.join(schemas_dir
, subdir
)
372 for file_name
in os
.listdir(dir):
373 if not file_name
.endswith('.yaml'):
376 with
open(os
.path
.join(dir, file_name
)) as f
:
377 schema
= yaml
.load(f
, Loader
=yaml
.SafeLoader
)
379 assert '$id' in schema
380 schema_id
= schema
['$id']
381 assert schema_id
not in self
._store
382 self
._store
[schema_id
] = schema
385 def _dict_from_ordered_dict(o_dict
):
388 for k
, v
in o_dict
.items():
391 if type(v
) is collections
.OrderedDict
:
392 new_v
= _SchemaValidator
._dict
_from
_ordered
_dict
(v
)
398 def _validate(self
, instance
, schema_short_id
):
399 # retrieve full schema ID from short ID
400 schema_id
= 'https://barectf.org/schemas/{}.json'.format(schema_short_id
)
401 assert schema_id
in self
._store
403 # retrieve full schema
404 schema
= self
._store
[schema_id
]
406 # Create a reference resolver for this schema using this
407 # validator's schema store.
408 resolver
= _RefResolver(base_uri
=schema_id
, referrer
=schema
,
411 # create a JSON schema validator using this reference resolver
412 validator
= jsonschema
.Draft7Validator(schema
, resolver
=resolver
)
414 # Validate the instance, converting its
415 # `collections.OrderedDict` objects to `dict` objects so as to
416 # make any error message easier to read (because
417 # validator.validate() below uses str() for error messages, and
418 # collections.OrderedDict.__str__() returns a somewhat bulky
420 validator
.validate(self
._dict
_from
_ordered
_dict
(instance
))
422 # Validates `instance` using the schema having the short ID
425 # A schema short ID is the part between `schemas/` and `.json` in
428 # Raises a `_ConfigParseError` object, hiding any `jsonschema`
429 # exception, on validation failure.
430 def validate(self
, instance
, schema_short_id
):
432 self
._validate
(instance
, schema_short_id
)
433 except jsonschema
.ValidationError
as exc
:
434 # convert to barectf `_ConfigParseError` exception
435 contexts
= ['Configuration object']
437 # Each element of the instance's absolute path is either an
438 # integer (array element's index) or a string (object
440 for elem
in exc
.absolute_path
:
441 if type(elem
) is int:
442 ctx
= 'Element {}'.format(elem
)
444 ctx
= '`{}` property'.format(elem
)
450 if len(exc
.context
) > 0:
451 # According to the documentation of
452 # jsonschema.ValidationError.context(),
453 # the method returns a
455 # > list of errors from the subschemas
457 # This contains additional information about the
458 # validation failure which can help the user figure out
459 # what's wrong exactly.
461 # Join each message with `; ` and append this to our
462 # configuration parsing error's message.
463 msgs
= '; '.join([e
.message
for e
in exc
.context
])
464 schema_ctx
= ': {}'.format(msgs
)
466 new_exc
= _ConfigParseError(contexts
.pop(),
467 '{}{} (from schema `{}`)'.format(exc
.message
,
471 for ctx
in reversed(contexts
):
472 new_exc
.append_ctx(ctx
)
477 # Converts the byte order string `bo_str` to a `metadata.ByteOrder`
479 def _byte_order_str_to_bo(bo_str
):
480 bo_str
= bo_str
.lower()
483 return metadata
.ByteOrder
.LE
485 return metadata
.ByteOrder
.BE
488 # Converts the encoding string `encoding_str` to a `metadata.Encoding`
490 def _encoding_str_to_encoding(encoding_str
):
491 encoding_str
= encoding_str
.lower()
493 if encoding_str
== 'utf-8' or encoding_str
== 'utf8':
494 return metadata
.Encoding
.UTF8
495 elif encoding_str
== 'ascii':
496 return metadata
.Encoding
.ASCII
497 elif encoding_str
== 'none':
498 return metadata
.Encoding
.NONE
501 # Validates the TSDL identifier `iden`, raising a `_ConfigParseError`
502 # exception using `ctx_obj_name` and `prop` to format the message if
504 def _validate_identifier(iden
, ctx_obj_name
, prop
):
505 assert type(iden
) is str
524 if iden
in ctf_keywords
:
525 fmt
= 'Invalid {} (not a valid identifier): `{}`'
526 raise _ConfigParseError(ctx_obj_name
, fmt
.format(prop
, iden
))
529 # Validates the alignment `align`, raising a `_ConfigParseError`
530 # exception using `ctx_obj_name` if it's invalid.
531 def _validate_alignment(align
, ctx_obj_name
):
534 if (align
& (align
- 1)) != 0:
535 raise _ConfigParseError(ctx_obj_name
,
536 'Invalid alignment (not a power of two): {}'.format(align
))
541 # Order of values is important here.
543 class _Entity(enum
.IntEnum
):
544 TRACE_PACKET_HEADER
= 0
545 STREAM_PACKET_CONTEXT
= 1
546 STREAM_EVENT_HEADER
= 2
547 STREAM_EVENT_CONTEXT
= 3
552 # A validator which validates the configured metadata for barectf
557 # * The alignments of all header/context field types are at least 8.
559 # * There are no nested structure or array field types, except the
560 # packet header field type's `uuid` field
562 class _BarectfMetadataValidator
:
564 self
._type
_to
_validate
_type
_func
= {
565 _Struct
: self
._validate
_struct
_type
,
566 _Array
: self
._validate
_array
_type
,
569 def _validate_struct_type(self
, t
, entity_root
):
571 raise _ConfigParseError('Structure field type',
572 'Inner structure field types are not supported as of this version')
574 for field_name
, field_type
in t
.fields
.items():
575 if entity_root
and self
._cur
_entity
is _Entity
.TRACE_PACKET_HEADER
:
576 if field_name
== 'uuid':
581 self
._validate
_type
(field_type
, False)
582 except _ConfigParseError
as exc
:
583 _append_error_ctx(exc
,
584 'Structure field type\'s field `{}`'.format(field_name
))
586 def _validate_array_type(self
, t
, entity_root
):
587 raise _ConfigParseError('Array field type',
588 'Not supported as of this version')
590 def _validate_type(self
, t
, entity_root
):
591 func
= self
._type
_to
_validate
_type
_func
.get(type(t
))
596 def _validate_entity(self
, t
):
600 # make sure root field type has a real alignment of at least 8
602 raise _ConfigParseError('Root field type',
603 'Effective alignment must be at least 8 (got {})'.format(t
.real_align
))
605 assert type(t
) is _Struct
607 # validate field types
608 self
._validate
_type
(t
, True)
610 def _validate_event_entities_and_names(self
, stream
, ev
):
612 _validate_identifier(ev
.name
, 'Event type', 'event type name')
614 self
._cur
_entity
= _Entity
.EVENT_CONTEXT
617 self
._validate
_entity
(ev
.context_type
)
618 except _ConfigParseError
as exc
:
619 _append_error_ctx(exc
, 'Event type',
620 'Invalid context field type')
622 self
._cur
_entity
= _Entity
.EVENT_PAYLOAD
625 self
._validate
_entity
(ev
.payload_type
)
626 except _ConfigParseError
as exc
:
627 _append_error_ctx(exc
, 'Event type',
628 'Invalid payload field type')
630 if stream
.is_event_empty(ev
):
631 raise _ConfigParseError('Event type', 'Empty')
632 except _ConfigParseError
as exc
:
633 _append_error_ctx(exc
, 'Event type `{}`'.format(ev
.name
))
635 def _validate_stream_entities_and_names(self
, stream
):
637 _validate_identifier(stream
.name
, 'Stream type', 'stream type name')
638 self
._cur
_entity
= _Entity
.STREAM_PACKET_CONTEXT
641 self
._validate
_entity
(stream
.packet_context_type
)
642 except _ConfigParseError
as exc
:
643 _append_error_ctx(exc
, 'Stream type',
644 'Invalid packet context field type')
646 self
._cur
_entity
= _Entity
.STREAM_EVENT_HEADER
649 self
._validate
_entity
(stream
.event_header_type
)
650 except _ConfigParseError
as exc
:
651 _append_error_ctx(exc
, 'Stream type',
652 'Invalid event header field type')
654 self
._cur
_entity
= _Entity
.STREAM_EVENT_CONTEXT
657 self
._validate
_entity
(stream
.event_context_type
)
658 except _ConfigParseError
as exc
:
659 _append_error_ctx(exc
, 'Stream type',
660 'Invalid event context field type')
662 for ev
in stream
.events
.values():
663 self
._validate
_event
_entities
_and
_names
(stream
, ev
)
664 except _ConfigParseError
as exc
:
665 _append_error_ctx(exc
, 'Stream type `{}`'.format(stream
.name
))
667 def _validate_entities_and_names(self
, meta
):
668 self
._cur
_entity
= _Entity
.TRACE_PACKET_HEADER
671 self
._validate
_entity
(meta
.trace
.packet_header_type
)
672 except _ConfigParseError
as exc
:
673 _append_error_ctx(exc
, 'Trace type',
674 'Invalid packet header field type')
676 for stream
in meta
.streams
.values():
677 self
._validate
_stream
_entities
_and
_names
(stream
)
679 def _validate_default_stream(self
, meta
):
680 if meta
.default_stream_name
is not None:
681 if meta
.default_stream_name
not in meta
.streams
.keys():
682 fmt
= 'Default stream type name (`{}`) does not name an existing stream type'
683 raise _ConfigParseError('Metadata',
684 fmt
.format(meta
.default_stream_name
))
686 def validate(self
, meta
):
688 self
._validate
_entities
_and
_names
(meta
)
689 self
._validate
_default
_stream
(meta
)
690 except _ConfigParseError
as exc
:
691 _append_error_ctx(exc
, 'barectf metadata')
694 # A validator which validates special fields of trace, stream, and event
696 class _MetadataSpecialFieldsValidator
:
697 # Validates the packet header field type `t`.
698 def _validate_trace_packet_header_type(self
, t
):
699 ctx_obj_name
= '`packet-header-type` property'
701 # If there's more than one stream type, then the `stream_id`
702 # (stream type ID) field is required.
703 if len(self
._meta
.streams
) > 1:
705 raise _ConfigParseError('Trace type',
706 '`stream_id` field is required (because there\'s more than one stream type), but packet header field type is missing')
708 if 'stream_id' not in t
.fields
:
709 raise _ConfigParseError(ctx_obj_name
,
710 '`stream_id` field is required (because there\'s more than one stream type)')
715 # The `magic` field type must be the first one.
717 # The `stream_id` field type's size (bits) must be large enough
718 # to accomodate any stream type ID.
719 for i
, (field_name
, field_type
) in enumerate(t
.fields
.items()):
720 if field_name
== 'magic':
722 raise _ConfigParseError(ctx_obj_name
,
723 '`magic` field must be the first packet header field type\'s field')
724 elif field_name
== 'stream_id':
725 if len(self
._meta
.streams
) > (1 << field_type
.size
):
726 raise _ConfigParseError(ctx_obj_name
,
727 '`stream_id` field\'s size is too small to accomodate {} stream types'.format(len(self
._meta
.streams
)))
729 # Validates the trace type of the metadata object `meta`.
730 def _validate_trace(self
, meta
):
731 self
._validate
_trace
_packet
_header
_type
(meta
.trace
.packet_header_type
)
733 # Validates the packet context field type of the stream type
735 def _validate_stream_packet_context(self
, stream
):
736 ctx_obj_name
= '`packet-context-type` property'
737 t
= stream
.packet_context_type
740 # The `timestamp_begin` and `timestamp_end` field types must be
741 # mapped to the `value` property of the same clock.
742 ts_begin
= t
.fields
.get('timestamp_begin')
743 ts_end
= t
.fields
.get('timestamp_end')
745 if ts_begin
is not None and ts_end
is not None:
746 if ts_begin
.property_mappings
[0].object.name
!= ts_end
.property_mappings
[0].object.name
:
747 raise _ConfigParseError(ctx_obj_name
,
748 '`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock value')
750 # The `packet_size` field type's size must be greater than or
751 # equal to the `content_size` field type's size.
752 if t
.fields
['content_size'].size
> t
.fields
['packet_size'].size
:
753 raise _ConfigParseError(ctx_obj_name
,
754 '`content_size` field\'s size must be less than or equal to `packet_size` field\'s size')
756 # Validates the event header field type of the stream type `stream`.
757 def _validate_stream_event_header(self
, stream
):
758 ctx_obj_name
= '`event-header-type` property'
759 t
= stream
.event_header_type
761 # If there's more than one event type, then the `id` (event type
762 # ID) field is required.
763 if len(stream
.events
) > 1:
765 raise _ConfigParseError('Stream type',
766 '`id` field is required (because there\'s more than one event type), but event header field type is missing')
768 if 'id' not in t
.fields
:
769 raise _ConfigParseError(ctx_obj_name
,
770 '`id` field is required (because there\'s more than one event type)')
775 # The `id` field type's size (bits) must be large enough to
776 # accomodate any event type ID.
777 eid
= t
.fields
.get('id')
780 if len(stream
.events
) > (1 << eid
.size
):
781 raise _ConfigParseError(ctx_obj_name
,
782 '`id` field\'s size is too small to accomodate {} event types'.format(len(stream
.events
)))
784 # Validates the stream type `stream`.
785 def _validate_stream(self
, stream
):
786 self
._validate
_stream
_packet
_context
(stream
)
787 self
._validate
_stream
_event
_header
(stream
)
789 # Validates the trace and stream types of the metadata object
791 def validate(self
, meta
):
796 self
._validate
_trace
(meta
)
797 except _ConfigParseError
as exc
:
798 _append_error_ctx(exc
, 'Trace type')
800 for stream
in meta
.streams
.values():
802 self
._validate
_stream
(stream
)
803 except _ConfigParseError
as exc
:
804 _append_error_ctx(exc
, 'Stream type `{}`'.format(stream
.name
))
805 except _ConfigParseError
as exc
:
806 _append_error_ctx(exc
, 'Metadata')
809 # A barectf YAML configuration parser.
811 # When you build such a parser, it parses the configuration file and
812 # creates a corresponding `config.Config` object which you can get with
813 # the `config` property.
815 # See the comments of _parse() for more implementation details about the
816 # parsing stages and general strategy.
817 class _YamlConfigParser
:
818 # Builds a barectf YAML configuration parser and parses the
819 # configuration file having the path `path`.
821 # The parser considers the inclusion directories `include_dirs`,
822 # ignores nonexistent inclusion files if `ignore_include_not_found`
823 # is `True`, and dumps the effective configuration (as YAML) if
824 # `dump_config` is `True`.
826 # Raises `_ConfigParseError` on parsing error.
827 def __init__(self
, path
, include_dirs
, ignore_include_not_found
,
829 self
._root
_path
= path
830 self
._class
_name
_to
_create
_field
_type
_func
= {
831 'int': self
._create
_integer
_field
_type
,
832 'integer': self
._create
_integer
_field
_type
,
833 'flt': self
._create
_float
_field
_type
,
834 'float': self
._create
_float
_field
_type
,
835 'floating-point': self
._create
_float
_field
_type
,
836 'enum': self
._create
_enum
_field
_type
,
837 'enumeration': self
._create
_enum
_field
_type
,
838 'str': self
._create
_string
_field
_type
,
839 'string': self
._create
_string
_field
_type
,
840 'struct': self
._create
_struct
_field
_type
,
841 'structure': self
._create
_struct
_field
_type
,
842 'array': self
._create
_array
_field
_type
,
844 self
._include
_dirs
= include_dirs
845 self
._ignore
_include
_not
_found
= ignore_include_not_found
846 self
._dump
_config
= dump_config
847 self
._schema
_validator
= _SchemaValidator()
850 # Sets the default byte order as found in the `metadata_node` node.
851 def _set_byte_order(self
, metadata_node
):
852 self
._bo
= _byte_order_str_to_bo(metadata_node
['trace']['byte-order'])
853 assert self
._bo
is not None
855 # Sets the clock value property mapping of the pseudo integer field
856 # type object `int_obj` as found in the `prop_mapping_node` node.
857 def _set_int_clock_prop_mapping(self
, int_obj
, prop_mapping_node
):
858 clock_name
= prop_mapping_node
['name']
859 clock
= self
._clocks
.get(clock_name
)
862 exc
= _ConfigParseError('`property-mappings` property',
863 'Clock type `{}` does not exist'.format(clock_name
))
864 exc
.append_ctx('Integer field type')
867 prop_mapping
= _PropertyMapping()
868 prop_mapping
.object = clock
869 prop_mapping
.prop
= 'value'
870 int_obj
.property_mappings
.append(prop_mapping
)
872 # Creates a pseudo integer field type from the node `node` and
874 def _create_integer_field_type(self
, node
):
876 obj
.size
= node
['size']
877 align_node
= node
.get('align')
879 if align_node
is not None:
880 _validate_alignment(align_node
, 'Integer field type')
881 obj
.align
= align_node
883 signed_node
= node
.get('signed')
885 if signed_node
is not None:
886 obj
.signed
= signed_node
888 obj
.byte_order
= self
._bo
889 bo_node
= node
.get('byte-order')
891 if bo_node
is not None:
892 obj
.byte_order
= _byte_order_str_to_bo(bo_node
)
894 base_node
= node
.get('base')
896 if base_node
is not None:
897 if base_node
== 'bin':
899 elif base_node
== 'oct':
901 elif base_node
== 'dec':
904 assert base_node
== 'hex'
907 encoding_node
= node
.get('encoding')
909 if encoding_node
is not None:
910 obj
.encoding
= _encoding_str_to_encoding(encoding_node
)
912 pm_node
= node
.get('property-mappings')
914 if pm_node
is not None:
915 assert len(pm_node
) == 1
916 self
._set
_int
_clock
_prop
_mapping
(obj
, pm_node
[0])
920 # Creates a pseudo floating point number field type from the node
921 # `node` and returns it.
922 def _create_float_field_type(self
, node
):
923 obj
= _FloatingPoint()
924 size_node
= node
['size']
925 obj
.exp_size
= size_node
['exp']
926 obj
.mant_size
= size_node
['mant']
927 align_node
= node
.get('align')
929 if align_node
is not None:
930 _validate_alignment(align_node
, 'Floating point number field type')
931 obj
.align
= align_node
933 obj
.byte_order
= self
._bo
934 bo_node
= node
.get('byte-order')
936 if bo_node
is not None:
937 obj
.byte_order
= _byte_order_str_to_bo(bo_node
)
941 # Creates a pseudo enumeration field type from the node `node` and
943 def _create_enum_field_type(self
, node
):
944 ctx_obj_name
= 'Enumeration field type'
947 # value (integer) field type
949 obj
.value_type
= self
._create
_type
(node
['value-type'])
950 except _ConfigParseError
as exc
:
951 _append_error_ctx(exc
, ctx_obj_name
,
952 'Cannot create value (integer) field type')
955 members_node
= node
.get('members')
957 if members_node
is not None:
958 if obj
.value_type
.signed
:
959 value_min
= -(1 << obj
.value_type
.size
- 1)
960 value_max
= (1 << (obj
.value_type
.size
- 1)) - 1
963 value_max
= (1 << obj
.value_type
.size
) - 1
967 for m_node
in members_node
:
968 if type(m_node
) is str:
973 assert type(m_node
) is collections
.OrderedDict
974 label
= m_node
['label']
975 value
= m_node
['value']
977 if type(value
) is int:
979 value
= (value
, value
)
981 assert type(value
) is list
982 assert len(value
) == 2
987 exc
= _ConfigParseError(ctx_obj_name
)
988 exc
.append_ctx('Member `{}`'.format(label
),
989 'Invalid integral range ({} > {})'.format(mn
, mx
))
995 # Make sure that all the integral values of the range
996 # fits the enumeration field type's integer value field
997 # type depending on its size (bits).
998 member_obj_name
= 'Member `{}`'.format(label
)
999 msg_fmt
= 'Value {} is outside the value type range [{}, {}]'
1000 msg
= msg_fmt
.format(value
[0], value_min
, value_max
)
1003 if value
[0] < value_min
or value
[0] > value_max
:
1004 raise _ConfigParseError(member_obj_name
, msg
)
1006 if value
[1] < value_min
or value
[1] > value_max
:
1007 raise _ConfigParseError(member_obj_name
, msg
)
1008 except _ConfigParseError
as exc
:
1009 _append_error_ctx(exc
, ctx_obj_name
)
1011 obj
.members
[label
] = value
1015 # Creates a pseudo string field type from the node `node` and
1017 def _create_string_field_type(self
, node
):
1019 encoding_node
= node
.get('encoding')
1021 if encoding_node
is not None:
1022 obj
.encoding
= _encoding_str_to_encoding(encoding_node
)
1026 # Creates a pseudo structure field type from the node `node` and
1028 def _create_struct_field_type(self
, node
):
1029 ctx_obj_name
= 'Structure field type'
1031 min_align_node
= node
.get('min-align')
1033 if min_align_node
is not None:
1034 _validate_alignment(min_align_node
, ctx_obj_name
)
1035 obj
.min_align
= min_align_node
1037 fields_node
= node
.get('fields')
1039 if fields_node
is not None:
1040 for field_name
, field_node
in fields_node
.items():
1041 _validate_identifier(field_name
, ctx_obj_name
, 'field name')
1044 obj
.fields
[field_name
] = self
._create
_type
(field_node
)
1045 except _ConfigParseError
as exc
:
1046 _append_error_ctx(exc
, ctx_obj_name
,
1047 'Cannot create field `{}`'.format(field_name
))
1051 # Creates a pseudo array field type from the node `node` and returns
1053 def _create_array_field_type(self
, node
):
1055 obj
.length
= node
['length']
1058 obj
.element_type
= self
._create
_type
(node
['element-type'])
1059 except _ConfigParseError
as exc
:
1060 _append_error_ctx(exc
, 'Array field type',
1061 'Cannot create element field type')
1065 # Creates a pseudo field type from the node `node` and returns it.
1067 # This method checks the `class` property of `node` to determine
1068 # which function of `self._class_name_to_create_field_type_func` to
1069 # call to create the corresponding pseudo field type.
1070 def _create_type(self
, type_node
):
1071 return self
._class
_name
_to
_create
_field
_type
_func
[type_node
['class']](type_node
)
1073 # Creates a pseudo clock type from the node `node` and returns it.
1074 def _create_clock(self
, node
):
1076 uuid_node
= node
.get('uuid')
1078 if uuid_node
is not None:
1080 clock
.uuid
= uuid
.UUID(uuid_node
)
1081 except ValueError as exc
:
1082 raise _ConfigParseError('Clock type',
1083 'Malformed UUID `{}`: {}'.format(uuid_node
, exc
))
1085 descr_node
= node
.get('description')
1087 if descr_node
is not None:
1088 clock
.description
= descr_node
1090 freq_node
= node
.get('freq')
1092 if freq_node
is not None:
1093 clock
.freq
= freq_node
1095 error_cycles_node
= node
.get('error-cycles')
1097 if error_cycles_node
is not None:
1098 clock
.error_cycles
= error_cycles_node
1100 offset_node
= node
.get('offset')
1102 if offset_node
is not None:
1103 offset_cycles_node
= offset_node
.get('cycles')
1105 if offset_cycles_node
is not None:
1106 clock
.offset_cycles
= offset_cycles_node
1108 offset_seconds_node
= offset_node
.get('seconds')
1110 if offset_seconds_node
is not None:
1111 clock
.offset_seconds
= offset_seconds_node
1113 absolute_node
= node
.get('absolute')
1115 if absolute_node
is not None:
1116 clock
.absolute
= absolute_node
1118 return_ctype_node
= node
.get('$return-ctype')
1120 if return_ctype_node
is None:
1121 # barectf 2.1: `return-ctype` property was renamed to
1123 return_ctype_node
= node
.get('return-ctype')
1125 if return_ctype_node
is not None:
1126 clock
.return_ctype
= return_ctype_node
1130 # Registers all the clock types of the metadata node
1131 # `metadata_node`, creating pseudo clock types during the process,
1132 # within this parser.
1134 # The pseudo clock types in `self._clocks` are then accessible when
1135 # creating a pseudo integer field type (see
1136 # _create_integer_field_type() and _set_int_clock_prop_mapping()).
1137 def _register_clocks(self
, metadata_node
):
1138 self
._clocks
= collections
.OrderedDict()
1139 clocks_node
= metadata_node
.get('clocks')
1141 if clocks_node
is None:
1144 for clock_name
, clock_node
in clocks_node
.items():
1145 _validate_identifier(clock_name
, 'Metadata', 'clock type name')
1146 assert clock_name
not in self
._clocks
1149 clock
= self
._create
_clock
(clock_node
)
1150 except _ConfigParseError
as exc
:
1151 _append_error_ctx(exc
, 'Metadata',
1152 'Cannot create clock type `{}`'.format(clock_name
))
1154 clock
.name
= clock_name
1155 self
._clocks
[clock_name
] = clock
1157 # Creates an environment object (`collections.OrderedDict`) from the
1158 # metadata node `metadata_node` and returns it.
1159 def _create_env(self
, metadata_node
):
1160 env_node
= metadata_node
.get('env')
1162 if env_node
is None:
1163 return collections
.OrderedDict()
1165 for env_name
, env_value
in env_node
.items():
1166 _validate_identifier(env_name
, 'Metadata',
1167 'environment variable name')
1169 return copy
.deepcopy(env_node
)
1171 # Creates a pseudo trace type from the metadata node `metadata_node`
1173 def _create_trace(self
, metadata_node
):
1174 ctx_obj_name
= 'Trace type'
1176 trace_node
= metadata_node
['trace']
1177 trace
.byte_order
= self
._bo
1178 uuid_node
= trace_node
.get('uuid')
1180 if uuid_node
is not None:
1181 # The `uuid` property of the trace type node can be `auto`
1182 # to make barectf generate a UUID.
1183 if uuid_node
== 'auto':
1184 trace
.uuid
= uuid
.uuid1()
1187 trace
.uuid
= uuid
.UUID(uuid_node
)
1188 except ValueError as exc
:
1189 raise _ConfigParseError(ctx_obj_name
,
1190 'Malformed UUID `{}`: {}'.format(uuid_node
, exc
))
1192 pht_node
= trace_node
.get('packet-header-type')
1194 if pht_node
is not None:
1196 trace
.packet_header_type
= self
._create
_type
(pht_node
)
1197 except _ConfigParseError
as exc
:
1198 _append_error_ctx(exc
, ctx_obj_name
,
1199 'Cannot create packet header field type')
1203 # Creates a pseudo event type from the event node `event_node` and
1205 def _create_event(self
, event_node
):
1206 ctx_obj_name
= 'Event type'
1208 log_level_node
= event_node
.get('log-level')
1210 if log_level_node
is not None:
1211 assert type(log_level_node
) is int
1212 event
.log_level
= metadata
.LogLevel(None, log_level_node
)
1214 ct_node
= event_node
.get('context-type')
1216 if ct_node
is not None:
1218 event
.context_type
= self
._create
_type
(ct_node
)
1219 except _ConfigParseError
as exc
:
1220 _append_error_ctx(exc
, ctx_obj_name
,
1221 'Cannot create context field type')
1223 pt_node
= event_node
.get('payload-type')
1225 if pt_node
is not None:
1227 event
.payload_type
= self
._create
_type
(pt_node
)
1228 except _ConfigParseError
as exc
:
1229 _append_error_ctx(exc
, ctx_obj_name
,
1230 'Cannot create payload field type')
1234 # Creates a pseudo stream type named `stream_name` from the stream
1235 # node `stream_node` and returns it.
1236 def _create_stream(self
, stream_name
, stream_node
):
1237 ctx_obj_name
= 'Stream type'
1239 pct_node
= stream_node
.get('packet-context-type')
1241 if pct_node
is not None:
1243 stream
.packet_context_type
= self
._create
_type
(pct_node
)
1244 except _ConfigParseError
as exc
:
1245 _append_error_ctx(exc
, ctx_obj_name
,
1246 'Cannot create packet context field type')
1248 eht_node
= stream_node
.get('event-header-type')
1250 if eht_node
is not None:
1252 stream
.event_header_type
= self
._create
_type
(eht_node
)
1253 except _ConfigParseError
as exc
:
1254 _append_error_ctx(exc
, ctx_obj_name
,
1255 'Cannot create event header field type')
1257 ect_node
= stream_node
.get('event-context-type')
1259 if ect_node
is not None:
1261 stream
.event_context_type
= self
._create
_type
(ect_node
)
1262 except _ConfigParseError
as exc
:
1263 _append_error_ctx(exc
, ctx_obj_name
,
1264 'Cannot create event context field type')
1266 events_node
= stream_node
['events']
1269 for ev_name
, ev_node
in events_node
.items():
1271 ev
= self
._create
_event
(ev_node
)
1272 except _ConfigParseError
as exc
:
1273 _append_error_ctx(exc
, ctx_obj_name
,
1274 'Cannot create event type `{}`'.format(ev_name
))
1278 stream
.events
[ev_name
] = ev
1281 default_node
= stream_node
.get('$default')
1283 if default_node
is not None:
1284 if self
._meta
.default_stream_name
is not None and self
._meta
.default_stream_name
!= stream_name
:
1285 fmt
= 'Cannot specify more than one default stream type (default stream type already set to `{}`)'
1286 raise _ConfigParseError('Stream type',
1287 fmt
.format(self
._meta
.default_stream_name
))
1289 self
._meta
.default_stream_name
= stream_name
1293 # Creates a `collections.OrderedDict` object where keys are stream
1294 # type names and values are pseudo stream types from the metadata
1295 # node `metadata_node` and returns it.
1296 def _create_streams(self
, metadata_node
):
1297 streams
= collections
.OrderedDict()
1298 streams_node
= metadata_node
['streams']
1301 for stream_name
, stream_node
in streams_node
.items():
1303 stream
= self
._create
_stream
(stream_name
, stream_node
)
1304 except _ConfigParseError
as exc
:
1305 _append_error_ctx(exc
, 'Metadata',
1306 'Cannot create stream type `{}`'.format(stream_name
))
1309 stream
.name
= stream_name
1310 streams
[stream_name
] = stream
1315 # Creates a pseudo metadata object from the configuration node
1316 # `root` and returns it.
1317 def _create_metadata(self
, root
):
1318 self
._meta
= _Metadata()
1319 metadata_node
= root
['metadata']
1321 if '$default-stream' in metadata_node
and metadata_node
['$default-stream'] is not None:
1322 default_stream_node
= metadata_node
['$default-stream']
1323 self
._meta
.default_stream_name
= default_stream_node
1325 self
._set
_byte
_order
(metadata_node
)
1326 self
._register
_clocks
(metadata_node
)
1327 self
._meta
.clocks
= self
._clocks
1328 self
._meta
.env
= self
._create
_env
(metadata_node
)
1329 self
._meta
.trace
= self
._create
_trace
(metadata_node
)
1330 self
._meta
.streams
= self
._create
_streams
(metadata_node
)
1332 # validate the pseudo metadata object
1333 _MetadataSpecialFieldsValidator().validate(self
._meta
)
1334 _BarectfMetadataValidator().validate(self
._meta
)
1338 # Gets and validates the tracing prefix as found in the
1339 # configuration node `config_node` and returns it.
1340 def _get_prefix(self
, config_node
):
1341 prefix
= config_node
.get('prefix', 'barectf_')
1342 _validate_identifier(prefix
, '`prefix` property', 'prefix')
1345 # Gets the options as found in the configuration node `config_node`
1346 # and returns a corresponding `config.ConfigOptions` object.
1347 def _get_options(self
, config_node
):
1348 gen_prefix_def
= False
1349 gen_default_stream_def
= False
1350 options_node
= config_node
.get('options')
1352 if options_node
is not None:
1353 gen_prefix_def
= options_node
.get('gen-prefix-def',
1355 gen_default_stream_def
= options_node
.get('gen-default-stream-def',
1356 gen_default_stream_def
)
1358 return config
.ConfigOptions(gen_prefix_def
, gen_default_stream_def
)
1360 # Returns the last included file name from the parser's inclusion
1362 def _get_last_include_file(self
):
1363 if self
._include
_stack
:
1364 return self
._include
_stack
[-1]
1366 return self
._root
_path
1368 # Loads the inclusion file having the path `yaml_path` and returns
1369 # its content as a `collections.OrderedDict` object.
1370 def _load_include(self
, yaml_path
):
1371 for inc_dir
in self
._include
_dirs
:
1372 # Current inclusion dir + file name path.
1374 # Note: os.path.join() only takes the last argument if it's
1376 inc_path
= os
.path
.join(inc_dir
, yaml_path
)
1378 # real path (symbolic links resolved)
1379 real_path
= os
.path
.realpath(inc_path
)
1381 # normalized path (weird stuff removed!)
1382 norm_path
= os
.path
.normpath(real_path
)
1384 if not os
.path
.isfile(norm_path
):
1385 # file doesn't exist: skip
1388 if norm_path
in self
._include
_stack
:
1389 base_path
= self
._get
_last
_include
_file
()
1390 raise _ConfigParseError('File `{}`'.format(base_path
),
1391 'Cannot recursively include file `{}`'.format(norm_path
))
1393 self
._include
_stack
.append(norm_path
)
1396 return self
._yaml
_ordered
_load
(norm_path
)
1398 if not self
._ignore
_include
_not
_found
:
1399 base_path
= self
._get
_last
_include
_file
()
1400 raise _ConfigParseError('File `{}`'.format(base_path
),
1401 'Cannot include file `{}`: file not found in inclusion directories'.format(yaml_path
))
1403 # Returns a list of all the inclusion file paths as found in the
1404 # inclusion node `include_node`.
1405 def _get_include_paths(self
, include_node
):
1406 if include_node
is None:
1410 if type(include_node
) is str:
1412 return [include_node
]
1415 assert type(include_node
) is list
1418 # Updates the node `base_node` with an overlay node `overlay_node`.
1420 # Both the inclusion and field type inheritance features use this
1422 def _update_node(self
, base_node
, overlay_node
):
1423 for olay_key
, olay_value
in overlay_node
.items():
1424 if olay_key
in base_node
:
1425 base_value
= base_node
[olay_key
]
1427 if type(olay_value
) is collections
.OrderedDict
and type(base_value
) is collections
.OrderedDict
:
1428 # merge both objects
1429 self
._update
_node
(base_value
, olay_value
)
1430 elif type(olay_value
) is list and type(base_value
) is list:
1431 # append extension array items to base items
1432 base_value
+= olay_value
1434 # fall back to replacing base property
1435 base_node
[olay_key
] = olay_value
1437 # set base property from overlay property
1438 base_node
[olay_key
] = olay_value
1440 # Processes inclusions using `last_overlay_node` as the last overlay
1441 # node to use to "patch" the node.
1443 # If `last_overlay_node` contains an `$include` property, then this
1444 # method patches the current base node (initially empty) in order
1445 # using the content of the inclusion files (recursively).
1447 # At the end, this method removes the `$include` of
1448 # `last_overlay_node` and then patches the current base node with
1449 # its other properties before returning the result (always a deep
1451 def _process_node_include(self
, last_overlay_node
,
1452 process_base_include_cb
,
1453 process_children_include_cb
=None):
1454 # process children inclusions first
1455 if process_children_include_cb
is not None:
1456 process_children_include_cb(last_overlay_node
)
1458 incl_prop_name
= '$include'
1460 if incl_prop_name
in last_overlay_node
:
1461 include_node
= last_overlay_node
[incl_prop_name
]
1464 return last_overlay_node
1466 include_paths
= self
._get
_include
_paths
(include_node
)
1467 cur_base_path
= self
._get
_last
_include
_file
()
1470 # keep the inclusion paths and remove the `$include` property
1471 include_paths
= copy
.deepcopy(include_paths
)
1472 del last_overlay_node
[incl_prop_name
]
1474 for include_path
in include_paths
:
1475 # load raw YAML from included file
1476 overlay_node
= self
._load
_include
(include_path
)
1478 if overlay_node
is None:
1479 # Cannot find inclusion file, but we're ignoring those
1480 # errors, otherwise _load_include() itself raises a
1484 # recursively process inclusions
1486 overlay_node
= process_base_include_cb(overlay_node
)
1487 except _ConfigParseError
as exc
:
1488 _append_error_ctx(exc
, 'File `{}`'.format(cur_base_path
))
1490 # pop inclusion stack now that we're done including
1491 del self
._include
_stack
[-1]
1493 # At this point, `base_node` is fully resolved (does not
1494 # contain any `$include` property).
1495 if base_node
is None:
1496 base_node
= overlay_node
1498 self
._update
_node
(base_node
, overlay_node
)
1500 # Finally, update the latest base node with our last overlay
1502 if base_node
is None:
1503 # Nothing was included, which is possible when we're
1504 # ignoring inclusion errors.
1505 return last_overlay_node
1507 self
._update
_node
(base_node
, last_overlay_node
)
1510 # Process the inclusions of the event type node `event_node`,
1511 # returning the effective node.
1512 def _process_event_include(self
, event_node
):
1513 # Make sure the event type node is valid for the inclusion
1515 self
._schema
_validator
.validate(event_node
,
1516 '2/config/event-pre-include')
1518 # process inclusions
1519 return self
._process
_node
_include
(event_node
,
1520 self
._process
_event
_include
)
1522 # Process the inclusions of the stream type node `stream_node`,
1523 # returning the effective node.
1524 def _process_stream_include(self
, stream_node
):
1525 def process_children_include(stream_node
):
1526 if 'events' in stream_node
:
1527 events_node
= stream_node
['events']
1529 for key
in list(events_node
):
1530 events_node
[key
] = self
._process
_event
_include
(events_node
[key
])
1532 # Make sure the stream type node is valid for the inclusion
1534 self
._schema
_validator
.validate(stream_node
,
1535 '2/config/stream-pre-include')
1537 # process inclusions
1538 return self
._process
_node
_include
(stream_node
,
1539 self
._process
_stream
_include
,
1540 process_children_include
)
1542 # Process the inclusions of the trace type node `trace_node`,
1543 # returning the effective node.
1544 def _process_trace_include(self
, trace_node
):
1545 # Make sure the trace type node is valid for the inclusion
1547 self
._schema
_validator
.validate(trace_node
,
1548 '2/config/trace-pre-include')
1550 # process inclusions
1551 return self
._process
_node
_include
(trace_node
,
1552 self
._process
_trace
_include
)
1554 # Process the inclusions of the clock type node `clock_node`,
1555 # returning the effective node.
1556 def _process_clock_include(self
, clock_node
):
1557 # Make sure the clock type node is valid for the inclusion
1559 self
._schema
_validator
.validate(clock_node
,
1560 '2/config/clock-pre-include')
1562 # process inclusions
1563 return self
._process
_node
_include
(clock_node
,
1564 self
._process
_clock
_include
)
1566 # Process the inclusions of the metadata node `metadata_node`,
1567 # returning the effective node.
1568 def _process_metadata_include(self
, metadata_node
):
1569 def process_children_include(metadata_node
):
1570 if 'trace' in metadata_node
:
1571 metadata_node
['trace'] = self
._process
_trace
_include
(metadata_node
['trace'])
1573 if 'clocks' in metadata_node
:
1574 clocks_node
= metadata_node
['clocks']
1576 for key
in list(clocks_node
):
1577 clocks_node
[key
] = self
._process
_clock
_include
(clocks_node
[key
])
1579 if 'streams' in metadata_node
:
1580 streams_node
= metadata_node
['streams']
1582 for key
in list(streams_node
):
1583 streams_node
[key
] = self
._process
_stream
_include
(streams_node
[key
])
1585 # Make sure the metadata node is valid for the inclusion
1587 self
._schema
_validator
.validate(metadata_node
,
1588 '2/config/metadata-pre-include')
1590 # process inclusions
1591 return self
._process
_node
_include
(metadata_node
,
1592 self
._process
_metadata
_include
,
1593 process_children_include
)
1595 # Process the inclusions of the configuration node `config_node`,
1596 # returning the effective node.
1597 def _process_config_includes(self
, config_node
):
1598 # Process inclusions in this order:
1600 # 1. Clock type node, event type nodes, and trace type nodes
1601 # (the order between those is not important).
1603 # 2. Stream type nodes.
1609 # * A metadata node can include clock type nodes, a trace type
1610 # node, stream type nodes, and event type nodes (indirectly).
1612 # * A stream type node can include event type nodes.
1614 # We keep a stack of absolute paths to included files
1615 # (`self._include_stack`) to detect recursion.
1617 # First, make sure the configuration object itself is valid for
1618 # the inclusion processing stage.
1619 self
._schema
_validator
.validate(config_node
,
1620 '2/config/config-pre-include')
1622 # Process metadata node inclusions.
1624 # self._process_metadata_include() returns a new (or the same)
1625 # metadata node without any `$include` property in it,
1627 config_node
['metadata'] = self
._process
_metadata
_include
(config_node
['metadata'])
1631 # Expands the field type aliases found in the metadata node
1632 # `metadata_node` using the aliases of the `type_aliases_node` node.
1634 # This method modifies `metadata_node`.
1636 # When this method returns:
1638 # * Any field type alias is replaced with its full field type
1641 # * The `type-aliases` property of `metadata_node` is removed.
1642 def _expand_field_type_aliases(self
, metadata_node
, type_aliases_node
):
1643 def resolve_field_type_aliases(parent_node
, key
, from_descr
,
1645 if key
not in parent_node
:
1648 # This set holds all the aliases we need to expand,
1649 # recursively. This is used to detect cycles.
1650 if alias_set
is None:
1653 node
= parent_node
[key
]
1658 if type(node
) is str:
1661 if alias
not in resolved_aliases
:
1662 # Only check for a field type alias cycle when we
1663 # didn't resolve the alias yet, as a given node can
1664 # refer to the same field type alias more than once.
1665 if alias
in alias_set
:
1666 fmt
= 'Cycle detected during the `{}` field type alias resolution'
1667 raise _ConfigParseError(from_descr
, fmt
.format(alias
))
1669 # try to load field type alias node named `alias`
1670 if alias
not in type_aliases_node
:
1671 raise _ConfigParseError(from_descr
,
1672 'Field type alias `{}` does not exist'.format(alias
))
1675 alias_set
.add(alias
)
1676 resolve_field_type_aliases(type_aliases_node
, alias
,
1677 from_descr
, alias_set
)
1678 resolved_aliases
.add(alias
)
1680 parent_node
[key
] = copy
.deepcopy(type_aliases_node
[node
])
1683 # traverse, resolving field type aliases as needed
1684 for pkey
in ['$inherit', 'inherit', 'value-type', 'element-type']:
1685 resolve_field_type_aliases(node
, pkey
, from_descr
, alias_set
)
1687 # structure field type fields
1691 assert type(node
[pkey
]) is collections
.OrderedDict
1693 for field_name
in node
[pkey
]:
1694 resolve_field_type_aliases(node
[pkey
], field_name
,
1695 from_descr
, alias_set
)
1697 def resolve_field_type_aliases_from(parent_node
, key
):
1698 resolve_field_type_aliases(parent_node
, key
,
1699 '`{}` property'.format(key
))
1701 # set of resolved field type aliases
1702 resolved_aliases
= set()
1704 # Expand field type aliases within trace, stream, and event
1707 resolve_field_type_aliases_from(metadata_node
['trace'],
1708 'packet-header-type')
1709 except _ConfigParseError
as exc
:
1710 _append_error_ctx(exc
, 'Trace type')
1712 for stream_name
, stream
in metadata_node
['streams'].items():
1714 resolve_field_type_aliases_from(stream
, 'packet-context-type')
1715 resolve_field_type_aliases_from(stream
, 'event-header-type')
1716 resolve_field_type_aliases_from(stream
, 'event-context-type')
1718 for event_name
, event
in stream
['events'].items():
1720 resolve_field_type_aliases_from(event
, 'context-type')
1721 resolve_field_type_aliases_from(event
, 'payload-type')
1722 except _ConfigParseError
as exc
:
1723 _append_error_ctx(exc
,
1724 'Event type `{}`'.format(event_name
))
1725 except _ConfigParseError
as exc
:
1726 _append_error_ctx(exc
, 'Stream type `{}`'.format(stream_name
))
1728 # remove the (now unneeded) `type-aliases` node
1729 del metadata_node
['type-aliases']
1731 # Applies field type inheritance to all field types found in
1734 # This method modifies `metadata_node`.
1736 # When this method returns, no field type node has an `$inherit` or
1737 # `inherit` property.
1738 def _expand_field_type_inheritance(self
, metadata_node
):
1739 def apply_inheritance(parent_node
, key
):
1740 if key
not in parent_node
:
1743 node
= parent_node
[key
]
1748 # process children first
1749 for pkey
in ['$inherit', 'inherit', 'value-type', 'element-type']:
1750 apply_inheritance(node
, pkey
)
1752 # structure field type fields
1756 assert type(node
[pkey
]) is collections
.OrderedDict
1758 for field_name
, field_type
in node
[pkey
].items():
1759 apply_inheritance(node
[pkey
], field_name
)
1761 # apply inheritance of this node
1762 if 'inherit' in node
:
1763 # barectf 2.1: `inherit` property was renamed to `$inherit`
1764 assert '$inherit' not in node
1765 node
['$inherit'] = node
['inherit']
1768 inherit_key
= '$inherit'
1770 if inherit_key
in node
:
1771 assert type(node
[inherit_key
]) is collections
.OrderedDict
1773 # apply inheritance below
1774 apply_inheritance(node
, inherit_key
)
1776 # `node` is an overlay on the `$inherit` node
1777 base_node
= node
[inherit_key
]
1778 del node
[inherit_key
]
1779 self
._update
_node
(base_node
, node
)
1781 # set updated base node as this node
1782 parent_node
[key
] = base_node
1784 apply_inheritance(metadata_node
['trace'], 'packet-header-type')
1786 for stream
in metadata_node
['streams'].values():
1787 apply_inheritance(stream
, 'packet-context-type')
1788 apply_inheritance(stream
, 'event-header-type')
1789 apply_inheritance(stream
, 'event-context-type')
1791 for event
in stream
['events'].values():
1792 apply_inheritance(event
, 'context-type')
1793 apply_inheritance(event
, 'payload-type')
1795 # Calls _expand_field_type_aliases() and
1796 # _expand_field_type_inheritance() if the metadata node
1797 # `metadata_node` has a `type-aliases` property.
1798 def _expand_field_types(self
, metadata_node
):
1799 type_aliases_node
= metadata_node
.get('type-aliases')
1801 if type_aliases_node
is None:
1802 # If there's no `type-aliases` node, then there's no field
1803 # type aliases and therefore no possible inheritance.
1806 # first, expand field type aliases
1807 self
._expand
_field
_type
_aliases
(metadata_node
, type_aliases_node
)
1809 # next, apply inheritance to create effective field types
1810 self
._expand
_field
_type
_inheritance
(metadata_node
)
1812 # Replaces the textual log levels in event type nodes of the
1813 # metadata node `metadata_node` with their numeric equivalent (as
1814 # found in the `$log-levels` or `log-levels` node of
1817 # This method modifies `metadata_node`.
1819 # When this method returns, the `$log-levels` or `log-level`
1820 # property of `metadata_node` is removed.
1821 def _expand_log_levels(self
, metadata_node
):
1822 if 'log-levels' in metadata_node
:
1823 # barectf 2.1: `log-levels` property was renamed to
1825 assert '$log-levels' not in metadata_node
1826 metadata_node
['$log-levels'] = metadata_node
['log-levels']
1827 del metadata_node
['log-levels']
1829 log_levels_key
= '$log-levels'
1830 log_levels_node
= metadata_node
.get(log_levels_key
)
1832 if log_levels_node
is None:
1833 # no log level aliases
1836 # not needed anymore
1837 del metadata_node
[log_levels_key
]
1839 for stream_name
, stream
in metadata_node
['streams'].items():
1841 for event_name
, event
in stream
['events'].items():
1842 prop_name
= 'log-level'
1843 ll_node
= event
.get(prop_name
)
1848 if type(ll_node
) is str:
1849 if ll_node
not in log_levels_node
:
1850 exc
= _ConfigParseError('`log-level` property',
1851 'Log level alias `{}` does not exist'.format(ll_node
))
1852 exc
.append_ctx('Event type `{}`'.format(event_name
))
1855 event
[prop_name
] = log_levels_node
[ll_node
]
1856 except _ConfigParseError
as exc
:
1857 _append_error_ctx(exc
, 'Stream type `{}`'.format(stream_name
))
1859 # Dumps the node `node` as YAML, passing `kwds` to yaml.dump().
1860 def _yaml_ordered_dump(self
, node
, **kwds
):
1861 class ODumper(yaml
.Dumper
):
1864 def dict_representer(dumper
, node
):
1865 return dumper
.represent_mapping(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
1868 ODumper
.add_representer(collections
.OrderedDict
, dict_representer
)
1871 return yaml
.dump(node
, Dumper
=ODumper
, **kwds
)
1873 # Loads the content of the YAML file having the path `yaml_path` as
1876 # All YAML maps are loaded as `collections.OrderedDict` objects.
1877 def _yaml_ordered_load(self
, yaml_path
):
1878 class OLoader(yaml
.Loader
):
1881 def construct_mapping(loader
, node
):
1882 loader
.flatten_mapping(node
)
1884 return collections
.OrderedDict(loader
.construct_pairs(node
))
1886 OLoader
.add_constructor(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
1891 with
open(yaml_path
, 'r') as f
:
1892 node
= yaml
.load(f
, OLoader
)
1893 except (OSError, IOError) as exc
:
1894 raise _ConfigParseError('File `{}`'.format(yaml_path
),
1895 'Cannot open file: {}'.format(exc
))
1897 assert type(node
) is collections
.OrderedDict
1901 self
._version
= None
1902 self
._include
_stack
= []
1904 # load the configuration object as is from the root YAML file
1906 config_node
= self
._yaml
_ordered
_load
(self
._root
_path
)
1907 except _ConfigParseError
as exc
:
1908 _append_error_ctx(exc
, 'Configuration',
1909 'Cannot parse YAML file `{}`'.format(self
._root
_path
))
1911 # Make sure the configuration object is minimally valid, that
1912 # is, it contains a valid `version` property.
1914 # This step does not validate the whole configuration object
1915 # yet because we don't have an effective configuration object;
1918 # * Process inclusions.
1919 # * Expand field types (inheritance and aliases).
1920 self
._schema
_validator
.validate(config_node
, 'config/config-min')
1922 # Process configuration object inclusions.
1924 # self._process_config_includes() returns a new (or the same)
1925 # configuration object without any `$include` property in it,
1927 config_node
= self
._process
_config
_includes
(config_node
)
1929 # Make sure that the current configuration object is valid
1930 # considering field types are not expanded yet.
1931 self
._schema
_validator
.validate(config_node
,
1932 '2/config/config-pre-field-type-expansion')
1934 # Expand field types.
1938 # 1. Replaces field type aliases with "effective" field
1939 # types, recursively.
1941 # After this step, the `type-aliases` property of the
1942 # `metadata` node is gone.
1944 # 2. Applies inheritance, following the `$inherit`/`inherit`
1947 # After this step, field type objects do not contain
1948 # `$inherit` or `inherit` properties.
1950 # This is done blindly, in that the process _doesn't_ validate
1951 # field type objects at this point.
1952 self
._expand
_field
_types
(config_node
['metadata'])
1954 # Make sure that the current configuration object is valid
1955 # considering log levels are not expanded yet.
1956 self
._schema
_validator
.validate(config_node
,
1957 '2/config/config-pre-log-level-expansion')
1959 # Expand log levels, that is, replace log level strings with
1960 # their equivalent numeric values.
1961 self
._expand
_log
_levels
(config_node
['metadata'])
1963 # validate the whole, effective configuration object
1964 self
._schema
_validator
.validate(config_node
, '2/config/config')
1966 # dump config if required
1967 if self
._dump
_config
:
1968 print(self
._yaml
_ordered
_dump
(config_node
, indent
=2,
1969 default_flow_style
=False))
1971 # get prefix, options, and metadata pseudo-object
1972 prefix
= self
._get
_prefix
(config_node
)
1973 opts
= self
._get
_options
(config_node
)
1974 pseudo_meta
= self
._create
_metadata
(config_node
)
1976 # create public configuration
1977 self
._config
= config
.Config(pseudo_meta
.to_public(), prefix
, opts
)
1984 def _from_file(path
, include_dirs
, ignore_include_not_found
, dump_config
):
1986 return _YamlConfigParser(path
, include_dirs
, ignore_include_not_found
,
1988 except _ConfigParseError
as exc
:
1989 _append_error_ctx(exc
, 'Configuration',
1990 'Cannot create configuration from YAML file `{}`'.format(path
))