1 # The MIT License (MIT)
3 # Copyright (c) 2015-2016 Philippe Proulx <pproulx@efficios.com>
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 from barectf
import metadata
24 from barectf
import config
39 class _ConfigParseErrorCtx
:
40 def __init__(self
, name
, msg
=None):
53 class ConfigParseError(RuntimeError):
54 def __init__(self
, init_ctx_name
, init_ctx_msg
=None):
56 self
.append_ctx(init_ctx_name
, init_ctx_msg
)
62 def append_ctx(self
, name
, msg
=None):
63 self
._ctx
.append(_ConfigParseErrorCtx(name
, msg
))
66 def _opt_to_public(obj
):
70 return obj
.to_public()
78 if self
._public
is None:
79 self
._public
= self
._to
_public
()
84 raise NotImplementedError
87 class _PropertyMapping(_PseudoObj
):
94 return metadata
.PropertyMapping(self
.object.to_public(), self
.prop
)
97 class _Integer(_PseudoObj
):
101 self
.byte_order
= None
105 self
.encoding
= metadata
.Encoding
.NONE
106 self
.property_mappings
= []
109 def real_align(self
):
110 if self
.align
is None:
111 if self
.size
% 8 == 0:
118 def _to_public(self
):
119 prop_mappings
= [pm
.to_public() for pm
in self
.property_mappings
]
120 return metadata
.Integer(self
.size
, self
.byte_order
, self
.align
,
121 self
.signed
, self
.base
, self
.encoding
,
125 class _FloatingPoint(_PseudoObj
):
129 self
.mant_size
= None
130 self
.byte_order
= None
134 def real_align(self
):
137 def _to_public(self
):
138 return metadata
.FloatingPoint(self
.exp_size
, self
.mant_size
,
139 self
.byte_order
, self
.align
)
142 class _Enum(_PseudoObj
):
145 self
.value_type
= None
146 self
.members
= collections
.OrderedDict()
149 def real_align(self
):
150 return self
.value_type
.real_align
152 def _to_public(self
):
153 return metadata
.Enum(self
.value_type
.to_public(), self
.members
)
156 class _String(_PseudoObj
):
159 self
.encoding
= metadata
.Encoding
.UTF8
162 def real_align(self
):
165 def _to_public(self
):
166 return metadata
.String(self
.encoding
)
169 class _Array(_PseudoObj
):
172 self
.element_type
= None
176 def real_align(self
):
177 return self
.element_type
.real_align
179 def _to_public(self
):
180 return metadata
.Array(self
.element_type
.to_public(), self
.length
)
183 class _Struct(_PseudoObj
):
187 self
.fields
= collections
.OrderedDict()
190 def real_align(self
):
191 align
= self
.min_align
193 for pseudo_field
in self
.fields
.values():
194 if pseudo_field
.real_align
> align
:
195 align
= pseudo_field
.real_align
199 def _to_public(self
):
202 for name
, pseudo_field
in self
.fields
.items():
203 fields
.append((name
, pseudo_field
.to_public()))
205 return metadata
.Struct(self
.min_align
, collections
.OrderedDict(fields
))
208 class _Trace(_PseudoObj
):
211 self
.byte_order
= None
213 self
.packet_header_type
= None
215 def _to_public(self
):
216 return metadata
.Trace(self
.byte_order
, self
.uuid
,
217 _opt_to_public(self
.packet_header_type
))
220 class _Clock(_PseudoObj
):
225 self
.description
= None
227 self
.error_cycles
= 0
228 self
.offset_seconds
= 0
229 self
.offset_cycles
= 0
230 self
.absolute
= False
231 self
.return_ctype
= 'uint32_t'
233 def _to_public(self
):
234 return metadata
.Clock(self
.name
, self
.uuid
, self
.description
, self
.freq
,
235 self
.error_cycles
, self
.offset_seconds
,
236 self
.offset_cycles
, self
.absolute
,
240 class _Event(_PseudoObj
):
245 self
.log_level
= None
246 self
.payload_type
= None
247 self
.context_type
= None
249 def _to_public(self
):
250 return metadata
.Event(self
.id, self
.name
, self
.log_level
,
251 _opt_to_public(self
.payload_type
),
252 _opt_to_public(self
.context_type
))
255 class _Stream(_PseudoObj
):
260 self
.packet_context_type
= None
261 self
.event_header_type
= None
262 self
.event_context_type
= None
263 self
.events
= collections
.OrderedDict()
265 def is_event_empty(self
, event
):
268 if self
.event_header_type
is not None:
269 total_fields
+= len(self
.event_header_type
.fields
)
271 if self
.event_context_type
is not None:
272 total_fields
+= len(self
.event_context_type
.fields
)
274 if event
.context_type
is not None:
275 total_fields
+= len(event
.context_type
.fields
)
277 if event
.payload_type
is not None:
278 total_fields
+= len(event
.payload_type
.fields
)
280 return total_fields
== 0
282 def _to_public(self
):
285 for name
, pseudo_ev
in self
.events
.items():
286 events
.append((name
, pseudo_ev
.to_public()))
288 return metadata
.Stream(self
.id, self
.name
,
289 _opt_to_public(self
.packet_context_type
),
290 _opt_to_public(self
.event_header_type
),
291 _opt_to_public(self
.event_context_type
),
292 collections
.OrderedDict(events
))
295 class _Metadata(_PseudoObj
):
302 self
.default_stream_name
= None
304 def _to_public(self
):
307 for name
, pseudo_clock
in self
.clocks
.items():
308 clocks
.append((name
, pseudo_clock
.to_public()))
312 for name
, pseudo_stream
in self
.streams
.items():
313 streams
.append((name
, pseudo_stream
.to_public()))
315 return metadata
.Metadata(self
.trace
.to_public(), self
.env
,
316 collections
.OrderedDict(clocks
),
317 collections
.OrderedDict(streams
),
318 self
.default_stream_name
)
321 # This JSON schema reference resolver only serves to detect when it
322 # needs to resolve a remote URI.
324 # This must never happen in barectf because all our schemas are local;
325 # it would mean a programming or schema error.
326 class _RefResolver(jsonschema
.RefResolver
):
327 def resolve_remote(self
, uri
):
328 # this must never happen: all our schemas are local
329 raise RuntimeError('Missing local schema with URI "{}"'.format(uri
))
332 # Schema validator which considers all the schemas found in the barectf
333 # package's `schemas` directory.
335 # The only public method is validate() which accepts an instance to
336 # validate as well as a schema short ID.
337 class _SchemaValidator
:
339 subdirs
= ['config', os
.path
.join('2', 'config')]
340 schemas_dir
= pkg_resources
.resource_filename(__name__
, 'schemas')
343 for subdir
in subdirs
:
344 dir = os
.path
.join(schemas_dir
, subdir
)
346 for file_name
in os
.listdir(dir):
347 if not file_name
.endswith('.yaml'):
350 with
open(os
.path
.join(dir, file_name
)) as f
:
351 schema
= yaml
.load(f
, Loader
=yaml
.SafeLoader
)
353 assert '$id' in schema
354 schema_id
= schema
['$id']
355 assert schema_id
not in self
._store
356 self
._store
[schema_id
] = schema
359 def _dict_from_ordered_dict(o_dict
):
362 for k
, v
in o_dict
.items():
365 if type(v
) is collections
.OrderedDict
:
366 new_v
= _SchemaValidator
._dict
_from
_ordered
_dict
(v
)
372 def _validate(self
, instance
, schema_short_id
):
373 # retrieve full schema ID from short ID
374 schema_id
= 'https://barectf.org/schemas/{}.json'.format(schema_short_id
)
375 assert schema_id
in self
._store
377 # retrieve full schema
378 schema
= self
._store
[schema_id
]
380 # Create a reference resolver for this schema using this
381 # validator's schema store.
382 resolver
= _RefResolver(base_uri
=schema_id
, referrer
=schema
,
385 # create a JSON schema validator using this reference resolver
386 validator
= jsonschema
.Draft7Validator(schema
, resolver
=resolver
)
388 # Validate the instance, converting its
389 # `collections.OrderedDict` objects to `dict` objects so as to
390 # make any error message easier to read (because
391 # validator.validate() below uses str() for error messages, and
392 # collections.OrderedDict.__str__() is bulky).
393 validator
.validate(self
._dict
_from
_ordered
_dict
(instance
))
395 # Validates `instance` using the schema having the short ID
398 # A schema short ID is the part between `schemas/` and `.json` in
401 # Raises a `ConfigParseError` object, hiding any `jsonschema`
402 # exception, on validation failure.
403 def validate(self
, instance
, schema_short_id
):
405 self
._validate
(instance
, schema_short_id
)
406 except jsonschema
.ValidationError
as exc
:
407 # convert to barectf `ConfigParseError` exception
408 contexts
= ['Configuration object']
409 contexts
+= ['"{}" property'.format(p
) for p
in exc
.absolute_path
]
412 if len(exc
.context
) > 0:
413 msgs
= '; '.join([e
.message
for e
in exc
.context
])
414 schema_ctx
= ': {}'.format(msgs
)
416 new_exc
= ConfigParseError(contexts
.pop(),
417 '{}{} (from schema "{}")'.format(exc
.message
,
421 for ctx
in reversed(contexts
):
422 new_exc
.append_ctx(ctx
)
427 def _byte_order_str_to_bo(bo_str
):
428 bo_str
= bo_str
.lower()
431 return metadata
.ByteOrder
.LE
433 return metadata
.ByteOrder
.BE
436 def _encoding_str_to_encoding(encoding_str
):
437 encoding_str
= encoding_str
.lower()
439 if encoding_str
== 'utf-8' or encoding_str
== 'utf8':
440 return metadata
.Encoding
.UTF8
441 elif encoding_str
== 'ascii':
442 return metadata
.Encoding
.ASCII
443 elif encoding_str
== 'none':
444 return metadata
.Encoding
.NONE
447 def _validate_identifier(iden
, ctx_obj_name
, prop
):
448 assert type(iden
) is str
467 if iden
in ctf_keywords
:
468 fmt
= 'Invalid {} (not a valid identifier): "{}"'
469 raise ConfigParseError(ctx_obj_name
, fmt
.format(prop
, iden
))
472 def _validate_alignment(align
, ctx_obj_name
):
475 if (align
& (align
- 1)) != 0:
476 raise ConfigParseError(ctx_obj_name
,
477 'Invalid alignment: {}'.format(align
))
480 def _append_error_ctx(exc
, obj_name
, msg
=None):
481 exc
.append_ctx(obj_name
, msg
)
487 # Order of values is important here.
489 class _Entity(enum
.IntEnum
):
490 TRACE_PACKET_HEADER
= 0
491 STREAM_PACKET_CONTEXT
= 1
492 STREAM_EVENT_HEADER
= 2
493 STREAM_EVENT_CONTEXT
= 3
498 # This validator validates the configured metadata for barectf specific
503 # * All header/contexts are at least byte-aligned.
504 # * No nested structures or arrays.
505 class _BarectfMetadataValidator
:
507 self
._type
_to
_validate
_type
_func
= {
508 _Struct
: self
._validate
_struct
_type
,
509 _Array
: self
._validate
_array
_type
,
512 def _validate_struct_type(self
, t
, entity_root
):
514 raise ConfigParseError('Structure type',
515 'Inner structure types are not supported as of this version')
517 for field_name
, field_type
in t
.fields
.items():
518 if entity_root
and self
._cur
_entity
is _Entity
.TRACE_PACKET_HEADER
:
519 if field_name
== 'uuid':
524 self
._validate
_type
(field_type
, False)
525 except ConfigParseError
as exc
:
526 _append_error_ctx(exc
, 'Structure type\'s field "{}"'.format(field_name
))
528 def _validate_array_type(self
, t
, entity_root
):
529 raise ConfigParseError('Array type', 'Not supported as of this version')
531 def _validate_type(self
, t
, entity_root
):
532 func
= self
._type
_to
_validate
_type
_func
.get(type(t
))
537 def _validate_entity(self
, t
):
541 # make sure entity is byte-aligned
543 raise ConfigParseError('Root type',
544 'Alignment must be at least 8')
546 assert type(t
) is _Struct
549 self
._validate
_type
(t
, True)
551 def _validate_entities_and_names(self
, meta
):
552 self
._cur
_entity
= _Entity
.TRACE_PACKET_HEADER
555 self
._validate
_entity
(meta
.trace
.packet_header_type
)
556 except ConfigParseError
as exc
:
557 _append_error_ctx(exc
, 'Trace', 'Invalid packet header type')
559 for stream_name
, stream
in meta
.streams
.items():
560 _validate_identifier(stream_name
, 'Trace', 'stream name')
561 self
._cur
_entity
= _Entity
.STREAM_PACKET_CONTEXT
564 self
._validate
_entity
(stream
.packet_context_type
)
565 except ConfigParseError
as exc
:
566 _append_error_ctx(exc
, 'Stream "{}"'.format(stream_name
),
567 'Invalid packet context type')
569 self
._cur
_entity
= _Entity
.STREAM_EVENT_HEADER
572 self
._validate
_entity
(stream
.event_header_type
)
573 except ConfigParseError
as exc
:
574 _append_error_ctx(exc
, 'Stream "{}"'.format(stream_name
),
575 'Invalid event header type')
577 self
._cur
_entity
= _Entity
.STREAM_EVENT_CONTEXT
580 self
._validate
_entity
(stream
.event_context_type
)
581 except ConfigParseError
as exc
:
582 _append_error_ctx(exc
, 'Stream "{}"'.format(stream_name
),
583 'Invalid event context type'.format(stream_name
))
586 for ev_name
, ev
in stream
.events
.items():
587 _validate_identifier(ev_name
,
588 'Stream "{}"'.format(stream_name
),
591 self
._cur
_entity
= _Entity
.EVENT_CONTEXT
594 self
._validate
_entity
(ev
.context_type
)
595 except ConfigParseError
as exc
:
596 _append_error_ctx(exc
, 'Event "{}"'.format(ev_name
),
597 'Invalid context type')
599 self
._cur
_entity
= _Entity
.EVENT_PAYLOAD
602 self
._validate
_entity
(ev
.payload_type
)
603 except ConfigParseError
as exc
:
604 _append_error_ctx(exc
, 'Event "{}"'.format(ev_name
),
605 'Invalid payload type')
607 if stream
.is_event_empty(ev
):
608 raise ConfigParseError('Event "{}"'.format(ev_name
), 'Empty')
609 except ConfigParseError
as exc
:
610 _append_error_ctx(exc
, 'Stream "{}"'.format(stream_name
))
612 def _validate_default_stream(self
, meta
):
613 if meta
.default_stream_name
:
614 if meta
.default_stream_name
not in meta
.streams
.keys():
615 fmt
= 'Default stream name ("{}") does not exist'
616 raise ConfigParseError('barectf metadata',
617 fmt
.format(meta
.default_stream_name
))
619 def validate(self
, meta
):
620 self
._validate
_entities
_and
_names
(meta
)
621 self
._validate
_default
_stream
(meta
)
624 # This validator validates special fields of trace, stream, and event
627 # For example, it checks that the "stream_id" field exists in the trace
628 # packet header if there's more than one stream, and much more.
629 class _MetadataSpecialFieldsValidator
:
630 def _validate_trace_packet_header_type(self
, t
):
631 # needs "stream_id" field?
632 if len(self
._meta
.streams
) > 1:
635 raise ConfigParseError('"packet-header-type" property',
636 'Need "stream_id" field (more than one stream), but trace packet header type is missing')
638 if type(t
) is not _Struct
:
639 raise ConfigParseError('"packet-header-type" property',
640 'Need "stream_id" field (more than one stream), but trace packet header type is not a structure type')
642 if 'stream_id' not in t
.fields
:
643 raise ConfigParseError('"packet-header-type" property',
644 'Need "stream_id" field (more than one stream)')
646 # validate "magic" and "stream_id" types
647 if type(t
) is not _Struct
:
650 for i
, (field_name
, field_type
) in enumerate(t
.fields
.items()):
651 if field_name
== 'magic':
652 if type(field_type
) is not _Integer
:
653 raise ConfigParseError('"packet-header-type" property',
654 '"magic" field must be an integer type')
656 if field_type
.signed
or field_type
.size
!= 32:
657 raise ConfigParseError('"packet-header-type" property',
658 '"magic" field must be a 32-bit unsigned integer type')
661 raise ConfigParseError('"packet-header-type" property',
662 '"magic" field must be the first trace packet header type\'s field')
663 elif field_name
== 'stream_id':
664 if type(field_type
) is not _Integer
:
665 raise ConfigParseError('"packet-header-type" property',
666 '"stream_id" field must be an integer type')
668 if field_type
.signed
:
669 raise ConfigParseError('"packet-header-type" property',
670 '"stream_id" field must be an unsigned integer type')
672 # "id" size can fit all event IDs
673 if len(self
._meta
.streams
) > (1 << field_type
.size
):
674 raise ConfigParseError('"packet-header-type" property',
675 '"stream_id" field\' size is too small for the number of trace streams')
676 elif field_name
== 'uuid':
677 if self
._meta
.trace
.uuid
is None:
678 raise ConfigParseError('"packet-header-type" property',
679 '"uuid" field specified, but no trace UUID provided')
681 if type(field_type
) is not _Array
:
682 raise ConfigParseError('"packet-header-type" property',
683 '"uuid" field must be an array')
685 if field_type
.length
!= 16:
686 raise ConfigParseError('"packet-header-type" property',
687 '"uuid" field must be an array of 16 bytes')
689 element_type
= field_type
.element_type
691 if type(element_type
) is not _Integer
:
692 raise ConfigParseError('"packet-header-type" property',
693 '"uuid" field must be an array of 16 unsigned bytes')
695 if element_type
.size
!= 8:
696 raise ConfigParseError('"packet-header-type" property',
697 '"uuid" field must be an array of 16 unsigned bytes')
699 if element_type
.signed
:
700 raise ConfigParseError('"packet-header-type" property',
701 '"uuid" field must be an array of 16 unsigned bytes')
703 if element_type
.real_align
!= 8:
704 raise ConfigParseError('"packet-header-type" property',
705 '"uuid" field must be an array of 16 unsigned, byte-aligned bytes')
707 def _validate_trace(self
, meta
):
708 self
._validate
_trace
_packet
_header
_type
(meta
.trace
.packet_header_type
)
710 def _validate_stream_packet_context(self
, stream
):
711 t
= stream
.packet_context_type
714 raise ConfigParseError('Stream',
715 'Missing "packet-context-type" property')
717 if type(t
) is not _Struct
:
718 raise ConfigParseError('"packet-context-type" property',
719 'Expecting a structure type')
721 # "timestamp_begin", if exists, is an unsigned integer type,
725 if 'timestamp_begin' in t
.fields
:
726 ts_begin
= t
.fields
['timestamp_begin']
728 if type(ts_begin
) is not _Integer
:
729 raise ConfigParseError('"packet-context-type" property',
730 '"timestamp_begin" field must be an integer type')
733 raise ConfigParseError('"packet-context-type" property',
734 '"timestamp_begin" field must be an unsigned integer type')
736 if not ts_begin
.property_mappings
:
737 raise ConfigParseError('"packet-context-type" property',
738 '"timestamp_begin" field must be mapped to a clock')
740 # "timestamp_end", if exists, is an unsigned integer type,
744 if 'timestamp_end' in t
.fields
:
745 ts_end
= t
.fields
['timestamp_end']
747 if type(ts_end
) is not _Integer
:
748 raise ConfigParseError('"packet-context-type" property',
749 '"timestamp_end" field must be an integer type')
752 raise ConfigParseError('"packet-context-type" property',
753 '"timestamp_end" field must be an unsigned integer type')
755 if not ts_end
.property_mappings
:
756 raise ConfigParseError('"packet-context-type" property',
757 '"timestamp_end" field must be mapped to a clock')
759 # "timestamp_begin" and "timestamp_end" exist together
760 if (('timestamp_begin' in t
.fields
) ^
('timestamp_end' in t
.fields
)):
761 raise ConfigParseError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
763 # "timestamp_begin" and "timestamp_end" are mapped to the same clock
764 if ts_begin
is not None and ts_end
is not None:
765 if ts_begin
.property_mappings
[0].object.name
!= ts_end
.property_mappings
[0].object.name
:
766 raise ConfigParseError('"timestamp_begin" and "timestamp_end" fields must be mapped to the same clock object in stream packet context type')
768 # "events_discarded", if exists, is an unsigned integer type
769 if 'events_discarded' in t
.fields
:
770 events_discarded
= t
.fields
['events_discarded']
772 if type(events_discarded
) is not _Integer
:
773 raise ConfigParseError('"packet-context-type" property',
774 '"events_discarded" field must be an integer type')
776 if events_discarded
.signed
:
777 raise ConfigParseError('"packet-context-type" property',
778 '"events_discarded" field must be an unsigned integer type')
780 # "packet_size" and "content_size" must exist
781 if 'packet_size' not in t
.fields
:
782 raise ConfigParseError('"packet-context-type" property',
783 'Missing "packet_size" field in stream packet context type')
785 packet_size
= t
.fields
['packet_size']
787 # "content_size" and "content_size" must exist
788 if 'content_size' not in t
.fields
:
789 raise ConfigParseError('"packet-context-type" property',
790 'Missing "content_size" field in stream packet context type')
792 content_size
= t
.fields
['content_size']
794 # "packet_size" is an unsigned integer type
795 if type(packet_size
) is not _Integer
:
796 raise ConfigParseError('"packet-context-type" property',
797 '"packet_size" field in stream packet context type must be an integer type')
799 if packet_size
.signed
:
800 raise ConfigParseError('"packet-context-type" property',
801 '"packet_size" field in stream packet context type must be an unsigned integer type')
803 # "content_size" is an unsigned integer type
804 if type(content_size
) is not _Integer
:
805 raise ConfigParseError('"packet-context-type" property',
806 '"content_size" field in stream packet context type must be an integer type')
808 if content_size
.signed
:
809 raise ConfigParseError('"packet-context-type" property',
810 '"content_size" field in stream packet context type must be an unsigned integer type')
812 # "packet_size" size should be greater than or equal to "content_size" size
813 if content_size
.size
> packet_size
.size
:
814 raise ConfigParseError('"packet-context-type" property',
815 '"content_size" field size must be lesser than or equal to "packet_size" field size')
817 def _validate_stream_event_header(self
, stream
):
818 t
= stream
.event_header_type
821 if len(stream
.events
) > 1:
824 raise ConfigParseError('"event-header-type" property',
825 'Need "id" field (more than one event), but stream event header type is missing')
827 if type(t
) is not _Struct
:
828 raise ConfigParseError('"event-header-type" property',
829 'Need "id" field (more than one event), but stream event header type is not a structure type')
831 if 'id' not in t
.fields
:
832 raise ConfigParseError('"event-header-type" property',
833 'Need "id" field (more than one event)')
835 # validate "id" and "timestamp" types
836 if type(t
) is not _Struct
:
839 # "timestamp", if exists, is an unsigned integer type,
841 if 'timestamp' in t
.fields
:
842 ts
= t
.fields
['timestamp']
844 if type(ts
) is not _Integer
:
845 raise ConfigParseError('"event-header-type" property',
846 '"timestamp" field must be an integer type')
849 raise ConfigParseError('"event-header-type" property',
850 '"timestamp" field must be an unsigned integer type')
852 if not ts
.property_mappings
:
853 raise ConfigParseError('"event-header-type" property',
854 '"timestamp" field must be mapped to a clock')
859 # "id" is an unsigned integer type
860 if type(eid
) is not _Integer
:
861 raise ConfigParseError('"event-header-type" property',
862 '"id" field must be an integer type')
865 raise ConfigParseError('"event-header-type" property',
866 '"id" field must be an unsigned integer type')
868 # "id" size can fit all event IDs
869 if len(stream
.events
) > (1 << eid
.size
):
870 raise ConfigParseError('"event-header-type" property',
871 '"id" field\' size is too small for the number of stream events')
873 def _validate_stream(self
, stream
):
874 self
._validate
_stream
_packet
_context
(stream
)
875 self
._validate
_stream
_event
_header
(stream
)
877 def validate(self
, meta
):
879 self
._validate
_trace
(meta
)
881 for stream
in meta
.streams
.values():
883 self
._validate
_stream
(stream
)
884 except ConfigParseError
as exc
:
885 _append_error_ctx(exc
, 'Stream "{}"'.format(stream
.name
), 'Invalid')
888 class _YamlConfigParser
:
889 def __init__(self
, include_dirs
, ignore_include_not_found
, dump_config
):
890 self
._class
_name
_to
_create
_type
_func
= {
891 'int': self
._create
_integer
,
892 'integer': self
._create
_integer
,
893 'flt': self
._create
_float
,
894 'float': self
._create
_float
,
895 'floating-point': self
._create
_float
,
896 'enum': self
._create
_enum
,
897 'enumeration': self
._create
_enum
,
898 'str': self
._create
_string
,
899 'string': self
._create
_string
,
900 'struct': self
._create
_struct
,
901 'structure': self
._create
_struct
,
902 'array': self
._create
_array
,
904 self
._include
_dirs
= include_dirs
905 self
._ignore
_include
_not
_found
= ignore_include_not_found
906 self
._dump
_config
= dump_config
907 self
._schema
_validator
= _SchemaValidator()
909 def _set_byte_order(self
, metadata_node
):
910 self
._bo
= _byte_order_str_to_bo(metadata_node
['trace']['byte-order'])
911 assert self
._bo
is not None
913 def _set_int_clock_prop_mapping(self
, int_obj
, prop_mapping_node
):
914 clock_name
= prop_mapping_node
['name']
915 clock
= self
._clocks
.get(clock_name
)
918 raise ConfigParseError('Integer type\'s clock property mapping',
919 'Invalid clock name "{}"'.format(clock_name
))
921 prop_mapping
= _PropertyMapping()
922 prop_mapping
.object = clock
923 prop_mapping
.prop
= 'value'
924 int_obj
.property_mappings
.append(prop_mapping
)
926 def _create_integer(self
, node
):
930 obj
.size
= node
['size']
933 align_node
= node
.get('align')
935 if align_node
is not None:
936 _validate_alignment(align_node
, 'Integer type')
937 obj
.align
= align_node
940 signed_node
= node
.get('signed')
942 if signed_node
is not None:
943 obj
.signed
= signed_node
946 obj
.byte_order
= self
._bo
947 bo_node
= node
.get('byte-order')
949 if bo_node
is not None:
950 obj
.byte_order
= _byte_order_str_to_bo(bo_node
)
953 base_node
= node
.get('base')
955 if base_node
is not None:
956 if base_node
== 'bin':
958 elif base_node
== 'oct':
960 elif base_node
== 'dec':
963 assert base_node
== 'hex'
967 encoding_node
= node
.get('encoding')
969 if encoding_node
is not None:
970 obj
.encoding
= _encoding_str_to_encoding(encoding_node
)
973 pm_node
= node
.get('property-mappings')
975 if pm_node
is not None:
976 assert len(pm_node
) == 1
977 self
._set
_int
_clock
_prop
_mapping
(obj
, pm_node
[0])
981 def _create_float(self
, node
):
982 obj
= _FloatingPoint()
985 size_node
= node
['size']
986 obj
.exp_size
= size_node
['exp']
987 obj
.mant_size
= size_node
['mant']
990 align_node
= node
.get('align')
992 if align_node
is not None:
993 _validate_alignment(align_node
, 'Floating point number type')
994 obj
.align
= align_node
997 obj
.byte_order
= self
._bo
998 bo_node
= node
.get('byte-order')
1000 if bo_node
is not None:
1001 obj
.byte_order
= _byte_order_str_to_bo(bo_node
)
1005 def _create_enum(self
, node
):
1010 obj
.value_type
= self
._create
_type
(node
['value-type'])
1011 except ConfigParseError
as exc
:
1012 _append_error_ctx(exc
, 'Enumeration type',
1013 'Cannot create integer type')
1016 members_node
= node
.get('members')
1018 if members_node
is not None:
1019 if obj
.value_type
.signed
:
1020 value_min
= -(1 << obj
.value_type
.size
- 1)
1021 value_max
= (1 << (obj
.value_type
.size
- 1)) - 1
1024 value_max
= (1 << obj
.value_type
.size
) - 1
1028 for m_node
in members_node
:
1029 if type(m_node
) is str:
1034 assert type(m_node
) is collections
.OrderedDict
1035 label
= m_node
['label']
1036 value
= m_node
['value']
1038 if type(value
) is int:
1040 value
= (value
, value
)
1042 assert type(value
) is list
1043 assert len(value
) == 2
1048 raise ConfigParseError('Enumeration type',
1049 'Invalid member ("{}"): invalid range ({} > {})'.format(label
, mn
, mx
))
1054 name_fmt
= 'Enumeration type\'s member "{}"'
1055 msg_fmt
= 'Value {} is outside the value type range [{}, {}]'
1057 if value
[0] < value_min
or value
[0] > value_max
:
1058 raise ConfigParseError(name_fmt
.format(label
),
1059 msg_fmt
.format(value
[0],
1063 if value
[1] < value_min
or value
[1] > value_max
:
1064 raise ConfigParseError(name_fmt
.format(label
),
1065 msg_fmt
.format(value
[0],
1069 obj
.members
[label
] = value
1073 def _create_string(self
, node
):
1077 encoding_node
= node
.get('encoding')
1079 if encoding_node
is not None:
1080 obj
.encoding
= _encoding_str_to_encoding(encoding_node
)
1084 def _create_struct(self
, node
):
1088 min_align_node
= node
.get('min-align')
1090 if min_align_node
is not None:
1091 _validate_alignment(min_align_node
, 'Structure type')
1092 obj
.min_align
= min_align_node
1095 fields_node
= node
.get('fields')
1097 if fields_node
is not None:
1098 for field_name
, field_node
in fields_node
.items():
1099 _validate_identifier(field_name
, 'Structure type', 'field name')
1102 obj
.fields
[field_name
] = self
._create
_type
(field_node
)
1103 except ConfigParseError
as exc
:
1104 _append_error_ctx(exc
, 'Structure type',
1105 'Cannot create field "{}"'.format(field_name
))
1109 def _create_array(self
, node
):
1113 obj
.length
= node
['length']
1117 obj
.element_type
= self
._create
_type
(node
['element-type'])
1118 except ConfigParseError
as exc
:
1119 _append_error_ctx(exc
, 'Array type', 'Cannot create element type')
1123 def _create_type(self
, type_node
):
1124 return self
._class
_name
_to
_create
_type
_func
[type_node
['class']](type_node
)
1126 def _create_clock(self
, node
):
1127 # create clock object
1131 uuid_node
= node
.get('uuid')
1133 if uuid_node
is not None:
1135 clock
.uuid
= uuid
.UUID(uuid_node
)
1137 raise ConfigParseError('Clock', 'Malformed UUID: "{}"'.format(uuid_node
))
1140 descr_node
= node
.get('description')
1142 if descr_node
is not None:
1143 clock
.description
= descr_node
1146 freq_node
= node
.get('freq')
1148 if freq_node
is not None:
1149 clock
.freq
= freq_node
1152 error_cycles_node
= node
.get('error-cycles')
1154 if error_cycles_node
is not None:
1155 clock
.error_cycles
= error_cycles_node
1158 offset_node
= node
.get('offset')
1160 if offset_node
is not None:
1162 offset_cycles_node
= offset_node
.get('cycles')
1164 if offset_cycles_node
is not None:
1165 clock
.offset_cycles
= offset_cycles_node
1168 offset_seconds_node
= offset_node
.get('seconds')
1170 if offset_seconds_node
is not None:
1171 clock
.offset_seconds
= offset_seconds_node
1174 absolute_node
= node
.get('absolute')
1176 if absolute_node
is not None:
1177 clock
.absolute
= absolute_node
1179 return_ctype_node
= node
.get('$return-ctype')
1181 if return_ctype_node
is None:
1182 return_ctype_node
= node
.get('return-ctype')
1184 if return_ctype_node
is not None:
1185 clock
.return_ctype
= return_ctype_node
1189 def _register_clocks(self
, metadata_node
):
1190 self
._clocks
= collections
.OrderedDict()
1191 clocks_node
= metadata_node
.get('clocks')
1193 if clocks_node
is None:
1196 for clock_name
, clock_node
in clocks_node
.items():
1197 _validate_identifier(clock_name
, 'Metadata', 'clock name')
1198 assert clock_name
not in self
._clocks
1201 clock
= self
._create
_clock
(clock_node
)
1202 except ConfigParseError
as exc
:
1203 _append_error_ctx(exc
, 'Metadata',
1204 'Cannot create clock "{}"'.format(clock_name
))
1206 clock
.name
= clock_name
1207 self
._clocks
[clock_name
] = clock
1209 def _create_env(self
, metadata_node
):
1210 env_node
= metadata_node
.get('env')
1212 if env_node
is None:
1213 return collections
.OrderedDict()
1215 for env_name
, env_value
in env_node
.items():
1216 _validate_identifier(env_name
, 'Metadata',
1217 'environment variable name')
1219 return copy
.deepcopy(env_node
)
1221 def _create_trace(self
, metadata_node
):
1222 # create trace object
1225 trace_node
= metadata_node
['trace']
1227 # set byte order (already parsed)
1228 trace
.byte_order
= self
._bo
1231 uuid_node
= trace_node
.get('uuid')
1233 if uuid_node
is not None:
1234 if uuid_node
== 'auto':
1235 trace
.uuid
= uuid
.uuid1()
1238 trace
.uuid
= uuid
.UUID(uuid_node
)
1240 raise ConfigParseError('Trace',
1241 'Malformed UUID: "{}"'.format(uuid_node
))
1243 # packet header type
1244 pht_node
= trace_node
.get('packet-header-type')
1246 if pht_node
is not None:
1248 trace
.packet_header_type
= self
._create
_type
(pht_node
)
1249 except ConfigParseError
as exc
:
1250 _append_error_ctx(exc
, 'Trace',
1251 'Cannot create packet header type')
1255 def _create_event(self
, event_node
):
1256 # create event object
1259 log_level_node
= event_node
.get('log-level')
1261 if log_level_node
is not None:
1262 assert type(log_level_node
) is int
1263 event
.log_level
= metadata
.LogLevel(None, log_level_node
)
1265 ct_node
= event_node
.get('context-type')
1267 if ct_node
is not None:
1269 event
.context_type
= self
._create
_type
(ct_node
)
1270 except ConfigParseError
as exc
:
1271 _append_error_ctx(exc
, 'Event',
1272 'Cannot create context type object')
1274 pt_node
= event_node
.get('payload-type')
1276 if pt_node
is not None:
1278 event
.payload_type
= self
._create
_type
(pt_node
)
1279 except ConfigParseError
as exc
:
1280 _append_error_ctx(exc
, 'Event',
1281 'Cannot create payload type object')
1285 def _create_stream(self
, stream_name
, stream_node
):
1286 # create stream object
1289 pct_node
= stream_node
.get('packet-context-type')
1291 if pct_node
is not None:
1293 stream
.packet_context_type
= self
._create
_type
(pct_node
)
1294 except ConfigParseError
as exc
:
1295 _append_error_ctx(exc
, 'Stream',
1296 'Cannot create packet context type object')
1298 eht_node
= stream_node
.get('event-header-type')
1300 if eht_node
is not None:
1302 stream
.event_header_type
= self
._create
_type
(eht_node
)
1303 except ConfigParseError
as exc
:
1304 _append_error_ctx(exc
, 'Stream',
1305 'Cannot create event header type object')
1307 ect_node
= stream_node
.get('event-context-type')
1309 if ect_node
is not None:
1311 stream
.event_context_type
= self
._create
_type
(ect_node
)
1312 except ConfigParseError
as exc
:
1313 _append_error_ctx(exc
, 'Stream',
1314 'Cannot create event context type object')
1316 events_node
= stream_node
['events']
1319 for ev_name
, ev_node
in events_node
.items():
1321 ev
= self
._create
_event
(ev_node
)
1322 except ConfigParseError
as exc
:
1323 _append_error_ctx(exc
, 'Stream',
1324 'Cannot create event "{}"'.format(ev_name
))
1328 stream
.events
[ev_name
] = ev
1331 default_node
= stream_node
.get('$default')
1333 if default_node
is not None:
1334 if self
._meta
.default_stream_name
is not None and self
._meta
.default_stream_name
!= stream_name
:
1335 fmt
= 'Cannot specify more than one default stream (default stream already set to "{}")'
1336 raise ConfigParseError('Stream',
1337 fmt
.format(self
._meta
.default_stream_name
))
1339 self
._meta
.default_stream_name
= stream_name
1343 def _create_streams(self
, metadata_node
):
1344 streams
= collections
.OrderedDict()
1345 streams_node
= metadata_node
['streams']
1348 for stream_name
, stream_node
in streams_node
.items():
1350 stream
= self
._create
_stream
(stream_name
, stream_node
)
1351 except ConfigParseError
as exc
:
1352 _append_error_ctx(exc
, 'Metadata',
1353 'Cannot create stream "{}"'.format(stream_name
))
1356 stream
.name
= stream_name
1357 streams
[stream_name
] = stream
1362 def _create_metadata(self
, root
):
1363 self
._meta
= _Metadata()
1364 metadata_node
= root
['metadata']
1366 if '$default-stream' in metadata_node
and metadata_node
['$default-stream'] is not None:
1367 default_stream_node
= metadata_node
['$default-stream']
1368 self
._meta
.default_stream_name
= default_stream_node
1370 self
._set
_byte
_order
(metadata_node
)
1371 self
._register
_clocks
(metadata_node
)
1372 self
._meta
.clocks
= self
._clocks
1373 self
._meta
.env
= self
._create
_env
(metadata_node
)
1374 self
._meta
.trace
= self
._create
_trace
(metadata_node
)
1375 self
._meta
.streams
= self
._create
_streams
(metadata_node
)
1379 _MetadataSpecialFieldsValidator().validate(self
._meta
)
1380 except ConfigParseError
as exc
:
1381 _append_error_ctx(exc
, 'Metadata')
1384 _BarectfMetadataValidator().validate(self
._meta
)
1385 except ConfigParseError
as exc
:
1386 _append_error_ctx(exc
, 'barectf metadata')
1390 def _get_prefix(self
, config_node
):
1391 prefix
= config_node
.get('prefix', 'barectf_')
1392 _validate_identifier(prefix
, '"prefix" property', 'prefix')
1395 def _get_options(self
, config_node
):
1396 gen_prefix_def
= False
1397 gen_default_stream_def
= False
1398 options_node
= config_node
.get('options')
1400 if options_node
is not None:
1401 gen_prefix_def
= options_node
.get('gen-prefix-def',
1403 gen_default_stream_def
= options_node
.get('gen-default-stream-def',
1404 gen_default_stream_def
)
1406 return config
.ConfigOptions(gen_prefix_def
, gen_default_stream_def
)
1408 def _get_last_include_file(self
):
1409 if self
._include
_stack
:
1410 return self
._include
_stack
[-1]
1412 return self
._root
_yaml
_path
1414 def _load_include(self
, yaml_path
):
1415 for inc_dir
in self
._include
_dirs
:
1416 # Current inclusion dir + file name path.
1418 # Note: os.path.join() only takes the last argument if it's
1420 inc_path
= os
.path
.join(inc_dir
, yaml_path
)
1422 # real path (symbolic links resolved)
1423 real_path
= os
.path
.realpath(inc_path
)
1425 # normalized path (weird stuff removed!)
1426 norm_path
= os
.path
.normpath(real_path
)
1428 if not os
.path
.isfile(norm_path
):
1429 # file doesn't exist: skip
1432 if norm_path
in self
._include
_stack
:
1433 base_path
= self
._get
_last
_include
_file
()
1434 raise ConfigParseError('In "{}"',
1435 'Cannot recursively include file "{}"'.format(base_path
,
1438 self
._include
_stack
.append(norm_path
)
1441 return self
._yaml
_ordered
_load
(norm_path
)
1443 if not self
._ignore
_include
_not
_found
:
1444 base_path
= self
._get
_last
_include
_file
()
1445 raise ConfigParseError('In "{}"',
1446 'Cannot include file "{}": file not found in include directories'.format(base_path
,
1449 def _get_include_paths(self
, include_node
):
1450 if include_node
is None:
1454 if type(include_node
) is str:
1456 return [include_node
]
1459 assert type(include_node
) is list
1462 def _update_node(self
, base_node
, overlay_node
):
1463 for olay_key
, olay_value
in overlay_node
.items():
1464 if olay_key
in base_node
:
1465 base_value
= base_node
[olay_key
]
1467 if type(olay_value
) is collections
.OrderedDict
and type(base_value
) is collections
.OrderedDict
:
1468 # merge dictionaries
1469 self
._update
_node
(base_value
, olay_value
)
1470 elif type(olay_value
) is list and type(base_value
) is list:
1471 # append extension array items to base items
1472 base_value
+= olay_value
1474 # fall back to replacing
1475 base_node
[olay_key
] = olay_value
1477 base_node
[olay_key
] = olay_value
1479 def _process_node_include(self
, last_overlay_node
,
1480 process_base_include_cb
,
1481 process_children_include_cb
=None):
1482 # process children inclusions first
1483 if process_children_include_cb
is not None:
1484 process_children_include_cb(last_overlay_node
)
1486 incl_prop_name
= '$include'
1488 if incl_prop_name
in last_overlay_node
:
1489 include_node
= last_overlay_node
[incl_prop_name
]
1492 return last_overlay_node
1494 include_paths
= self
._get
_include
_paths
(include_node
)
1495 cur_base_path
= self
._get
_last
_include
_file
()
1498 # keep the inclusion paths and remove the `$include` property
1499 include_paths
= copy
.deepcopy(include_paths
)
1500 del last_overlay_node
[incl_prop_name
]
1502 for include_path
in include_paths
:
1503 # load raw YAML from included file
1504 overlay_node
= self
._load
_include
(include_path
)
1506 if overlay_node
is None:
1507 # Cannot find inclusion file, but we're ignoring those
1508 # errors, otherwise _load_include() itself raises a
1512 # recursively process inclusions
1514 overlay_node
= process_base_include_cb(overlay_node
)
1515 except ConfigParseError
as exc
:
1516 _append_error_ctx(exc
, 'In "{}"'.format(cur_base_path
))
1518 # pop inclusion stack now that we're done including
1519 del self
._include
_stack
[-1]
1521 # At this point, `base_node` is fully resolved (does not
1522 # contain any `$include` property).
1523 if base_node
is None:
1524 base_node
= overlay_node
1526 self
._update
_node
(base_node
, overlay_node
)
1528 # Finally, update the latest base node with our last overlay
1530 if base_node
is None:
1531 # Nothing was included, which is possible when we're
1532 # ignoring inclusion errors.
1533 return last_overlay_node
1535 self
._update
_node
(base_node
, last_overlay_node
)
1538 def _process_event_include(self
, event_node
):
1539 # Make sure the event object is valid for the inclusion
1541 self
._schema
_validator
.validate(event_node
,
1542 '2/config/event-pre-include')
1544 # process inclusions
1545 return self
._process
_node
_include
(event_node
,
1546 self
._process
_event
_include
)
1548 def _process_stream_include(self
, stream_node
):
1549 def process_children_include(stream_node
):
1550 if 'events' in stream_node
:
1551 events_node
= stream_node
['events']
1553 for key
in list(events_node
):
1554 events_node
[key
] = self
._process
_event
_include
(events_node
[key
])
1556 # Make sure the stream object is valid for the inclusion
1558 self
._schema
_validator
.validate(stream_node
,
1559 '2/config/stream-pre-include')
1561 # process inclusions
1562 return self
._process
_node
_include
(stream_node
,
1563 self
._process
_stream
_include
,
1564 process_children_include
)
1566 def _process_trace_include(self
, trace_node
):
1567 # Make sure the trace object is valid for the inclusion
1569 self
._schema
_validator
.validate(trace_node
,
1570 '2/config/trace-pre-include')
1572 # process inclusions
1573 return self
._process
_node
_include
(trace_node
,
1574 self
._process
_trace
_include
)
1576 def _process_clock_include(self
, clock_node
):
1577 # Make sure the clock object is valid for the inclusion
1579 self
._schema
_validator
.validate(clock_node
,
1580 '2/config/clock-pre-include')
1582 # process inclusions
1583 return self
._process
_node
_include
(clock_node
,
1584 self
._process
_clock
_include
)
1586 def _process_metadata_include(self
, metadata_node
):
1587 def process_children_include(metadata_node
):
1588 if 'trace' in metadata_node
:
1589 metadata_node
['trace'] = self
._process
_trace
_include
(metadata_node
['trace'])
1591 if 'clocks' in metadata_node
:
1592 clocks_node
= metadata_node
['clocks']
1594 for key
in list(clocks_node
):
1595 clocks_node
[key
] = self
._process
_clock
_include
(clocks_node
[key
])
1597 if 'streams' in metadata_node
:
1598 streams_node
= metadata_node
['streams']
1600 for key
in list(streams_node
):
1601 streams_node
[key
] = self
._process
_stream
_include
(streams_node
[key
])
1603 # Make sure the metadata object is valid for the inclusion
1605 self
._schema
_validator
.validate(metadata_node
,
1606 '2/config/metadata-pre-include')
1608 # process inclusions
1609 return self
._process
_node
_include
(metadata_node
,
1610 self
._process
_metadata
_include
,
1611 process_children_include
)
1613 def _process_config_includes(self
, config_node
):
1614 # Process inclusions in this order:
1616 # 1. Clock object, event objects, and trace objects (the order
1617 # between those is not important).
1619 # 2. Stream objects.
1621 # 3. Metadata object.
1625 # * A metadata object can include clock objects, a trace object,
1626 # stream objects, and event objects (indirectly).
1628 # * A stream object can include event objects.
1630 # We keep a stack of absolute paths to included files
1631 # (`self._include_stack`) to detect recursion.
1633 # First, make sure the configuration object itself is valid for
1634 # the inclusion processing stage.
1635 self
._schema
_validator
.validate(config_node
,
1636 '2/config/config-pre-include')
1638 # Process metadata object inclusions.
1640 # self._process_metadata_include() returns a new (or the same)
1641 # metadata object without any `$include` property in it,
1643 config_node
['metadata'] = self
._process
_metadata
_include
(config_node
['metadata'])
1647 def _expand_field_type_aliases(self
, metadata_node
, type_aliases_node
):
1648 def resolve_field_type_aliases(parent_node
, key
, from_descr
,
1650 if key
not in parent_node
:
1653 # This set holds all the aliases we need to expand,
1654 # recursively. This is used to detect cycles.
1655 if alias_set
is None:
1658 node
= parent_node
[key
]
1663 if type(node
) is str:
1666 if alias
not in resolved_aliases
:
1667 # Only check for a field type alias cycle when we
1668 # didn't resolve the alias yet, as a given node can
1669 # refer to the same field type alias more than once.
1670 if alias
in alias_set
:
1671 fmt
= 'Cycle detected during the "{}" type alias resolution'
1672 raise ConfigParseError(from_descr
, fmt
.format(alias
))
1674 # try to load field type alias node named `alias`
1675 if alias
not in type_aliases_node
:
1676 raise ConfigParseError(from_descr
,
1677 'Type alias "{}" does not exist'.format(alias
))
1680 alias_set
.add(alias
)
1681 resolve_field_type_aliases(type_aliases_node
, alias
,
1682 from_descr
, alias_set
)
1683 resolved_aliases
.add(alias
)
1685 parent_node
[key
] = copy
.deepcopy(type_aliases_node
[node
])
1688 # traverse, resolving field type aliases as needed
1689 for pkey
in ['$inherit', 'inherit', 'value-type', 'element-type']:
1690 resolve_field_type_aliases(node
, pkey
, from_descr
, alias_set
)
1692 # structure field type fields
1696 assert type(node
[pkey
]) is collections
.OrderedDict
1698 for field_name
in node
[pkey
]:
1699 resolve_field_type_aliases(node
[pkey
], field_name
,
1700 from_descr
, alias_set
)
1702 def resolve_field_type_aliases_from(parent_node
, key
, parent_node_type_name
,
1703 parent_node_name
=None):
1704 from_descr
= '"{}" property of {}'.format(key
,
1705 parent_node_type_name
)
1707 if parent_node_name
is not None:
1708 from_descr
+= ' "{}"'.format(parent_node_name
)
1710 resolve_field_type_aliases(parent_node
, key
, from_descr
)
1712 # set of resolved field type aliases
1713 resolved_aliases
= set()
1715 # expand field type aliases within trace, streams, and events now
1716 resolve_field_type_aliases_from(metadata_node
['trace'],
1717 'packet-header-type', 'trace')
1719 for stream_name
, stream
in metadata_node
['streams'].items():
1720 resolve_field_type_aliases_from(stream
, 'packet-context-type',
1721 'stream', stream_name
)
1722 resolve_field_type_aliases_from(stream
, 'event-header-type',
1723 'stream', stream_name
)
1724 resolve_field_type_aliases_from(stream
, 'event-context-type',
1725 'stream', stream_name
)
1728 for event_name
, event
in stream
['events'].items():
1729 resolve_field_type_aliases_from(event
, 'context-type', 'event',
1731 resolve_field_type_aliases_from(event
, 'payload-type', 'event',
1733 except ConfigParseError
as exc
:
1734 _append_error_ctx(exc
, 'Stream "{}"'.format(stream_name
))
1736 # we don't need the `type-aliases` node anymore
1737 del metadata_node
['type-aliases']
1739 def _expand_field_type_inheritance(self
, metadata_node
):
1740 def apply_inheritance(parent_node
, key
):
1741 if key
not in parent_node
:
1744 node
= parent_node
[key
]
1749 # process children first
1750 for pkey
in ['$inherit', 'inherit', 'value-type', 'element-type']:
1751 apply_inheritance(node
, pkey
)
1753 # structure field type fields
1757 assert type(node
[pkey
]) is collections
.OrderedDict
1759 for field_name
, field_type
in node
[pkey
].items():
1760 apply_inheritance(node
[pkey
], field_name
)
1762 # apply inheritance of this node
1763 if 'inherit' in node
:
1764 # barectf 2.1: `inherit` property was renamed to `$inherit`
1765 assert '$inherit' not in node
1766 node
['$inherit'] = node
['inherit']
1769 inherit_key
= '$inherit'
1771 if inherit_key
in node
:
1772 assert type(node
[inherit_key
]) is collections
.OrderedDict
1774 # apply inheritance below
1775 apply_inheritance(node
, inherit_key
)
1777 # `node` is an overlay on the `$inherit` node
1778 base_node
= node
[inherit_key
]
1779 del node
[inherit_key
]
1780 self
._update
_node
(base_node
, node
)
1782 # set updated base node as this node
1783 parent_node
[key
] = base_node
1785 apply_inheritance(metadata_node
['trace'], 'packet-header-type')
1787 for stream
in metadata_node
['streams'].values():
1788 apply_inheritance(stream
, 'packet-context-type')
1789 apply_inheritance(stream
, 'event-header-type')
1790 apply_inheritance(stream
, 'event-context-type')
1792 for event
in stream
['events'].values():
1793 apply_inheritance(event
, 'context-type')
1794 apply_inheritance(event
, 'payload-type')
1796 def _expand_field_types(self
, metadata_node
):
1797 type_aliases_node
= metadata_node
.get('type-aliases')
1799 if type_aliases_node
is None:
1800 # If there's no `type-aliases` node, then there's no field
1801 # type aliases and therefore no possible inheritance.
1804 # first, expand field type aliases
1805 self
._expand
_field
_type
_aliases
(metadata_node
, type_aliases_node
)
1807 # next, apply inheritance to create effective field types
1808 self
._expand
_field
_type
_inheritance
(metadata_node
)
1810 def _expand_log_levels(self
, metadata_node
):
1811 if 'log-levels' in metadata_node
:
1812 # barectf 2.1: `log-levels` property was renamed to `$log-levels`
1813 assert '$log-levels' not in node
1814 node
['$log-levels'] = node
['log-levels']
1815 del node
['log-levels']
1817 log_levels_key
= '$log-levels'
1818 log_levels_node
= metadata_node
.get(log_levels_key
)
1820 if log_levels_node
is None:
1821 # no log level aliases
1824 # not needed anymore
1825 del metadata_node
[log_levels_key
]
1827 for stream_name
, stream
in metadata_node
['streams'].items():
1829 for event_name
, event
in stream
['events'].items():
1830 prop_name
= 'log-level'
1831 ll_node
= event
.get(prop_name
)
1836 if type(ll_node
) is str:
1837 if ll_node
not in log_levels_node
:
1838 raise ConfigParseError('Event "{}"'.format(event_name
),
1839 'Log level "{}" does not exist'.format(ll_node
))
1841 event
[prop_name
] = log_levels_node
[ll_node
]
1842 except ConfigParseError
as exc
:
1843 _append_error_ctx(exc
, 'Stream "{}"'.format(stream_name
))
1845 def _yaml_ordered_dump(self
, node
, **kwds
):
1846 class ODumper(yaml
.Dumper
):
1849 def dict_representer(dumper
, node
):
1850 return dumper
.represent_mapping(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
1853 ODumper
.add_representer(collections
.OrderedDict
, dict_representer
)
1856 return yaml
.dump(node
, Dumper
=ODumper
, **kwds
)
1858 def _yaml_ordered_load(self
, yaml_path
):
1859 class OLoader(yaml
.Loader
):
1862 def construct_mapping(loader
, node
):
1863 loader
.flatten_mapping(node
)
1865 return collections
.OrderedDict(loader
.construct_pairs(node
))
1867 OLoader
.add_constructor(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
1872 with
open(yaml_path
, 'r') as f
:
1873 node
= yaml
.load(f
, OLoader
)
1874 except (OSError, IOError) as e
:
1875 raise ConfigParseError('Configuration',
1876 'Cannot open file "{}"'.format(yaml_path
))
1877 except ConfigParseError
as exc
:
1878 _append_error_ctx(exc
, 'Configuration',
1879 'Unknown error while trying to load file "{}"'.format(yaml_path
))
1881 # loaded node must be an associate array
1882 if type(node
) is not collections
.OrderedDict
:
1883 raise ConfigParseError('Configuration',
1884 'Root of YAML file "{}" must be an associative array'.format(yaml_path
))
1889 self
._version
= None
1890 self
._include
_stack
= []
1892 def parse(self
, yaml_path
):
1894 self
._root
_yaml
_path
= yaml_path
1896 # load the configuration object as is from the root YAML file
1898 config_node
= self
._yaml
_ordered
_load
(yaml_path
)
1899 except ConfigParseError
as exc
:
1900 _append_error_ctx(exc
, 'Configuration',
1901 'Cannot parse YAML file "{}"'.format(yaml_path
))
1903 # Make sure the configuration object is minimally valid, that
1904 # is, it contains a valid `version` property.
1906 # This step does not validate the whole configuration object
1907 # yet because we don't have an effective configuration object;
1910 # * Process inclusions.
1911 # * Expand field types (inheritance and aliases).
1912 self
._schema
_validator
.validate(config_node
, 'config/config-min')
1914 # Process configuration object inclusions.
1916 # self._process_config_includes() returns a new (or the same)
1917 # configuration object without any `$include` property in it,
1919 config_node
= self
._process
_config
_includes
(config_node
)
1921 # Make sure that the current configuration object is valid
1922 # considering field types are not expanded yet.
1923 self
._schema
_validator
.validate(config_node
,
1924 '2/config/config-pre-field-type-expansion')
1926 # Expand field types.
1930 # 1. Replaces field type aliases with "effective" field
1931 # types, recursively.
1933 # After this step, the `type-aliases` property of the
1934 # `metadata` node is gone.
1936 # 2. Applies inheritance following the `$inherit`/`inherit`
1939 # After this step, field type objects do not contain
1940 # `$inherit` or `inherit` properties.
1942 # This is done blindly, in that the process _doesn't_ validate
1943 # field type objects at this point.
1944 self
._expand
_field
_types
(config_node
['metadata'])
1946 # Make sure that the current configuration object is valid
1947 # considering log levels are not expanded yet.
1948 self
._schema
_validator
.validate(config_node
,
1949 '2/config/config-pre-log-level-expansion')
1951 # Expand log levels, that is, replace log level strings with
1952 # their equivalent numeric values.
1953 self
._expand
_log
_levels
(config_node
['metadata'])
1955 # validate the whole, effective configuration object
1956 self
._schema
_validator
.validate(config_node
, '2/config/config')
1958 # dump config if required
1959 if self
._dump
_config
:
1960 print(self
._yaml
_ordered
_dump
(config_node
, indent
=2,
1961 default_flow_style
=False))
1963 # get prefix, options, and metadata pseudo-object
1964 prefix
= self
._get
_prefix
(config_node
)
1965 opts
= self
._get
_options
(config_node
)
1966 pseudo_meta
= self
._create
_metadata
(config_node
)
1968 # create public configuration
1969 return config
.Config(pseudo_meta
.to_public(), prefix
, opts
)
1972 def _from_file(path
, include_dirs
, ignore_include_not_found
, dump_config
):
1974 parser
= _YamlConfigParser(include_dirs
, ignore_include_not_found
,
1976 return parser
.parse(path
)
1977 except ConfigParseError
as exc
:
1978 _append_error_ctx(exc
, 'Configuration',
1979 'Cannot create configuration from YAML file "{}"'.format(path
))