config: make event's log-level property accept integer
[deliverable/barectf.git] / barectf / config.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 # THE SOFTWARE.
22
23 from barectf import metadata
24 import collections
25 import datetime
26 import barectf
27 import enum
28 import yaml
29 import uuid
30 import copy
31 import re
32 import os
33
34
35 class ConfigError(RuntimeError):
36 def __init__(self, msg, prev=None):
37 super().__init__(msg)
38 self._prev = prev
39
40 @property
41 def prev(self):
42 return self._prev
43
44
45 class Config:
46 def __init__(self, version, prefix, metadata):
47 self.prefix = prefix
48 self.version = version
49 self.metadata = metadata
50
51 def _validate_metadata(self, meta):
52 try:
53 validator = _MetadataTypesHistologyValidator()
54 validator.validate(meta)
55 validator = _MetadataDynamicTypesValidator()
56 validator.validate(meta)
57 validator = _MetadataSpecialFieldsValidator()
58 validator.validate(meta)
59 except Exception as e:
60 raise ConfigError('metadata error', e)
61
62 try:
63 validator = _BarectfMetadataValidator()
64 validator.validate(meta)
65 except Exception as e:
66 raise ConfigError('barectf metadata error', e)
67
68 def _augment_metadata_env(self, meta):
69 env = meta.env
70
71 env['domain'] = 'bare'
72 env['tracer_name'] = 'barectf'
73 version_tuple = barectf.get_version_tuple()
74 env['tracer_major'] = version_tuple[0]
75 env['tracer_minor'] = version_tuple[1]
76 env['tracer_patch'] = version_tuple[2]
77 env['barectf_gen_date'] = str(datetime.datetime.now().isoformat())
78
79 @property
80 def version(self):
81 return self._version
82
83 @version.setter
84 def version(self, value):
85 self._version = value
86
87 @property
88 def metadata(self):
89 return self._metadata
90
91 @metadata.setter
92 def metadata(self, value):
93 self._validate_metadata(value)
94 self._augment_metadata_env(value)
95 self._metadata = value
96
97 @property
98 def prefix(self):
99 return self._prefix
100
101 @prefix.setter
102 def prefix(self, value):
103 if not is_valid_identifier(value):
104 raise ConfigError('prefix must be a valid C identifier')
105
106 self._prefix = value
107
108
109 def _is_assoc_array_prop(node):
110 return isinstance(node, dict)
111
112
113 def _is_array_prop(node):
114 return isinstance(node, list)
115
116
117 def _is_int_prop(node):
118 return type(node) is int
119
120
121 def _is_str_prop(node):
122 return type(node) is str
123
124
125 def _is_bool_prop(node):
126 return type(node) is bool
127
128
129 def _is_valid_alignment(align):
130 return ((align & (align - 1)) == 0) and align > 0
131
132
133 def _byte_order_str_to_bo(bo_str):
134 bo_str = bo_str.lower()
135
136 if bo_str == 'le':
137 return metadata.ByteOrder.LE
138 elif bo_str == 'be':
139 return metadata.ByteOrder.BE
140
141
142 def _encoding_str_to_encoding(encoding_str):
143 encoding_str = encoding_str.lower()
144
145 if encoding_str == 'utf-8' or encoding_str == 'utf8':
146 return metadata.Encoding.UTF8
147 elif encoding_str == 'ascii':
148 return metadata.Encoding.ASCII
149 elif encoding_str == 'none':
150 return metadata.Encoding.NONE
151
152
153 _re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
154 _ctf_keywords = set([
155 'align',
156 'callsite',
157 'clock',
158 'enum',
159 'env',
160 'event',
161 'floating_point',
162 'integer',
163 'stream',
164 'string',
165 'struct',
166 'trace',
167 'typealias',
168 'typedef',
169 'variant',
170 ])
171
172
173 def is_valid_identifier(iden):
174 if not _re_iden.match(iden):
175 return False
176
177 if _re_iden in _ctf_keywords:
178 return False
179
180 return True
181
182
183 def _get_first_unknown_prop(node, known_props):
184 for prop_name in node:
185 if prop_name in known_props:
186 continue
187
188 return prop_name
189
190
191 # This validator validates the configured metadata for barectf specific
192 # needs.
193 #
194 # barectf needs:
195 #
196 # * all header/contexts are at least byte-aligned
197 # * all integer and floating point number sizes to be <= 64
198 # * no inner structures, arrays, or variants
199 class _BarectfMetadataValidator:
200 def __init__(self):
201 self._type_to_validate_type_func = {
202 metadata.Integer: self._validate_int_type,
203 metadata.FloatingPoint: self._validate_float_type,
204 metadata.Enum: self._validate_enum_type,
205 metadata.String: self._validate_string_type,
206 metadata.Struct: self._validate_struct_type,
207 metadata.Array: self._validate_array_type,
208 metadata.Variant: self._validate_variant_type,
209 }
210
211 def _validate_int_type(self, t, entity_root):
212 if t.size > 64:
213 raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits')
214
215 def _validate_float_type(self, t, entity_root):
216 if t.size > 64:
217 raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits')
218
219 def _validate_enum_type(self, t, entity_root):
220 if t.value_type.size > 64:
221 raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits')
222
223 def _validate_string_type(self, t, entity_root):
224 pass
225
226 def _validate_struct_type(self, t, entity_root):
227 if not entity_root:
228 raise ConfigError('inner structure types are not supported as of this version')
229
230 for field_name, field_type in t.fields.items():
231 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
232 if field_name == 'uuid':
233 # allow
234 continue
235
236 try:
237 self._validate_type(field_type, False)
238 except Exception as e:
239 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
240
241 def _validate_array_type(self, t, entity_root):
242 raise ConfigError('array types are not supported as of this version')
243
244 def _validate_variant_type(self, t, entity_root):
245 raise ConfigError('variant types are not supported as of this version')
246
247 def _validate_type(self, t, entity_root):
248 self._type_to_validate_type_func[type(t)](t, entity_root)
249
250 def _validate_entity(self, t):
251 if t is None:
252 return
253
254 # make sure entity is byte-aligned
255 if t.align < 8:
256 raise ConfigError('type\'s alignment must be at least byte-aligned')
257
258 # make sure entity is a structure
259 if type(t) is not metadata.Struct:
260 raise ConfigError('expecting a structure type')
261
262 # validate types
263 self._validate_type(t, True)
264
265 def _validate_entities_and_names(self, meta):
266 self._cur_entity = _Entity.TRACE_PACKET_HEADER
267
268 try:
269 self._validate_entity(meta.trace.packet_header_type)
270 except Exception as e:
271 raise ConfigError('invalid trace packet header type', e)
272
273 for stream_name, stream in meta.streams.items():
274 if not is_valid_identifier(stream_name):
275 raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name))
276
277 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
278
279 try:
280 self._validate_entity(stream.packet_context_type)
281 except Exception as e:
282 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
283
284 self._cur_entity = _Entity.STREAM_EVENT_HEADER
285
286 try:
287 self._validate_entity(stream.event_header_type)
288 except Exception as e:
289 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
290
291 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
292
293 try:
294 self._validate_entity(stream.event_context_type)
295 except Exception as e:
296 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
297
298 try:
299 for ev_name, ev in stream.events.items():
300 if not is_valid_identifier(ev_name):
301 raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name))
302
303 self._cur_entity = _Entity.EVENT_CONTEXT
304
305 try:
306 self._validate_entity(ev.context_type)
307 except Exception as e:
308 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
309
310 self._cur_entity = _Entity.EVENT_PAYLOAD
311
312 if ev.payload_type is None:
313 raise ConfigError('missing payload type in event "{}"'.format(ev_name), e)
314
315 try:
316 self._validate_entity(ev.payload_type)
317 except Exception as e:
318 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
319
320 if not ev.payload_type.fields:
321 raise ConfigError('empty payload type in event "{}"'.format(ev_name), e)
322 except Exception as e:
323 raise ConfigError('invalid stream "{}"'.format(stream_name), e)
324
325 def validate(self, meta):
326 self._validate_entities_and_names(meta)
327
328
329 # This validator validates special fields of trace, stream, and event
330 # types. For example, if checks that the "stream_id" field exists in the
331 # trace packet header if there's more than one stream, and much more.
332 class _MetadataSpecialFieldsValidator:
333 def _validate_trace_packet_header_type(self, t):
334 # needs "stream_id" field?
335 if len(self._meta.streams) > 1:
336 # yes
337 if t is None:
338 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is missing')
339
340 if type(t) is not metadata.Struct:
341 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is not a structure type')
342
343 if 'stream_id' not in t.fields:
344 raise ConfigError('need "stream_id" field in trace packet header type')
345
346 # validate "magic" and "stream_id" types
347 if type(t) is not metadata.Struct:
348 return
349
350 for i, (field_name, field_type) in enumerate(t.fields.items()):
351 if field_name == 'magic':
352 if type(field_type) is not metadata.Integer:
353 raise ConfigError('"magic" field in trace packet header type must be an integer type')
354
355 if field_type.signed or field_type.size != 32:
356 raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type')
357
358 if i != 0:
359 raise ConfigError('"magic" field must be the first trace packet header type\'s field')
360 elif field_name == 'stream_id':
361 if type(field_type) is not metadata.Integer:
362 raise ConfigError('"stream_id" field in trace packet header type must be an integer type')
363
364 if field_type.signed:
365 raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type')
366 elif field_name == 'uuid':
367 if self._meta.trace.uuid is None:
368 raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided')
369
370 if type(field_type) is not metadata.Array:
371 raise ConfigError('"uuid" field in trace packet header type must be an array')
372
373 if field_type.length != 16:
374 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
375
376 element_type = field_type.element_type
377
378 if type(element_type) is not metadata.Integer:
379 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
380
381 if element_type.size != 8:
382 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
383
384 if element_type.align != 8:
385 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 byte-aligned bytes')
386
387 def _validate_trace(self, meta):
388 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
389
390 def _validate_stream_packet_context(self, stream):
391 t = stream.packet_context_type
392
393 if type(t) is None:
394 return
395
396 if type(t) is not metadata.Struct:
397 return
398
399 # "timestamp_begin", if exists, is an unsigned integer type,
400 # mapped to a clock
401 if 'timestamp_begin' in t.fields:
402 ts_begin = t.fields['timestamp_begin']
403
404 if type(ts_begin) is not metadata.Integer:
405 raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type')
406
407 if ts_begin.signed:
408 raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type')
409
410 if not ts_begin.property_mappings:
411 raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock')
412
413 # "timestamp_end", if exists, is an unsigned integer type,
414 # mapped to a clock
415 if 'timestamp_end' in t.fields:
416 ts_end = t.fields['timestamp_end']
417
418 if type(ts_end) is not metadata.Integer:
419 raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type')
420
421 if ts_end.signed:
422 raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type')
423
424 if not ts_end.property_mappings:
425 raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock')
426
427 # "timestamp_begin" and "timestamp_end" exist together
428 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
429 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
430
431 # "events_discarded", if exists, is an unsigned integer type
432 if 'events_discarded' in t.fields:
433 events_discarded = t.fields['events_discarded']
434
435 if type(events_discarded) is not metadata.Integer:
436 raise ConfigError('"events_discarded" field in stream packet context type must be an integer type')
437
438 if events_discarded.signed:
439 raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type')
440
441 # "packet_size" and "content_size" must exist
442 if 'packet_size' not in t.fields:
443 raise ConfigError('missing "packet_size" field in stream packet context type')
444
445 packet_size = t.fields['packet_size']
446
447 # "content_size" and "content_size" must exist
448 if 'content_size' not in t.fields:
449 raise ConfigError('missing "content_size" field in stream packet context type')
450
451 content_size = t.fields['content_size']
452
453 # "packet_size" is an unsigned integer type
454 if type(packet_size) is not metadata.Integer:
455 raise ConfigError('"packet_size" field in stream packet context type must be an integer type')
456
457 if packet_size.signed:
458 raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type')
459
460 # "content_size" is an unsigned integer type
461 if type(content_size) is not metadata.Integer:
462 raise ConfigError('"content_size" field in stream packet context type must be an integer type')
463
464 if content_size.signed:
465 raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type')
466
467 def _validate_stream_event_header(self, stream):
468 t = stream.event_header_type
469
470 # needs "id" field?
471 if len(stream.events) > 1:
472 # yes
473 if t is None:
474 raise ConfigError('need "id" field in stream event header type, but stream event header type is missing')
475
476 if type(t) is not metadata.Struct:
477 raise ConfigError('need "id" field in stream event header type, but stream event header type is not a structure type')
478
479 if 'id' not in t.fields:
480 raise ConfigError('need "id" field in stream event header type')
481
482 # validate "id" and "timestamp" types
483 if type(t) is not metadata.Struct:
484 return
485
486 # "timestamp", if exists, is an unsigned integer type,
487 # mapped to a clock
488 if 'timestamp' in t.fields:
489 ts = t.fields['timestamp']
490
491 if type(ts) is not metadata.Integer:
492 raise ConfigError('"ts" field in stream event header type must be an integer type')
493
494 if ts.signed:
495 raise ConfigError('"ts" field in stream event header type must be an unsigned integer type')
496
497 if not ts.property_mappings:
498 raise ConfigError('"ts" field in stream event header type must be mapped to a clock')
499
500 # "id" is an unsigned integer type
501 if 'id' in t.fields:
502 eid = t.fields['id']
503
504 if type(eid) is not metadata.Integer:
505 raise ConfigError('"id" field in stream event header type must be an integer type')
506
507 if eid.signed:
508 raise ConfigError('"id" field in stream event header type must be an unsigned integer type')
509
510 def _validate_stream(self, stream):
511 self._validate_stream_packet_context(stream)
512 self._validate_stream_event_header(stream)
513
514 def validate(self, meta):
515 self._meta = meta
516 self._validate_trace(meta)
517
518 for stream in meta.streams.values():
519 try:
520 self._validate_stream(stream)
521 except Exception as e:
522 raise ConfigError('invalid stream "{}"'.format(stream.name), e)
523
524
525 class _MetadataDynamicTypesValidatorStackEntry:
526 def __init__(self, base_t):
527 self._base_t = base_t
528 self._index = 0
529
530 @property
531 def index(self):
532 return self._index
533
534 @index.setter
535 def index(self, value):
536 self._index = value
537
538 @property
539 def base_t(self):
540 return self._base_t
541
542 @base_t.setter
543 def base_t(self, value):
544 self._base_t = value
545
546
547 # Entities. Order of values is important here.
548 @enum.unique
549 class _Entity(enum.IntEnum):
550 TRACE_PACKET_HEADER = 0
551 STREAM_PACKET_CONTEXT = 1
552 STREAM_EVENT_HEADER = 2
553 STREAM_EVENT_CONTEXT = 3
554 EVENT_CONTEXT = 4
555 EVENT_PAYLOAD = 5
556
557
558 # This validator validates dynamic metadata types, that is, it ensures
559 # variable-length array lengths and variant tags actually point to
560 # something that exists. It also checks that variable-length array
561 # lengths point to integer types and variant tags to enumeration types.
562 class _MetadataDynamicTypesValidator:
563 def __init__(self):
564 self._type_to_visit_type_func = {
565 metadata.Integer: None,
566 metadata.FloatingPoint: None,
567 metadata.Enum: None,
568 metadata.String: None,
569 metadata.Struct: self._visit_struct_type,
570 metadata.Array: self._visit_array_type,
571 metadata.Variant: self._visit_variant_type,
572 }
573
574 self._cur_trace = None
575 self._cur_stream = None
576 self._cur_event = None
577
578 def _lookup_path_from_base(self, path, parts, base, start_index,
579 base_is_current, from_t):
580 index = start_index
581 cur_t = base
582 found_path = []
583
584 while index < len(parts):
585 part = parts[index]
586 next_t = None
587
588 if type(cur_t) is metadata.Struct:
589 enumerated_items = enumerate(cur_t.fields.items())
590
591 # lookup each field
592 for i, (field_name, field_type) in enumerated_items:
593 if field_name == part:
594 next_t = field_type
595 found_path.append((i, field_type))
596
597 if next_t is None:
598 raise ConfigError('invalid path "{}": cannot find field "{}" in structure type'.format(path, part))
599 elif type(cur_t) is metadata.Variant:
600 enumerated_items = enumerate(cur_t.types.items())
601
602 # lookup each type
603 for i, (type_name, type_type) in enumerated_items:
604 if type_name == part:
605 next_t = type_type
606 found_path.append((i, type_type))
607
608 if next_t is None:
609 raise ConfigError('invalid path "{}": cannot find type "{}" in variant type'.format(path, part))
610 else:
611 raise ConfigError('invalid path "{}": requesting "{}" in a non-variant, non-structure type'.format(path, part))
612
613 cur_t = next_t
614 index += 1
615
616 # make sure that the pointed type is not the pointing type
617 if from_t is cur_t:
618 raise ConfigError('invalid path "{}": pointing to self'.format(path))
619
620 # if we're here, we found the type; however, it could be located
621 # _after_ the variant/VLA looking for it, if the pointing
622 # and pointed types are in the same entity, so compare the
623 # current stack entries indexes to our index path in that case
624 if not base_is_current:
625 return cur_t
626
627 for index, entry in enumerate(self._stack):
628 if index == len(found_path):
629 # end of index path; valid so far
630 break
631
632 if found_path[index][0] > entry.index:
633 raise ConfigError('invalid path "{}": pointed type is after pointing type'.format(path))
634
635 # also make sure that both pointed and pointing types share
636 # a common structure ancestor
637 for index, entry in enumerate(self._stack):
638 if index == len(found_path):
639 break
640
641 if entry.base_t is not found_path[index][1]:
642 # found common ancestor
643 if type(entry.base_t) is metadata.Variant:
644 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
645
646 return cur_t
647
648 def _lookup_path_from_top(self, path, parts):
649 if len(parts) != 1:
650 raise ConfigError('invalid path "{}": multipart relative path not supported'.format(path))
651
652 find_name = parts[0]
653 index = len(self._stack) - 1
654 got_struct = False
655
656 # check stack entries in reversed order
657 for entry in reversed(self._stack):
658 # structure base type
659 if type(entry.base_t) is metadata.Struct:
660 got_struct = True
661 enumerated_items = enumerate(entry.base_t.fields.items())
662
663 # lookup each field, until the current visiting index is met
664 for i, (field_name, field_type) in enumerated_items:
665 if i == entry.index:
666 break
667
668 if field_name == find_name:
669 return field_type
670
671 # variant base type
672 elif type(entry.base_t) is metadata.Variant:
673 enumerated_items = enumerate(entry.base_t.types.items())
674
675 # lookup each type, until the current visiting index is met
676 for i, (type_name, type_type) in enumerated_items:
677 if i == entry.index:
678 break
679
680 if type_name == find_name:
681 if not got_struct:
682 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
683
684 return type_type
685
686 # nothing returned here: cannot find type
687 raise ConfigError('invalid path "{}": cannot find type in current context'.format(path))
688
689 def _lookup_path(self, path, from_t):
690 parts = path.lower().split('.')
691 base = None
692 base_is_current = False
693
694 if len(parts) >= 3:
695 if parts[0] == 'trace':
696 if parts[1] == 'packet' and parts[2] == 'header':
697 # make sure packet header exists
698 if self._cur_trace.packet_header_type is None:
699 raise ConfigError('invalid path "{}": no defined trace packet header type'.format(path))
700
701 base = self._cur_trace.packet_header_type
702
703 if self._cur_entity == _Entity.TRACE_PACKET_HEADER:
704 base_is_current = True
705 else:
706 raise ConfigError('invalid path "{}": unknown names after "trace"'.format(path))
707 elif parts[0] == 'stream':
708 if parts[1] == 'packet' and parts[2] == 'context':
709 if self._cur_entity < _Entity.STREAM_PACKET_CONTEXT:
710 raise ConfigError('invalid path "{}": cannot access stream packet context here'.format(path))
711
712 if self._cur_stream.packet_context_type is None:
713 raise ConfigError('invalid path "{}": no defined stream packet context type'.format(path))
714
715 base = self._cur_stream.packet_context_type
716
717 if self._cur_entity == _Entity.STREAM_PACKET_CONTEXT:
718 base_is_current = True
719 elif parts[1] == 'event':
720 if parts[2] == 'header':
721 if self._cur_entity < _Entity.STREAM_EVENT_HEADER:
722 raise ConfigError('invalid path "{}": cannot access stream event header here'.format(path))
723
724 if self._cur_stream.event_header_type is None:
725 raise ConfigError('invalid path "{}": no defined stream event header type'.format(path))
726
727 base = self._cur_stream.event_header_type
728
729 if self._cur_entity == _Entity.STREAM_EVENT_HEADER:
730 base_is_current = True
731 elif parts[2] == 'context':
732 if self._cur_entity < _Entity.STREAM_EVENT_CONTEXT:
733 raise ConfigError('invalid path "{}": cannot access stream event context here'.format(path))
734
735 if self._cur_stream.event_context_type is None:
736 raise ConfigError('invalid path "{}": no defined stream event context type'.format(path))
737
738 base = self._cur_stream.event_context_type
739
740 if self._cur_entity == _Entity.STREAM_EVENT_CONTEXT:
741 base_is_current = True
742 else:
743 raise ConfigError('invalid path "{}": unknown names after "stream.event"'.format(path))
744 else:
745 raise ConfigError('invalid path "{}": unknown names after "stream"'.format(path))
746
747 if base is not None:
748 start_index = 3
749
750 if len(parts) >= 2 and base is None:
751 if parts[0] == 'event':
752 if parts[1] == 'context':
753 if self._cur_entity < _Entity.EVENT_CONTEXT:
754 raise ConfigError('invalid path "{}": cannot access event context here'.format(path))
755
756 if self._cur_event.context_type is None:
757 raise ConfigError('invalid path "{}": no defined event context type'.format(path))
758
759 base = self._cur_event.context_type
760
761 if self._cur_entity == _Entity.EVENT_CONTEXT:
762 base_is_current = True
763 elif parts[1] == 'payload' or parts[1] == 'fields':
764 if self._cur_entity < _Entity.EVENT_PAYLOAD:
765 raise ConfigError('invalid path "{}": cannot access event payload here'.format(path))
766
767 if self._cur_event.payload_type is None:
768 raise ConfigError('invalid path "{}": no defined event payload type'.format(path))
769
770 base = self._cur_event.payload_type
771
772 if self._cur_entity == _Entity.EVENT_PAYLOAD:
773 base_is_current = True
774 else:
775 raise ConfigError('invalid path "{}": unknown names after "event"'.format(path))
776
777 if base is not None:
778 start_index = 2
779
780 if base is not None:
781 return self._lookup_path_from_base(path, parts, base, start_index,
782 base_is_current, from_t)
783 else:
784 return self._lookup_path_from_top(path, parts)
785
786 def _stack_reset(self):
787 self._stack = []
788
789 def _stack_push(self, base_t):
790 entry = _MetadataDynamicTypesValidatorStackEntry(base_t)
791 self._stack.append(entry)
792
793 def _stack_pop(self):
794 self._stack.pop()
795
796 def _stack_incr_index(self):
797 self._stack[-1].index += 1
798
799 def _visit_struct_type(self, t):
800 self._stack_push(t)
801
802 for field_name, field_type in t.fields.items():
803 try:
804 self._visit_type(field_type)
805 except Exception as e:
806 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
807
808 self._stack_incr_index()
809
810 self._stack_pop()
811
812 def _visit_array_type(self, t):
813 if not t.is_static:
814 # find length type
815 try:
816 length_type = self._lookup_path(t.length, t)
817 except Exception as e:
818 raise ConfigError('invalid array type\'s length', e)
819
820 # make sure length type an unsigned integer
821 if type(length_type) is not metadata.Integer:
822 raise ConfigError('array type\'s length does not point to an integer type')
823
824 if length_type.signed:
825 raise ConfigError('array type\'s length does not point to an unsigned integer type')
826
827 self._visit_type(t.element_type)
828
829 def _visit_variant_type(self, t):
830 # find tag type
831 try:
832 tag_type = self._lookup_path(t.tag, t)
833 except Exception as e:
834 raise ConfigError('invalid variant type\'s tag', e)
835
836 # make sure tag type is an enumeration
837 if type(tag_type) is not metadata.Enum:
838 raise ConfigError('variant type\'s tag does not point to an enumeration type')
839
840 # verify that each variant type's type exists as an enumeration member
841 for tag_name in t.types.keys():
842 if tag_name not in tag_type.members:
843 raise ConfigError('cannot find variant type\'s type "{}" in pointed tag type'.format(tag_name))
844
845 self._stack_push(t)
846
847 for type_name, type_type in t.types.items():
848 try:
849 self._visit_type(type_type)
850 except Exception as e:
851 raise ConfigError('in variant type\'s type "{}"'.format(type_name), e)
852
853 self._stack_incr_index()
854
855 self._stack_pop()
856
857 def _visit_type(self, t):
858 if t is None:
859 return
860
861 if type(t) in self._type_to_visit_type_func:
862 func = self._type_to_visit_type_func[type(t)]
863
864 if func is not None:
865 func(t)
866
867 def _visit_event(self, ev):
868 ev_name = ev.name
869
870 # set current event
871 self._cur_event = ev
872
873 # visit event context type
874 self._stack_reset()
875 self._cur_entity = _Entity.EVENT_CONTEXT
876
877 try:
878 self._visit_type(ev.context_type)
879 except Exception as e:
880 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
881
882 # visit event payload type
883 self._stack_reset()
884 self._cur_entity = _Entity.EVENT_PAYLOAD
885
886 try:
887 self._visit_type(ev.payload_type)
888 except Exception as e:
889 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
890
891 def _visit_stream(self, stream):
892 stream_name = stream.name
893
894 # set current stream
895 self._cur_stream = stream
896
897 # reset current event
898 self._cur_event = None
899
900 # visit stream packet context type
901 self._stack_reset()
902 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
903
904 try:
905 self._visit_type(stream.packet_context_type)
906 except Exception as e:
907 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
908
909 # visit stream event header type
910 self._stack_reset()
911 self._cur_entity = _Entity.STREAM_EVENT_HEADER
912
913 try:
914 self._visit_type(stream.event_header_type)
915 except Exception as e:
916 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
917
918 # visit stream event context type
919 self._stack_reset()
920 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
921
922 try:
923 self._visit_type(stream.event_context_type)
924 except Exception as e:
925 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
926
927 # visit events
928 for ev in stream.events.values():
929 try:
930 self._visit_event(ev)
931 except Exception as e:
932 raise ConfigError('invalid stream "{}"'.format(stream_name))
933
934 def validate(self, meta):
935 # set current trace
936 self._cur_trace = meta.trace
937
938 # visit trace packet header type
939 self._stack_reset()
940 self._cur_entity = _Entity.TRACE_PACKET_HEADER
941
942 try:
943 self._visit_type(meta.trace.packet_header_type)
944 except Exception as e:
945 raise ConfigError('invalid packet header type in trace', e)
946
947 # visit streams
948 for stream in meta.streams.values():
949 self._visit_stream(stream)
950
951
952 # Since type inheritance allows types to be only partially defined at
953 # any place in the configuration, this validator validates that actual
954 # trace, stream, and event types are all complete and valid.
955 class _MetadataTypesHistologyValidator:
956 def __init__(self):
957 self._type_to_validate_type_histology_func = {
958 metadata.Integer: self._validate_integer_histology,
959 metadata.FloatingPoint: self._validate_float_histology,
960 metadata.Enum: self._validate_enum_histology,
961 metadata.String: self._validate_string_histology,
962 metadata.Struct: self._validate_struct_histology,
963 metadata.Array: self._validate_array_histology,
964 metadata.Variant: self._validate_variant_histology,
965 }
966
967 def _validate_integer_histology(self, t):
968 # size is set
969 if t.size is None:
970 raise ConfigError('missing integer type\'s size')
971
972 def _validate_float_histology(self, t):
973 # exponent digits is set
974 if t.exp_size is None:
975 raise ConfigError('missing floating point number type\'s exponent size')
976
977 # mantissa digits is set
978 if t.mant_size is None:
979 raise ConfigError('missing floating point number type\'s mantissa size')
980
981 # exponent and mantissa sum is a multiple of 8
982 if (t.exp_size + t.mant_size) % 8 != 0:
983 raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8')
984
985 def _validate_enum_histology(self, t):
986 # integer type is set
987 if t.value_type is None:
988 raise ConfigError('missing enumeration type\'s integer type')
989
990 # there's at least one member
991 if not t.members:
992 raise ConfigError('enumeration type needs at least one member')
993
994 # no overlapping values
995 ranges = []
996
997 for label, value in t.members.items():
998 for rg in ranges:
999 if value[0] <= rg[1] and rg[0] <= value[1]:
1000 raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label))
1001
1002 ranges.append(value)
1003
1004 def _validate_string_histology(self, t):
1005 # always valid
1006 pass
1007
1008 def _validate_struct_histology(self, t):
1009 # all fields are valid
1010 for field_name, field_type in t.fields.items():
1011 try:
1012 self._validate_type_histology(field_type)
1013 except Exception as e:
1014 raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e)
1015
1016 def _validate_array_histology(self, t):
1017 # length is set
1018 if t.length is None:
1019 raise ConfigError('missing array type\'s length')
1020
1021 # element type is set
1022 if t.element_type is None:
1023 raise ConfigError('missing array type\'s element type')
1024
1025 # element type is valid
1026 try:
1027 self._validate_type_histology(t.element_type)
1028 except Exception as e:
1029 raise ConfigError('invalid array type\'s element type', e)
1030
1031 def _validate_variant_histology(self, t):
1032 # tag is set
1033 if t.tag is None:
1034 raise ConfigError('missing variant type\'s tag')
1035
1036 # there's at least one type
1037 if not t.types:
1038 raise ConfigError('variant type needs at least one type')
1039
1040 # all types are valid
1041 for type_name, type_t in t.types.items():
1042 try:
1043 self._validate_type_histology(type_t)
1044 except Exception as e:
1045 raise ConfigError('invalid variant type\'s type "{}"'.format(type_name), e)
1046
1047 def _validate_type_histology(self, t):
1048 if t is None:
1049 return
1050
1051 self._type_to_validate_type_histology_func[type(t)](t)
1052
1053 def _validate_entity_type_histology(self, t):
1054 if t is None:
1055 return
1056
1057 # entity cannot be an array
1058 if type(t) is metadata.Array:
1059 raise ConfigError('cannot use an array here')
1060
1061 self._validate_type_histology(t)
1062
1063 def _validate_event_types_histology(self, ev):
1064 ev_name = ev.name
1065
1066 # validate event context type
1067 try:
1068 self._validate_entity_type_histology(ev.context_type)
1069 except Exception as e:
1070 raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e)
1071
1072 # validate event payload type
1073 if ev.payload_type is None:
1074 raise ConfigError('event payload type must exist in event "{}"'.format(ev_name))
1075
1076 # TODO: also check arrays, sequences, and variants
1077 if type(ev.payload_type) is metadata.Struct:
1078 if not ev.payload_type.fields:
1079 raise ConfigError('event payload type must have at least one field for event "{}"'.format(ev_name))
1080
1081 try:
1082 self._validate_entity_type_histology(ev.payload_type)
1083 except Exception as e:
1084 raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e)
1085
1086 def _validate_stream_types_histology(self, stream):
1087 stream_name = stream.name
1088
1089 # validate stream packet context type
1090 try:
1091 self._validate_entity_type_histology(stream.packet_context_type)
1092 except Exception as e:
1093 raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e)
1094
1095 # validate stream event header type
1096 try:
1097 self._validate_entity_type_histology(stream.event_header_type)
1098 except Exception as e:
1099 raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e)
1100
1101 # validate stream event context type
1102 try:
1103 self._validate_entity_type_histology(stream.event_context_type)
1104 except Exception as e:
1105 raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e)
1106
1107 # validate events
1108 for ev in stream.events.values():
1109 try:
1110 self._validate_event_types_histology(ev)
1111 except Exception as e:
1112 raise ConfigError('invalid event in stream "{}"'.format(stream_name), e)
1113
1114 def validate(self, meta):
1115 # validate trace packet header type
1116 try:
1117 self._validate_entity_type_histology(meta.trace.packet_header_type)
1118 except Exception as e:
1119 raise ConfigError('invalid trace packet header type', e)
1120
1121 # validate streams
1122 for stream in meta.streams.values():
1123 self._validate_stream_types_histology(stream)
1124
1125
1126 class _YamlConfigParser:
1127 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
1128 self._class_name_to_create_type_func = {
1129 'int': self._create_integer,
1130 'integer': self._create_integer,
1131 'flt': self._create_float,
1132 'float': self._create_float,
1133 'floating-point': self._create_float,
1134 'enum': self._create_enum,
1135 'enumeration': self._create_enum,
1136 'str': self._create_string,
1137 'string': self._create_string,
1138 'struct': self._create_struct,
1139 'structure': self._create_struct,
1140 'array': self._create_array,
1141 'var': self._create_variant,
1142 'variant': self._create_variant,
1143 }
1144 self._type_to_create_type_func = {
1145 metadata.Integer: self._create_integer,
1146 metadata.FloatingPoint: self._create_float,
1147 metadata.Enum: self._create_enum,
1148 metadata.String: self._create_string,
1149 metadata.Struct: self._create_struct,
1150 metadata.Array: self._create_array,
1151 metadata.Variant: self._create_variant,
1152 }
1153 self._include_dirs = include_dirs
1154 self._include_dirs.append(os.getcwd())
1155 self._ignore_include_not_found = ignore_include_not_found
1156 self._dump_config = dump_config
1157
1158 def _set_byte_order(self, metadata_node):
1159 if 'trace' not in metadata_node:
1160 raise ConfigError('missing "trace" property (metadata)')
1161
1162 trace_node = metadata_node['trace']
1163
1164 if not _is_assoc_array_prop(trace_node):
1165 raise ConfigError('"trace" property (metadata) must be an associative array')
1166
1167 if 'byte-order' not in trace_node:
1168 raise ConfigError('missing "byte-order" property (trace)')
1169
1170 self._bo = _byte_order_str_to_bo(trace_node['byte-order'])
1171
1172 if self._bo is None:
1173 raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"')
1174
1175 def _lookup_type_alias(self, name):
1176 if name in self._tas:
1177 return copy.deepcopy(self._tas[name])
1178
1179 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
1180 unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
1181
1182 if unk_prop:
1183 raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop))
1184
1185 if 'name' not in prop_mapping_node:
1186 raise ConfigError('missing "name" property in integer type object\'s clock property mapping')
1187
1188 if 'property' not in prop_mapping_node:
1189 raise ConfigError('missing "property" property in integer type object\'s clock property mapping')
1190
1191 clock_name = prop_mapping_node['name']
1192 prop = prop_mapping_node['property']
1193
1194 if not _is_str_prop(clock_name):
1195 raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string')
1196
1197 if not _is_str_prop(prop):
1198 raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string')
1199
1200 if clock_name not in self._clocks:
1201 raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name))
1202
1203 if prop != 'value':
1204 raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop))
1205
1206 mapped_clock = self._clocks[clock_name]
1207 int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
1208
1209 def _get_first_unknown_type_prop(self, type_node, known_props):
1210 kp = known_props + ['inherit', 'class']
1211
1212 if self._version >= 201:
1213 kp.append('$inherit')
1214
1215 return _get_first_unknown_prop(type_node, kp)
1216
1217 def _create_integer(self, obj, node):
1218 if obj is None:
1219 # create integer object
1220 obj = metadata.Integer()
1221
1222 unk_prop = self._get_first_unknown_type_prop(node, [
1223 'size',
1224 'align',
1225 'signed',
1226 'byte-order',
1227 'base',
1228 'encoding',
1229 'property-mappings',
1230 ])
1231
1232 if unk_prop:
1233 raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop))
1234
1235 # size
1236 if 'size' in node:
1237 size = node['size']
1238
1239 if not _is_int_prop(size):
1240 raise ConfigError('"size" property of integer type object must be an integer')
1241
1242 if size < 1:
1243 raise ConfigError('invalid integer size: {}'.format(size))
1244
1245 obj.size = size
1246
1247 # align
1248 if 'align' in node:
1249 align = node['align']
1250
1251 if not _is_int_prop(align):
1252 raise ConfigError('"align" property of integer type object must be an integer')
1253
1254 if not _is_valid_alignment(align):
1255 raise ConfigError('invalid alignment: {}'.format(align))
1256
1257 obj.align = align
1258
1259 # signed
1260 if 'signed' in node:
1261 signed = node['signed']
1262
1263 if not _is_bool_prop(signed):
1264 raise ConfigError('"signed" property of integer type object must be a boolean')
1265
1266 obj.signed = signed
1267
1268 # byte order
1269 if 'byte-order' in node:
1270 byte_order = node['byte-order']
1271
1272 if not _is_str_prop(byte_order):
1273 raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")')
1274
1275 byte_order = _byte_order_str_to_bo(byte_order)
1276
1277 if byte_order is None:
1278 raise ConfigError('invalid "byte-order" property in integer type object')
1279 else:
1280 byte_order = self._bo
1281
1282 obj.byte_order = byte_order
1283
1284 # base
1285 if 'base' in node:
1286 base = node['base']
1287
1288 if not _is_str_prop(base):
1289 raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
1290
1291 if base == 'bin':
1292 base = 2
1293 elif base == 'oct':
1294 base = 8
1295 elif base == 'dec':
1296 base = 10
1297 elif base == 'hex':
1298 base = 16
1299
1300 obj.base = base
1301
1302 # encoding
1303 if 'encoding' in node:
1304 encoding = node['encoding']
1305
1306 if not _is_str_prop(encoding):
1307 raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
1308
1309 encoding = _encoding_str_to_encoding(encoding)
1310
1311 if encoding is None:
1312 raise ConfigError('invalid "encoding" property in integer type object')
1313
1314 obj.encoding = encoding
1315
1316 # property mappings
1317 if 'property-mappings' in node:
1318 prop_mappings = node['property-mappings']
1319
1320 if not _is_array_prop(prop_mappings):
1321 raise ConfigError('"property-mappings" property of integer type object must be an array')
1322
1323 if len(prop_mappings) > 1:
1324 raise ConfigError('length of "property-mappings" array in integer type object must be 1')
1325
1326 del obj.property_mappings[:]
1327
1328 for index, prop_mapping in enumerate(prop_mappings):
1329 if not _is_assoc_array_prop(prop_mapping):
1330 raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays')
1331
1332 if 'type' not in prop_mapping:
1333 raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
1334
1335 prop_type = prop_mapping['type']
1336
1337 if not _is_str_prop(prop_type):
1338 raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
1339
1340 if prop_type == 'clock':
1341 self._set_int_clock_prop_mapping(obj, prop_mapping)
1342 else:
1343 raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
1344
1345 return obj
1346
1347 def _create_float(self, obj, node):
1348 if obj is None:
1349 # create floating point number object
1350 obj = metadata.FloatingPoint()
1351
1352 unk_prop = self._get_first_unknown_type_prop(node, [
1353 'size',
1354 'align',
1355 'byte-order',
1356 ])
1357
1358 if unk_prop:
1359 raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop))
1360
1361 # size
1362 if 'size' in node:
1363 size = node['size']
1364
1365 if not _is_assoc_array_prop(size):
1366 raise ConfigError('"size" property of floating point number type object must be an associative array')
1367
1368 unk_prop = _get_first_unknown_prop(node, ['exp', 'mant'])
1369
1370 if 'exp' in size:
1371 exp = size['exp']
1372
1373 if not _is_int_prop(exp):
1374 raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer')
1375
1376 if exp < 1:
1377 raise ConfigError('invalid floating point number exponent size: {}')
1378
1379 obj.exp_size = exp
1380
1381 if 'mant' in size:
1382 mant = size['mant']
1383
1384 if not _is_int_prop(mant):
1385 raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer')
1386
1387 if mant < 1:
1388 raise ConfigError('invalid floating point number mantissa size: {}')
1389
1390 obj.mant_size = mant
1391
1392 # align
1393 if 'align' in node:
1394 align = node['align']
1395
1396 if not _is_int_prop(align):
1397 raise ConfigError('"align" property of floating point number type object must be an integer')
1398
1399 if not _is_valid_alignment(align):
1400 raise ConfigError('invalid alignment: {}'.format(align))
1401
1402 obj.align = align
1403
1404 # byte order
1405 if 'byte-order' in node:
1406 byte_order = node['byte-order']
1407
1408 if not _is_str_prop(byte_order):
1409 raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")')
1410
1411 byte_order = _byte_order_str_to_bo(byte_order)
1412
1413 if byte_order is None:
1414 raise ConfigError('invalid "byte-order" property in floating point number type object')
1415 else:
1416 byte_order = self._bo
1417
1418 obj.byte_order = byte_order
1419
1420 return obj
1421
1422 def _create_enum(self, obj, node):
1423 if obj is None:
1424 # create enumeration object
1425 obj = metadata.Enum()
1426
1427 unk_prop = self._get_first_unknown_type_prop(node, [
1428 'value-type',
1429 'members',
1430 ])
1431
1432 if unk_prop:
1433 raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop))
1434
1435 # value type
1436 if 'value-type' in node:
1437 try:
1438 obj.value_type = self._create_type(node['value-type'])
1439 except Exception as e:
1440 raise ConfigError('cannot create enumeration type\'s integer type', e)
1441
1442 # members
1443 if 'members' in node:
1444 members_node = node['members']
1445
1446 if not _is_array_prop(members_node):
1447 raise ConfigError('"members" property of enumeration type object must be an array')
1448
1449 cur = 0
1450
1451 for index, m_node in enumerate(members_node):
1452 if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
1453 raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index))
1454
1455 if _is_str_prop(m_node):
1456 label = m_node
1457 value = (cur, cur)
1458 cur += 1
1459 else:
1460 if 'label' not in m_node:
1461 raise ConfigError('missing "label" property in enumeration member #{}'.format(index))
1462
1463 label = m_node['label']
1464
1465 if not _is_str_prop(label):
1466 raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index))
1467
1468 if 'value' not in m_node:
1469 raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label))
1470
1471 value = m_node['value']
1472
1473 if not _is_int_prop(value) and not _is_array_prop(value):
1474 raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label))
1475
1476 if _is_int_prop(value):
1477 cur = value + 1
1478 value = (value, value)
1479 else:
1480 if len(value) != 2:
1481 raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label))
1482
1483 mn = value[0]
1484 mx = value[1]
1485
1486 if mn > mx:
1487 raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
1488
1489 value = (mn, mx)
1490 cur = mx + 1
1491
1492 obj.members[label] = value
1493
1494 return obj
1495
1496 def _create_string(self, obj, node):
1497 if obj is None:
1498 # create string object
1499 obj = metadata.String()
1500
1501 unk_prop = self._get_first_unknown_type_prop(node, [
1502 'encoding',
1503 ])
1504
1505 if unk_prop:
1506 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1507
1508 # encoding
1509 if 'encoding' in node:
1510 encoding = node['encoding']
1511
1512 if not _is_str_prop(encoding):
1513 raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")')
1514
1515 encoding = _encoding_str_to_encoding(encoding)
1516
1517 if encoding is None:
1518 raise ConfigError('invalid "encoding" property in string type object')
1519
1520 obj.encoding = encoding
1521
1522 return obj
1523
1524 def _create_struct(self, obj, node):
1525 if obj is None:
1526 # create structure object
1527 obj = metadata.Struct()
1528
1529 unk_prop = self._get_first_unknown_type_prop(node, [
1530 'min-align',
1531 'fields',
1532 ])
1533
1534 if unk_prop:
1535 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1536
1537 # minimum alignment
1538 if 'min-align' in node:
1539 min_align = node['min-align']
1540
1541 if not _is_int_prop(min_align):
1542 raise ConfigError('"min-align" property of structure type object must be an integer')
1543
1544 if not _is_valid_alignment(min_align):
1545 raise ConfigError('invalid minimum alignment: {}'.format(min_align))
1546
1547 obj.min_align = min_align
1548
1549 # fields
1550 if 'fields' in node:
1551 fields = node['fields']
1552
1553 if not _is_assoc_array_prop(fields):
1554 raise ConfigError('"fields" property of structure type object must be an associative array')
1555
1556 for field_name, field_node in fields.items():
1557 if not is_valid_identifier(field_name):
1558 raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name))
1559
1560 try:
1561 obj.fields[field_name] = self._create_type(field_node)
1562 except Exception as e:
1563 raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e)
1564
1565 return obj
1566
1567 def _create_array(self, obj, node):
1568 if obj is None:
1569 # create array object
1570 obj = metadata.Array()
1571
1572 unk_prop = self._get_first_unknown_type_prop(node, [
1573 'length',
1574 'element-type',
1575 ])
1576
1577 if unk_prop:
1578 raise ConfigError('unknown array type object property: "{}"'.format(unk_prop))
1579
1580 # length
1581 if 'length' in node:
1582 length = node['length']
1583
1584 if not _is_int_prop(length) and not _is_str_prop(length):
1585 raise ConfigError('"length" property of array type object must be an integer or a string')
1586
1587 if type(length) is int and length < 0:
1588 raise ConfigError('invalid static array length: {}'.format(length))
1589
1590 obj.length = length
1591
1592 # element type
1593 if 'element-type' in node:
1594 try:
1595 obj.element_type = self._create_type(node['element-type'])
1596 except Exception as e:
1597 raise ConfigError('cannot create array type\'s element type', e)
1598
1599 return obj
1600
1601 def _create_variant(self, obj, node):
1602 if obj is None:
1603 # create variant object
1604 obj = metadata.Variant()
1605
1606 unk_prop = self._get_first_unknown_type_prop(node, [
1607 'tag',
1608 'types',
1609 ])
1610
1611 if unk_prop:
1612 raise ConfigError('unknown variant type object property: "{}"'.format(unk_prop))
1613
1614 # tag
1615 if 'tag' in node:
1616 tag = node['tag']
1617
1618 if not _is_str_prop(tag):
1619 raise ConfigError('"tag" property of variant type object must be a string')
1620
1621 # do not validate variant tag for the moment; will be done in a
1622 # second phase
1623 obj.tag = tag
1624
1625 # element type
1626 if 'types' in node:
1627 types = node['types']
1628
1629 if not _is_assoc_array_prop(types):
1630 raise ConfigError('"types" property of variant type object must be an associative array')
1631
1632 # do not validate type names for the moment; will be done in a
1633 # second phase
1634 for type_name, type_node in types.items():
1635 if not is_valid_identifier(type_name):
1636 raise ConfigError('"{}" is not a valid type name for variant type'.format(type_name))
1637
1638 try:
1639 obj.types[type_name] = self._create_type(type_node)
1640 except Exception as e:
1641 raise ConfigError('cannot create variant type\'s type "{}"'.format(type_name), e)
1642
1643 return obj
1644
1645 def _create_type(self, type_node):
1646 if type(type_node) is str:
1647 t = self._lookup_type_alias(type_node)
1648
1649 if t is None:
1650 raise ConfigError('unknown type alias "{}"'.format(type_node))
1651
1652 return t
1653
1654 if not _is_assoc_array_prop(type_node):
1655 raise ConfigError('type objects must be associative arrays')
1656
1657 # inherit:
1658 # v2.0: "inherit"
1659 # v2.1+: "$inherit"
1660 inherit_node = None
1661
1662 if self._version >= 200:
1663 if 'inherit' in type_node:
1664 inherit_prop = 'inherit'
1665 inherit_node = type_node[inherit_prop]
1666
1667 if self._version >= 201:
1668 if '$inherit' in type_node:
1669 if inherit_node is not None:
1670 raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
1671
1672 inherit_prop = '$inherit'
1673 inherit_node = type_node[inherit_prop]
1674
1675 if inherit_node is not None and 'class' in type_node:
1676 raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
1677
1678 if inherit_node is not None:
1679 if not _is_str_prop(inherit_node):
1680 raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop))
1681
1682 base = self._lookup_type_alias(inherit_node)
1683
1684 if base is None:
1685 raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
1686
1687 func = self._type_to_create_type_func[type(base)]
1688 else:
1689 if 'class' not in type_node:
1690 raise ConfigError('type objects which do not inherit must have a "class" property')
1691
1692 class_name = type_node['class']
1693
1694 if type(class_name) is not str:
1695 raise ConfigError('type objects\' "class" property must be a string')
1696
1697 if class_name not in self._class_name_to_create_type_func:
1698 raise ConfigError('unknown type class "{}"'.format(class_name))
1699
1700 base = None
1701 func = self._class_name_to_create_type_func[class_name]
1702
1703 return func(base, type_node)
1704
1705 def _register_type_aliases(self, metadata_node):
1706 self._tas = dict()
1707
1708 if 'type-aliases' not in metadata_node:
1709 return
1710
1711 ta_node = metadata_node['type-aliases']
1712
1713 if not _is_assoc_array_prop(ta_node):
1714 raise ConfigError('"type-aliases" property (metadata) must be an associative array')
1715
1716 for ta_name, ta_type in ta_node.items():
1717 if ta_name in self._tas:
1718 raise ConfigError('duplicate type alias "{}"'.format(ta_name))
1719
1720 try:
1721 t = self._create_type(ta_type)
1722 except Exception as e:
1723 raise ConfigError('cannot create type alias "{}"'.format(ta_name), e)
1724
1725 self._tas[ta_name] = t
1726
1727 def _create_clock(self, node):
1728 # create clock object
1729 clock = metadata.Clock()
1730
1731 if not _is_assoc_array_prop(node):
1732 raise ConfigError('clock objects must be associative arrays')
1733
1734 known_props = [
1735 'uuid',
1736 'description',
1737 'freq',
1738 'error-cycles',
1739 'offset',
1740 'absolute',
1741 'return-ctype',
1742 ]
1743
1744 if self._version >= 201:
1745 known_props.append('$return-ctype')
1746
1747 unk_prop = _get_first_unknown_prop(node, known_props)
1748
1749 if unk_prop:
1750 raise ConfigError('unknown clock object property: "{}"'.format(unk_prop))
1751
1752 # UUID
1753 if 'uuid' in node:
1754 uuidp = node['uuid']
1755
1756 if not _is_str_prop(uuidp):
1757 raise ConfigError('"uuid" property of clock object must be a string')
1758
1759 try:
1760 uuidp = uuid.UUID(uuidp)
1761 except:
1762 raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp))
1763
1764 clock.uuid = uuidp
1765
1766 # description
1767 if 'description' in node:
1768 desc = node['description']
1769
1770 if not _is_str_prop(desc):
1771 raise ConfigError('"description" property of clock object must be a string')
1772
1773 clock.description = desc
1774
1775 # frequency
1776 if 'freq' in node:
1777 freq = node['freq']
1778
1779 if not _is_int_prop(freq):
1780 raise ConfigError('"freq" property of clock object must be an integer')
1781
1782 if freq < 1:
1783 raise ConfigError('invalid clock frequency: {}'.format(freq))
1784
1785 clock.freq = freq
1786
1787 # error cycles
1788 if 'error-cycles' in node:
1789 error_cycles = node['error-cycles']
1790
1791 if not _is_int_prop(error_cycles):
1792 raise ConfigError('"error-cycles" property of clock object must be an integer')
1793
1794 if error_cycles < 0:
1795 raise ConfigError('invalid clock error cycles: {}'.format(error_cycles))
1796
1797 clock.error_cycles = error_cycles
1798
1799 # offset
1800 if 'offset' in node:
1801 offset = node['offset']
1802
1803 if not _is_assoc_array_prop(offset):
1804 raise ConfigError('"offset" property of clock object must be an associative array')
1805
1806 unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
1807
1808 if unk_prop:
1809 raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop))
1810
1811 # cycles
1812 if 'cycles' in offset:
1813 offset_cycles = offset['cycles']
1814
1815 if not _is_int_prop(offset_cycles):
1816 raise ConfigError('"cycles" property of clock object\'s offset property must be an integer')
1817
1818 if offset_cycles < 0:
1819 raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles))
1820
1821 clock.offset_cycles = offset_cycles
1822
1823 # seconds
1824 if 'seconds' in offset:
1825 offset_seconds = offset['seconds']
1826
1827 if not _is_int_prop(offset_seconds):
1828 raise ConfigError('"seconds" property of clock object\'s offset property must be an integer')
1829
1830 if offset_seconds < 0:
1831 raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds))
1832
1833 clock.offset_seconds = offset_seconds
1834
1835 # absolute
1836 if 'absolute' in node:
1837 absolute = node['absolute']
1838
1839 if not _is_bool_prop(absolute):
1840 raise ConfigError('"absolute" property of clock object must be a boolean')
1841
1842 clock.absolute = absolute
1843
1844 # return C type:
1845 # v2.0: "return-ctype"
1846 # v2.1+: "$return-ctype"
1847 return_ctype_node = None
1848
1849 if self._version >= 200:
1850 if 'return-ctype' in node:
1851 return_ctype_prop = 'return-ctype'
1852 return_ctype_node = node[return_ctype_prop]
1853
1854 if self._version >= 201:
1855 if '$return-ctype' in node:
1856 if return_ctype_node is not None:
1857 raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"')
1858
1859 return_ctype_prop = '$return-ctype'
1860 return_ctype_node = node[return_ctype_prop]
1861
1862 if return_ctype_node is not None:
1863 if not _is_str_prop(return_ctype_node):
1864 raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop))
1865
1866 clock.return_ctype = return_ctype_node
1867
1868 return clock
1869
1870 def _register_clocks(self, metadata_node):
1871 self._clocks = collections.OrderedDict()
1872
1873 if 'clocks' not in metadata_node:
1874 return
1875
1876 clocks_node = metadata_node['clocks']
1877
1878 if not _is_assoc_array_prop(clocks_node):
1879 raise ConfigError('"clocks" property (metadata) must be an associative array')
1880
1881 for clock_name, clock_node in clocks_node.items():
1882 if not is_valid_identifier(clock_name):
1883 raise ConfigError('invalid clock name: "{}"'.format(clock_name))
1884
1885 if clock_name in self._clocks:
1886 raise ConfigError('duplicate clock "{}"'.format(clock_name))
1887
1888 try:
1889 clock = self._create_clock(clock_node)
1890 except Exception as e:
1891 raise ConfigError('cannot create clock "{}"'.format(clock_name), e)
1892
1893 clock.name = clock_name
1894 self._clocks[clock_name] = clock
1895
1896 def _create_env(self, metadata_node):
1897 env = collections.OrderedDict()
1898
1899 if 'env' not in metadata_node:
1900 return env
1901
1902 env_node = metadata_node['env']
1903
1904 if not _is_assoc_array_prop(env_node):
1905 raise ConfigError('"env" property (metadata) must be an associative array')
1906
1907 for env_name, env_value in env_node.items():
1908 if env_name in env:
1909 raise ConfigError('duplicate environment variable "{}"'.format(env_name))
1910
1911 if not is_valid_identifier(env_name):
1912 raise ConfigError('invalid environment variable name: "{}"'.format(env_name))
1913
1914 if not _is_int_prop(env_value) and not _is_str_prop(env_value):
1915 raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
1916
1917 env[env_name] = env_value
1918
1919 return env
1920
1921 def _register_log_levels(self, metadata_node):
1922 self._log_levels = dict()
1923
1924 # log levels:
1925 # v2.0: "log-levels"
1926 # v2.1+: "$log-levels"
1927 log_levels_node = None
1928
1929 if self._version >= 200:
1930 if 'log-levels' in metadata_node:
1931 log_levels_prop = 'log-levels'
1932 log_levels_node = metadata_node[log_levels_prop]
1933
1934 if self._version >= 201:
1935 if '$log-levels' in metadata_node:
1936 if log_levels_node is not None:
1937 raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
1938
1939 log_levels_prop = '$log-levels'
1940 log_levels_node = metadata_node[log_levels_prop]
1941
1942 if log_levels_node is None:
1943 return
1944
1945 if not _is_assoc_array_prop(log_levels_node):
1946 raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop))
1947
1948 for ll_name, ll_value in log_levels_node.items():
1949 if ll_name in self._log_levels:
1950 raise ConfigError('duplicate log level entry "{}"'.format(ll_name))
1951
1952 if not _is_int_prop(ll_value):
1953 raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name))
1954
1955 self._log_levels[ll_name] = ll_value
1956
1957 def _create_trace(self, metadata_node):
1958 # create trace object
1959 trace = metadata.Trace()
1960
1961 if 'trace' not in metadata_node:
1962 raise ConfigError('missing "trace" property (metadata)')
1963
1964 trace_node = metadata_node['trace']
1965
1966 if not _is_assoc_array_prop(trace_node):
1967 raise ConfigError('"trace" property (metadata) must be an associative array')
1968
1969 unk_prop = _get_first_unknown_prop(trace_node, [
1970 'byte-order',
1971 'uuid',
1972 'packet-header-type',
1973 ])
1974
1975 if unk_prop:
1976 raise ConfigError('unknown trace object property: "{}"'.format(unk_prop))
1977
1978 # set byte order (already parsed)
1979 trace.byte_order = self._bo
1980
1981 # UUID
1982 if 'uuid' in trace_node:
1983 uuidp = trace_node['uuid']
1984
1985 if not _is_str_prop(uuidp):
1986 raise ConfigError('"uuid" property of trace object must be a string')
1987
1988 if uuidp == 'auto':
1989 uuidp = uuid.uuid1()
1990 else:
1991 try:
1992 uuidp = uuid.UUID(uuidp)
1993 except:
1994 raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp))
1995
1996 trace.uuid = uuidp
1997
1998 # packet header type
1999 if 'packet-header-type' in trace_node:
2000 try:
2001 ph_type = self._create_type(trace_node['packet-header-type'])
2002 except Exception as e:
2003 raise ConfigError('cannot create packet header type (trace)', e)
2004
2005 trace.packet_header_type = ph_type
2006
2007 return trace
2008
2009 def _lookup_log_level(self, ll):
2010 if _is_int_prop(ll):
2011 return ll
2012 elif _is_str_prop(ll) and ll in self._log_levels:
2013 return self._log_levels[ll]
2014
2015 def _create_event(self, event_node):
2016 event = metadata.Event()
2017
2018 if not _is_assoc_array_prop(event_node):
2019 raise ConfigError('event objects must be associative arrays')
2020
2021 unk_prop = _get_first_unknown_prop(event_node, [
2022 'log-level',
2023 'context-type',
2024 'payload-type',
2025 ])
2026
2027 if unk_prop:
2028 raise ConfigError('unknown event object property: "{}"'.format(unk_prop))
2029
2030 if 'log-level' in event_node:
2031 ll_node = event_node['log-level']
2032
2033 if _is_str_prop(ll_node):
2034 ll = self._lookup_log_level(event_node['log-level'])
2035
2036 if ll is None:
2037 raise ConfigError('cannot find log level "{}"'.format(ll_node))
2038 elif _is_int_prop(ll_node):
2039 ll = ll_node
2040 else:
2041 raise ConfigError('"log-level" property must be either a string or an integer')
2042
2043 event.log_level = ll
2044
2045 if 'context-type' in event_node:
2046 try:
2047 t = self._create_type(event_node['context-type'])
2048 except Exception as e:
2049 raise ConfigError('cannot create event\'s context type object', e)
2050
2051 event.context_type = t
2052
2053 if 'payload-type' not in event_node:
2054 raise ConfigError('missing "payload-type" property in event object')
2055
2056 try:
2057 t = self._create_type(event_node['payload-type'])
2058 except Exception as e:
2059 raise ConfigError('cannot create event\'s payload type object', e)
2060
2061 event.payload_type = t
2062
2063 return event
2064
2065 def _create_stream(self, stream_node):
2066 stream = metadata.Stream()
2067
2068 if not _is_assoc_array_prop(stream_node):
2069 raise ConfigError('stream objects must be associative arrays')
2070
2071 unk_prop = _get_first_unknown_prop(stream_node, [
2072 'packet-context-type',
2073 'event-header-type',
2074 'event-context-type',
2075 'events',
2076 ])
2077
2078 if unk_prop:
2079 raise ConfigError('unknown stream object property: "{}"'.format(unk_prop))
2080
2081 if 'packet-context-type' in stream_node:
2082 try:
2083 t = self._create_type(stream_node['packet-context-type'])
2084 except Exception as e:
2085 raise ConfigError('cannot create stream\'s packet context type object', e)
2086
2087 stream.packet_context_type = t
2088
2089 if 'event-header-type' in stream_node:
2090 try:
2091 t = self._create_type(stream_node['event-header-type'])
2092 except Exception as e:
2093 raise ConfigError('cannot create stream\'s event header type object', e)
2094
2095 stream.event_header_type = t
2096
2097 if 'event-context-type' in stream_node:
2098 try:
2099 t = self._create_type(stream_node['event-context-type'])
2100 except Exception as e:
2101 raise ConfigError('cannot create stream\'s event context type object', e)
2102
2103 stream.event_context_type = t
2104
2105 if 'events' not in stream_node:
2106 raise ConfigError('missing "events" property in stream object')
2107
2108 events = stream_node['events']
2109
2110 if not _is_assoc_array_prop(events):
2111 raise ConfigError('"events" property of stream object must be an associative array')
2112
2113 if not events:
2114 raise ConfigError('at least one event is needed within a stream object')
2115
2116 cur_id = 0
2117
2118 for ev_name, ev_node in events.items():
2119 try:
2120 ev = self._create_event(ev_node)
2121 except Exception as e:
2122 raise ConfigError('cannot create event "{}"'.format(ev_name), e)
2123
2124 ev.id = cur_id
2125 ev.name = ev_name
2126 stream.events[ev_name] = ev
2127 cur_id += 1
2128
2129 return stream
2130
2131 def _create_streams(self, metadata_node):
2132 streams = collections.OrderedDict()
2133
2134 if 'streams' not in metadata_node:
2135 raise ConfigError('missing "streams" property (metadata)')
2136
2137 streams_node = metadata_node['streams']
2138
2139 if not _is_assoc_array_prop(streams_node):
2140 raise ConfigError('"streams" property (metadata) must be an associative array')
2141
2142 if not streams_node:
2143 raise ConfigError('at least one stream is needed (metadata)')
2144
2145 cur_id = 0
2146
2147 for stream_name, stream_node in streams_node.items():
2148 try:
2149 stream = self._create_stream(stream_node)
2150 except Exception as e:
2151 raise ConfigError('cannot create stream "{}"'.format(stream_name), e)
2152
2153 stream.id = cur_id
2154 stream.name = str(stream_name)
2155 streams[stream_name] = stream
2156 cur_id += 1
2157
2158 return streams
2159
2160 def _create_metadata(self, root):
2161 meta = metadata.Metadata()
2162
2163 if 'metadata' not in root:
2164 raise ConfigError('missing "metadata" property (configuration)')
2165
2166 metadata_node = root['metadata']
2167
2168 if not _is_assoc_array_prop(metadata_node):
2169 raise ConfigError('"metadata" property (configuration) must be an associative array')
2170
2171 known_props = [
2172 'type-aliases',
2173 'log-levels',
2174 'trace',
2175 'env',
2176 'clocks',
2177 'streams',
2178 ]
2179
2180 if self._version >= 201:
2181 known_props.append('$log-levels')
2182
2183 unk_prop = _get_first_unknown_prop(metadata_node, known_props)
2184
2185 if unk_prop:
2186 add = ''
2187
2188 if unk_prop == '$include':
2189 add = ' (use version 2.1 or greater)'
2190
2191 raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop))
2192
2193 self._set_byte_order(metadata_node)
2194 self._register_clocks(metadata_node)
2195 meta.clocks = self._clocks
2196 self._register_type_aliases(metadata_node)
2197 meta.env = self._create_env(metadata_node)
2198 meta.trace = self._create_trace(metadata_node)
2199 self._register_log_levels(metadata_node)
2200 meta.streams = self._create_streams(metadata_node)
2201
2202 return meta
2203
2204 def _get_version(self, root):
2205 if 'version' not in root:
2206 raise ConfigError('missing "version" property (configuration)')
2207
2208 version_node = root['version']
2209
2210 if not _is_str_prop(version_node):
2211 raise ConfigError('"version" property (configuration) must be a string')
2212
2213 version_node = version_node.strip()
2214
2215 if version_node not in ['2.0', '2.1']:
2216 raise ConfigError('unsupported version ({}): versions 2.0 and 2.1 are supported'.format(version_node))
2217
2218 # convert version string to comparable version integer
2219 parts = version_node.split('.')
2220 version = int(parts[0]) * 100 + int(parts[1])
2221
2222 return version
2223
2224 def _get_prefix(self, root):
2225 if 'prefix' not in root:
2226 return 'barectf_'
2227
2228 prefix_node = root['prefix']
2229
2230 if not _is_str_prop(prefix_node):
2231 raise ConfigError('"prefix" property (configuration) must be a string')
2232
2233 if not is_valid_identifier(prefix_node):
2234 raise ConfigError('"prefix" property (configuration) must be a valid C identifier')
2235
2236 return prefix_node
2237
2238 def _get_last_include_file(self):
2239 if self._include_stack:
2240 return self._include_stack[-1]
2241
2242 return self._root_yaml_path
2243
2244 def _load_include(self, yaml_path):
2245 for inc_dir in self._include_dirs:
2246 # current include dir + file name path
2247 # note: os.path.join() only takes the last arg if it's absolute
2248 inc_path = os.path.join(inc_dir, yaml_path)
2249
2250 # real path (symbolic links resolved)
2251 real_path = os.path.realpath(inc_path)
2252
2253 # normalized path (weird stuff removed!)
2254 norm_path = os.path.normpath(real_path)
2255
2256 if not os.path.isfile(norm_path):
2257 # file does not exist: skip
2258 continue
2259
2260 if norm_path in self._include_stack:
2261 base_path = self._get_last_include_file()
2262 raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path))
2263
2264 self._include_stack.append(norm_path)
2265
2266 # load raw content
2267 return self._yaml_ordered_load(norm_path)
2268
2269 if not self._ignore_include_not_found:
2270 base_path = self._get_last_include_file()
2271 raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
2272
2273 return None
2274
2275 def _get_include_paths(self, include_node):
2276 if _is_str_prop(include_node):
2277 return [include_node]
2278 elif _is_array_prop(include_node):
2279 for include_path in include_node:
2280 if not _is_str_prop(include_path):
2281 raise ConfigError('invalid include property: expecting array of strings')
2282
2283 return include_node
2284
2285 raise ConfigError('invalid include property: expecting string or array of strings')
2286
2287 def _update_node(self, base_node, overlay_node):
2288 for olay_key, olay_value in overlay_node.items():
2289 if olay_key in base_node:
2290 base_value = base_node[olay_key]
2291
2292 if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
2293 # merge dictionaries
2294 self._update_node(base_value, olay_value)
2295 elif _is_array_prop(olay_value) and _is_array_prop(base_value):
2296 # append extension array items to base items
2297 base_value += olay_value
2298 else:
2299 # fall back to replacing
2300 base_node[olay_key] = olay_value
2301 else:
2302 base_node[olay_key] = olay_value
2303
2304 def _process_node_include(self, last_overlay_node, name,
2305 process_base_include_cb,
2306 process_children_include_cb=None):
2307 if not _is_assoc_array_prop(last_overlay_node):
2308 raise ConfigError('{} objects must be associative arrays'.format(name))
2309
2310 # process children inclusions first
2311 if process_children_include_cb:
2312 process_children_include_cb(last_overlay_node)
2313
2314 if '$include' in last_overlay_node:
2315 include_node = last_overlay_node['$include']
2316 else:
2317 # no includes!
2318 return last_overlay_node
2319
2320 include_paths = self._get_include_paths(include_node)
2321 cur_base_path = self._get_last_include_file()
2322 base_node = None
2323
2324 # keep the include paths and remove the include property
2325 include_paths = copy.deepcopy(include_paths)
2326 del last_overlay_node['$include']
2327
2328 for include_path in include_paths:
2329 # load raw YAML from included file
2330 overlay_node = self._load_include(include_path)
2331
2332 if overlay_node is None:
2333 # cannot find include file, but we're ignoring those
2334 # errors, otherwise _load_include() itself raises
2335 # a config error
2336 continue
2337
2338 # recursively process includes
2339 try:
2340 overlay_node = process_base_include_cb(overlay_node)
2341 except Exception as e:
2342 raise ConfigError('in "{}"'.format(cur_base_path), e)
2343
2344 # pop include stack now that we're done including
2345 del self._include_stack[-1]
2346
2347 # at this point, base_node is fully resolved (does not
2348 # contain any include property)
2349 if base_node is None:
2350 base_node = overlay_node
2351 else:
2352 self._update_node(base_node, overlay_node)
2353
2354 # finally, we update the latest base node with our last overlay
2355 # node
2356 if base_node is None:
2357 # nothing was included, which is possible when we're
2358 # ignoring include errors
2359 return last_overlay_node
2360
2361 self._update_node(base_node, last_overlay_node)
2362
2363 return base_node
2364
2365 def _process_event_include(self, event_node):
2366 return self._process_node_include(event_node, 'event',
2367 self._process_event_include)
2368
2369 def _process_stream_include(self, stream_node):
2370 def process_children_include(stream_node):
2371 if 'events' in stream_node:
2372 events_node = stream_node['events']
2373
2374 if not _is_assoc_array_prop(events_node):
2375 raise ConfigError('"events" property must be an associative array')
2376
2377 events_node_keys = list(events_node.keys())
2378
2379 for key in events_node_keys:
2380 event_node = events_node[key]
2381
2382 try:
2383 events_node[key] = self._process_event_include(event_node)
2384 except Exception as e:
2385 raise ConfigError('cannot process includes of event object "{}"'.format(key), e)
2386
2387 return self._process_node_include(stream_node, 'stream',
2388 self._process_stream_include,
2389 process_children_include)
2390
2391 def _process_trace_include(self, trace_node):
2392 return self._process_node_include(trace_node, 'trace',
2393 self._process_trace_include)
2394
2395 def _process_clock_include(self, clock_node):
2396 return self._process_node_include(clock_node, 'clock',
2397 self._process_clock_include)
2398
2399 def _process_metadata_include(self, metadata_node):
2400 def process_children_include(metadata_node):
2401 if 'trace' in metadata_node:
2402 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
2403
2404 if 'clocks' in metadata_node:
2405 clocks_node = metadata_node['clocks']
2406
2407 if not _is_assoc_array_prop(clocks_node):
2408 raise ConfigError('"clocks" property (metadata) must be an associative array')
2409
2410 clocks_node_keys = list(clocks_node.keys())
2411
2412 for key in clocks_node_keys:
2413 clock_node = clocks_node[key]
2414
2415 try:
2416 clocks_node[key] = self._process_clock_include(clock_node)
2417 except Exception as e:
2418 raise ConfigError('cannot process includes of clock object "{}"'.format(key), e)
2419
2420 if 'streams' in metadata_node:
2421 streams_node = metadata_node['streams']
2422
2423 if not _is_assoc_array_prop(streams_node):
2424 raise ConfigError('"streams" property (metadata) must be an associative array')
2425
2426 streams_node_keys = list(streams_node.keys())
2427
2428 for key in streams_node_keys:
2429 stream_node = streams_node[key]
2430
2431 try:
2432 streams_node[key] = self._process_stream_include(stream_node)
2433 except Exception as e:
2434 raise ConfigError('cannot process includes of stream object "{}"'.format(key), e)
2435
2436 return self._process_node_include(metadata_node, 'metadata',
2437 self._process_metadata_include,
2438 process_children_include)
2439
2440 def _process_root_includes(self, root):
2441 # The following config objects support includes:
2442 #
2443 # * Metadata object
2444 # * Trace object
2445 # * Stream object
2446 # * Event object
2447 #
2448 # We need to process the event includes first, then the stream
2449 # includes, then the trace includes, and finally the metadata
2450 # includes.
2451 #
2452 # In each object, only one of the $include and $include-replace
2453 # special properties is allowed.
2454 #
2455 # We keep a stack of absolute paths to included files to detect
2456 # recursion.
2457 if 'metadata' in root:
2458 root['metadata'] = self._process_metadata_include(root['metadata'])
2459
2460 return root
2461
2462 def _yaml_ordered_dump(self, node, **kwds):
2463 class ODumper(yaml.Dumper):
2464 pass
2465
2466 def dict_representer(dumper, node):
2467 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2468 node.items())
2469
2470 ODumper.add_representer(collections.OrderedDict, dict_representer)
2471
2472 return yaml.dump(node, Dumper=ODumper, **kwds)
2473
2474 def _yaml_ordered_load(self, yaml_path):
2475 class OLoader(yaml.Loader):
2476 pass
2477
2478 def construct_mapping(loader, node):
2479 loader.flatten_mapping(node)
2480
2481 return collections.OrderedDict(loader.construct_pairs(node))
2482
2483 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2484 construct_mapping)
2485
2486 # YAML -> Python
2487 try:
2488 with open(yaml_path, 'r') as f:
2489 node = yaml.load(f, OLoader)
2490 except (OSError, IOError) as e:
2491 raise ConfigError('cannot open file "{}"'.format(yaml_path))
2492 except Exception as e:
2493 raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e)
2494
2495 # loaded node must be an associate array
2496 if not _is_assoc_array_prop(node):
2497 raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path))
2498
2499 return node
2500
2501 def _reset(self):
2502 self._version = None
2503 self._include_stack = []
2504
2505 def parse(self, yaml_path):
2506 self._reset()
2507 self._root_yaml_path = yaml_path
2508
2509 try:
2510 root = self._yaml_ordered_load(yaml_path)
2511 except Exception as e:
2512 raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e)
2513
2514 if not _is_assoc_array_prop(root):
2515 raise ConfigError('configuration must be an associative array')
2516
2517 unk_prop = _get_first_unknown_prop(root, [
2518 'version',
2519 'prefix',
2520 'metadata',
2521 ])
2522
2523 if unk_prop:
2524 raise ConfigError('unknown configuration property: "{}"'.format(unk_prop))
2525
2526 # get the config version
2527 self._version = self._get_version(root)
2528
2529 # process includes if supported
2530 if self._version >= 201:
2531 root = self._process_root_includes(root)
2532
2533 # dump config if required
2534 if self._dump_config:
2535 print(self._yaml_ordered_dump(root, indent=2,
2536 default_flow_style=False))
2537
2538 # get prefix and metadata
2539 prefix = self._get_prefix(root)
2540 meta = self._create_metadata(root)
2541
2542 return Config(self._version, prefix, meta)
2543
2544
2545 def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
2546 try:
2547 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
2548 dump_config)
2549 cfg = parser.parse(path)
2550
2551 return cfg
2552 except Exception as e:
2553 raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e)
This page took 0.085593 seconds and 5 git commands to generate.