Add a few provided includable config files
[deliverable/barectf.git] / barectf / config.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 # THE SOFTWARE.
22
23 from barectf import metadata
24 import collections
25 import datetime
26 import barectf
27 import enum
28 import yaml
29 import uuid
30 import copy
31 import re
32 import os
33
34
35 class ConfigError(RuntimeError):
36 def __init__(self, msg, prev=None):
37 super().__init__(msg)
38 self._prev = prev
39
40 @property
41 def prev(self):
42 return self._prev
43
44
45 class Config:
46 def __init__(self, version, prefix, metadata):
47 self.prefix = prefix
48 self.version = version
49 self.metadata = metadata
50
51 def _validate_metadata(self, meta):
52 try:
53 validator = _MetadataTypesHistologyValidator()
54 validator.validate(meta)
55 validator = _MetadataDynamicTypesValidator()
56 validator.validate(meta)
57 validator = _MetadataSpecialFieldsValidator()
58 validator.validate(meta)
59 except Exception as e:
60 raise ConfigError('metadata error', e)
61
62 try:
63 validator = _BarectfMetadataValidator()
64 validator.validate(meta)
65 except Exception as e:
66 raise ConfigError('barectf metadata error', e)
67
68 def _augment_metadata_env(self, meta):
69 env = meta.env
70
71 env['domain'] = 'bare'
72 env['tracer_name'] = 'barectf'
73 version_tuple = barectf.get_version_tuple()
74 env['tracer_major'] = version_tuple[0]
75 env['tracer_minor'] = version_tuple[1]
76 env['tracer_patch'] = version_tuple[2]
77 env['barectf_gen_date'] = str(datetime.datetime.now().isoformat())
78
79 @property
80 def version(self):
81 return self._version
82
83 @version.setter
84 def version(self, value):
85 self._version = value
86
87 @property
88 def metadata(self):
89 return self._metadata
90
91 @metadata.setter
92 def metadata(self, value):
93 self._validate_metadata(value)
94 self._augment_metadata_env(value)
95 self._metadata = value
96
97 @property
98 def prefix(self):
99 return self._prefix
100
101 @prefix.setter
102 def prefix(self, value):
103 if not is_valid_identifier(value):
104 raise ConfigError('prefix must be a valid C identifier')
105
106 self._prefix = value
107
108
109 def _is_assoc_array_prop(node):
110 return isinstance(node, dict)
111
112
113 def _is_array_prop(node):
114 return isinstance(node, list)
115
116
117 def _is_int_prop(node):
118 return type(node) is int
119
120
121 def _is_str_prop(node):
122 return type(node) is str
123
124
125 def _is_bool_prop(node):
126 return type(node) is bool
127
128
129 def _is_valid_alignment(align):
130 return ((align & (align - 1)) == 0) and align > 0
131
132
133 def _byte_order_str_to_bo(bo_str):
134 bo_str = bo_str.lower()
135
136 if bo_str == 'le':
137 return metadata.ByteOrder.LE
138 elif bo_str == 'be':
139 return metadata.ByteOrder.BE
140
141
142 def _encoding_str_to_encoding(encoding_str):
143 encoding_str = encoding_str.lower()
144
145 if encoding_str == 'utf-8' or encoding_str == 'utf8':
146 return metadata.Encoding.UTF8
147 elif encoding_str == 'ascii':
148 return metadata.Encoding.ASCII
149 elif encoding_str == 'none':
150 return metadata.Encoding.NONE
151
152
153 _re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
154 _ctf_keywords = set([
155 'align',
156 'callsite',
157 'clock',
158 'enum',
159 'env',
160 'event',
161 'floating_point',
162 'integer',
163 'stream',
164 'string',
165 'struct',
166 'trace',
167 'typealias',
168 'typedef',
169 'variant',
170 ])
171
172
173 def is_valid_identifier(iden):
174 if not _re_iden.match(iden):
175 return False
176
177 if _re_iden in _ctf_keywords:
178 return False
179
180 return True
181
182
183 def _get_first_unknown_prop(node, known_props):
184 for prop_name in node:
185 if prop_name in known_props:
186 continue
187
188 return prop_name
189
190
191 # This validator validates the configured metadata for barectf specific
192 # needs.
193 #
194 # barectf needs:
195 #
196 # * all header/contexts are at least byte-aligned
197 # * all integer and floating point number sizes to be <= 64
198 # * no inner structures, arrays, or variants
199 class _BarectfMetadataValidator:
200 def __init__(self):
201 self._type_to_validate_type_func = {
202 metadata.Integer: self._validate_int_type,
203 metadata.FloatingPoint: self._validate_float_type,
204 metadata.Enum: self._validate_enum_type,
205 metadata.String: self._validate_string_type,
206 metadata.Struct: self._validate_struct_type,
207 metadata.Array: self._validate_array_type,
208 metadata.Variant: self._validate_variant_type,
209 }
210
211 def _validate_int_type(self, t, entity_root):
212 if t.size > 64:
213 raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits')
214
215 def _validate_float_type(self, t, entity_root):
216 if t.size > 64:
217 raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits')
218
219 def _validate_enum_type(self, t, entity_root):
220 if t.value_type.size > 64:
221 raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits')
222
223 def _validate_string_type(self, t, entity_root):
224 pass
225
226 def _validate_struct_type(self, t, entity_root):
227 if not entity_root:
228 raise ConfigError('inner structure types are not supported as of this version')
229
230 for field_name, field_type in t.fields.items():
231 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
232 if field_name == 'uuid':
233 # allow
234 continue
235
236 try:
237 self._validate_type(field_type, False)
238 except Exception as e:
239 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
240
241 def _validate_array_type(self, t, entity_root):
242 raise ConfigError('array types are not supported as of this version')
243
244 def _validate_variant_type(self, t, entity_root):
245 raise ConfigError('variant types are not supported as of this version')
246
247 def _validate_type(self, t, entity_root):
248 self._type_to_validate_type_func[type(t)](t, entity_root)
249
250 def _validate_entity(self, t):
251 if t is None:
252 return
253
254 # make sure entity is byte-aligned
255 if t.align < 8:
256 raise ConfigError('type\'s alignment must be at least byte-aligned')
257
258 # make sure entity is a structure
259 if type(t) is not metadata.Struct:
260 raise ConfigError('expecting a structure type')
261
262 # validate types
263 self._validate_type(t, True)
264
265 def _validate_entities_and_names(self, meta):
266 self._cur_entity = _Entity.TRACE_PACKET_HEADER
267
268 try:
269 self._validate_entity(meta.trace.packet_header_type)
270 except Exception as e:
271 raise ConfigError('invalid trace packet header type', e)
272
273 for stream_name, stream in meta.streams.items():
274 if not is_valid_identifier(stream_name):
275 raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name))
276
277 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
278
279 try:
280 self._validate_entity(stream.packet_context_type)
281 except Exception as e:
282 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
283
284 self._cur_entity = _Entity.STREAM_EVENT_HEADER
285
286 try:
287 self._validate_entity(stream.event_header_type)
288 except Exception as e:
289 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
290
291 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
292
293 try:
294 self._validate_entity(stream.event_context_type)
295 except Exception as e:
296 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
297
298 try:
299 for ev_name, ev in stream.events.items():
300 if not is_valid_identifier(ev_name):
301 raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name))
302
303 self._cur_entity = _Entity.EVENT_CONTEXT
304
305 try:
306 self._validate_entity(ev.context_type)
307 except Exception as e:
308 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
309
310 self._cur_entity = _Entity.EVENT_PAYLOAD
311
312 if ev.payload_type is None:
313 raise ConfigError('missing payload type in event "{}"'.format(ev_name), e)
314
315 try:
316 self._validate_entity(ev.payload_type)
317 except Exception as e:
318 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
319
320 if not ev.payload_type.fields:
321 raise ConfigError('empty payload type in event "{}"'.format(ev_name), e)
322 except Exception as e:
323 raise ConfigError('invalid stream "{}"'.format(stream_name), e)
324
325 def validate(self, meta):
326 self._validate_entities_and_names(meta)
327
328
329 # This validator validates special fields of trace, stream, and event
330 # types. For example, if checks that the "stream_id" field exists in the
331 # trace packet header if there's more than one stream, and much more.
332 class _MetadataSpecialFieldsValidator:
333 def _validate_trace_packet_header_type(self, t):
334 # needs "stream_id" field?
335 if len(self._meta.streams) > 1:
336 # yes
337 if t is None:
338 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is missing')
339
340 if type(t) is not metadata.Struct:
341 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is not a structure type')
342
343 if 'stream_id' not in t.fields:
344 raise ConfigError('need "stream_id" field in trace packet header type')
345
346 # validate "magic" and "stream_id" types
347 if type(t) is not metadata.Struct:
348 return
349
350 for i, (field_name, field_type) in enumerate(t.fields.items()):
351 if field_name == 'magic':
352 if type(field_type) is not metadata.Integer:
353 raise ConfigError('"magic" field in trace packet header type must be an integer type')
354
355 if field_type.signed or field_type.size != 32:
356 raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type')
357
358 if i != 0:
359 raise ConfigError('"magic" field must be the first trace packet header type\'s field')
360 elif field_name == 'stream_id':
361 if type(field_type) is not metadata.Integer:
362 raise ConfigError('"stream_id" field in trace packet header type must be an integer type')
363
364 if field_type.signed:
365 raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type')
366 elif field_name == 'uuid':
367 if self._meta.trace.uuid is None:
368 raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided')
369
370 if type(field_type) is not metadata.Array:
371 raise ConfigError('"uuid" field in trace packet header type must be an array')
372
373 if field_type.length != 16:
374 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
375
376 element_type = field_type.element_type
377
378 if type(element_type) is not metadata.Integer:
379 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
380
381 if element_type.size != 8:
382 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
383
384 if element_type.signed:
385 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
386
387 if element_type.align != 8:
388 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned, byte-aligned bytes')
389
390 def _validate_trace(self, meta):
391 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
392
393 def _validate_stream_packet_context(self, stream):
394 t = stream.packet_context_type
395
396 if type(t) is None:
397 return
398
399 if type(t) is not metadata.Struct:
400 return
401
402 # "timestamp_begin", if exists, is an unsigned integer type,
403 # mapped to a clock
404 if 'timestamp_begin' in t.fields:
405 ts_begin = t.fields['timestamp_begin']
406
407 if type(ts_begin) is not metadata.Integer:
408 raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type')
409
410 if ts_begin.signed:
411 raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type')
412
413 if not ts_begin.property_mappings:
414 raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock')
415
416 # "timestamp_end", if exists, is an unsigned integer type,
417 # mapped to a clock
418 if 'timestamp_end' in t.fields:
419 ts_end = t.fields['timestamp_end']
420
421 if type(ts_end) is not metadata.Integer:
422 raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type')
423
424 if ts_end.signed:
425 raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type')
426
427 if not ts_end.property_mappings:
428 raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock')
429
430 # "timestamp_begin" and "timestamp_end" exist together
431 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
432 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
433
434 # "events_discarded", if exists, is an unsigned integer type
435 if 'events_discarded' in t.fields:
436 events_discarded = t.fields['events_discarded']
437
438 if type(events_discarded) is not metadata.Integer:
439 raise ConfigError('"events_discarded" field in stream packet context type must be an integer type')
440
441 if events_discarded.signed:
442 raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type')
443
444 # "packet_size" and "content_size" must exist
445 if 'packet_size' not in t.fields:
446 raise ConfigError('missing "packet_size" field in stream packet context type')
447
448 packet_size = t.fields['packet_size']
449
450 # "content_size" and "content_size" must exist
451 if 'content_size' not in t.fields:
452 raise ConfigError('missing "content_size" field in stream packet context type')
453
454 content_size = t.fields['content_size']
455
456 # "packet_size" is an unsigned integer type
457 if type(packet_size) is not metadata.Integer:
458 raise ConfigError('"packet_size" field in stream packet context type must be an integer type')
459
460 if packet_size.signed:
461 raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type')
462
463 # "content_size" is an unsigned integer type
464 if type(content_size) is not metadata.Integer:
465 raise ConfigError('"content_size" field in stream packet context type must be an integer type')
466
467 if content_size.signed:
468 raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type')
469
470 def _validate_stream_event_header(self, stream):
471 t = stream.event_header_type
472
473 # needs "id" field?
474 if len(stream.events) > 1:
475 # yes
476 if t is None:
477 raise ConfigError('need "id" field in stream event header type, but stream event header type is missing')
478
479 if type(t) is not metadata.Struct:
480 raise ConfigError('need "id" field in stream event header type, but stream event header type is not a structure type')
481
482 if 'id' not in t.fields:
483 raise ConfigError('need "id" field in stream event header type')
484
485 # validate "id" and "timestamp" types
486 if type(t) is not metadata.Struct:
487 return
488
489 # "timestamp", if exists, is an unsigned integer type,
490 # mapped to a clock
491 if 'timestamp' in t.fields:
492 ts = t.fields['timestamp']
493
494 if type(ts) is not metadata.Integer:
495 raise ConfigError('"ts" field in stream event header type must be an integer type')
496
497 if ts.signed:
498 raise ConfigError('"ts" field in stream event header type must be an unsigned integer type')
499
500 if not ts.property_mappings:
501 raise ConfigError('"ts" field in stream event header type must be mapped to a clock')
502
503 # "id" is an unsigned integer type
504 if 'id' in t.fields:
505 eid = t.fields['id']
506
507 if type(eid) is not metadata.Integer:
508 raise ConfigError('"id" field in stream event header type must be an integer type')
509
510 if eid.signed:
511 raise ConfigError('"id" field in stream event header type must be an unsigned integer type')
512
513 def _validate_stream(self, stream):
514 self._validate_stream_packet_context(stream)
515 self._validate_stream_event_header(stream)
516
517 def validate(self, meta):
518 self._meta = meta
519 self._validate_trace(meta)
520
521 for stream in meta.streams.values():
522 try:
523 self._validate_stream(stream)
524 except Exception as e:
525 raise ConfigError('invalid stream "{}"'.format(stream.name), e)
526
527
528 class _MetadataDynamicTypesValidatorStackEntry:
529 def __init__(self, base_t):
530 self._base_t = base_t
531 self._index = 0
532
533 @property
534 def index(self):
535 return self._index
536
537 @index.setter
538 def index(self, value):
539 self._index = value
540
541 @property
542 def base_t(self):
543 return self._base_t
544
545 @base_t.setter
546 def base_t(self, value):
547 self._base_t = value
548
549
550 # Entities. Order of values is important here.
551 @enum.unique
552 class _Entity(enum.IntEnum):
553 TRACE_PACKET_HEADER = 0
554 STREAM_PACKET_CONTEXT = 1
555 STREAM_EVENT_HEADER = 2
556 STREAM_EVENT_CONTEXT = 3
557 EVENT_CONTEXT = 4
558 EVENT_PAYLOAD = 5
559
560
561 # This validator validates dynamic metadata types, that is, it ensures
562 # variable-length array lengths and variant tags actually point to
563 # something that exists. It also checks that variable-length array
564 # lengths point to integer types and variant tags to enumeration types.
565 class _MetadataDynamicTypesValidator:
566 def __init__(self):
567 self._type_to_visit_type_func = {
568 metadata.Integer: None,
569 metadata.FloatingPoint: None,
570 metadata.Enum: None,
571 metadata.String: None,
572 metadata.Struct: self._visit_struct_type,
573 metadata.Array: self._visit_array_type,
574 metadata.Variant: self._visit_variant_type,
575 }
576
577 self._cur_trace = None
578 self._cur_stream = None
579 self._cur_event = None
580
581 def _lookup_path_from_base(self, path, parts, base, start_index,
582 base_is_current, from_t):
583 index = start_index
584 cur_t = base
585 found_path = []
586
587 while index < len(parts):
588 part = parts[index]
589 next_t = None
590
591 if type(cur_t) is metadata.Struct:
592 enumerated_items = enumerate(cur_t.fields.items())
593
594 # lookup each field
595 for i, (field_name, field_type) in enumerated_items:
596 if field_name == part:
597 next_t = field_type
598 found_path.append((i, field_type))
599
600 if next_t is None:
601 raise ConfigError('invalid path "{}": cannot find field "{}" in structure type'.format(path, part))
602 elif type(cur_t) is metadata.Variant:
603 enumerated_items = enumerate(cur_t.types.items())
604
605 # lookup each type
606 for i, (type_name, type_type) in enumerated_items:
607 if type_name == part:
608 next_t = type_type
609 found_path.append((i, type_type))
610
611 if next_t is None:
612 raise ConfigError('invalid path "{}": cannot find type "{}" in variant type'.format(path, part))
613 else:
614 raise ConfigError('invalid path "{}": requesting "{}" in a non-variant, non-structure type'.format(path, part))
615
616 cur_t = next_t
617 index += 1
618
619 # make sure that the pointed type is not the pointing type
620 if from_t is cur_t:
621 raise ConfigError('invalid path "{}": pointing to self'.format(path))
622
623 # if we're here, we found the type; however, it could be located
624 # _after_ the variant/VLA looking for it, if the pointing
625 # and pointed types are in the same entity, so compare the
626 # current stack entries indexes to our index path in that case
627 if not base_is_current:
628 return cur_t
629
630 for index, entry in enumerate(self._stack):
631 if index == len(found_path):
632 # end of index path; valid so far
633 break
634
635 if found_path[index][0] > entry.index:
636 raise ConfigError('invalid path "{}": pointed type is after pointing type'.format(path))
637
638 # also make sure that both pointed and pointing types share
639 # a common structure ancestor
640 for index, entry in enumerate(self._stack):
641 if index == len(found_path):
642 break
643
644 if entry.base_t is not found_path[index][1]:
645 # found common ancestor
646 if type(entry.base_t) is metadata.Variant:
647 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
648
649 return cur_t
650
651 def _lookup_path_from_top(self, path, parts):
652 if len(parts) != 1:
653 raise ConfigError('invalid path "{}": multipart relative path not supported'.format(path))
654
655 find_name = parts[0]
656 index = len(self._stack) - 1
657 got_struct = False
658
659 # check stack entries in reversed order
660 for entry in reversed(self._stack):
661 # structure base type
662 if type(entry.base_t) is metadata.Struct:
663 got_struct = True
664 enumerated_items = enumerate(entry.base_t.fields.items())
665
666 # lookup each field, until the current visiting index is met
667 for i, (field_name, field_type) in enumerated_items:
668 if i == entry.index:
669 break
670
671 if field_name == find_name:
672 return field_type
673
674 # variant base type
675 elif type(entry.base_t) is metadata.Variant:
676 enumerated_items = enumerate(entry.base_t.types.items())
677
678 # lookup each type, until the current visiting index is met
679 for i, (type_name, type_type) in enumerated_items:
680 if i == entry.index:
681 break
682
683 if type_name == find_name:
684 if not got_struct:
685 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
686
687 return type_type
688
689 # nothing returned here: cannot find type
690 raise ConfigError('invalid path "{}": cannot find type in current context'.format(path))
691
692 def _lookup_path(self, path, from_t):
693 parts = path.lower().split('.')
694 base = None
695 base_is_current = False
696
697 if len(parts) >= 3:
698 if parts[0] == 'trace':
699 if parts[1] == 'packet' and parts[2] == 'header':
700 # make sure packet header exists
701 if self._cur_trace.packet_header_type is None:
702 raise ConfigError('invalid path "{}": no defined trace packet header type'.format(path))
703
704 base = self._cur_trace.packet_header_type
705
706 if self._cur_entity == _Entity.TRACE_PACKET_HEADER:
707 base_is_current = True
708 else:
709 raise ConfigError('invalid path "{}": unknown names after "trace"'.format(path))
710 elif parts[0] == 'stream':
711 if parts[1] == 'packet' and parts[2] == 'context':
712 if self._cur_entity < _Entity.STREAM_PACKET_CONTEXT:
713 raise ConfigError('invalid path "{}": cannot access stream packet context here'.format(path))
714
715 if self._cur_stream.packet_context_type is None:
716 raise ConfigError('invalid path "{}": no defined stream packet context type'.format(path))
717
718 base = self._cur_stream.packet_context_type
719
720 if self._cur_entity == _Entity.STREAM_PACKET_CONTEXT:
721 base_is_current = True
722 elif parts[1] == 'event':
723 if parts[2] == 'header':
724 if self._cur_entity < _Entity.STREAM_EVENT_HEADER:
725 raise ConfigError('invalid path "{}": cannot access stream event header here'.format(path))
726
727 if self._cur_stream.event_header_type is None:
728 raise ConfigError('invalid path "{}": no defined stream event header type'.format(path))
729
730 base = self._cur_stream.event_header_type
731
732 if self._cur_entity == _Entity.STREAM_EVENT_HEADER:
733 base_is_current = True
734 elif parts[2] == 'context':
735 if self._cur_entity < _Entity.STREAM_EVENT_CONTEXT:
736 raise ConfigError('invalid path "{}": cannot access stream event context here'.format(path))
737
738 if self._cur_stream.event_context_type is None:
739 raise ConfigError('invalid path "{}": no defined stream event context type'.format(path))
740
741 base = self._cur_stream.event_context_type
742
743 if self._cur_entity == _Entity.STREAM_EVENT_CONTEXT:
744 base_is_current = True
745 else:
746 raise ConfigError('invalid path "{}": unknown names after "stream.event"'.format(path))
747 else:
748 raise ConfigError('invalid path "{}": unknown names after "stream"'.format(path))
749
750 if base is not None:
751 start_index = 3
752
753 if len(parts) >= 2 and base is None:
754 if parts[0] == 'event':
755 if parts[1] == 'context':
756 if self._cur_entity < _Entity.EVENT_CONTEXT:
757 raise ConfigError('invalid path "{}": cannot access event context here'.format(path))
758
759 if self._cur_event.context_type is None:
760 raise ConfigError('invalid path "{}": no defined event context type'.format(path))
761
762 base = self._cur_event.context_type
763
764 if self._cur_entity == _Entity.EVENT_CONTEXT:
765 base_is_current = True
766 elif parts[1] == 'payload' or parts[1] == 'fields':
767 if self._cur_entity < _Entity.EVENT_PAYLOAD:
768 raise ConfigError('invalid path "{}": cannot access event payload here'.format(path))
769
770 if self._cur_event.payload_type is None:
771 raise ConfigError('invalid path "{}": no defined event payload type'.format(path))
772
773 base = self._cur_event.payload_type
774
775 if self._cur_entity == _Entity.EVENT_PAYLOAD:
776 base_is_current = True
777 else:
778 raise ConfigError('invalid path "{}": unknown names after "event"'.format(path))
779
780 if base is not None:
781 start_index = 2
782
783 if base is not None:
784 return self._lookup_path_from_base(path, parts, base, start_index,
785 base_is_current, from_t)
786 else:
787 return self._lookup_path_from_top(path, parts)
788
789 def _stack_reset(self):
790 self._stack = []
791
792 def _stack_push(self, base_t):
793 entry = _MetadataDynamicTypesValidatorStackEntry(base_t)
794 self._stack.append(entry)
795
796 def _stack_pop(self):
797 self._stack.pop()
798
799 def _stack_incr_index(self):
800 self._stack[-1].index += 1
801
802 def _visit_struct_type(self, t):
803 self._stack_push(t)
804
805 for field_name, field_type in t.fields.items():
806 try:
807 self._visit_type(field_type)
808 except Exception as e:
809 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
810
811 self._stack_incr_index()
812
813 self._stack_pop()
814
815 def _visit_array_type(self, t):
816 if not t.is_static:
817 # find length type
818 try:
819 length_type = self._lookup_path(t.length, t)
820 except Exception as e:
821 raise ConfigError('invalid array type\'s length', e)
822
823 # make sure length type an unsigned integer
824 if type(length_type) is not metadata.Integer:
825 raise ConfigError('array type\'s length does not point to an integer type')
826
827 if length_type.signed:
828 raise ConfigError('array type\'s length does not point to an unsigned integer type')
829
830 self._visit_type(t.element_type)
831
832 def _visit_variant_type(self, t):
833 # find tag type
834 try:
835 tag_type = self._lookup_path(t.tag, t)
836 except Exception as e:
837 raise ConfigError('invalid variant type\'s tag', e)
838
839 # make sure tag type is an enumeration
840 if type(tag_type) is not metadata.Enum:
841 raise ConfigError('variant type\'s tag does not point to an enumeration type')
842
843 # verify that each variant type's type exists as an enumeration member
844 for tag_name in t.types.keys():
845 if tag_name not in tag_type.members:
846 raise ConfigError('cannot find variant type\'s type "{}" in pointed tag type'.format(tag_name))
847
848 self._stack_push(t)
849
850 for type_name, type_type in t.types.items():
851 try:
852 self._visit_type(type_type)
853 except Exception as e:
854 raise ConfigError('in variant type\'s type "{}"'.format(type_name), e)
855
856 self._stack_incr_index()
857
858 self._stack_pop()
859
860 def _visit_type(self, t):
861 if t is None:
862 return
863
864 if type(t) in self._type_to_visit_type_func:
865 func = self._type_to_visit_type_func[type(t)]
866
867 if func is not None:
868 func(t)
869
870 def _visit_event(self, ev):
871 ev_name = ev.name
872
873 # set current event
874 self._cur_event = ev
875
876 # visit event context type
877 self._stack_reset()
878 self._cur_entity = _Entity.EVENT_CONTEXT
879
880 try:
881 self._visit_type(ev.context_type)
882 except Exception as e:
883 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
884
885 # visit event payload type
886 self._stack_reset()
887 self._cur_entity = _Entity.EVENT_PAYLOAD
888
889 try:
890 self._visit_type(ev.payload_type)
891 except Exception as e:
892 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
893
894 def _visit_stream(self, stream):
895 stream_name = stream.name
896
897 # set current stream
898 self._cur_stream = stream
899
900 # reset current event
901 self._cur_event = None
902
903 # visit stream packet context type
904 self._stack_reset()
905 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
906
907 try:
908 self._visit_type(stream.packet_context_type)
909 except Exception as e:
910 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
911
912 # visit stream event header type
913 self._stack_reset()
914 self._cur_entity = _Entity.STREAM_EVENT_HEADER
915
916 try:
917 self._visit_type(stream.event_header_type)
918 except Exception as e:
919 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
920
921 # visit stream event context type
922 self._stack_reset()
923 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
924
925 try:
926 self._visit_type(stream.event_context_type)
927 except Exception as e:
928 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
929
930 # visit events
931 for ev in stream.events.values():
932 try:
933 self._visit_event(ev)
934 except Exception as e:
935 raise ConfigError('invalid stream "{}"'.format(stream_name))
936
937 def validate(self, meta):
938 # set current trace
939 self._cur_trace = meta.trace
940
941 # visit trace packet header type
942 self._stack_reset()
943 self._cur_entity = _Entity.TRACE_PACKET_HEADER
944
945 try:
946 self._visit_type(meta.trace.packet_header_type)
947 except Exception as e:
948 raise ConfigError('invalid packet header type in trace', e)
949
950 # visit streams
951 for stream in meta.streams.values():
952 self._visit_stream(stream)
953
954
955 # Since type inheritance allows types to be only partially defined at
956 # any place in the configuration, this validator validates that actual
957 # trace, stream, and event types are all complete and valid.
958 class _MetadataTypesHistologyValidator:
959 def __init__(self):
960 self._type_to_validate_type_histology_func = {
961 metadata.Integer: self._validate_integer_histology,
962 metadata.FloatingPoint: self._validate_float_histology,
963 metadata.Enum: self._validate_enum_histology,
964 metadata.String: self._validate_string_histology,
965 metadata.Struct: self._validate_struct_histology,
966 metadata.Array: self._validate_array_histology,
967 metadata.Variant: self._validate_variant_histology,
968 }
969
970 def _validate_integer_histology(self, t):
971 # size is set
972 if t.size is None:
973 raise ConfigError('missing integer type\'s size')
974
975 def _validate_float_histology(self, t):
976 # exponent digits is set
977 if t.exp_size is None:
978 raise ConfigError('missing floating point number type\'s exponent size')
979
980 # mantissa digits is set
981 if t.mant_size is None:
982 raise ConfigError('missing floating point number type\'s mantissa size')
983
984 # exponent and mantissa sum is a multiple of 8
985 if (t.exp_size + t.mant_size) % 8 != 0:
986 raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8')
987
988 def _validate_enum_histology(self, t):
989 # integer type is set
990 if t.value_type is None:
991 raise ConfigError('missing enumeration type\'s value type')
992
993 # there's at least one member
994 if not t.members:
995 raise ConfigError('enumeration type needs at least one member')
996
997 # no overlapping values and all values are valid considering
998 # the value type
999 ranges = []
1000
1001 if t.value_type.signed:
1002 value_min = -(1 << t.value_type.size - 1)
1003 value_max = (1 << (t.value_type.size - 1)) - 1
1004 else:
1005 value_min = 0
1006 value_max = (1 << t.value_type.size) - 1
1007
1008 for label, value in t.members.items():
1009 for rg in ranges:
1010 if value[0] <= rg[1] and rg[0] <= value[1]:
1011 raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label))
1012
1013 fmt = 'enumeration type\'s member "{}": value {} is outside the value type range [{}, {}]'
1014
1015 if value[0] < value_min or value[0] > value_max:
1016 raise ConfigError(fmt.format(label, value[0], value_min, value_max))
1017
1018 if value[1] < value_min or value[1] > value_max:
1019 raise ConfigError(fmt.format(label, value[1], value_min, value_max))
1020
1021 ranges.append(value)
1022
1023 def _validate_string_histology(self, t):
1024 # always valid
1025 pass
1026
1027 def _validate_struct_histology(self, t):
1028 # all fields are valid
1029 for field_name, field_type in t.fields.items():
1030 try:
1031 self._validate_type_histology(field_type)
1032 except Exception as e:
1033 raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e)
1034
1035 def _validate_array_histology(self, t):
1036 # length is set
1037 if t.length is None:
1038 raise ConfigError('missing array type\'s length')
1039
1040 # element type is set
1041 if t.element_type is None:
1042 raise ConfigError('missing array type\'s element type')
1043
1044 # element type is valid
1045 try:
1046 self._validate_type_histology(t.element_type)
1047 except Exception as e:
1048 raise ConfigError('invalid array type\'s element type', e)
1049
1050 def _validate_variant_histology(self, t):
1051 # tag is set
1052 if t.tag is None:
1053 raise ConfigError('missing variant type\'s tag')
1054
1055 # there's at least one type
1056 if not t.types:
1057 raise ConfigError('variant type needs at least one type')
1058
1059 # all types are valid
1060 for type_name, type_t in t.types.items():
1061 try:
1062 self._validate_type_histology(type_t)
1063 except Exception as e:
1064 raise ConfigError('invalid variant type\'s type "{}"'.format(type_name), e)
1065
1066 def _validate_type_histology(self, t):
1067 if t is None:
1068 return
1069
1070 self._type_to_validate_type_histology_func[type(t)](t)
1071
1072 def _validate_entity_type_histology(self, t):
1073 if t is None:
1074 return
1075
1076 # entity cannot be an array
1077 if type(t) is metadata.Array:
1078 raise ConfigError('cannot use an array here')
1079
1080 self._validate_type_histology(t)
1081
1082 def _validate_event_types_histology(self, ev):
1083 ev_name = ev.name
1084
1085 # validate event context type
1086 try:
1087 self._validate_entity_type_histology(ev.context_type)
1088 except Exception as e:
1089 raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e)
1090
1091 # validate event payload type
1092 if ev.payload_type is None:
1093 raise ConfigError('event payload type must exist in event "{}"'.format(ev_name))
1094
1095 # TODO: also check arrays, sequences, and variants
1096 if type(ev.payload_type) is metadata.Struct:
1097 if not ev.payload_type.fields:
1098 raise ConfigError('event payload type must have at least one field for event "{}"'.format(ev_name))
1099
1100 try:
1101 self._validate_entity_type_histology(ev.payload_type)
1102 except Exception as e:
1103 raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e)
1104
1105 def _validate_stream_types_histology(self, stream):
1106 stream_name = stream.name
1107
1108 # validate stream packet context type
1109 try:
1110 self._validate_entity_type_histology(stream.packet_context_type)
1111 except Exception as e:
1112 raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e)
1113
1114 # validate stream event header type
1115 try:
1116 self._validate_entity_type_histology(stream.event_header_type)
1117 except Exception as e:
1118 raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e)
1119
1120 # validate stream event context type
1121 try:
1122 self._validate_entity_type_histology(stream.event_context_type)
1123 except Exception as e:
1124 raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e)
1125
1126 # validate events
1127 for ev in stream.events.values():
1128 try:
1129 self._validate_event_types_histology(ev)
1130 except Exception as e:
1131 raise ConfigError('invalid event in stream "{}"'.format(stream_name), e)
1132
1133 def validate(self, meta):
1134 # validate trace packet header type
1135 try:
1136 self._validate_entity_type_histology(meta.trace.packet_header_type)
1137 except Exception as e:
1138 raise ConfigError('invalid trace packet header type', e)
1139
1140 # validate streams
1141 for stream in meta.streams.values():
1142 self._validate_stream_types_histology(stream)
1143
1144
1145 class _YamlConfigParser:
1146 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
1147 self._class_name_to_create_type_func = {
1148 'int': self._create_integer,
1149 'integer': self._create_integer,
1150 'flt': self._create_float,
1151 'float': self._create_float,
1152 'floating-point': self._create_float,
1153 'enum': self._create_enum,
1154 'enumeration': self._create_enum,
1155 'str': self._create_string,
1156 'string': self._create_string,
1157 'struct': self._create_struct,
1158 'structure': self._create_struct,
1159 'array': self._create_array,
1160 'var': self._create_variant,
1161 'variant': self._create_variant,
1162 }
1163 self._type_to_create_type_func = {
1164 metadata.Integer: self._create_integer,
1165 metadata.FloatingPoint: self._create_float,
1166 metadata.Enum: self._create_enum,
1167 metadata.String: self._create_string,
1168 metadata.Struct: self._create_struct,
1169 metadata.Array: self._create_array,
1170 metadata.Variant: self._create_variant,
1171 }
1172 self._include_dirs = include_dirs
1173 self._ignore_include_not_found = ignore_include_not_found
1174 self._dump_config = dump_config
1175
1176 def _set_byte_order(self, metadata_node):
1177 if 'trace' not in metadata_node:
1178 raise ConfigError('missing "trace" property (metadata)')
1179
1180 trace_node = metadata_node['trace']
1181
1182 if not _is_assoc_array_prop(trace_node):
1183 raise ConfigError('"trace" property (metadata) must be an associative array')
1184
1185 if 'byte-order' not in trace_node:
1186 raise ConfigError('missing "byte-order" property (trace)')
1187
1188 bo_node = trace_node['byte-order']
1189
1190 if not _is_str_prop(bo_node):
1191 raise ConfigError('"byte-order" property of trace object must be a string ("le" or "be")')
1192
1193 self._bo = _byte_order_str_to_bo(bo_node)
1194
1195 if self._bo is None:
1196 raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"')
1197
1198 def _lookup_type_alias(self, name):
1199 if name in self._tas:
1200 return copy.deepcopy(self._tas[name])
1201
1202 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
1203 unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
1204
1205 if unk_prop:
1206 raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop))
1207
1208 if 'name' not in prop_mapping_node:
1209 raise ConfigError('missing "name" property in integer type object\'s clock property mapping')
1210
1211 if 'property' not in prop_mapping_node:
1212 raise ConfigError('missing "property" property in integer type object\'s clock property mapping')
1213
1214 clock_name = prop_mapping_node['name']
1215 prop = prop_mapping_node['property']
1216
1217 if not _is_str_prop(clock_name):
1218 raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string')
1219
1220 if not _is_str_prop(prop):
1221 raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string')
1222
1223 if clock_name not in self._clocks:
1224 raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name))
1225
1226 if prop != 'value':
1227 raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop))
1228
1229 mapped_clock = self._clocks[clock_name]
1230 int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
1231
1232 def _get_first_unknown_type_prop(self, type_node, known_props):
1233 kp = known_props + ['inherit', 'class']
1234
1235 if self._version >= 201:
1236 kp.append('$inherit')
1237
1238 return _get_first_unknown_prop(type_node, kp)
1239
1240 def _create_integer(self, obj, node):
1241 if obj is None:
1242 # create integer object
1243 obj = metadata.Integer()
1244
1245 unk_prop = self._get_first_unknown_type_prop(node, [
1246 'size',
1247 'align',
1248 'signed',
1249 'byte-order',
1250 'base',
1251 'encoding',
1252 'property-mappings',
1253 ])
1254
1255 if unk_prop:
1256 raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop))
1257
1258 # size
1259 if 'size' in node:
1260 size = node['size']
1261
1262 if not _is_int_prop(size):
1263 raise ConfigError('"size" property of integer type object must be an integer')
1264
1265 if size < 1:
1266 raise ConfigError('invalid integer size: {}'.format(size))
1267
1268 obj.size = size
1269
1270 # align
1271 if 'align' in node:
1272 align = node['align']
1273
1274 if not _is_int_prop(align):
1275 raise ConfigError('"align" property of integer type object must be an integer')
1276
1277 if not _is_valid_alignment(align):
1278 raise ConfigError('invalid alignment: {}'.format(align))
1279
1280 obj.align = align
1281
1282 # signed
1283 if 'signed' in node:
1284 signed = node['signed']
1285
1286 if not _is_bool_prop(signed):
1287 raise ConfigError('"signed" property of integer type object must be a boolean')
1288
1289 obj.signed = signed
1290
1291 # byte order
1292 if 'byte-order' in node:
1293 byte_order = node['byte-order']
1294
1295 if not _is_str_prop(byte_order):
1296 raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")')
1297
1298 byte_order = _byte_order_str_to_bo(byte_order)
1299
1300 if byte_order is None:
1301 raise ConfigError('invalid "byte-order" property in integer type object')
1302 else:
1303 byte_order = self._bo
1304
1305 obj.byte_order = byte_order
1306
1307 # base
1308 if 'base' in node:
1309 base = node['base']
1310
1311 if not _is_str_prop(base):
1312 raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
1313
1314 if base == 'bin':
1315 base = 2
1316 elif base == 'oct':
1317 base = 8
1318 elif base == 'dec':
1319 base = 10
1320 elif base == 'hex':
1321 base = 16
1322 else:
1323 raise ConfigError('unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base))
1324
1325 obj.base = base
1326
1327 # encoding
1328 if 'encoding' in node:
1329 encoding = node['encoding']
1330
1331 if not _is_str_prop(encoding):
1332 raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
1333
1334 encoding = _encoding_str_to_encoding(encoding)
1335
1336 if encoding is None:
1337 raise ConfigError('invalid "encoding" property in integer type object')
1338
1339 obj.encoding = encoding
1340
1341 # property mappings
1342 if 'property-mappings' in node:
1343 prop_mappings = node['property-mappings']
1344
1345 if not _is_array_prop(prop_mappings):
1346 raise ConfigError('"property-mappings" property of integer type object must be an array')
1347
1348 if len(prop_mappings) > 1:
1349 raise ConfigError('length of "property-mappings" array in integer type object must be 1')
1350
1351 del obj.property_mappings[:]
1352
1353 for index, prop_mapping in enumerate(prop_mappings):
1354 if not _is_assoc_array_prop(prop_mapping):
1355 raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays')
1356
1357 if 'type' not in prop_mapping:
1358 raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
1359
1360 prop_type = prop_mapping['type']
1361
1362 if not _is_str_prop(prop_type):
1363 raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
1364
1365 if prop_type == 'clock':
1366 self._set_int_clock_prop_mapping(obj, prop_mapping)
1367 else:
1368 raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
1369
1370 return obj
1371
1372 def _create_float(self, obj, node):
1373 if obj is None:
1374 # create floating point number object
1375 obj = metadata.FloatingPoint()
1376
1377 unk_prop = self._get_first_unknown_type_prop(node, [
1378 'size',
1379 'align',
1380 'byte-order',
1381 ])
1382
1383 if unk_prop:
1384 raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop))
1385
1386 # size
1387 if 'size' in node:
1388 size = node['size']
1389
1390 if not _is_assoc_array_prop(size):
1391 raise ConfigError('"size" property of floating point number type object must be an associative array')
1392
1393 unk_prop = _get_first_unknown_prop(size, ['exp', 'mant'])
1394
1395 if unk_prop:
1396 raise ConfigError('unknown floating point number type object\'s "size" property: "{}"'.format(unk_prop))
1397
1398 if 'exp' in size:
1399 exp = size['exp']
1400
1401 if not _is_int_prop(exp):
1402 raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer')
1403
1404 if exp < 1:
1405 raise ConfigError('invalid floating point number exponent size: {}')
1406
1407 obj.exp_size = exp
1408
1409 if 'mant' in size:
1410 mant = size['mant']
1411
1412 if not _is_int_prop(mant):
1413 raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer')
1414
1415 if mant < 1:
1416 raise ConfigError('invalid floating point number mantissa size: {}')
1417
1418 obj.mant_size = mant
1419
1420 # align
1421 if 'align' in node:
1422 align = node['align']
1423
1424 if not _is_int_prop(align):
1425 raise ConfigError('"align" property of floating point number type object must be an integer')
1426
1427 if not _is_valid_alignment(align):
1428 raise ConfigError('invalid alignment: {}'.format(align))
1429
1430 obj.align = align
1431
1432 # byte order
1433 if 'byte-order' in node:
1434 byte_order = node['byte-order']
1435
1436 if not _is_str_prop(byte_order):
1437 raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")')
1438
1439 byte_order = _byte_order_str_to_bo(byte_order)
1440
1441 if byte_order is None:
1442 raise ConfigError('invalid "byte-order" property in floating point number type object')
1443 else:
1444 byte_order = self._bo
1445
1446 obj.byte_order = byte_order
1447
1448 return obj
1449
1450 def _create_enum(self, obj, node):
1451 if obj is None:
1452 # create enumeration object
1453 obj = metadata.Enum()
1454
1455 unk_prop = self._get_first_unknown_type_prop(node, [
1456 'value-type',
1457 'members',
1458 ])
1459
1460 if unk_prop:
1461 raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop))
1462
1463 # value type
1464 if 'value-type' in node:
1465 try:
1466 obj.value_type = self._create_type(node['value-type'])
1467 except Exception as e:
1468 raise ConfigError('cannot create enumeration type\'s integer type', e)
1469
1470 # members
1471 if 'members' in node:
1472 members_node = node['members']
1473
1474 if not _is_array_prop(members_node):
1475 raise ConfigError('"members" property of enumeration type object must be an array')
1476
1477 cur = 0
1478
1479 for index, m_node in enumerate(members_node):
1480 if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
1481 raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index))
1482
1483 if _is_str_prop(m_node):
1484 label = m_node
1485 value = (cur, cur)
1486 cur += 1
1487 else:
1488 unk_prop = _get_first_unknown_prop(m_node, [
1489 'label',
1490 'value',
1491 ])
1492
1493 if unk_prop:
1494 raise ConfigError('unknown enumeration type member object property: "{}"'.format(unk_prop))
1495
1496 if 'label' not in m_node:
1497 raise ConfigError('missing "label" property in enumeration member #{}'.format(index))
1498
1499 label = m_node['label']
1500
1501 if not _is_str_prop(label):
1502 raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index))
1503
1504 if 'value' not in m_node:
1505 raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label))
1506
1507 value = m_node['value']
1508
1509 if not _is_int_prop(value) and not _is_array_prop(value):
1510 raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label))
1511
1512 if _is_int_prop(value):
1513 cur = value + 1
1514 value = (value, value)
1515 else:
1516 if len(value) != 2:
1517 raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label))
1518
1519 mn = value[0]
1520 mx = value[1]
1521
1522 if mn > mx:
1523 raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
1524
1525 value = (mn, mx)
1526 cur = mx + 1
1527
1528 obj.members[label] = value
1529
1530 return obj
1531
1532 def _create_string(self, obj, node):
1533 if obj is None:
1534 # create string object
1535 obj = metadata.String()
1536
1537 unk_prop = self._get_first_unknown_type_prop(node, [
1538 'encoding',
1539 ])
1540
1541 if unk_prop:
1542 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1543
1544 # encoding
1545 if 'encoding' in node:
1546 encoding = node['encoding']
1547
1548 if not _is_str_prop(encoding):
1549 raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")')
1550
1551 encoding = _encoding_str_to_encoding(encoding)
1552
1553 if encoding is None:
1554 raise ConfigError('invalid "encoding" property in string type object')
1555
1556 obj.encoding = encoding
1557
1558 return obj
1559
1560 def _create_struct(self, obj, node):
1561 if obj is None:
1562 # create structure object
1563 obj = metadata.Struct()
1564
1565 unk_prop = self._get_first_unknown_type_prop(node, [
1566 'min-align',
1567 'fields',
1568 ])
1569
1570 if unk_prop:
1571 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1572
1573 # minimum alignment
1574 if 'min-align' in node:
1575 min_align = node['min-align']
1576
1577 if not _is_int_prop(min_align):
1578 raise ConfigError('"min-align" property of structure type object must be an integer')
1579
1580 if not _is_valid_alignment(min_align):
1581 raise ConfigError('invalid minimum alignment: {}'.format(min_align))
1582
1583 obj.min_align = min_align
1584
1585 # fields
1586 if 'fields' in node:
1587 fields = node['fields']
1588
1589 if not _is_assoc_array_prop(fields):
1590 raise ConfigError('"fields" property of structure type object must be an associative array')
1591
1592 for field_name, field_node in fields.items():
1593 if not is_valid_identifier(field_name):
1594 raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name))
1595
1596 try:
1597 obj.fields[field_name] = self._create_type(field_node)
1598 except Exception as e:
1599 raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e)
1600
1601 return obj
1602
1603 def _create_array(self, obj, node):
1604 if obj is None:
1605 # create array object
1606 obj = metadata.Array()
1607
1608 unk_prop = self._get_first_unknown_type_prop(node, [
1609 'length',
1610 'element-type',
1611 ])
1612
1613 if unk_prop:
1614 raise ConfigError('unknown array type object property: "{}"'.format(unk_prop))
1615
1616 # length
1617 if 'length' in node:
1618 length = node['length']
1619
1620 if not _is_int_prop(length) and not _is_str_prop(length):
1621 raise ConfigError('"length" property of array type object must be an integer or a string')
1622
1623 if type(length) is int and length < 0:
1624 raise ConfigError('invalid static array length: {}'.format(length))
1625
1626 obj.length = length
1627
1628 # element type
1629 if 'element-type' in node:
1630 try:
1631 obj.element_type = self._create_type(node['element-type'])
1632 except Exception as e:
1633 raise ConfigError('cannot create array type\'s element type', e)
1634
1635 return obj
1636
1637 def _create_variant(self, obj, node):
1638 if obj is None:
1639 # create variant object
1640 obj = metadata.Variant()
1641
1642 unk_prop = self._get_first_unknown_type_prop(node, [
1643 'tag',
1644 'types',
1645 ])
1646
1647 if unk_prop:
1648 raise ConfigError('unknown variant type object property: "{}"'.format(unk_prop))
1649
1650 # tag
1651 if 'tag' in node:
1652 tag = node['tag']
1653
1654 if not _is_str_prop(tag):
1655 raise ConfigError('"tag" property of variant type object must be a string')
1656
1657 # do not validate variant tag for the moment; will be done in a
1658 # second phase
1659 obj.tag = tag
1660
1661 # element type
1662 if 'types' in node:
1663 types = node['types']
1664
1665 if not _is_assoc_array_prop(types):
1666 raise ConfigError('"types" property of variant type object must be an associative array')
1667
1668 # do not validate type names for the moment; will be done in a
1669 # second phase
1670 for type_name, type_node in types.items():
1671 if not is_valid_identifier(type_name):
1672 raise ConfigError('"{}" is not a valid type name for variant type'.format(type_name))
1673
1674 try:
1675 obj.types[type_name] = self._create_type(type_node)
1676 except Exception as e:
1677 raise ConfigError('cannot create variant type\'s type "{}"'.format(type_name), e)
1678
1679 return obj
1680
1681 def _create_type(self, type_node):
1682 if type(type_node) is str:
1683 t = self._lookup_type_alias(type_node)
1684
1685 if t is None:
1686 raise ConfigError('unknown type alias "{}"'.format(type_node))
1687
1688 return t
1689
1690 if not _is_assoc_array_prop(type_node):
1691 raise ConfigError('type objects must be associative arrays or strings (type alias name)')
1692
1693 # inherit:
1694 # v2.0: "inherit"
1695 # v2.1+: "$inherit"
1696 inherit_node = None
1697
1698 if self._version >= 200:
1699 if 'inherit' in type_node:
1700 inherit_prop = 'inherit'
1701 inherit_node = type_node[inherit_prop]
1702
1703 if self._version >= 201:
1704 if '$inherit' in type_node:
1705 if inherit_node is not None:
1706 raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
1707
1708 inherit_prop = '$inherit'
1709 inherit_node = type_node[inherit_prop]
1710
1711 if inherit_node is not None and 'class' in type_node:
1712 raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
1713
1714 if inherit_node is not None:
1715 if not _is_str_prop(inherit_node):
1716 raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop))
1717
1718 base = self._lookup_type_alias(inherit_node)
1719
1720 if base is None:
1721 raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
1722
1723 func = self._type_to_create_type_func[type(base)]
1724 else:
1725 if 'class' not in type_node:
1726 raise ConfigError('type objects which do not inherit must have a "class" property')
1727
1728 class_name = type_node['class']
1729
1730 if type(class_name) is not str:
1731 raise ConfigError('type objects\' "class" property must be a string')
1732
1733 if class_name not in self._class_name_to_create_type_func:
1734 raise ConfigError('unknown type class "{}"'.format(class_name))
1735
1736 base = None
1737 func = self._class_name_to_create_type_func[class_name]
1738
1739 return func(base, type_node)
1740
1741 def _register_type_aliases(self, metadata_node):
1742 self._tas = dict()
1743
1744 if 'type-aliases' not in metadata_node:
1745 return
1746
1747 ta_node = metadata_node['type-aliases']
1748
1749 if not _is_assoc_array_prop(ta_node):
1750 raise ConfigError('"type-aliases" property (metadata) must be an associative array')
1751
1752 for ta_name, ta_type in ta_node.items():
1753 if ta_name in self._tas:
1754 raise ConfigError('duplicate type alias "{}"'.format(ta_name))
1755
1756 try:
1757 t = self._create_type(ta_type)
1758 except Exception as e:
1759 raise ConfigError('cannot create type alias "{}"'.format(ta_name), e)
1760
1761 self._tas[ta_name] = t
1762
1763 def _create_clock(self, node):
1764 # create clock object
1765 clock = metadata.Clock()
1766
1767 if not _is_assoc_array_prop(node):
1768 raise ConfigError('clock objects must be associative arrays')
1769
1770 known_props = [
1771 'uuid',
1772 'description',
1773 'freq',
1774 'error-cycles',
1775 'offset',
1776 'absolute',
1777 'return-ctype',
1778 ]
1779
1780 if self._version >= 201:
1781 known_props.append('$return-ctype')
1782
1783 unk_prop = _get_first_unknown_prop(node, known_props)
1784
1785 if unk_prop:
1786 raise ConfigError('unknown clock object property: "{}"'.format(unk_prop))
1787
1788 # UUID
1789 if 'uuid' in node:
1790 uuidp = node['uuid']
1791
1792 if not _is_str_prop(uuidp):
1793 raise ConfigError('"uuid" property of clock object must be a string')
1794
1795 try:
1796 uuidp = uuid.UUID(uuidp)
1797 except:
1798 raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp))
1799
1800 clock.uuid = uuidp
1801
1802 # description
1803 if 'description' in node:
1804 desc = node['description']
1805
1806 if not _is_str_prop(desc):
1807 raise ConfigError('"description" property of clock object must be a string')
1808
1809 clock.description = desc
1810
1811 # frequency
1812 if 'freq' in node:
1813 freq = node['freq']
1814
1815 if not _is_int_prop(freq):
1816 raise ConfigError('"freq" property of clock object must be an integer')
1817
1818 if freq < 1:
1819 raise ConfigError('invalid clock frequency: {}'.format(freq))
1820
1821 clock.freq = freq
1822
1823 # error cycles
1824 if 'error-cycles' in node:
1825 error_cycles = node['error-cycles']
1826
1827 if not _is_int_prop(error_cycles):
1828 raise ConfigError('"error-cycles" property of clock object must be an integer')
1829
1830 if error_cycles < 0:
1831 raise ConfigError('invalid clock error cycles: {}'.format(error_cycles))
1832
1833 clock.error_cycles = error_cycles
1834
1835 # offset
1836 if 'offset' in node:
1837 offset = node['offset']
1838
1839 if not _is_assoc_array_prop(offset):
1840 raise ConfigError('"offset" property of clock object must be an associative array')
1841
1842 unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
1843
1844 if unk_prop:
1845 raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop))
1846
1847 # cycles
1848 if 'cycles' in offset:
1849 offset_cycles = offset['cycles']
1850
1851 if not _is_int_prop(offset_cycles):
1852 raise ConfigError('"cycles" property of clock object\'s offset property must be an integer')
1853
1854 if offset_cycles < 0:
1855 raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles))
1856
1857 clock.offset_cycles = offset_cycles
1858
1859 # seconds
1860 if 'seconds' in offset:
1861 offset_seconds = offset['seconds']
1862
1863 if not _is_int_prop(offset_seconds):
1864 raise ConfigError('"seconds" property of clock object\'s offset property must be an integer')
1865
1866 if offset_seconds < 0:
1867 raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds))
1868
1869 clock.offset_seconds = offset_seconds
1870
1871 # absolute
1872 if 'absolute' in node:
1873 absolute = node['absolute']
1874
1875 if not _is_bool_prop(absolute):
1876 raise ConfigError('"absolute" property of clock object must be a boolean')
1877
1878 clock.absolute = absolute
1879
1880 # return C type:
1881 # v2.0: "return-ctype"
1882 # v2.1+: "$return-ctype"
1883 return_ctype_node = None
1884
1885 if self._version >= 200:
1886 if 'return-ctype' in node:
1887 return_ctype_prop = 'return-ctype'
1888 return_ctype_node = node[return_ctype_prop]
1889
1890 if self._version >= 201:
1891 if '$return-ctype' in node:
1892 if return_ctype_node is not None:
1893 raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"')
1894
1895 return_ctype_prop = '$return-ctype'
1896 return_ctype_node = node[return_ctype_prop]
1897
1898 if return_ctype_node is not None:
1899 if not _is_str_prop(return_ctype_node):
1900 raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop))
1901
1902 clock.return_ctype = return_ctype_node
1903
1904 return clock
1905
1906 def _register_clocks(self, metadata_node):
1907 self._clocks = collections.OrderedDict()
1908
1909 if 'clocks' not in metadata_node:
1910 return
1911
1912 clocks_node = metadata_node['clocks']
1913
1914 if not _is_assoc_array_prop(clocks_node):
1915 raise ConfigError('"clocks" property (metadata) must be an associative array')
1916
1917 for clock_name, clock_node in clocks_node.items():
1918 if not is_valid_identifier(clock_name):
1919 raise ConfigError('invalid clock name: "{}"'.format(clock_name))
1920
1921 if clock_name in self._clocks:
1922 raise ConfigError('duplicate clock "{}"'.format(clock_name))
1923
1924 try:
1925 clock = self._create_clock(clock_node)
1926 except Exception as e:
1927 raise ConfigError('cannot create clock "{}"'.format(clock_name), e)
1928
1929 clock.name = clock_name
1930 self._clocks[clock_name] = clock
1931
1932 def _create_env(self, metadata_node):
1933 env = collections.OrderedDict()
1934
1935 if 'env' not in metadata_node:
1936 return env
1937
1938 env_node = metadata_node['env']
1939
1940 if not _is_assoc_array_prop(env_node):
1941 raise ConfigError('"env" property (metadata) must be an associative array')
1942
1943 for env_name, env_value in env_node.items():
1944 if env_name in env:
1945 raise ConfigError('duplicate environment variable "{}"'.format(env_name))
1946
1947 if not is_valid_identifier(env_name):
1948 raise ConfigError('invalid environment variable name: "{}"'.format(env_name))
1949
1950 if not _is_int_prop(env_value) and not _is_str_prop(env_value):
1951 raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
1952
1953 env[env_name] = env_value
1954
1955 return env
1956
1957 def _register_log_levels(self, metadata_node):
1958 self._log_levels = dict()
1959
1960 # log levels:
1961 # v2.0: "log-levels"
1962 # v2.1+: "$log-levels"
1963 log_levels_node = None
1964
1965 if self._version >= 200:
1966 if 'log-levels' in metadata_node:
1967 log_levels_prop = 'log-levels'
1968 log_levels_node = metadata_node[log_levels_prop]
1969
1970 if self._version >= 201:
1971 if '$log-levels' in metadata_node:
1972 if log_levels_node is not None:
1973 raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
1974
1975 log_levels_prop = '$log-levels'
1976 log_levels_node = metadata_node[log_levels_prop]
1977
1978 if log_levels_node is None:
1979 return
1980
1981 if not _is_assoc_array_prop(log_levels_node):
1982 raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop))
1983
1984 for ll_name, ll_value in log_levels_node.items():
1985 if ll_name in self._log_levels:
1986 raise ConfigError('duplicate log level entry "{}"'.format(ll_name))
1987
1988 if not _is_int_prop(ll_value):
1989 raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name))
1990
1991 if ll_value < 0:
1992 raise ConfigError('invalid log level entry ("{}"): log level value must be positive'.format(ll_name))
1993
1994 self._log_levels[ll_name] = ll_value
1995
1996 def _create_trace(self, metadata_node):
1997 # create trace object
1998 trace = metadata.Trace()
1999
2000 if 'trace' not in metadata_node:
2001 raise ConfigError('missing "trace" property (metadata)')
2002
2003 trace_node = metadata_node['trace']
2004
2005 if not _is_assoc_array_prop(trace_node):
2006 raise ConfigError('"trace" property (metadata) must be an associative array')
2007
2008 unk_prop = _get_first_unknown_prop(trace_node, [
2009 'byte-order',
2010 'uuid',
2011 'packet-header-type',
2012 ])
2013
2014 if unk_prop:
2015 raise ConfigError('unknown trace object property: "{}"'.format(unk_prop))
2016
2017 # set byte order (already parsed)
2018 trace.byte_order = self._bo
2019
2020 # UUID
2021 if 'uuid' in trace_node:
2022 uuidp = trace_node['uuid']
2023
2024 if not _is_str_prop(uuidp):
2025 raise ConfigError('"uuid" property of trace object must be a string')
2026
2027 if uuidp == 'auto':
2028 uuidp = uuid.uuid1()
2029 else:
2030 try:
2031 uuidp = uuid.UUID(uuidp)
2032 except:
2033 raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp))
2034
2035 trace.uuid = uuidp
2036
2037 # packet header type
2038 if 'packet-header-type' in trace_node:
2039 try:
2040 ph_type = self._create_type(trace_node['packet-header-type'])
2041 except Exception as e:
2042 raise ConfigError('cannot create packet header type (trace)', e)
2043
2044 trace.packet_header_type = ph_type
2045
2046 return trace
2047
2048 def _lookup_log_level(self, ll):
2049 if _is_int_prop(ll):
2050 return ll
2051 elif _is_str_prop(ll) and ll in self._log_levels:
2052 return self._log_levels[ll]
2053
2054 def _create_event(self, event_node):
2055 event = metadata.Event()
2056
2057 if not _is_assoc_array_prop(event_node):
2058 raise ConfigError('event objects must be associative arrays')
2059
2060 unk_prop = _get_first_unknown_prop(event_node, [
2061 'log-level',
2062 'context-type',
2063 'payload-type',
2064 ])
2065
2066 if unk_prop:
2067 raise ConfigError('unknown event object property: "{}"'.format(unk_prop))
2068
2069 if 'log-level' in event_node:
2070 ll_node = event_node['log-level']
2071
2072 if _is_str_prop(ll_node):
2073 ll = self._lookup_log_level(event_node['log-level'])
2074
2075 if ll is None:
2076 raise ConfigError('cannot find log level "{}"'.format(ll_node))
2077 elif _is_int_prop(ll_node):
2078 if ll_node < 0:
2079 raise ConfigError('invalid log level value {}: value must be positive'.format(ll_node))
2080
2081 ll = ll_node
2082 else:
2083 raise ConfigError('"log-level" property must be either a string or an integer')
2084
2085 event.log_level = ll
2086
2087 if 'context-type' in event_node:
2088 try:
2089 t = self._create_type(event_node['context-type'])
2090 except Exception as e:
2091 raise ConfigError('cannot create event\'s context type object', e)
2092
2093 event.context_type = t
2094
2095 if 'payload-type' not in event_node:
2096 raise ConfigError('missing "payload-type" property in event object')
2097
2098 try:
2099 t = self._create_type(event_node['payload-type'])
2100 except Exception as e:
2101 raise ConfigError('cannot create event\'s payload type object', e)
2102
2103 event.payload_type = t
2104
2105 return event
2106
2107 def _create_stream(self, stream_node):
2108 stream = metadata.Stream()
2109
2110 if not _is_assoc_array_prop(stream_node):
2111 raise ConfigError('stream objects must be associative arrays')
2112
2113 unk_prop = _get_first_unknown_prop(stream_node, [
2114 'packet-context-type',
2115 'event-header-type',
2116 'event-context-type',
2117 'events',
2118 ])
2119
2120 if unk_prop:
2121 raise ConfigError('unknown stream object property: "{}"'.format(unk_prop))
2122
2123 if 'packet-context-type' in stream_node:
2124 try:
2125 t = self._create_type(stream_node['packet-context-type'])
2126 except Exception as e:
2127 raise ConfigError('cannot create stream\'s packet context type object', e)
2128
2129 stream.packet_context_type = t
2130
2131 if 'event-header-type' in stream_node:
2132 try:
2133 t = self._create_type(stream_node['event-header-type'])
2134 except Exception as e:
2135 raise ConfigError('cannot create stream\'s event header type object', e)
2136
2137 stream.event_header_type = t
2138
2139 if 'event-context-type' in stream_node:
2140 try:
2141 t = self._create_type(stream_node['event-context-type'])
2142 except Exception as e:
2143 raise ConfigError('cannot create stream\'s event context type object', e)
2144
2145 stream.event_context_type = t
2146
2147 if 'events' not in stream_node:
2148 raise ConfigError('missing "events" property in stream object')
2149
2150 events = stream_node['events']
2151
2152 if not _is_assoc_array_prop(events):
2153 raise ConfigError('"events" property of stream object must be an associative array')
2154
2155 if not events:
2156 raise ConfigError('at least one event is needed within a stream object')
2157
2158 cur_id = 0
2159
2160 for ev_name, ev_node in events.items():
2161 try:
2162 ev = self._create_event(ev_node)
2163 except Exception as e:
2164 raise ConfigError('cannot create event "{}"'.format(ev_name), e)
2165
2166 ev.id = cur_id
2167 ev.name = ev_name
2168 stream.events[ev_name] = ev
2169 cur_id += 1
2170
2171 return stream
2172
2173 def _create_streams(self, metadata_node):
2174 streams = collections.OrderedDict()
2175
2176 if 'streams' not in metadata_node:
2177 raise ConfigError('missing "streams" property (metadata)')
2178
2179 streams_node = metadata_node['streams']
2180
2181 if not _is_assoc_array_prop(streams_node):
2182 raise ConfigError('"streams" property (metadata) must be an associative array')
2183
2184 if not streams_node:
2185 raise ConfigError('at least one stream is needed (metadata)')
2186
2187 cur_id = 0
2188
2189 for stream_name, stream_node in streams_node.items():
2190 try:
2191 stream = self._create_stream(stream_node)
2192 except Exception as e:
2193 raise ConfigError('cannot create stream "{}"'.format(stream_name), e)
2194
2195 stream.id = cur_id
2196 stream.name = str(stream_name)
2197 streams[stream_name] = stream
2198 cur_id += 1
2199
2200 return streams
2201
2202 def _create_metadata(self, root):
2203 meta = metadata.Metadata()
2204
2205 if 'metadata' not in root:
2206 raise ConfigError('missing "metadata" property (configuration)')
2207
2208 metadata_node = root['metadata']
2209
2210 if not _is_assoc_array_prop(metadata_node):
2211 raise ConfigError('"metadata" property (configuration) must be an associative array')
2212
2213 known_props = [
2214 'type-aliases',
2215 'log-levels',
2216 'trace',
2217 'env',
2218 'clocks',
2219 'streams',
2220 ]
2221
2222 if self._version >= 201:
2223 known_props.append('$log-levels')
2224
2225 unk_prop = _get_first_unknown_prop(metadata_node, known_props)
2226
2227 if unk_prop:
2228 add = ''
2229
2230 if unk_prop == '$include':
2231 add = ' (use version 2.1 or greater)'
2232
2233 raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop))
2234
2235 self._set_byte_order(metadata_node)
2236 self._register_clocks(metadata_node)
2237 meta.clocks = self._clocks
2238 self._register_type_aliases(metadata_node)
2239 meta.env = self._create_env(metadata_node)
2240 meta.trace = self._create_trace(metadata_node)
2241 self._register_log_levels(metadata_node)
2242 meta.streams = self._create_streams(metadata_node)
2243
2244 return meta
2245
2246 def _get_version(self, root):
2247 if 'version' not in root:
2248 raise ConfigError('missing "version" property (configuration)')
2249
2250 version_node = root['version']
2251
2252 if not _is_str_prop(version_node):
2253 raise ConfigError('"version" property (configuration) must be a string')
2254
2255 version_node = version_node.strip()
2256
2257 if version_node not in ['2.0', '2.1']:
2258 raise ConfigError('unsupported version ({}): versions 2.0 and 2.1 are supported'.format(version_node))
2259
2260 # convert version string to comparable version integer
2261 parts = version_node.split('.')
2262 version = int(parts[0]) * 100 + int(parts[1])
2263
2264 return version
2265
2266 def _get_prefix(self, root):
2267 if 'prefix' not in root:
2268 return 'barectf_'
2269
2270 prefix_node = root['prefix']
2271
2272 if not _is_str_prop(prefix_node):
2273 raise ConfigError('"prefix" property (configuration) must be a string')
2274
2275 if not is_valid_identifier(prefix_node):
2276 raise ConfigError('"prefix" property (configuration) must be a valid C identifier')
2277
2278 return prefix_node
2279
2280 def _get_last_include_file(self):
2281 if self._include_stack:
2282 return self._include_stack[-1]
2283
2284 return self._root_yaml_path
2285
2286 def _load_include(self, yaml_path):
2287 for inc_dir in self._include_dirs:
2288 # current include dir + file name path
2289 # note: os.path.join() only takes the last arg if it's absolute
2290 inc_path = os.path.join(inc_dir, yaml_path)
2291
2292 # real path (symbolic links resolved)
2293 real_path = os.path.realpath(inc_path)
2294
2295 # normalized path (weird stuff removed!)
2296 norm_path = os.path.normpath(real_path)
2297
2298 if not os.path.isfile(norm_path):
2299 # file does not exist: skip
2300 continue
2301
2302 if norm_path in self._include_stack:
2303 base_path = self._get_last_include_file()
2304 raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path))
2305
2306 self._include_stack.append(norm_path)
2307
2308 # load raw content
2309 return self._yaml_ordered_load(norm_path)
2310
2311 if not self._ignore_include_not_found:
2312 base_path = self._get_last_include_file()
2313 raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
2314
2315 return None
2316
2317 def _get_include_paths(self, include_node):
2318 if _is_str_prop(include_node):
2319 return [include_node]
2320 elif _is_array_prop(include_node):
2321 for include_path in include_node:
2322 if not _is_str_prop(include_path):
2323 raise ConfigError('invalid include property: expecting array of strings')
2324
2325 return include_node
2326
2327 raise ConfigError('invalid include property: expecting string or array of strings')
2328
2329 def _update_node(self, base_node, overlay_node):
2330 for olay_key, olay_value in overlay_node.items():
2331 if olay_key in base_node:
2332 base_value = base_node[olay_key]
2333
2334 if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
2335 # merge dictionaries
2336 self._update_node(base_value, olay_value)
2337 elif _is_array_prop(olay_value) and _is_array_prop(base_value):
2338 # append extension array items to base items
2339 base_value += olay_value
2340 else:
2341 # fall back to replacing
2342 base_node[olay_key] = olay_value
2343 else:
2344 base_node[olay_key] = olay_value
2345
2346 def _process_node_include(self, last_overlay_node, name,
2347 process_base_include_cb,
2348 process_children_include_cb=None):
2349 if not _is_assoc_array_prop(last_overlay_node):
2350 raise ConfigError('{} objects must be associative arrays'.format(name))
2351
2352 # process children inclusions first
2353 if process_children_include_cb:
2354 process_children_include_cb(last_overlay_node)
2355
2356 if '$include' in last_overlay_node:
2357 include_node = last_overlay_node['$include']
2358 else:
2359 # no includes!
2360 return last_overlay_node
2361
2362 include_paths = self._get_include_paths(include_node)
2363 cur_base_path = self._get_last_include_file()
2364 base_node = None
2365
2366 # keep the include paths and remove the include property
2367 include_paths = copy.deepcopy(include_paths)
2368 del last_overlay_node['$include']
2369
2370 for include_path in include_paths:
2371 # load raw YAML from included file
2372 overlay_node = self._load_include(include_path)
2373
2374 if overlay_node is None:
2375 # cannot find include file, but we're ignoring those
2376 # errors, otherwise _load_include() itself raises
2377 # a config error
2378 continue
2379
2380 # recursively process includes
2381 try:
2382 overlay_node = process_base_include_cb(overlay_node)
2383 except Exception as e:
2384 raise ConfigError('in "{}"'.format(cur_base_path), e)
2385
2386 # pop include stack now that we're done including
2387 del self._include_stack[-1]
2388
2389 # at this point, base_node is fully resolved (does not
2390 # contain any include property)
2391 if base_node is None:
2392 base_node = overlay_node
2393 else:
2394 self._update_node(base_node, overlay_node)
2395
2396 # finally, we update the latest base node with our last overlay
2397 # node
2398 if base_node is None:
2399 # nothing was included, which is possible when we're
2400 # ignoring include errors
2401 return last_overlay_node
2402
2403 self._update_node(base_node, last_overlay_node)
2404
2405 return base_node
2406
2407 def _process_event_include(self, event_node):
2408 return self._process_node_include(event_node, 'event',
2409 self._process_event_include)
2410
2411 def _process_stream_include(self, stream_node):
2412 def process_children_include(stream_node):
2413 if 'events' in stream_node:
2414 events_node = stream_node['events']
2415
2416 if not _is_assoc_array_prop(events_node):
2417 raise ConfigError('"events" property must be an associative array')
2418
2419 events_node_keys = list(events_node.keys())
2420
2421 for key in events_node_keys:
2422 event_node = events_node[key]
2423
2424 try:
2425 events_node[key] = self._process_event_include(event_node)
2426 except Exception as e:
2427 raise ConfigError('cannot process includes of event object "{}"'.format(key), e)
2428
2429 return self._process_node_include(stream_node, 'stream',
2430 self._process_stream_include,
2431 process_children_include)
2432
2433 def _process_trace_include(self, trace_node):
2434 return self._process_node_include(trace_node, 'trace',
2435 self._process_trace_include)
2436
2437 def _process_clock_include(self, clock_node):
2438 return self._process_node_include(clock_node, 'clock',
2439 self._process_clock_include)
2440
2441 def _process_metadata_include(self, metadata_node):
2442 def process_children_include(metadata_node):
2443 if 'trace' in metadata_node:
2444 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
2445
2446 if 'clocks' in metadata_node:
2447 clocks_node = metadata_node['clocks']
2448
2449 if not _is_assoc_array_prop(clocks_node):
2450 raise ConfigError('"clocks" property (metadata) must be an associative array')
2451
2452 clocks_node_keys = list(clocks_node.keys())
2453
2454 for key in clocks_node_keys:
2455 clock_node = clocks_node[key]
2456
2457 try:
2458 clocks_node[key] = self._process_clock_include(clock_node)
2459 except Exception as e:
2460 raise ConfigError('cannot process includes of clock object "{}"'.format(key), e)
2461
2462 if 'streams' in metadata_node:
2463 streams_node = metadata_node['streams']
2464
2465 if not _is_assoc_array_prop(streams_node):
2466 raise ConfigError('"streams" property (metadata) must be an associative array')
2467
2468 streams_node_keys = list(streams_node.keys())
2469
2470 for key in streams_node_keys:
2471 stream_node = streams_node[key]
2472
2473 try:
2474 streams_node[key] = self._process_stream_include(stream_node)
2475 except Exception as e:
2476 raise ConfigError('cannot process includes of stream object "{}"'.format(key), e)
2477
2478 return self._process_node_include(metadata_node, 'metadata',
2479 self._process_metadata_include,
2480 process_children_include)
2481
2482 def _process_root_includes(self, root):
2483 # The following config objects support includes:
2484 #
2485 # * Metadata object
2486 # * Trace object
2487 # * Stream object
2488 # * Event object
2489 #
2490 # We need to process the event includes first, then the stream
2491 # includes, then the trace includes, and finally the metadata
2492 # includes.
2493 #
2494 # In each object, only one of the $include and $include-replace
2495 # special properties is allowed.
2496 #
2497 # We keep a stack of absolute paths to included files to detect
2498 # recursion.
2499 if 'metadata' in root:
2500 root['metadata'] = self._process_metadata_include(root['metadata'])
2501
2502 return root
2503
2504 def _yaml_ordered_dump(self, node, **kwds):
2505 class ODumper(yaml.Dumper):
2506 pass
2507
2508 def dict_representer(dumper, node):
2509 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2510 node.items())
2511
2512 ODumper.add_representer(collections.OrderedDict, dict_representer)
2513
2514 return yaml.dump(node, Dumper=ODumper, **kwds)
2515
2516 def _yaml_ordered_load(self, yaml_path):
2517 class OLoader(yaml.Loader):
2518 pass
2519
2520 def construct_mapping(loader, node):
2521 loader.flatten_mapping(node)
2522
2523 return collections.OrderedDict(loader.construct_pairs(node))
2524
2525 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2526 construct_mapping)
2527
2528 # YAML -> Python
2529 try:
2530 with open(yaml_path, 'r') as f:
2531 node = yaml.load(f, OLoader)
2532 except (OSError, IOError) as e:
2533 raise ConfigError('cannot open file "{}"'.format(yaml_path))
2534 except Exception as e:
2535 raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e)
2536
2537 # loaded node must be an associate array
2538 if not _is_assoc_array_prop(node):
2539 raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path))
2540
2541 return node
2542
2543 def _reset(self):
2544 self._version = None
2545 self._include_stack = []
2546
2547 def parse(self, yaml_path):
2548 self._reset()
2549 self._root_yaml_path = yaml_path
2550
2551 try:
2552 root = self._yaml_ordered_load(yaml_path)
2553 except Exception as e:
2554 raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e)
2555
2556 if not _is_assoc_array_prop(root):
2557 raise ConfigError('configuration must be an associative array')
2558
2559 unk_prop = _get_first_unknown_prop(root, [
2560 'version',
2561 'prefix',
2562 'metadata',
2563 ])
2564
2565 if unk_prop:
2566 raise ConfigError('unknown configuration property: "{}"'.format(unk_prop))
2567
2568 # get the config version
2569 self._version = self._get_version(root)
2570
2571 # process includes if supported
2572 if self._version >= 201:
2573 root = self._process_root_includes(root)
2574
2575 # dump config if required
2576 if self._dump_config:
2577 print(self._yaml_ordered_dump(root, indent=2,
2578 default_flow_style=False))
2579
2580 # get prefix and metadata
2581 prefix = self._get_prefix(root)
2582 meta = self._create_metadata(root)
2583
2584 return Config(self._version, prefix, meta)
2585
2586
2587 def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
2588 try:
2589 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
2590 dump_config)
2591 cfg = parser.parse(path)
2592
2593 return cfg
2594 except Exception as e:
2595 raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e)
This page took 0.111697 seconds and 5 git commands to generate.