metadata: add is_dynamic to type objs and fix align
[barectf.git] / barectf / config.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 # THE SOFTWARE.
22
23 from barectf import metadata
24 import collections
25 import datetime
26 import barectf
27 import enum
28 import yaml
29 import uuid
30 import copy
31 import re
32 import os
33
34
35 class ConfigError(RuntimeError):
36 def __init__(self, msg, prev=None):
37 super().__init__(msg)
38 self._prev = prev
39
40 @property
41 def prev(self):
42 return self._prev
43
44
45 class Config:
46 def __init__(self, version, prefix, metadata):
47 self.prefix = prefix
48 self.version = version
49 self.metadata = metadata
50
51 def _validate_metadata(self, meta):
52 try:
53 validator = _MetadataTypesHistologyValidator()
54 validator.validate(meta)
55 validator = _MetadataDynamicTypesValidator()
56 validator.validate(meta)
57 validator = _MetadataSpecialFieldsValidator()
58 validator.validate(meta)
59 except Exception as e:
60 raise ConfigError('metadata error', e)
61
62 try:
63 validator = _BarectfMetadataValidator()
64 validator.validate(meta)
65 except Exception as e:
66 raise ConfigError('barectf metadata error', e)
67
68 def _augment_metadata_env(self, meta):
69 env = meta.env
70
71 env['domain'] = 'bare'
72 env['tracer_name'] = 'barectf'
73 version_tuple = barectf.get_version_tuple()
74 env['tracer_major'] = version_tuple[0]
75 env['tracer_minor'] = version_tuple[1]
76 env['tracer_patch'] = version_tuple[2]
77 env['barectf_gen_date'] = str(datetime.datetime.now().isoformat())
78
79 @property
80 def version(self):
81 return self._version
82
83 @version.setter
84 def version(self, value):
85 self._version = value
86
87 @property
88 def metadata(self):
89 return self._metadata
90
91 @metadata.setter
92 def metadata(self, value):
93 self._validate_metadata(value)
94 self._augment_metadata_env(value)
95 self._metadata = value
96
97 @property
98 def prefix(self):
99 return self._prefix
100
101 @prefix.setter
102 def prefix(self, value):
103 if not is_valid_identifier(value):
104 raise ConfigError('prefix must be a valid C identifier')
105
106 self._prefix = value
107
108
109 def _is_assoc_array_prop(node):
110 return isinstance(node, dict)
111
112
113 def _is_array_prop(node):
114 return isinstance(node, list)
115
116
117 def _is_int_prop(node):
118 return type(node) is int
119
120
121 def _is_str_prop(node):
122 return type(node) is str
123
124
125 def _is_bool_prop(node):
126 return type(node) is bool
127
128
129 def _is_valid_alignment(align):
130 return ((align & (align - 1)) == 0) and align > 0
131
132
133 def _byte_order_str_to_bo(bo_str):
134 bo_str = bo_str.lower()
135
136 if bo_str == 'le':
137 return metadata.ByteOrder.LE
138 elif bo_str == 'be':
139 return metadata.ByteOrder.BE
140
141
142 def _encoding_str_to_encoding(encoding_str):
143 encoding_str = encoding_str.lower()
144
145 if encoding_str == 'utf-8' or encoding_str == 'utf8':
146 return metadata.Encoding.UTF8
147 elif encoding_str == 'ascii':
148 return metadata.Encoding.ASCII
149 elif encoding_str == 'none':
150 return metadata.Encoding.NONE
151
152
153 _re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
154 _ctf_keywords = set([
155 'align',
156 'callsite',
157 'clock',
158 'enum',
159 'env',
160 'event',
161 'floating_point',
162 'integer',
163 'stream',
164 'string',
165 'struct',
166 'trace',
167 'typealias',
168 'typedef',
169 'variant',
170 ])
171
172
173 def is_valid_identifier(iden):
174 if not _re_iden.match(iden):
175 return False
176
177 if _re_iden in _ctf_keywords:
178 return False
179
180 return True
181
182
183 def _get_first_unknown_prop(node, known_props):
184 for prop_name in node:
185 if prop_name in known_props:
186 continue
187
188 return prop_name
189
190
191 # This validator validates the configured metadata for barectf specific
192 # needs.
193 #
194 # barectf needs:
195 #
196 # * all header/contexts are at least byte-aligned
197 # * all integer and floating point number sizes to be <= 64
198 # * no inner structures, arrays, or variants
199 class _BarectfMetadataValidator:
200 def __init__(self):
201 self._type_to_validate_type_func = {
202 metadata.Integer: self._validate_int_type,
203 metadata.FloatingPoint: self._validate_float_type,
204 metadata.Enum: self._validate_enum_type,
205 metadata.String: self._validate_string_type,
206 metadata.Struct: self._validate_struct_type,
207 metadata.Array: self._validate_array_type,
208 metadata.Variant: self._validate_variant_type,
209 }
210
211 def _validate_int_type(self, t, entity_root):
212 if t.size > 64:
213 raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits')
214
215 def _validate_float_type(self, t, entity_root):
216 if t.size > 64:
217 raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits')
218
219 def _validate_enum_type(self, t, entity_root):
220 if t.value_type.size > 64:
221 raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits')
222
223 def _validate_string_type(self, t, entity_root):
224 pass
225
226 def _validate_struct_type(self, t, entity_root):
227 if not entity_root:
228 raise ConfigError('inner structure types are not supported as of this version')
229
230 for field_name, field_type in t.fields.items():
231 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
232 if field_name == 'uuid':
233 # allow
234 continue
235
236 try:
237 self._validate_type(field_type, False)
238 except Exception as e:
239 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
240
241 def _validate_array_type(self, t, entity_root):
242 raise ConfigError('array types are not supported as of this version')
243
244 def _validate_variant_type(self, t, entity_root):
245 raise ConfigError('variant types are not supported as of this version')
246
247 def _validate_type(self, t, entity_root):
248 self._type_to_validate_type_func[type(t)](t, entity_root)
249
250 def _validate_entity(self, t):
251 if t is None:
252 return
253
254 # make sure entity is byte-aligned
255 if t.align < 8:
256 raise ConfigError('type\'s alignment must be at least byte-aligned')
257
258 # make sure entity is a structure
259 if type(t) is not metadata.Struct:
260 raise ConfigError('expecting a structure type')
261
262 # validate types
263 self._validate_type(t, True)
264
265 def _validate_entities_and_names(self, meta):
266 self._cur_entity = _Entity.TRACE_PACKET_HEADER
267
268 try:
269 self._validate_entity(meta.trace.packet_header_type)
270 except Exception as e:
271 raise ConfigError('invalid trace packet header type', e)
272
273 for stream_name, stream in meta.streams.items():
274 if not is_valid_identifier(stream_name):
275 raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name))
276
277 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
278
279 try:
280 self._validate_entity(stream.packet_context_type)
281 except Exception as e:
282 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
283
284 self._cur_entity = _Entity.STREAM_EVENT_HEADER
285
286 try:
287 self._validate_entity(stream.event_header_type)
288 except Exception as e:
289 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
290
291 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
292
293 try:
294 self._validate_entity(stream.event_context_type)
295 except Exception as e:
296 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
297
298 try:
299 for ev_name, ev in stream.events.items():
300 if not is_valid_identifier(ev_name):
301 raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name))
302
303 self._cur_entity = _Entity.EVENT_CONTEXT
304
305 try:
306 self._validate_entity(ev.context_type)
307 except Exception as e:
308 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
309
310 self._cur_entity = _Entity.EVENT_PAYLOAD
311
312 if ev.payload_type is None:
313 raise ConfigError('missing payload type in event "{}"'.format(ev_name), e)
314
315 try:
316 self._validate_entity(ev.payload_type)
317 except Exception as e:
318 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
319
320 if not ev.payload_type.fields:
321 raise ConfigError('empty payload type in event "{}"'.format(ev_name), e)
322 except Exception as e:
323 raise ConfigError('invalid stream "{}"'.format(stream_name), e)
324
325 def validate(self, meta):
326 self._validate_entities_and_names(meta)
327
328
329 # This validator validates special fields of trace, stream, and event
330 # types. For example, if checks that the "stream_id" field exists in the
331 # trace packet header if there's more than one stream, and much more.
332 class _MetadataSpecialFieldsValidator:
333 def _validate_trace_packet_header_type(self, t):
334 # needs "stream_id" field?
335 if len(self._meta.streams) > 1:
336 # yes
337 if t is None:
338 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is missing')
339
340 if type(t) is not metadata.Struct:
341 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is not a structure type')
342
343 if 'stream_id' not in t.fields:
344 raise ConfigError('need "stream_id" field in trace packet header type')
345
346 # validate "magic" and "stream_id" types
347 if type(t) is not metadata.Struct:
348 return
349
350 for i, (field_name, field_type) in enumerate(t.fields.items()):
351 if field_name == 'magic':
352 if type(field_type) is not metadata.Integer:
353 raise ConfigError('"magic" field in trace packet header type must be an integer type')
354
355 if field_type.signed or field_type.size != 32:
356 raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type')
357
358 if i != 0:
359 raise ConfigError('"magic" field must be the first trace packet header type\'s field')
360 elif field_name == 'stream_id':
361 if type(field_type) is not metadata.Integer:
362 raise ConfigError('"stream_id" field in trace packet header type must be an integer type')
363
364 if field_type.signed:
365 raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type')
366 elif field_name == 'uuid':
367 if self._meta.trace.uuid is None:
368 raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided')
369
370 if type(field_type) is not metadata.Array:
371 raise ConfigError('"uuid" field in trace packet header type must be an array')
372
373 if field_type.length != 16:
374 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
375
376 element_type = field_type.element_type
377
378 if type(element_type) is not metadata.Integer:
379 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
380
381 if element_type.size != 8:
382 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
383
384 if element_type.signed:
385 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
386
387 if element_type.align != 8:
388 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned, byte-aligned bytes')
389
390 def _validate_trace(self, meta):
391 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
392
393 def _validate_stream_packet_context(self, stream):
394 t = stream.packet_context_type
395
396 if type(t) is None:
397 return
398
399 if type(t) is not metadata.Struct:
400 return
401
402 # "timestamp_begin", if exists, is an unsigned integer type,
403 # mapped to a clock
404 if 'timestamp_begin' in t.fields:
405 ts_begin = t.fields['timestamp_begin']
406
407 if type(ts_begin) is not metadata.Integer:
408 raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type')
409
410 if ts_begin.signed:
411 raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type')
412
413 if not ts_begin.property_mappings:
414 raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock')
415
416 # "timestamp_end", if exists, is an unsigned integer type,
417 # mapped to a clock
418 if 'timestamp_end' in t.fields:
419 ts_end = t.fields['timestamp_end']
420
421 if type(ts_end) is not metadata.Integer:
422 raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type')
423
424 if ts_end.signed:
425 raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type')
426
427 if not ts_end.property_mappings:
428 raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock')
429
430 # "timestamp_begin" and "timestamp_end" exist together
431 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
432 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
433
434 # "events_discarded", if exists, is an unsigned integer type
435 if 'events_discarded' in t.fields:
436 events_discarded = t.fields['events_discarded']
437
438 if type(events_discarded) is not metadata.Integer:
439 raise ConfigError('"events_discarded" field in stream packet context type must be an integer type')
440
441 if events_discarded.signed:
442 raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type')
443
444 # "packet_size" and "content_size" must exist
445 if 'packet_size' not in t.fields:
446 raise ConfigError('missing "packet_size" field in stream packet context type')
447
448 packet_size = t.fields['packet_size']
449
450 # "content_size" and "content_size" must exist
451 if 'content_size' not in t.fields:
452 raise ConfigError('missing "content_size" field in stream packet context type')
453
454 content_size = t.fields['content_size']
455
456 # "packet_size" is an unsigned integer type
457 if type(packet_size) is not metadata.Integer:
458 raise ConfigError('"packet_size" field in stream packet context type must be an integer type')
459
460 if packet_size.signed:
461 raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type')
462
463 # "content_size" is an unsigned integer type
464 if type(content_size) is not metadata.Integer:
465 raise ConfigError('"content_size" field in stream packet context type must be an integer type')
466
467 if content_size.signed:
468 raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type')
469
470 def _validate_stream_event_header(self, stream):
471 t = stream.event_header_type
472
473 # needs "id" field?
474 if len(stream.events) > 1:
475 # yes
476 if t is None:
477 raise ConfigError('need "id" field in stream event header type, but stream event header type is missing')
478
479 if type(t) is not metadata.Struct:
480 raise ConfigError('need "id" field in stream event header type, but stream event header type is not a structure type')
481
482 if 'id' not in t.fields:
483 raise ConfigError('need "id" field in stream event header type')
484
485 # validate "id" and "timestamp" types
486 if type(t) is not metadata.Struct:
487 return
488
489 # "timestamp", if exists, is an unsigned integer type,
490 # mapped to a clock
491 if 'timestamp' in t.fields:
492 ts = t.fields['timestamp']
493
494 if type(ts) is not metadata.Integer:
495 raise ConfigError('"ts" field in stream event header type must be an integer type')
496
497 if ts.signed:
498 raise ConfigError('"ts" field in stream event header type must be an unsigned integer type')
499
500 if not ts.property_mappings:
501 raise ConfigError('"ts" field in stream event header type must be mapped to a clock')
502
503 # "id" is an unsigned integer type
504 if 'id' in t.fields:
505 eid = t.fields['id']
506
507 if type(eid) is not metadata.Integer:
508 raise ConfigError('"id" field in stream event header type must be an integer type')
509
510 if eid.signed:
511 raise ConfigError('"id" field in stream event header type must be an unsigned integer type')
512
513 def _validate_stream(self, stream):
514 self._validate_stream_packet_context(stream)
515 self._validate_stream_event_header(stream)
516
517 def validate(self, meta):
518 self._meta = meta
519 self._validate_trace(meta)
520
521 for stream in meta.streams.values():
522 try:
523 self._validate_stream(stream)
524 except Exception as e:
525 raise ConfigError('invalid stream "{}"'.format(stream.name), e)
526
527
528 class _MetadataDynamicTypesValidatorStackEntry:
529 def __init__(self, base_t):
530 self._base_t = base_t
531 self._index = 0
532
533 @property
534 def index(self):
535 return self._index
536
537 @index.setter
538 def index(self, value):
539 self._index = value
540
541 @property
542 def base_t(self):
543 return self._base_t
544
545 @base_t.setter
546 def base_t(self, value):
547 self._base_t = value
548
549
550 # Entities. Order of values is important here.
551 @enum.unique
552 class _Entity(enum.IntEnum):
553 TRACE_PACKET_HEADER = 0
554 STREAM_PACKET_CONTEXT = 1
555 STREAM_EVENT_HEADER = 2
556 STREAM_EVENT_CONTEXT = 3
557 EVENT_CONTEXT = 4
558 EVENT_PAYLOAD = 5
559
560
561 # This validator validates dynamic metadata types, that is, it ensures
562 # variable-length array lengths and variant tags actually point to
563 # something that exists. It also checks that variable-length array
564 # lengths point to integer types and variant tags to enumeration types.
565 class _MetadataDynamicTypesValidator:
566 def __init__(self):
567 self._type_to_visit_type_func = {
568 metadata.Integer: None,
569 metadata.FloatingPoint: None,
570 metadata.Enum: None,
571 metadata.String: None,
572 metadata.Struct: self._visit_struct_type,
573 metadata.Array: self._visit_array_type,
574 metadata.Variant: self._visit_variant_type,
575 }
576
577 self._cur_trace = None
578 self._cur_stream = None
579 self._cur_event = None
580
581 def _lookup_path_from_base(self, path, parts, base, start_index,
582 base_is_current, from_t):
583 index = start_index
584 cur_t = base
585 found_path = []
586
587 while index < len(parts):
588 part = parts[index]
589 next_t = None
590
591 if type(cur_t) is metadata.Struct:
592 enumerated_items = enumerate(cur_t.fields.items())
593
594 # lookup each field
595 for i, (field_name, field_type) in enumerated_items:
596 if field_name == part:
597 next_t = field_type
598 found_path.append((i, field_type))
599
600 if next_t is None:
601 raise ConfigError('invalid path "{}": cannot find field "{}" in structure type'.format(path, part))
602 elif type(cur_t) is metadata.Variant:
603 enumerated_items = enumerate(cur_t.types.items())
604
605 # lookup each type
606 for i, (type_name, type_type) in enumerated_items:
607 if type_name == part:
608 next_t = type_type
609 found_path.append((i, type_type))
610
611 if next_t is None:
612 raise ConfigError('invalid path "{}": cannot find type "{}" in variant type'.format(path, part))
613 else:
614 raise ConfigError('invalid path "{}": requesting "{}" in a non-variant, non-structure type'.format(path, part))
615
616 cur_t = next_t
617 index += 1
618
619 # make sure that the pointed type is not the pointing type
620 if from_t is cur_t:
621 raise ConfigError('invalid path "{}": pointing to self'.format(path))
622
623 # if we're here, we found the type; however, it could be located
624 # _after_ the variant/VLA looking for it, if the pointing
625 # and pointed types are in the same entity, so compare the
626 # current stack entries indexes to our index path in that case
627 if not base_is_current:
628 return cur_t
629
630 for index, entry in enumerate(self._stack):
631 if index == len(found_path):
632 # end of index path; valid so far
633 break
634
635 if found_path[index][0] > entry.index:
636 raise ConfigError('invalid path "{}": pointed type is after pointing type'.format(path))
637
638 # also make sure that both pointed and pointing types share
639 # a common structure ancestor
640 for index, entry in enumerate(self._stack):
641 if index == len(found_path):
642 break
643
644 if entry.base_t is not found_path[index][1]:
645 # found common ancestor
646 if type(entry.base_t) is metadata.Variant:
647 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
648
649 return cur_t
650
651 def _lookup_path_from_top(self, path, parts):
652 if len(parts) != 1:
653 raise ConfigError('invalid path "{}": multipart relative path not supported'.format(path))
654
655 find_name = parts[0]
656 index = len(self._stack) - 1
657 got_struct = False
658
659 # check stack entries in reversed order
660 for entry in reversed(self._stack):
661 # structure base type
662 if type(entry.base_t) is metadata.Struct:
663 got_struct = True
664 enumerated_items = enumerate(entry.base_t.fields.items())
665
666 # lookup each field, until the current visiting index is met
667 for i, (field_name, field_type) in enumerated_items:
668 if i == entry.index:
669 break
670
671 if field_name == find_name:
672 return field_type
673
674 # variant base type
675 elif type(entry.base_t) is metadata.Variant:
676 enumerated_items = enumerate(entry.base_t.types.items())
677
678 # lookup each type, until the current visiting index is met
679 for i, (type_name, type_type) in enumerated_items:
680 if i == entry.index:
681 break
682
683 if type_name == find_name:
684 if not got_struct:
685 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
686
687 return type_type
688
689 # nothing returned here: cannot find type
690 raise ConfigError('invalid path "{}": cannot find type in current context'.format(path))
691
692 def _lookup_path(self, path, from_t):
693 parts = path.lower().split('.')
694 base = None
695 base_is_current = False
696
697 if len(parts) >= 3:
698 if parts[0] == 'trace':
699 if parts[1] == 'packet' and parts[2] == 'header':
700 # make sure packet header exists
701 if self._cur_trace.packet_header_type is None:
702 raise ConfigError('invalid path "{}": no defined trace packet header type'.format(path))
703
704 base = self._cur_trace.packet_header_type
705
706 if self._cur_entity == _Entity.TRACE_PACKET_HEADER:
707 base_is_current = True
708 else:
709 raise ConfigError('invalid path "{}": unknown names after "trace"'.format(path))
710 elif parts[0] == 'stream':
711 if parts[1] == 'packet' and parts[2] == 'context':
712 if self._cur_entity < _Entity.STREAM_PACKET_CONTEXT:
713 raise ConfigError('invalid path "{}": cannot access stream packet context here'.format(path))
714
715 if self._cur_stream.packet_context_type is None:
716 raise ConfigError('invalid path "{}": no defined stream packet context type'.format(path))
717
718 base = self._cur_stream.packet_context_type
719
720 if self._cur_entity == _Entity.STREAM_PACKET_CONTEXT:
721 base_is_current = True
722 elif parts[1] == 'event':
723 if parts[2] == 'header':
724 if self._cur_entity < _Entity.STREAM_EVENT_HEADER:
725 raise ConfigError('invalid path "{}": cannot access stream event header here'.format(path))
726
727 if self._cur_stream.event_header_type is None:
728 raise ConfigError('invalid path "{}": no defined stream event header type'.format(path))
729
730 base = self._cur_stream.event_header_type
731
732 if self._cur_entity == _Entity.STREAM_EVENT_HEADER:
733 base_is_current = True
734 elif parts[2] == 'context':
735 if self._cur_entity < _Entity.STREAM_EVENT_CONTEXT:
736 raise ConfigError('invalid path "{}": cannot access stream event context here'.format(path))
737
738 if self._cur_stream.event_context_type is None:
739 raise ConfigError('invalid path "{}": no defined stream event context type'.format(path))
740
741 base = self._cur_stream.event_context_type
742
743 if self._cur_entity == _Entity.STREAM_EVENT_CONTEXT:
744 base_is_current = True
745 else:
746 raise ConfigError('invalid path "{}": unknown names after "stream.event"'.format(path))
747 else:
748 raise ConfigError('invalid path "{}": unknown names after "stream"'.format(path))
749
750 if base is not None:
751 start_index = 3
752
753 if len(parts) >= 2 and base is None:
754 if parts[0] == 'event':
755 if parts[1] == 'context':
756 if self._cur_entity < _Entity.EVENT_CONTEXT:
757 raise ConfigError('invalid path "{}": cannot access event context here'.format(path))
758
759 if self._cur_event.context_type is None:
760 raise ConfigError('invalid path "{}": no defined event context type'.format(path))
761
762 base = self._cur_event.context_type
763
764 if self._cur_entity == _Entity.EVENT_CONTEXT:
765 base_is_current = True
766 elif parts[1] == 'payload' or parts[1] == 'fields':
767 if self._cur_entity < _Entity.EVENT_PAYLOAD:
768 raise ConfigError('invalid path "{}": cannot access event payload here'.format(path))
769
770 if self._cur_event.payload_type is None:
771 raise ConfigError('invalid path "{}": no defined event payload type'.format(path))
772
773 base = self._cur_event.payload_type
774
775 if self._cur_entity == _Entity.EVENT_PAYLOAD:
776 base_is_current = True
777 else:
778 raise ConfigError('invalid path "{}": unknown names after "event"'.format(path))
779
780 if base is not None:
781 start_index = 2
782
783 if base is not None:
784 return self._lookup_path_from_base(path, parts, base, start_index,
785 base_is_current, from_t)
786 else:
787 return self._lookup_path_from_top(path, parts)
788
789 def _stack_reset(self):
790 self._stack = []
791
792 def _stack_push(self, base_t):
793 entry = _MetadataDynamicTypesValidatorStackEntry(base_t)
794 self._stack.append(entry)
795
796 def _stack_pop(self):
797 self._stack.pop()
798
799 def _stack_incr_index(self):
800 self._stack[-1].index += 1
801
802 def _visit_struct_type(self, t):
803 self._stack_push(t)
804
805 for field_name, field_type in t.fields.items():
806 try:
807 self._visit_type(field_type)
808 except Exception as e:
809 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
810
811 self._stack_incr_index()
812
813 self._stack_pop()
814
815 def _visit_array_type(self, t):
816 if t.is_variable_length:
817 # find length type
818 try:
819 length_type = self._lookup_path(t.length, t)
820 except Exception as e:
821 raise ConfigError('invalid array type\'s length', e)
822
823 # make sure length type an unsigned integer
824 if type(length_type) is not metadata.Integer:
825 raise ConfigError('array type\'s length does not point to an integer type')
826
827 if length_type.signed:
828 raise ConfigError('array type\'s length does not point to an unsigned integer type')
829
830 self._visit_type(t.element_type)
831
832 def _visit_variant_type(self, t):
833 # find tag type
834 try:
835 tag_type = self._lookup_path(t.tag, t)
836 except Exception as e:
837 raise ConfigError('invalid variant type\'s tag', e)
838
839 # make sure tag type is an enumeration
840 if type(tag_type) is not metadata.Enum:
841 raise ConfigError('variant type\'s tag does not point to an enumeration type')
842
843 # verify that each variant type's type exists as an enumeration member
844 for tag_name in t.types.keys():
845 if tag_name not in tag_type.members:
846 raise ConfigError('cannot find variant type\'s type "{}" in pointed tag type'.format(tag_name))
847
848 self._stack_push(t)
849
850 for type_name, type_type in t.types.items():
851 try:
852 self._visit_type(type_type)
853 except Exception as e:
854 raise ConfigError('in variant type\'s type "{}"'.format(type_name), e)
855
856 self._stack_incr_index()
857
858 self._stack_pop()
859
860 def _visit_type(self, t):
861 if t is None:
862 return
863
864 if type(t) in self._type_to_visit_type_func:
865 func = self._type_to_visit_type_func[type(t)]
866
867 if func is not None:
868 func(t)
869
870 def _visit_event(self, ev):
871 ev_name = ev.name
872
873 # set current event
874 self._cur_event = ev
875
876 # visit event context type
877 self._stack_reset()
878 self._cur_entity = _Entity.EVENT_CONTEXT
879
880 try:
881 self._visit_type(ev.context_type)
882 except Exception as e:
883 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
884
885 # visit event payload type
886 self._stack_reset()
887 self._cur_entity = _Entity.EVENT_PAYLOAD
888
889 try:
890 self._visit_type(ev.payload_type)
891 except Exception as e:
892 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
893
894 def _visit_stream(self, stream):
895 stream_name = stream.name
896
897 # set current stream
898 self._cur_stream = stream
899
900 # reset current event
901 self._cur_event = None
902
903 # visit stream packet context type
904 self._stack_reset()
905 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
906
907 try:
908 self._visit_type(stream.packet_context_type)
909 except Exception as e:
910 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
911
912 # visit stream event header type
913 self._stack_reset()
914 self._cur_entity = _Entity.STREAM_EVENT_HEADER
915
916 try:
917 self._visit_type(stream.event_header_type)
918 except Exception as e:
919 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
920
921 # visit stream event context type
922 self._stack_reset()
923 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
924
925 try:
926 self._visit_type(stream.event_context_type)
927 except Exception as e:
928 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
929
930 # visit events
931 for ev in stream.events.values():
932 try:
933 self._visit_event(ev)
934 except Exception as e:
935 raise ConfigError('invalid stream "{}"'.format(stream_name))
936
937 def validate(self, meta):
938 # set current trace
939 self._cur_trace = meta.trace
940
941 # visit trace packet header type
942 self._stack_reset()
943 self._cur_entity = _Entity.TRACE_PACKET_HEADER
944
945 try:
946 self._visit_type(meta.trace.packet_header_type)
947 except Exception as e:
948 raise ConfigError('invalid packet header type in trace', e)
949
950 # visit streams
951 for stream in meta.streams.values():
952 self._visit_stream(stream)
953
954
955 # Since type inheritance allows types to be only partially defined at
956 # any place in the configuration, this validator validates that actual
957 # trace, stream, and event types are all complete and valid.
958 class _MetadataTypesHistologyValidator:
959 def __init__(self):
960 self._type_to_validate_type_histology_func = {
961 metadata.Integer: self._validate_integer_histology,
962 metadata.FloatingPoint: self._validate_float_histology,
963 metadata.Enum: self._validate_enum_histology,
964 metadata.String: self._validate_string_histology,
965 metadata.Struct: self._validate_struct_histology,
966 metadata.Array: self._validate_array_histology,
967 metadata.Variant: self._validate_variant_histology,
968 }
969
970 def _validate_integer_histology(self, t):
971 # size is set
972 if t.size is None:
973 raise ConfigError('missing integer type\'s size')
974
975 def _validate_float_histology(self, t):
976 # exponent digits is set
977 if t.exp_size is None:
978 raise ConfigError('missing floating point number type\'s exponent size')
979
980 # mantissa digits is set
981 if t.mant_size is None:
982 raise ConfigError('missing floating point number type\'s mantissa size')
983
984 # exponent and mantissa sum is a multiple of 8
985 if (t.exp_size + t.mant_size) % 8 != 0:
986 raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8')
987
988 def _validate_enum_histology(self, t):
989 # integer type is set
990 if t.value_type is None:
991 raise ConfigError('missing enumeration type\'s value type')
992
993 # there's at least one member
994 if not t.members:
995 raise ConfigError('enumeration type needs at least one member')
996
997 # no overlapping values and all values are valid considering
998 # the value type
999 ranges = []
1000
1001 if t.value_type.signed:
1002 value_min = -(1 << t.value_type.size - 1)
1003 value_max = (1 << (t.value_type.size - 1)) - 1
1004 else:
1005 value_min = 0
1006 value_max = (1 << t.value_type.size) - 1
1007
1008 for label, value in t.members.items():
1009 for rg in ranges:
1010 if value[0] <= rg[1] and rg[0] <= value[1]:
1011 raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label))
1012
1013 fmt = 'enumeration type\'s member "{}": value {} is outside the value type range [{}, {}]'
1014
1015 if value[0] < value_min or value[0] > value_max:
1016 raise ConfigError(fmt.format(label, value[0], value_min, value_max))
1017
1018 if value[1] < value_min or value[1] > value_max:
1019 raise ConfigError(fmt.format(label, value[1], value_min, value_max))
1020
1021 ranges.append(value)
1022
1023 def _validate_string_histology(self, t):
1024 # always valid
1025 pass
1026
1027 def _validate_struct_histology(self, t):
1028 # all fields are valid
1029 for field_name, field_type in t.fields.items():
1030 try:
1031 self._validate_type_histology(field_type)
1032 except Exception as e:
1033 raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e)
1034
1035 def _validate_array_histology(self, t):
1036 # length is set
1037 if t.length is None:
1038 raise ConfigError('missing array type\'s length')
1039
1040 # element type is set
1041 if t.element_type is None:
1042 raise ConfigError('missing array type\'s element type')
1043
1044 # element type is valid
1045 try:
1046 self._validate_type_histology(t.element_type)
1047 except Exception as e:
1048 raise ConfigError('invalid array type\'s element type', e)
1049
1050 def _validate_variant_histology(self, t):
1051 # tag is set
1052 if t.tag is None:
1053 raise ConfigError('missing variant type\'s tag')
1054
1055 # there's at least one type
1056 if not t.types:
1057 raise ConfigError('variant type needs at least one type')
1058
1059 # all types are valid
1060 for type_name, type_t in t.types.items():
1061 try:
1062 self._validate_type_histology(type_t)
1063 except Exception as e:
1064 raise ConfigError('invalid variant type\'s type "{}"'.format(type_name), e)
1065
1066 def _validate_type_histology(self, t):
1067 if t is None:
1068 return
1069
1070 self._type_to_validate_type_histology_func[type(t)](t)
1071
1072 def _validate_entity_type_histology(self, t):
1073 if t is None:
1074 return
1075
1076 if type(t) is not metadata.Struct:
1077 raise ConfigError('expecting a structure type')
1078
1079 self._validate_type_histology(t)
1080
1081 def _validate_event_types_histology(self, ev):
1082 ev_name = ev.name
1083
1084 # validate event context type
1085 try:
1086 self._validate_entity_type_histology(ev.context_type)
1087 except Exception as e:
1088 raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e)
1089
1090 # validate event payload type
1091 if ev.payload_type is None:
1092 raise ConfigError('event payload type must exist in event "{}"'.format(ev_name))
1093
1094 # TODO: also check arrays, sequences, and variants
1095 if type(ev.payload_type) is metadata.Struct:
1096 if not ev.payload_type.fields:
1097 raise ConfigError('event payload type must have at least one field for event "{}"'.format(ev_name))
1098
1099 try:
1100 self._validate_entity_type_histology(ev.payload_type)
1101 except Exception as e:
1102 raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e)
1103
1104 def _validate_stream_types_histology(self, stream):
1105 stream_name = stream.name
1106
1107 # validate stream packet context type
1108 try:
1109 self._validate_entity_type_histology(stream.packet_context_type)
1110 except Exception as e:
1111 raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e)
1112
1113 # validate stream event header type
1114 try:
1115 self._validate_entity_type_histology(stream.event_header_type)
1116 except Exception as e:
1117 raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e)
1118
1119 # validate stream event context type
1120 try:
1121 self._validate_entity_type_histology(stream.event_context_type)
1122 except Exception as e:
1123 raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e)
1124
1125 # validate events
1126 for ev in stream.events.values():
1127 try:
1128 self._validate_event_types_histology(ev)
1129 except Exception as e:
1130 raise ConfigError('invalid event in stream "{}"'.format(stream_name), e)
1131
1132 def validate(self, meta):
1133 # validate trace packet header type
1134 try:
1135 self._validate_entity_type_histology(meta.trace.packet_header_type)
1136 except Exception as e:
1137 raise ConfigError('invalid trace packet header type', e)
1138
1139 # validate streams
1140 for stream in meta.streams.values():
1141 self._validate_stream_types_histology(stream)
1142
1143
1144 class _YamlConfigParser:
1145 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
1146 self._class_name_to_create_type_func = {
1147 'int': self._create_integer,
1148 'integer': self._create_integer,
1149 'flt': self._create_float,
1150 'float': self._create_float,
1151 'floating-point': self._create_float,
1152 'enum': self._create_enum,
1153 'enumeration': self._create_enum,
1154 'str': self._create_string,
1155 'string': self._create_string,
1156 'struct': self._create_struct,
1157 'structure': self._create_struct,
1158 'array': self._create_array,
1159 'var': self._create_variant,
1160 'variant': self._create_variant,
1161 }
1162 self._type_to_create_type_func = {
1163 metadata.Integer: self._create_integer,
1164 metadata.FloatingPoint: self._create_float,
1165 metadata.Enum: self._create_enum,
1166 metadata.String: self._create_string,
1167 metadata.Struct: self._create_struct,
1168 metadata.Array: self._create_array,
1169 metadata.Variant: self._create_variant,
1170 }
1171 self._include_dirs = include_dirs
1172 self._ignore_include_not_found = ignore_include_not_found
1173 self._dump_config = dump_config
1174
1175 def _set_byte_order(self, metadata_node):
1176 if 'trace' not in metadata_node:
1177 raise ConfigError('missing "trace" property (metadata)')
1178
1179 trace_node = metadata_node['trace']
1180
1181 if not _is_assoc_array_prop(trace_node):
1182 raise ConfigError('"trace" property (metadata) must be an associative array')
1183
1184 if 'byte-order' not in trace_node:
1185 raise ConfigError('missing "byte-order" property (trace)')
1186
1187 bo_node = trace_node['byte-order']
1188
1189 if not _is_str_prop(bo_node):
1190 raise ConfigError('"byte-order" property of trace object must be a string ("le" or "be")')
1191
1192 self._bo = _byte_order_str_to_bo(bo_node)
1193
1194 if self._bo is None:
1195 raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"')
1196
1197 def _lookup_type_alias(self, name):
1198 if name in self._tas:
1199 return copy.deepcopy(self._tas[name])
1200
1201 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
1202 unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
1203
1204 if unk_prop:
1205 raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop))
1206
1207 if 'name' not in prop_mapping_node:
1208 raise ConfigError('missing "name" property in integer type object\'s clock property mapping')
1209
1210 if 'property' not in prop_mapping_node:
1211 raise ConfigError('missing "property" property in integer type object\'s clock property mapping')
1212
1213 clock_name = prop_mapping_node['name']
1214 prop = prop_mapping_node['property']
1215
1216 if not _is_str_prop(clock_name):
1217 raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string')
1218
1219 if not _is_str_prop(prop):
1220 raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string')
1221
1222 if clock_name not in self._clocks:
1223 raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name))
1224
1225 if prop != 'value':
1226 raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop))
1227
1228 mapped_clock = self._clocks[clock_name]
1229 int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
1230
1231 def _get_first_unknown_type_prop(self, type_node, known_props):
1232 kp = known_props + ['inherit', 'class']
1233
1234 if self._version >= 201:
1235 kp.append('$inherit')
1236
1237 return _get_first_unknown_prop(type_node, kp)
1238
1239 def _create_integer(self, obj, node):
1240 if obj is None:
1241 # create integer object
1242 obj = metadata.Integer()
1243
1244 unk_prop = self._get_first_unknown_type_prop(node, [
1245 'size',
1246 'align',
1247 'signed',
1248 'byte-order',
1249 'base',
1250 'encoding',
1251 'property-mappings',
1252 ])
1253
1254 if unk_prop:
1255 raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop))
1256
1257 # size
1258 if 'size' in node:
1259 size = node['size']
1260
1261 if not _is_int_prop(size):
1262 raise ConfigError('"size" property of integer type object must be an integer')
1263
1264 if size < 1:
1265 raise ConfigError('invalid integer size: {}'.format(size))
1266
1267 obj.size = size
1268
1269 # align
1270 if 'align' in node:
1271 align = node['align']
1272
1273 if not _is_int_prop(align):
1274 raise ConfigError('"align" property of integer type object must be an integer')
1275
1276 if not _is_valid_alignment(align):
1277 raise ConfigError('invalid alignment: {}'.format(align))
1278
1279 obj.align = align
1280
1281 # signed
1282 if 'signed' in node:
1283 signed = node['signed']
1284
1285 if not _is_bool_prop(signed):
1286 raise ConfigError('"signed" property of integer type object must be a boolean')
1287
1288 obj.signed = signed
1289
1290 # byte order
1291 if 'byte-order' in node:
1292 byte_order = node['byte-order']
1293
1294 if not _is_str_prop(byte_order):
1295 raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")')
1296
1297 byte_order = _byte_order_str_to_bo(byte_order)
1298
1299 if byte_order is None:
1300 raise ConfigError('invalid "byte-order" property in integer type object')
1301 else:
1302 byte_order = self._bo
1303
1304 obj.byte_order = byte_order
1305
1306 # base
1307 if 'base' in node:
1308 base = node['base']
1309
1310 if not _is_str_prop(base):
1311 raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
1312
1313 if base == 'bin':
1314 base = 2
1315 elif base == 'oct':
1316 base = 8
1317 elif base == 'dec':
1318 base = 10
1319 elif base == 'hex':
1320 base = 16
1321 else:
1322 raise ConfigError('unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base))
1323
1324 obj.base = base
1325
1326 # encoding
1327 if 'encoding' in node:
1328 encoding = node['encoding']
1329
1330 if not _is_str_prop(encoding):
1331 raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
1332
1333 encoding = _encoding_str_to_encoding(encoding)
1334
1335 if encoding is None:
1336 raise ConfigError('invalid "encoding" property in integer type object')
1337
1338 obj.encoding = encoding
1339
1340 # property mappings
1341 if 'property-mappings' in node:
1342 prop_mappings = node['property-mappings']
1343
1344 if not _is_array_prop(prop_mappings):
1345 raise ConfigError('"property-mappings" property of integer type object must be an array')
1346
1347 if len(prop_mappings) > 1:
1348 raise ConfigError('length of "property-mappings" array in integer type object must be 1')
1349
1350 del obj.property_mappings[:]
1351
1352 for index, prop_mapping in enumerate(prop_mappings):
1353 if not _is_assoc_array_prop(prop_mapping):
1354 raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays')
1355
1356 if 'type' not in prop_mapping:
1357 raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
1358
1359 prop_type = prop_mapping['type']
1360
1361 if not _is_str_prop(prop_type):
1362 raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
1363
1364 if prop_type == 'clock':
1365 self._set_int_clock_prop_mapping(obj, prop_mapping)
1366 else:
1367 raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
1368
1369 return obj
1370
1371 def _create_float(self, obj, node):
1372 if obj is None:
1373 # create floating point number object
1374 obj = metadata.FloatingPoint()
1375
1376 unk_prop = self._get_first_unknown_type_prop(node, [
1377 'size',
1378 'align',
1379 'byte-order',
1380 ])
1381
1382 if unk_prop:
1383 raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop))
1384
1385 # size
1386 if 'size' in node:
1387 size = node['size']
1388
1389 if not _is_assoc_array_prop(size):
1390 raise ConfigError('"size" property of floating point number type object must be an associative array')
1391
1392 unk_prop = _get_first_unknown_prop(size, ['exp', 'mant'])
1393
1394 if unk_prop:
1395 raise ConfigError('unknown floating point number type object\'s "size" property: "{}"'.format(unk_prop))
1396
1397 if 'exp' in size:
1398 exp = size['exp']
1399
1400 if not _is_int_prop(exp):
1401 raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer')
1402
1403 if exp < 1:
1404 raise ConfigError('invalid floating point number exponent size: {}')
1405
1406 obj.exp_size = exp
1407
1408 if 'mant' in size:
1409 mant = size['mant']
1410
1411 if not _is_int_prop(mant):
1412 raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer')
1413
1414 if mant < 1:
1415 raise ConfigError('invalid floating point number mantissa size: {}')
1416
1417 obj.mant_size = mant
1418
1419 # align
1420 if 'align' in node:
1421 align = node['align']
1422
1423 if not _is_int_prop(align):
1424 raise ConfigError('"align" property of floating point number type object must be an integer')
1425
1426 if not _is_valid_alignment(align):
1427 raise ConfigError('invalid alignment: {}'.format(align))
1428
1429 obj.align = align
1430
1431 # byte order
1432 if 'byte-order' in node:
1433 byte_order = node['byte-order']
1434
1435 if not _is_str_prop(byte_order):
1436 raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")')
1437
1438 byte_order = _byte_order_str_to_bo(byte_order)
1439
1440 if byte_order is None:
1441 raise ConfigError('invalid "byte-order" property in floating point number type object')
1442 else:
1443 byte_order = self._bo
1444
1445 obj.byte_order = byte_order
1446
1447 return obj
1448
1449 def _create_enum(self, obj, node):
1450 if obj is None:
1451 # create enumeration object
1452 obj = metadata.Enum()
1453
1454 unk_prop = self._get_first_unknown_type_prop(node, [
1455 'value-type',
1456 'members',
1457 ])
1458
1459 if unk_prop:
1460 raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop))
1461
1462 # value type
1463 if 'value-type' in node:
1464 try:
1465 obj.value_type = self._create_type(node['value-type'])
1466 except Exception as e:
1467 raise ConfigError('cannot create enumeration type\'s integer type', e)
1468
1469 # members
1470 if 'members' in node:
1471 members_node = node['members']
1472
1473 if not _is_array_prop(members_node):
1474 raise ConfigError('"members" property of enumeration type object must be an array')
1475
1476 cur = 0
1477
1478 for index, m_node in enumerate(members_node):
1479 if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
1480 raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index))
1481
1482 if _is_str_prop(m_node):
1483 label = m_node
1484 value = (cur, cur)
1485 cur += 1
1486 else:
1487 unk_prop = _get_first_unknown_prop(m_node, [
1488 'label',
1489 'value',
1490 ])
1491
1492 if unk_prop:
1493 raise ConfigError('unknown enumeration type member object property: "{}"'.format(unk_prop))
1494
1495 if 'label' not in m_node:
1496 raise ConfigError('missing "label" property in enumeration member #{}'.format(index))
1497
1498 label = m_node['label']
1499
1500 if not _is_str_prop(label):
1501 raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index))
1502
1503 if 'value' not in m_node:
1504 raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label))
1505
1506 value = m_node['value']
1507
1508 if not _is_int_prop(value) and not _is_array_prop(value):
1509 raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label))
1510
1511 if _is_int_prop(value):
1512 cur = value + 1
1513 value = (value, value)
1514 else:
1515 if len(value) != 2:
1516 raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label))
1517
1518 mn = value[0]
1519 mx = value[1]
1520
1521 if mn > mx:
1522 raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
1523
1524 value = (mn, mx)
1525 cur = mx + 1
1526
1527 obj.members[label] = value
1528
1529 return obj
1530
1531 def _create_string(self, obj, node):
1532 if obj is None:
1533 # create string object
1534 obj = metadata.String()
1535
1536 unk_prop = self._get_first_unknown_type_prop(node, [
1537 'encoding',
1538 ])
1539
1540 if unk_prop:
1541 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1542
1543 # encoding
1544 if 'encoding' in node:
1545 encoding = node['encoding']
1546
1547 if not _is_str_prop(encoding):
1548 raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")')
1549
1550 encoding = _encoding_str_to_encoding(encoding)
1551
1552 if encoding is None:
1553 raise ConfigError('invalid "encoding" property in string type object')
1554
1555 obj.encoding = encoding
1556
1557 return obj
1558
1559 def _create_struct(self, obj, node):
1560 if obj is None:
1561 # create structure object
1562 obj = metadata.Struct()
1563
1564 unk_prop = self._get_first_unknown_type_prop(node, [
1565 'min-align',
1566 'fields',
1567 ])
1568
1569 if unk_prop:
1570 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1571
1572 # minimum alignment
1573 if 'min-align' in node:
1574 min_align = node['min-align']
1575
1576 if not _is_int_prop(min_align):
1577 raise ConfigError('"min-align" property of structure type object must be an integer')
1578
1579 if not _is_valid_alignment(min_align):
1580 raise ConfigError('invalid minimum alignment: {}'.format(min_align))
1581
1582 obj.min_align = min_align
1583
1584 # fields
1585 if 'fields' in node:
1586 fields = node['fields']
1587
1588 if not _is_assoc_array_prop(fields):
1589 raise ConfigError('"fields" property of structure type object must be an associative array')
1590
1591 for field_name, field_node in fields.items():
1592 if not is_valid_identifier(field_name):
1593 raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name))
1594
1595 try:
1596 obj.fields[field_name] = self._create_type(field_node)
1597 except Exception as e:
1598 raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e)
1599
1600 return obj
1601
1602 def _create_array(self, obj, node):
1603 if obj is None:
1604 # create array object
1605 obj = metadata.Array()
1606
1607 unk_prop = self._get_first_unknown_type_prop(node, [
1608 'length',
1609 'element-type',
1610 ])
1611
1612 if unk_prop:
1613 raise ConfigError('unknown array type object property: "{}"'.format(unk_prop))
1614
1615 # length
1616 if 'length' in node:
1617 length = node['length']
1618
1619 if not _is_int_prop(length) and not _is_str_prop(length):
1620 raise ConfigError('"length" property of array type object must be an integer or a string')
1621
1622 if type(length) is int and length < 0:
1623 raise ConfigError('invalid static array length: {}'.format(length))
1624
1625 obj.length = length
1626
1627 # element type
1628 if 'element-type' in node:
1629 try:
1630 obj.element_type = self._create_type(node['element-type'])
1631 except Exception as e:
1632 raise ConfigError('cannot create array type\'s element type', e)
1633
1634 return obj
1635
1636 def _create_variant(self, obj, node):
1637 if obj is None:
1638 # create variant object
1639 obj = metadata.Variant()
1640
1641 unk_prop = self._get_first_unknown_type_prop(node, [
1642 'tag',
1643 'types',
1644 ])
1645
1646 if unk_prop:
1647 raise ConfigError('unknown variant type object property: "{}"'.format(unk_prop))
1648
1649 # tag
1650 if 'tag' in node:
1651 tag = node['tag']
1652
1653 if not _is_str_prop(tag):
1654 raise ConfigError('"tag" property of variant type object must be a string')
1655
1656 # do not validate variant tag for the moment; will be done in a
1657 # second phase
1658 obj.tag = tag
1659
1660 # element type
1661 if 'types' in node:
1662 types = node['types']
1663
1664 if not _is_assoc_array_prop(types):
1665 raise ConfigError('"types" property of variant type object must be an associative array')
1666
1667 # do not validate type names for the moment; will be done in a
1668 # second phase
1669 for type_name, type_node in types.items():
1670 if not is_valid_identifier(type_name):
1671 raise ConfigError('"{}" is not a valid type name for variant type'.format(type_name))
1672
1673 try:
1674 obj.types[type_name] = self._create_type(type_node)
1675 except Exception as e:
1676 raise ConfigError('cannot create variant type\'s type "{}"'.format(type_name), e)
1677
1678 return obj
1679
1680 def _create_type(self, type_node):
1681 if type(type_node) is str:
1682 t = self._lookup_type_alias(type_node)
1683
1684 if t is None:
1685 raise ConfigError('unknown type alias "{}"'.format(type_node))
1686
1687 return t
1688
1689 if not _is_assoc_array_prop(type_node):
1690 raise ConfigError('type objects must be associative arrays or strings (type alias name)')
1691
1692 # inherit:
1693 # v2.0: "inherit"
1694 # v2.1+: "$inherit"
1695 inherit_node = None
1696
1697 if self._version >= 200:
1698 if 'inherit' in type_node:
1699 inherit_prop = 'inherit'
1700 inherit_node = type_node[inherit_prop]
1701
1702 if self._version >= 201:
1703 if '$inherit' in type_node:
1704 if inherit_node is not None:
1705 raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
1706
1707 inherit_prop = '$inherit'
1708 inherit_node = type_node[inherit_prop]
1709
1710 if inherit_node is not None and 'class' in type_node:
1711 raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
1712
1713 if inherit_node is not None:
1714 if not _is_str_prop(inherit_node):
1715 raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop))
1716
1717 base = self._lookup_type_alias(inherit_node)
1718
1719 if base is None:
1720 raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
1721
1722 func = self._type_to_create_type_func[type(base)]
1723 else:
1724 if 'class' not in type_node:
1725 raise ConfigError('type objects which do not inherit must have a "class" property')
1726
1727 class_name = type_node['class']
1728
1729 if type(class_name) is not str:
1730 raise ConfigError('type objects\' "class" property must be a string')
1731
1732 if class_name not in self._class_name_to_create_type_func:
1733 raise ConfigError('unknown type class "{}"'.format(class_name))
1734
1735 base = None
1736 func = self._class_name_to_create_type_func[class_name]
1737
1738 return func(base, type_node)
1739
1740 def _register_type_aliases(self, metadata_node):
1741 self._tas = dict()
1742
1743 if 'type-aliases' not in metadata_node:
1744 return
1745
1746 ta_node = metadata_node['type-aliases']
1747
1748 if not _is_assoc_array_prop(ta_node):
1749 raise ConfigError('"type-aliases" property (metadata) must be an associative array')
1750
1751 for ta_name, ta_type in ta_node.items():
1752 if ta_name in self._tas:
1753 raise ConfigError('duplicate type alias "{}"'.format(ta_name))
1754
1755 try:
1756 t = self._create_type(ta_type)
1757 except Exception as e:
1758 raise ConfigError('cannot create type alias "{}"'.format(ta_name), e)
1759
1760 self._tas[ta_name] = t
1761
1762 def _create_clock(self, node):
1763 # create clock object
1764 clock = metadata.Clock()
1765
1766 if not _is_assoc_array_prop(node):
1767 raise ConfigError('clock objects must be associative arrays')
1768
1769 known_props = [
1770 'uuid',
1771 'description',
1772 'freq',
1773 'error-cycles',
1774 'offset',
1775 'absolute',
1776 'return-ctype',
1777 ]
1778
1779 if self._version >= 201:
1780 known_props.append('$return-ctype')
1781
1782 unk_prop = _get_first_unknown_prop(node, known_props)
1783
1784 if unk_prop:
1785 raise ConfigError('unknown clock object property: "{}"'.format(unk_prop))
1786
1787 # UUID
1788 if 'uuid' in node:
1789 uuidp = node['uuid']
1790
1791 if not _is_str_prop(uuidp):
1792 raise ConfigError('"uuid" property of clock object must be a string')
1793
1794 try:
1795 uuidp = uuid.UUID(uuidp)
1796 except:
1797 raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp))
1798
1799 clock.uuid = uuidp
1800
1801 # description
1802 if 'description' in node:
1803 desc = node['description']
1804
1805 if not _is_str_prop(desc):
1806 raise ConfigError('"description" property of clock object must be a string')
1807
1808 clock.description = desc
1809
1810 # frequency
1811 if 'freq' in node:
1812 freq = node['freq']
1813
1814 if not _is_int_prop(freq):
1815 raise ConfigError('"freq" property of clock object must be an integer')
1816
1817 if freq < 1:
1818 raise ConfigError('invalid clock frequency: {}'.format(freq))
1819
1820 clock.freq = freq
1821
1822 # error cycles
1823 if 'error-cycles' in node:
1824 error_cycles = node['error-cycles']
1825
1826 if not _is_int_prop(error_cycles):
1827 raise ConfigError('"error-cycles" property of clock object must be an integer')
1828
1829 if error_cycles < 0:
1830 raise ConfigError('invalid clock error cycles: {}'.format(error_cycles))
1831
1832 clock.error_cycles = error_cycles
1833
1834 # offset
1835 if 'offset' in node:
1836 offset = node['offset']
1837
1838 if not _is_assoc_array_prop(offset):
1839 raise ConfigError('"offset" property of clock object must be an associative array')
1840
1841 unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
1842
1843 if unk_prop:
1844 raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop))
1845
1846 # cycles
1847 if 'cycles' in offset:
1848 offset_cycles = offset['cycles']
1849
1850 if not _is_int_prop(offset_cycles):
1851 raise ConfigError('"cycles" property of clock object\'s offset property must be an integer')
1852
1853 if offset_cycles < 0:
1854 raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles))
1855
1856 clock.offset_cycles = offset_cycles
1857
1858 # seconds
1859 if 'seconds' in offset:
1860 offset_seconds = offset['seconds']
1861
1862 if not _is_int_prop(offset_seconds):
1863 raise ConfigError('"seconds" property of clock object\'s offset property must be an integer')
1864
1865 if offset_seconds < 0:
1866 raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds))
1867
1868 clock.offset_seconds = offset_seconds
1869
1870 # absolute
1871 if 'absolute' in node:
1872 absolute = node['absolute']
1873
1874 if not _is_bool_prop(absolute):
1875 raise ConfigError('"absolute" property of clock object must be a boolean')
1876
1877 clock.absolute = absolute
1878
1879 # return C type:
1880 # v2.0: "return-ctype"
1881 # v2.1+: "$return-ctype"
1882 return_ctype_node = None
1883
1884 if self._version >= 200:
1885 if 'return-ctype' in node:
1886 return_ctype_prop = 'return-ctype'
1887 return_ctype_node = node[return_ctype_prop]
1888
1889 if self._version >= 201:
1890 if '$return-ctype' in node:
1891 if return_ctype_node is not None:
1892 raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"')
1893
1894 return_ctype_prop = '$return-ctype'
1895 return_ctype_node = node[return_ctype_prop]
1896
1897 if return_ctype_node is not None:
1898 if not _is_str_prop(return_ctype_node):
1899 raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop))
1900
1901 clock.return_ctype = return_ctype_node
1902
1903 return clock
1904
1905 def _register_clocks(self, metadata_node):
1906 self._clocks = collections.OrderedDict()
1907
1908 if 'clocks' not in metadata_node:
1909 return
1910
1911 clocks_node = metadata_node['clocks']
1912
1913 if not _is_assoc_array_prop(clocks_node):
1914 raise ConfigError('"clocks" property (metadata) must be an associative array')
1915
1916 for clock_name, clock_node in clocks_node.items():
1917 if not is_valid_identifier(clock_name):
1918 raise ConfigError('invalid clock name: "{}"'.format(clock_name))
1919
1920 if clock_name in self._clocks:
1921 raise ConfigError('duplicate clock "{}"'.format(clock_name))
1922
1923 try:
1924 clock = self._create_clock(clock_node)
1925 except Exception as e:
1926 raise ConfigError('cannot create clock "{}"'.format(clock_name), e)
1927
1928 clock.name = clock_name
1929 self._clocks[clock_name] = clock
1930
1931 def _create_env(self, metadata_node):
1932 env = collections.OrderedDict()
1933
1934 if 'env' not in metadata_node:
1935 return env
1936
1937 env_node = metadata_node['env']
1938
1939 if not _is_assoc_array_prop(env_node):
1940 raise ConfigError('"env" property (metadata) must be an associative array')
1941
1942 for env_name, env_value in env_node.items():
1943 if env_name in env:
1944 raise ConfigError('duplicate environment variable "{}"'.format(env_name))
1945
1946 if not is_valid_identifier(env_name):
1947 raise ConfigError('invalid environment variable name: "{}"'.format(env_name))
1948
1949 if not _is_int_prop(env_value) and not _is_str_prop(env_value):
1950 raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
1951
1952 env[env_name] = env_value
1953
1954 return env
1955
1956 def _register_log_levels(self, metadata_node):
1957 self._log_levels = dict()
1958
1959 # log levels:
1960 # v2.0: "log-levels"
1961 # v2.1+: "$log-levels"
1962 log_levels_node = None
1963
1964 if self._version >= 200:
1965 if 'log-levels' in metadata_node:
1966 log_levels_prop = 'log-levels'
1967 log_levels_node = metadata_node[log_levels_prop]
1968
1969 if self._version >= 201:
1970 if '$log-levels' in metadata_node:
1971 if log_levels_node is not None:
1972 raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
1973
1974 log_levels_prop = '$log-levels'
1975 log_levels_node = metadata_node[log_levels_prop]
1976
1977 if log_levels_node is None:
1978 return
1979
1980 if not _is_assoc_array_prop(log_levels_node):
1981 raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop))
1982
1983 for ll_name, ll_value in log_levels_node.items():
1984 if ll_name in self._log_levels:
1985 raise ConfigError('duplicate log level entry "{}"'.format(ll_name))
1986
1987 if not _is_int_prop(ll_value):
1988 raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name))
1989
1990 if ll_value < 0:
1991 raise ConfigError('invalid log level entry ("{}"): log level value must be positive'.format(ll_name))
1992
1993 self._log_levels[ll_name] = ll_value
1994
1995 def _create_trace(self, metadata_node):
1996 # create trace object
1997 trace = metadata.Trace()
1998
1999 if 'trace' not in metadata_node:
2000 raise ConfigError('missing "trace" property (metadata)')
2001
2002 trace_node = metadata_node['trace']
2003
2004 if not _is_assoc_array_prop(trace_node):
2005 raise ConfigError('"trace" property (metadata) must be an associative array')
2006
2007 unk_prop = _get_first_unknown_prop(trace_node, [
2008 'byte-order',
2009 'uuid',
2010 'packet-header-type',
2011 ])
2012
2013 if unk_prop:
2014 raise ConfigError('unknown trace object property: "{}"'.format(unk_prop))
2015
2016 # set byte order (already parsed)
2017 trace.byte_order = self._bo
2018
2019 # UUID
2020 if 'uuid' in trace_node:
2021 uuidp = trace_node['uuid']
2022
2023 if not _is_str_prop(uuidp):
2024 raise ConfigError('"uuid" property of trace object must be a string')
2025
2026 if uuidp == 'auto':
2027 uuidp = uuid.uuid1()
2028 else:
2029 try:
2030 uuidp = uuid.UUID(uuidp)
2031 except:
2032 raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp))
2033
2034 trace.uuid = uuidp
2035
2036 # packet header type
2037 if 'packet-header-type' in trace_node:
2038 try:
2039 ph_type = self._create_type(trace_node['packet-header-type'])
2040 except Exception as e:
2041 raise ConfigError('cannot create packet header type (trace)', e)
2042
2043 trace.packet_header_type = ph_type
2044
2045 return trace
2046
2047 def _lookup_log_level(self, ll):
2048 if _is_int_prop(ll):
2049 return ll
2050 elif _is_str_prop(ll) and ll in self._log_levels:
2051 return self._log_levels[ll]
2052
2053 def _create_event(self, event_node):
2054 event = metadata.Event()
2055
2056 if not _is_assoc_array_prop(event_node):
2057 raise ConfigError('event objects must be associative arrays')
2058
2059 unk_prop = _get_first_unknown_prop(event_node, [
2060 'log-level',
2061 'context-type',
2062 'payload-type',
2063 ])
2064
2065 if unk_prop:
2066 raise ConfigError('unknown event object property: "{}"'.format(unk_prop))
2067
2068 if 'log-level' in event_node:
2069 ll_node = event_node['log-level']
2070
2071 if _is_str_prop(ll_node):
2072 ll = self._lookup_log_level(event_node['log-level'])
2073
2074 if ll is None:
2075 raise ConfigError('cannot find log level "{}"'.format(ll_node))
2076 elif _is_int_prop(ll_node):
2077 if ll_node < 0:
2078 raise ConfigError('invalid log level value {}: value must be positive'.format(ll_node))
2079
2080 ll = ll_node
2081 else:
2082 raise ConfigError('"log-level" property must be either a string or an integer')
2083
2084 event.log_level = ll
2085
2086 if 'context-type' in event_node:
2087 try:
2088 t = self._create_type(event_node['context-type'])
2089 except Exception as e:
2090 raise ConfigError('cannot create event\'s context type object', e)
2091
2092 event.context_type = t
2093
2094 if 'payload-type' not in event_node:
2095 raise ConfigError('missing "payload-type" property in event object')
2096
2097 try:
2098 t = self._create_type(event_node['payload-type'])
2099 except Exception as e:
2100 raise ConfigError('cannot create event\'s payload type object', e)
2101
2102 event.payload_type = t
2103
2104 return event
2105
2106 def _create_stream(self, stream_node):
2107 stream = metadata.Stream()
2108
2109 if not _is_assoc_array_prop(stream_node):
2110 raise ConfigError('stream objects must be associative arrays')
2111
2112 unk_prop = _get_first_unknown_prop(stream_node, [
2113 'packet-context-type',
2114 'event-header-type',
2115 'event-context-type',
2116 'events',
2117 ])
2118
2119 if unk_prop:
2120 raise ConfigError('unknown stream object property: "{}"'.format(unk_prop))
2121
2122 if 'packet-context-type' in stream_node:
2123 try:
2124 t = self._create_type(stream_node['packet-context-type'])
2125 except Exception as e:
2126 raise ConfigError('cannot create stream\'s packet context type object', e)
2127
2128 stream.packet_context_type = t
2129
2130 if 'event-header-type' in stream_node:
2131 try:
2132 t = self._create_type(stream_node['event-header-type'])
2133 except Exception as e:
2134 raise ConfigError('cannot create stream\'s event header type object', e)
2135
2136 stream.event_header_type = t
2137
2138 if 'event-context-type' in stream_node:
2139 try:
2140 t = self._create_type(stream_node['event-context-type'])
2141 except Exception as e:
2142 raise ConfigError('cannot create stream\'s event context type object', e)
2143
2144 stream.event_context_type = t
2145
2146 if 'events' not in stream_node:
2147 raise ConfigError('missing "events" property in stream object')
2148
2149 events = stream_node['events']
2150
2151 if not _is_assoc_array_prop(events):
2152 raise ConfigError('"events" property of stream object must be an associative array')
2153
2154 if not events:
2155 raise ConfigError('at least one event is needed within a stream object')
2156
2157 cur_id = 0
2158
2159 for ev_name, ev_node in events.items():
2160 try:
2161 ev = self._create_event(ev_node)
2162 except Exception as e:
2163 raise ConfigError('cannot create event "{}"'.format(ev_name), e)
2164
2165 ev.id = cur_id
2166 ev.name = ev_name
2167 stream.events[ev_name] = ev
2168 cur_id += 1
2169
2170 return stream
2171
2172 def _create_streams(self, metadata_node):
2173 streams = collections.OrderedDict()
2174
2175 if 'streams' not in metadata_node:
2176 raise ConfigError('missing "streams" property (metadata)')
2177
2178 streams_node = metadata_node['streams']
2179
2180 if not _is_assoc_array_prop(streams_node):
2181 raise ConfigError('"streams" property (metadata) must be an associative array')
2182
2183 if not streams_node:
2184 raise ConfigError('at least one stream is needed (metadata)')
2185
2186 cur_id = 0
2187
2188 for stream_name, stream_node in streams_node.items():
2189 try:
2190 stream = self._create_stream(stream_node)
2191 except Exception as e:
2192 raise ConfigError('cannot create stream "{}"'.format(stream_name), e)
2193
2194 stream.id = cur_id
2195 stream.name = str(stream_name)
2196 streams[stream_name] = stream
2197 cur_id += 1
2198
2199 return streams
2200
2201 def _create_metadata(self, root):
2202 meta = metadata.Metadata()
2203
2204 if 'metadata' not in root:
2205 raise ConfigError('missing "metadata" property (configuration)')
2206
2207 metadata_node = root['metadata']
2208
2209 if not _is_assoc_array_prop(metadata_node):
2210 raise ConfigError('"metadata" property (configuration) must be an associative array')
2211
2212 known_props = [
2213 'type-aliases',
2214 'log-levels',
2215 'trace',
2216 'env',
2217 'clocks',
2218 'streams',
2219 ]
2220
2221 if self._version >= 201:
2222 known_props.append('$log-levels')
2223
2224 unk_prop = _get_first_unknown_prop(metadata_node, known_props)
2225
2226 if unk_prop:
2227 add = ''
2228
2229 if unk_prop == '$include':
2230 add = ' (use version 2.1 or greater)'
2231
2232 raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop))
2233
2234 self._set_byte_order(metadata_node)
2235 self._register_clocks(metadata_node)
2236 meta.clocks = self._clocks
2237 self._register_type_aliases(metadata_node)
2238 meta.env = self._create_env(metadata_node)
2239 meta.trace = self._create_trace(metadata_node)
2240 self._register_log_levels(metadata_node)
2241 meta.streams = self._create_streams(metadata_node)
2242
2243 return meta
2244
2245 def _get_version(self, root):
2246 if 'version' not in root:
2247 raise ConfigError('missing "version" property (configuration)')
2248
2249 version_node = root['version']
2250
2251 if not _is_str_prop(version_node):
2252 raise ConfigError('"version" property (configuration) must be a string')
2253
2254 version_node = version_node.strip()
2255
2256 if version_node not in ['2.0', '2.1']:
2257 raise ConfigError('unsupported version ({}): versions 2.0 and 2.1 are supported'.format(version_node))
2258
2259 # convert version string to comparable version integer
2260 parts = version_node.split('.')
2261 version = int(parts[0]) * 100 + int(parts[1])
2262
2263 return version
2264
2265 def _get_prefix(self, root):
2266 if 'prefix' not in root:
2267 return 'barectf_'
2268
2269 prefix_node = root['prefix']
2270
2271 if not _is_str_prop(prefix_node):
2272 raise ConfigError('"prefix" property (configuration) must be a string')
2273
2274 if not is_valid_identifier(prefix_node):
2275 raise ConfigError('"prefix" property (configuration) must be a valid C identifier')
2276
2277 return prefix_node
2278
2279 def _get_last_include_file(self):
2280 if self._include_stack:
2281 return self._include_stack[-1]
2282
2283 return self._root_yaml_path
2284
2285 def _load_include(self, yaml_path):
2286 for inc_dir in self._include_dirs:
2287 # current include dir + file name path
2288 # note: os.path.join() only takes the last arg if it's absolute
2289 inc_path = os.path.join(inc_dir, yaml_path)
2290
2291 # real path (symbolic links resolved)
2292 real_path = os.path.realpath(inc_path)
2293
2294 # normalized path (weird stuff removed!)
2295 norm_path = os.path.normpath(real_path)
2296
2297 if not os.path.isfile(norm_path):
2298 # file does not exist: skip
2299 continue
2300
2301 if norm_path in self._include_stack:
2302 base_path = self._get_last_include_file()
2303 raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path))
2304
2305 self._include_stack.append(norm_path)
2306
2307 # load raw content
2308 return self._yaml_ordered_load(norm_path)
2309
2310 if not self._ignore_include_not_found:
2311 base_path = self._get_last_include_file()
2312 raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
2313
2314 return None
2315
2316 def _get_include_paths(self, include_node):
2317 if _is_str_prop(include_node):
2318 return [include_node]
2319 elif _is_array_prop(include_node):
2320 for include_path in include_node:
2321 if not _is_str_prop(include_path):
2322 raise ConfigError('invalid include property: expecting array of strings')
2323
2324 return include_node
2325
2326 raise ConfigError('invalid include property: expecting string or array of strings')
2327
2328 def _update_node(self, base_node, overlay_node):
2329 for olay_key, olay_value in overlay_node.items():
2330 if olay_key in base_node:
2331 base_value = base_node[olay_key]
2332
2333 if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
2334 # merge dictionaries
2335 self._update_node(base_value, olay_value)
2336 elif _is_array_prop(olay_value) and _is_array_prop(base_value):
2337 # append extension array items to base items
2338 base_value += olay_value
2339 else:
2340 # fall back to replacing
2341 base_node[olay_key] = olay_value
2342 else:
2343 base_node[olay_key] = olay_value
2344
2345 def _process_node_include(self, last_overlay_node, name,
2346 process_base_include_cb,
2347 process_children_include_cb=None):
2348 if not _is_assoc_array_prop(last_overlay_node):
2349 raise ConfigError('{} objects must be associative arrays'.format(name))
2350
2351 # process children inclusions first
2352 if process_children_include_cb:
2353 process_children_include_cb(last_overlay_node)
2354
2355 if '$include' in last_overlay_node:
2356 include_node = last_overlay_node['$include']
2357 else:
2358 # no includes!
2359 return last_overlay_node
2360
2361 include_paths = self._get_include_paths(include_node)
2362 cur_base_path = self._get_last_include_file()
2363 base_node = None
2364
2365 # keep the include paths and remove the include property
2366 include_paths = copy.deepcopy(include_paths)
2367 del last_overlay_node['$include']
2368
2369 for include_path in include_paths:
2370 # load raw YAML from included file
2371 overlay_node = self._load_include(include_path)
2372
2373 if overlay_node is None:
2374 # cannot find include file, but we're ignoring those
2375 # errors, otherwise _load_include() itself raises
2376 # a config error
2377 continue
2378
2379 # recursively process includes
2380 try:
2381 overlay_node = process_base_include_cb(overlay_node)
2382 except Exception as e:
2383 raise ConfigError('in "{}"'.format(cur_base_path), e)
2384
2385 # pop include stack now that we're done including
2386 del self._include_stack[-1]
2387
2388 # at this point, base_node is fully resolved (does not
2389 # contain any include property)
2390 if base_node is None:
2391 base_node = overlay_node
2392 else:
2393 self._update_node(base_node, overlay_node)
2394
2395 # finally, we update the latest base node with our last overlay
2396 # node
2397 if base_node is None:
2398 # nothing was included, which is possible when we're
2399 # ignoring include errors
2400 return last_overlay_node
2401
2402 self._update_node(base_node, last_overlay_node)
2403
2404 return base_node
2405
2406 def _process_event_include(self, event_node):
2407 return self._process_node_include(event_node, 'event',
2408 self._process_event_include)
2409
2410 def _process_stream_include(self, stream_node):
2411 def process_children_include(stream_node):
2412 if 'events' in stream_node:
2413 events_node = stream_node['events']
2414
2415 if not _is_assoc_array_prop(events_node):
2416 raise ConfigError('"events" property must be an associative array')
2417
2418 events_node_keys = list(events_node.keys())
2419
2420 for key in events_node_keys:
2421 event_node = events_node[key]
2422
2423 try:
2424 events_node[key] = self._process_event_include(event_node)
2425 except Exception as e:
2426 raise ConfigError('cannot process includes of event object "{}"'.format(key), e)
2427
2428 return self._process_node_include(stream_node, 'stream',
2429 self._process_stream_include,
2430 process_children_include)
2431
2432 def _process_trace_include(self, trace_node):
2433 return self._process_node_include(trace_node, 'trace',
2434 self._process_trace_include)
2435
2436 def _process_clock_include(self, clock_node):
2437 return self._process_node_include(clock_node, 'clock',
2438 self._process_clock_include)
2439
2440 def _process_metadata_include(self, metadata_node):
2441 def process_children_include(metadata_node):
2442 if 'trace' in metadata_node:
2443 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
2444
2445 if 'clocks' in metadata_node:
2446 clocks_node = metadata_node['clocks']
2447
2448 if not _is_assoc_array_prop(clocks_node):
2449 raise ConfigError('"clocks" property (metadata) must be an associative array')
2450
2451 clocks_node_keys = list(clocks_node.keys())
2452
2453 for key in clocks_node_keys:
2454 clock_node = clocks_node[key]
2455
2456 try:
2457 clocks_node[key] = self._process_clock_include(clock_node)
2458 except Exception as e:
2459 raise ConfigError('cannot process includes of clock object "{}"'.format(key), e)
2460
2461 if 'streams' in metadata_node:
2462 streams_node = metadata_node['streams']
2463
2464 if not _is_assoc_array_prop(streams_node):
2465 raise ConfigError('"streams" property (metadata) must be an associative array')
2466
2467 streams_node_keys = list(streams_node.keys())
2468
2469 for key in streams_node_keys:
2470 stream_node = streams_node[key]
2471
2472 try:
2473 streams_node[key] = self._process_stream_include(stream_node)
2474 except Exception as e:
2475 raise ConfigError('cannot process includes of stream object "{}"'.format(key), e)
2476
2477 return self._process_node_include(metadata_node, 'metadata',
2478 self._process_metadata_include,
2479 process_children_include)
2480
2481 def _process_root_includes(self, root):
2482 # The following config objects support includes:
2483 #
2484 # * Metadata object
2485 # * Trace object
2486 # * Stream object
2487 # * Event object
2488 #
2489 # We need to process the event includes first, then the stream
2490 # includes, then the trace includes, and finally the metadata
2491 # includes.
2492 #
2493 # In each object, only one of the $include and $include-replace
2494 # special properties is allowed.
2495 #
2496 # We keep a stack of absolute paths to included files to detect
2497 # recursion.
2498 if 'metadata' in root:
2499 root['metadata'] = self._process_metadata_include(root['metadata'])
2500
2501 return root
2502
2503 def _yaml_ordered_dump(self, node, **kwds):
2504 class ODumper(yaml.Dumper):
2505 pass
2506
2507 def dict_representer(dumper, node):
2508 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2509 node.items())
2510
2511 ODumper.add_representer(collections.OrderedDict, dict_representer)
2512
2513 return yaml.dump(node, Dumper=ODumper, **kwds)
2514
2515 def _yaml_ordered_load(self, yaml_path):
2516 class OLoader(yaml.Loader):
2517 pass
2518
2519 def construct_mapping(loader, node):
2520 loader.flatten_mapping(node)
2521
2522 return collections.OrderedDict(loader.construct_pairs(node))
2523
2524 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2525 construct_mapping)
2526
2527 # YAML -> Python
2528 try:
2529 with open(yaml_path, 'r') as f:
2530 node = yaml.load(f, OLoader)
2531 except (OSError, IOError) as e:
2532 raise ConfigError('cannot open file "{}"'.format(yaml_path))
2533 except Exception as e:
2534 raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e)
2535
2536 # loaded node must be an associate array
2537 if not _is_assoc_array_prop(node):
2538 raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path))
2539
2540 return node
2541
2542 def _reset(self):
2543 self._version = None
2544 self._include_stack = []
2545
2546 def parse(self, yaml_path):
2547 self._reset()
2548 self._root_yaml_path = yaml_path
2549
2550 try:
2551 root = self._yaml_ordered_load(yaml_path)
2552 except Exception as e:
2553 raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e)
2554
2555 if not _is_assoc_array_prop(root):
2556 raise ConfigError('configuration must be an associative array')
2557
2558 unk_prop = _get_first_unknown_prop(root, [
2559 'version',
2560 'prefix',
2561 'metadata',
2562 ])
2563
2564 if unk_prop:
2565 raise ConfigError('unknown configuration property: "{}"'.format(unk_prop))
2566
2567 # get the config version
2568 self._version = self._get_version(root)
2569
2570 # process includes if supported
2571 if self._version >= 201:
2572 root = self._process_root_includes(root)
2573
2574 # dump config if required
2575 if self._dump_config:
2576 print(self._yaml_ordered_dump(root, indent=2,
2577 default_flow_style=False))
2578
2579 # get prefix and metadata
2580 prefix = self._get_prefix(root)
2581 meta = self._create_metadata(root)
2582
2583 return Config(self._version, prefix, meta)
2584
2585
2586 def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
2587 try:
2588 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
2589 dump_config)
2590 cfg = parser.parse(path)
2591
2592 return cfg
2593 except Exception as e:
2594 raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e)
This page took 0.084252 seconds and 5 git commands to generate.