Fix: config: check for content_size gt packet_size
[deliverable/barectf.git] / barectf / config.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 # THE SOFTWARE.
22
23 from barectf import metadata
24 import collections
25 import datetime
26 import barectf
27 import enum
28 import yaml
29 import uuid
30 import copy
31 import re
32 import os
33
34
35 class ConfigError(RuntimeError):
36 def __init__(self, msg, prev=None):
37 super().__init__(msg)
38 self._prev = prev
39
40 @property
41 def prev(self):
42 return self._prev
43
44
45 class Config:
46 def __init__(self, version, prefix, metadata):
47 self.prefix = prefix
48 self.version = version
49 self.metadata = metadata
50
51 def _validate_metadata(self, meta):
52 try:
53 validator = _MetadataTypesHistologyValidator()
54 validator.validate(meta)
55 validator = _MetadataDynamicTypesValidator()
56 validator.validate(meta)
57 validator = _MetadataSpecialFieldsValidator()
58 validator.validate(meta)
59 except Exception as e:
60 raise ConfigError('metadata error', e)
61
62 try:
63 validator = _BarectfMetadataValidator()
64 validator.validate(meta)
65 except Exception as e:
66 raise ConfigError('barectf metadata error', e)
67
68 def _augment_metadata_env(self, meta):
69 env = meta.env
70
71 env['domain'] = 'bare'
72 env['tracer_name'] = 'barectf'
73 version_tuple = barectf.get_version_tuple()
74 env['tracer_major'] = version_tuple[0]
75 env['tracer_minor'] = version_tuple[1]
76 env['tracer_patch'] = version_tuple[2]
77 env['barectf_gen_date'] = str(datetime.datetime.now().isoformat())
78
79 @property
80 def version(self):
81 return self._version
82
83 @version.setter
84 def version(self, value):
85 self._version = value
86
87 @property
88 def metadata(self):
89 return self._metadata
90
91 @metadata.setter
92 def metadata(self, value):
93 self._validate_metadata(value)
94 self._augment_metadata_env(value)
95 self._metadata = value
96
97 @property
98 def prefix(self):
99 return self._prefix
100
101 @prefix.setter
102 def prefix(self, value):
103 if not is_valid_identifier(value):
104 raise ConfigError('prefix must be a valid C identifier')
105
106 self._prefix = value
107
108
109 def _is_assoc_array_prop(node):
110 return isinstance(node, dict)
111
112
113 def _is_array_prop(node):
114 return isinstance(node, list)
115
116
117 def _is_int_prop(node):
118 return type(node) is int
119
120
121 def _is_str_prop(node):
122 return type(node) is str
123
124
125 def _is_bool_prop(node):
126 return type(node) is bool
127
128
129 def _is_valid_alignment(align):
130 return ((align & (align - 1)) == 0) and align > 0
131
132
133 def _byte_order_str_to_bo(bo_str):
134 bo_str = bo_str.lower()
135
136 if bo_str == 'le':
137 return metadata.ByteOrder.LE
138 elif bo_str == 'be':
139 return metadata.ByteOrder.BE
140
141
142 def _encoding_str_to_encoding(encoding_str):
143 encoding_str = encoding_str.lower()
144
145 if encoding_str == 'utf-8' or encoding_str == 'utf8':
146 return metadata.Encoding.UTF8
147 elif encoding_str == 'ascii':
148 return metadata.Encoding.ASCII
149 elif encoding_str == 'none':
150 return metadata.Encoding.NONE
151
152
153 _re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
154 _ctf_keywords = set([
155 'align',
156 'callsite',
157 'clock',
158 'enum',
159 'env',
160 'event',
161 'floating_point',
162 'integer',
163 'stream',
164 'string',
165 'struct',
166 'trace',
167 'typealias',
168 'typedef',
169 'variant',
170 ])
171
172
173 def is_valid_identifier(iden):
174 if not _re_iden.match(iden):
175 return False
176
177 if _re_iden in _ctf_keywords:
178 return False
179
180 return True
181
182
183 def _get_first_unknown_prop(node, known_props):
184 for prop_name in node:
185 if prop_name in known_props:
186 continue
187
188 return prop_name
189
190
191 # This validator validates the configured metadata for barectf specific
192 # needs.
193 #
194 # barectf needs:
195 #
196 # * all header/contexts are at least byte-aligned
197 # * all integer and floating point number sizes to be <= 64
198 # * no inner structures, arrays, or variants
199 class _BarectfMetadataValidator:
200 def __init__(self):
201 self._type_to_validate_type_func = {
202 metadata.Integer: self._validate_int_type,
203 metadata.FloatingPoint: self._validate_float_type,
204 metadata.Enum: self._validate_enum_type,
205 metadata.String: self._validate_string_type,
206 metadata.Struct: self._validate_struct_type,
207 metadata.Array: self._validate_array_type,
208 metadata.Variant: self._validate_variant_type,
209 }
210
211 def _validate_int_type(self, t, entity_root):
212 if t.size > 64:
213 raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits')
214
215 def _validate_float_type(self, t, entity_root):
216 if t.size > 64:
217 raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits')
218
219 def _validate_enum_type(self, t, entity_root):
220 if t.value_type.size > 64:
221 raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits')
222
223 def _validate_string_type(self, t, entity_root):
224 pass
225
226 def _validate_struct_type(self, t, entity_root):
227 if not entity_root:
228 raise ConfigError('inner structure types are not supported as of this version')
229
230 for field_name, field_type in t.fields.items():
231 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
232 if field_name == 'uuid':
233 # allow
234 continue
235
236 try:
237 self._validate_type(field_type, False)
238 except Exception as e:
239 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
240
241 def _validate_array_type(self, t, entity_root):
242 raise ConfigError('array types are not supported as of this version')
243
244 def _validate_variant_type(self, t, entity_root):
245 raise ConfigError('variant types are not supported as of this version')
246
247 def _validate_type(self, t, entity_root):
248 self._type_to_validate_type_func[type(t)](t, entity_root)
249
250 def _validate_entity(self, t):
251 if t is None:
252 return
253
254 # make sure entity is byte-aligned
255 if t.align < 8:
256 raise ConfigError('type\'s alignment must be at least byte-aligned')
257
258 # make sure entity is a structure
259 if type(t) is not metadata.Struct:
260 raise ConfigError('expecting a structure type')
261
262 # validate types
263 self._validate_type(t, True)
264
265 def _validate_entities_and_names(self, meta):
266 self._cur_entity = _Entity.TRACE_PACKET_HEADER
267
268 try:
269 self._validate_entity(meta.trace.packet_header_type)
270 except Exception as e:
271 raise ConfigError('invalid trace packet header type', e)
272
273 for stream_name, stream in meta.streams.items():
274 if not is_valid_identifier(stream_name):
275 raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name))
276
277 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
278
279 try:
280 self._validate_entity(stream.packet_context_type)
281 except Exception as e:
282 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
283
284 self._cur_entity = _Entity.STREAM_EVENT_HEADER
285
286 try:
287 self._validate_entity(stream.event_header_type)
288 except Exception as e:
289 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
290
291 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
292
293 try:
294 self._validate_entity(stream.event_context_type)
295 except Exception as e:
296 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
297
298 try:
299 for ev_name, ev in stream.events.items():
300 if not is_valid_identifier(ev_name):
301 raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name))
302
303 self._cur_entity = _Entity.EVENT_CONTEXT
304
305 try:
306 self._validate_entity(ev.context_type)
307 except Exception as e:
308 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
309
310 self._cur_entity = _Entity.EVENT_PAYLOAD
311
312 if ev.payload_type is None:
313 raise ConfigError('missing payload type in event "{}"'.format(ev_name), e)
314
315 try:
316 self._validate_entity(ev.payload_type)
317 except Exception as e:
318 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
319
320 if not ev.payload_type.fields:
321 raise ConfigError('empty payload type in event "{}"'.format(ev_name), e)
322 except Exception as e:
323 raise ConfigError('invalid stream "{}"'.format(stream_name), e)
324
325 def validate(self, meta):
326 self._validate_entities_and_names(meta)
327
328
329 # This validator validates special fields of trace, stream, and event
330 # types. For example, if checks that the "stream_id" field exists in the
331 # trace packet header if there's more than one stream, and much more.
332 class _MetadataSpecialFieldsValidator:
333 def _validate_trace_packet_header_type(self, t):
334 # needs "stream_id" field?
335 if len(self._meta.streams) > 1:
336 # yes
337 if t is None:
338 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is missing')
339
340 if type(t) is not metadata.Struct:
341 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is not a structure type')
342
343 if 'stream_id' not in t.fields:
344 raise ConfigError('need "stream_id" field in trace packet header type')
345
346 # validate "magic" and "stream_id" types
347 if type(t) is not metadata.Struct:
348 return
349
350 for i, (field_name, field_type) in enumerate(t.fields.items()):
351 if field_name == 'magic':
352 if type(field_type) is not metadata.Integer:
353 raise ConfigError('"magic" field in trace packet header type must be an integer type')
354
355 if field_type.signed or field_type.size != 32:
356 raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type')
357
358 if i != 0:
359 raise ConfigError('"magic" field must be the first trace packet header type\'s field')
360 elif field_name == 'stream_id':
361 if type(field_type) is not metadata.Integer:
362 raise ConfigError('"stream_id" field in trace packet header type must be an integer type')
363
364 if field_type.signed:
365 raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type')
366 elif field_name == 'uuid':
367 if self._meta.trace.uuid is None:
368 raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided')
369
370 if type(field_type) is not metadata.Array:
371 raise ConfigError('"uuid" field in trace packet header type must be an array')
372
373 if field_type.length != 16:
374 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
375
376 element_type = field_type.element_type
377
378 if type(element_type) is not metadata.Integer:
379 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
380
381 if element_type.size != 8:
382 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
383
384 if element_type.signed:
385 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
386
387 if element_type.align != 8:
388 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned, byte-aligned bytes')
389
390 def _validate_trace(self, meta):
391 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
392
393 def _validate_stream_packet_context(self, stream):
394 t = stream.packet_context_type
395
396 if type(t) is None:
397 raise ConfigError('missing "packet-context-type" property in stream object')
398
399 if type(t) is not metadata.Struct:
400 raise ConfigError('"packet-context-type": expecting a structure type')
401
402 # "timestamp_begin", if exists, is an unsigned integer type,
403 # mapped to a clock
404 if 'timestamp_begin' in t.fields:
405 ts_begin = t.fields['timestamp_begin']
406
407 if type(ts_begin) is not metadata.Integer:
408 raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type')
409
410 if ts_begin.signed:
411 raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type')
412
413 if not ts_begin.property_mappings:
414 raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock')
415
416 # "timestamp_end", if exists, is an unsigned integer type,
417 # mapped to a clock
418 if 'timestamp_end' in t.fields:
419 ts_end = t.fields['timestamp_end']
420
421 if type(ts_end) is not metadata.Integer:
422 raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type')
423
424 if ts_end.signed:
425 raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type')
426
427 if not ts_end.property_mappings:
428 raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock')
429
430 # "timestamp_begin" and "timestamp_end" exist together
431 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
432 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
433
434 # "events_discarded", if exists, is an unsigned integer type
435 if 'events_discarded' in t.fields:
436 events_discarded = t.fields['events_discarded']
437
438 if type(events_discarded) is not metadata.Integer:
439 raise ConfigError('"events_discarded" field in stream packet context type must be an integer type')
440
441 if events_discarded.signed:
442 raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type')
443
444 # "packet_size" and "content_size" must exist
445 if 'packet_size' not in t.fields:
446 raise ConfigError('missing "packet_size" field in stream packet context type')
447
448 packet_size = t.fields['packet_size']
449
450 # "content_size" and "content_size" must exist
451 if 'content_size' not in t.fields:
452 raise ConfigError('missing "content_size" field in stream packet context type')
453
454 content_size = t.fields['content_size']
455
456 # "packet_size" is an unsigned integer type
457 if type(packet_size) is not metadata.Integer:
458 raise ConfigError('"packet_size" field in stream packet context type must be an integer type')
459
460 if packet_size.signed:
461 raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type')
462
463 # "content_size" is an unsigned integer type
464 if type(content_size) is not metadata.Integer:
465 raise ConfigError('"content_size" field in stream packet context type must be an integer type')
466
467 if content_size.signed:
468 raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type')
469
470 # "packet_size" size should be greater than or equal to "content_size" size
471 if content_size.size > packet_size.size:
472 raise ConfigError('"content_size" field size must be lesser than or equal to "packet_size" field size')
473
474 def _validate_stream_event_header(self, stream):
475 t = stream.event_header_type
476
477 # needs "id" field?
478 if len(stream.events) > 1:
479 # yes
480 if t is None:
481 raise ConfigError('need "id" field in stream event header type, but stream event header type is missing')
482
483 if type(t) is not metadata.Struct:
484 raise ConfigError('need "id" field in stream event header type, but stream event header type is not a structure type')
485
486 if 'id' not in t.fields:
487 raise ConfigError('need "id" field in stream event header type')
488
489 # validate "id" and "timestamp" types
490 if type(t) is not metadata.Struct:
491 return
492
493 # "timestamp", if exists, is an unsigned integer type,
494 # mapped to a clock
495 if 'timestamp' in t.fields:
496 ts = t.fields['timestamp']
497
498 if type(ts) is not metadata.Integer:
499 raise ConfigError('"ts" field in stream event header type must be an integer type')
500
501 if ts.signed:
502 raise ConfigError('"ts" field in stream event header type must be an unsigned integer type')
503
504 if not ts.property_mappings:
505 raise ConfigError('"ts" field in stream event header type must be mapped to a clock')
506
507 # "id" is an unsigned integer type
508 if 'id' in t.fields:
509 eid = t.fields['id']
510
511 if type(eid) is not metadata.Integer:
512 raise ConfigError('"id" field in stream event header type must be an integer type')
513
514 if eid.signed:
515 raise ConfigError('"id" field in stream event header type must be an unsigned integer type')
516
517 def _validate_stream(self, stream):
518 self._validate_stream_packet_context(stream)
519 self._validate_stream_event_header(stream)
520
521 def validate(self, meta):
522 self._meta = meta
523 self._validate_trace(meta)
524
525 for stream in meta.streams.values():
526 try:
527 self._validate_stream(stream)
528 except Exception as e:
529 raise ConfigError('invalid stream "{}"'.format(stream.name), e)
530
531
532 class _MetadataDynamicTypesValidatorStackEntry:
533 def __init__(self, base_t):
534 self._base_t = base_t
535 self._index = 0
536
537 @property
538 def index(self):
539 return self._index
540
541 @index.setter
542 def index(self, value):
543 self._index = value
544
545 @property
546 def base_t(self):
547 return self._base_t
548
549 @base_t.setter
550 def base_t(self, value):
551 self._base_t = value
552
553
554 # Entities. Order of values is important here.
555 @enum.unique
556 class _Entity(enum.IntEnum):
557 TRACE_PACKET_HEADER = 0
558 STREAM_PACKET_CONTEXT = 1
559 STREAM_EVENT_HEADER = 2
560 STREAM_EVENT_CONTEXT = 3
561 EVENT_CONTEXT = 4
562 EVENT_PAYLOAD = 5
563
564
565 # This validator validates dynamic metadata types, that is, it ensures
566 # variable-length array lengths and variant tags actually point to
567 # something that exists. It also checks that variable-length array
568 # lengths point to integer types and variant tags to enumeration types.
569 class _MetadataDynamicTypesValidator:
570 def __init__(self):
571 self._type_to_visit_type_func = {
572 metadata.Integer: None,
573 metadata.FloatingPoint: None,
574 metadata.Enum: None,
575 metadata.String: None,
576 metadata.Struct: self._visit_struct_type,
577 metadata.Array: self._visit_array_type,
578 metadata.Variant: self._visit_variant_type,
579 }
580
581 self._cur_trace = None
582 self._cur_stream = None
583 self._cur_event = None
584
585 def _lookup_path_from_base(self, path, parts, base, start_index,
586 base_is_current, from_t):
587 index = start_index
588 cur_t = base
589 found_path = []
590
591 while index < len(parts):
592 part = parts[index]
593 next_t = None
594
595 if type(cur_t) is metadata.Struct:
596 enumerated_items = enumerate(cur_t.fields.items())
597
598 # lookup each field
599 for i, (field_name, field_type) in enumerated_items:
600 if field_name == part:
601 next_t = field_type
602 found_path.append((i, field_type))
603
604 if next_t is None:
605 raise ConfigError('invalid path "{}": cannot find field "{}" in structure type'.format(path, part))
606 elif type(cur_t) is metadata.Variant:
607 enumerated_items = enumerate(cur_t.types.items())
608
609 # lookup each type
610 for i, (type_name, type_type) in enumerated_items:
611 if type_name == part:
612 next_t = type_type
613 found_path.append((i, type_type))
614
615 if next_t is None:
616 raise ConfigError('invalid path "{}": cannot find type "{}" in variant type'.format(path, part))
617 else:
618 raise ConfigError('invalid path "{}": requesting "{}" in a non-variant, non-structure type'.format(path, part))
619
620 cur_t = next_t
621 index += 1
622
623 # make sure that the pointed type is not the pointing type
624 if from_t is cur_t:
625 raise ConfigError('invalid path "{}": pointing to self'.format(path))
626
627 # if we're here, we found the type; however, it could be located
628 # _after_ the variant/VLA looking for it, if the pointing
629 # and pointed types are in the same entity, so compare the
630 # current stack entries indexes to our index path in that case
631 if not base_is_current:
632 return cur_t
633
634 for index, entry in enumerate(self._stack):
635 if index == len(found_path):
636 # end of index path; valid so far
637 break
638
639 if found_path[index][0] > entry.index:
640 raise ConfigError('invalid path "{}": pointed type is after pointing type'.format(path))
641
642 # also make sure that both pointed and pointing types share
643 # a common structure ancestor
644 for index, entry in enumerate(self._stack):
645 if index == len(found_path):
646 break
647
648 if entry.base_t is not found_path[index][1]:
649 # found common ancestor
650 if type(entry.base_t) is metadata.Variant:
651 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
652
653 return cur_t
654
655 def _lookup_path_from_top(self, path, parts):
656 if len(parts) != 1:
657 raise ConfigError('invalid path "{}": multipart relative path not supported'.format(path))
658
659 find_name = parts[0]
660 index = len(self._stack) - 1
661 got_struct = False
662
663 # check stack entries in reversed order
664 for entry in reversed(self._stack):
665 # structure base type
666 if type(entry.base_t) is metadata.Struct:
667 got_struct = True
668 enumerated_items = enumerate(entry.base_t.fields.items())
669
670 # lookup each field, until the current visiting index is met
671 for i, (field_name, field_type) in enumerated_items:
672 if i == entry.index:
673 break
674
675 if field_name == find_name:
676 return field_type
677
678 # variant base type
679 elif type(entry.base_t) is metadata.Variant:
680 enumerated_items = enumerate(entry.base_t.types.items())
681
682 # lookup each type, until the current visiting index is met
683 for i, (type_name, type_type) in enumerated_items:
684 if i == entry.index:
685 break
686
687 if type_name == find_name:
688 if not got_struct:
689 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
690
691 return type_type
692
693 # nothing returned here: cannot find type
694 raise ConfigError('invalid path "{}": cannot find type in current context'.format(path))
695
696 def _lookup_path(self, path, from_t):
697 parts = path.lower().split('.')
698 base = None
699 base_is_current = False
700
701 if len(parts) >= 3:
702 if parts[0] == 'trace':
703 if parts[1] == 'packet' and parts[2] == 'header':
704 # make sure packet header exists
705 if self._cur_trace.packet_header_type is None:
706 raise ConfigError('invalid path "{}": no defined trace packet header type'.format(path))
707
708 base = self._cur_trace.packet_header_type
709
710 if self._cur_entity == _Entity.TRACE_PACKET_HEADER:
711 base_is_current = True
712 else:
713 raise ConfigError('invalid path "{}": unknown names after "trace"'.format(path))
714 elif parts[0] == 'stream':
715 if parts[1] == 'packet' and parts[2] == 'context':
716 if self._cur_entity < _Entity.STREAM_PACKET_CONTEXT:
717 raise ConfigError('invalid path "{}": cannot access stream packet context here'.format(path))
718
719 if self._cur_stream.packet_context_type is None:
720 raise ConfigError('invalid path "{}": no defined stream packet context type'.format(path))
721
722 base = self._cur_stream.packet_context_type
723
724 if self._cur_entity == _Entity.STREAM_PACKET_CONTEXT:
725 base_is_current = True
726 elif parts[1] == 'event':
727 if parts[2] == 'header':
728 if self._cur_entity < _Entity.STREAM_EVENT_HEADER:
729 raise ConfigError('invalid path "{}": cannot access stream event header here'.format(path))
730
731 if self._cur_stream.event_header_type is None:
732 raise ConfigError('invalid path "{}": no defined stream event header type'.format(path))
733
734 base = self._cur_stream.event_header_type
735
736 if self._cur_entity == _Entity.STREAM_EVENT_HEADER:
737 base_is_current = True
738 elif parts[2] == 'context':
739 if self._cur_entity < _Entity.STREAM_EVENT_CONTEXT:
740 raise ConfigError('invalid path "{}": cannot access stream event context here'.format(path))
741
742 if self._cur_stream.event_context_type is None:
743 raise ConfigError('invalid path "{}": no defined stream event context type'.format(path))
744
745 base = self._cur_stream.event_context_type
746
747 if self._cur_entity == _Entity.STREAM_EVENT_CONTEXT:
748 base_is_current = True
749 else:
750 raise ConfigError('invalid path "{}": unknown names after "stream.event"'.format(path))
751 else:
752 raise ConfigError('invalid path "{}": unknown names after "stream"'.format(path))
753
754 if base is not None:
755 start_index = 3
756
757 if len(parts) >= 2 and base is None:
758 if parts[0] == 'event':
759 if parts[1] == 'context':
760 if self._cur_entity < _Entity.EVENT_CONTEXT:
761 raise ConfigError('invalid path "{}": cannot access event context here'.format(path))
762
763 if self._cur_event.context_type is None:
764 raise ConfigError('invalid path "{}": no defined event context type'.format(path))
765
766 base = self._cur_event.context_type
767
768 if self._cur_entity == _Entity.EVENT_CONTEXT:
769 base_is_current = True
770 elif parts[1] == 'payload' or parts[1] == 'fields':
771 if self._cur_entity < _Entity.EVENT_PAYLOAD:
772 raise ConfigError('invalid path "{}": cannot access event payload here'.format(path))
773
774 if self._cur_event.payload_type is None:
775 raise ConfigError('invalid path "{}": no defined event payload type'.format(path))
776
777 base = self._cur_event.payload_type
778
779 if self._cur_entity == _Entity.EVENT_PAYLOAD:
780 base_is_current = True
781 else:
782 raise ConfigError('invalid path "{}": unknown names after "event"'.format(path))
783
784 if base is not None:
785 start_index = 2
786
787 if base is not None:
788 return self._lookup_path_from_base(path, parts, base, start_index,
789 base_is_current, from_t)
790 else:
791 return self._lookup_path_from_top(path, parts)
792
793 def _stack_reset(self):
794 self._stack = []
795
796 def _stack_push(self, base_t):
797 entry = _MetadataDynamicTypesValidatorStackEntry(base_t)
798 self._stack.append(entry)
799
800 def _stack_pop(self):
801 self._stack.pop()
802
803 def _stack_incr_index(self):
804 self._stack[-1].index += 1
805
806 def _visit_struct_type(self, t):
807 self._stack_push(t)
808
809 for field_name, field_type in t.fields.items():
810 try:
811 self._visit_type(field_type)
812 except Exception as e:
813 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
814
815 self._stack_incr_index()
816
817 self._stack_pop()
818
819 def _visit_array_type(self, t):
820 if t.is_variable_length:
821 # find length type
822 try:
823 length_type = self._lookup_path(t.length, t)
824 except Exception as e:
825 raise ConfigError('invalid array type\'s length', e)
826
827 # make sure length type an unsigned integer
828 if type(length_type) is not metadata.Integer:
829 raise ConfigError('array type\'s length does not point to an integer type')
830
831 if length_type.signed:
832 raise ConfigError('array type\'s length does not point to an unsigned integer type')
833
834 self._visit_type(t.element_type)
835
836 def _visit_variant_type(self, t):
837 # find tag type
838 try:
839 tag_type = self._lookup_path(t.tag, t)
840 except Exception as e:
841 raise ConfigError('invalid variant type\'s tag', e)
842
843 # make sure tag type is an enumeration
844 if type(tag_type) is not metadata.Enum:
845 raise ConfigError('variant type\'s tag does not point to an enumeration type')
846
847 # verify that each variant type's type exists as an enumeration member
848 for tag_name in t.types.keys():
849 if tag_name not in tag_type.members:
850 raise ConfigError('cannot find variant type\'s type "{}" in pointed tag type'.format(tag_name))
851
852 self._stack_push(t)
853
854 for type_name, type_type in t.types.items():
855 try:
856 self._visit_type(type_type)
857 except Exception as e:
858 raise ConfigError('in variant type\'s type "{}"'.format(type_name), e)
859
860 self._stack_incr_index()
861
862 self._stack_pop()
863
864 def _visit_type(self, t):
865 if t is None:
866 return
867
868 if type(t) in self._type_to_visit_type_func:
869 func = self._type_to_visit_type_func[type(t)]
870
871 if func is not None:
872 func(t)
873
874 def _visit_event(self, ev):
875 ev_name = ev.name
876
877 # set current event
878 self._cur_event = ev
879
880 # visit event context type
881 self._stack_reset()
882 self._cur_entity = _Entity.EVENT_CONTEXT
883
884 try:
885 self._visit_type(ev.context_type)
886 except Exception as e:
887 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
888
889 # visit event payload type
890 self._stack_reset()
891 self._cur_entity = _Entity.EVENT_PAYLOAD
892
893 try:
894 self._visit_type(ev.payload_type)
895 except Exception as e:
896 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
897
898 def _visit_stream(self, stream):
899 stream_name = stream.name
900
901 # set current stream
902 self._cur_stream = stream
903
904 # reset current event
905 self._cur_event = None
906
907 # visit stream packet context type
908 self._stack_reset()
909 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
910
911 try:
912 self._visit_type(stream.packet_context_type)
913 except Exception as e:
914 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
915
916 # visit stream event header type
917 self._stack_reset()
918 self._cur_entity = _Entity.STREAM_EVENT_HEADER
919
920 try:
921 self._visit_type(stream.event_header_type)
922 except Exception as e:
923 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
924
925 # visit stream event context type
926 self._stack_reset()
927 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
928
929 try:
930 self._visit_type(stream.event_context_type)
931 except Exception as e:
932 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
933
934 # visit events
935 for ev in stream.events.values():
936 try:
937 self._visit_event(ev)
938 except Exception as e:
939 raise ConfigError('invalid stream "{}"'.format(stream_name))
940
941 def validate(self, meta):
942 # set current trace
943 self._cur_trace = meta.trace
944
945 # visit trace packet header type
946 self._stack_reset()
947 self._cur_entity = _Entity.TRACE_PACKET_HEADER
948
949 try:
950 self._visit_type(meta.trace.packet_header_type)
951 except Exception as e:
952 raise ConfigError('invalid packet header type in trace', e)
953
954 # visit streams
955 for stream in meta.streams.values():
956 self._visit_stream(stream)
957
958
959 # Since type inheritance allows types to be only partially defined at
960 # any place in the configuration, this validator validates that actual
961 # trace, stream, and event types are all complete and valid. Therefore
962 # an invalid, but unusued type alias is accepted.
963 class _MetadataTypesHistologyValidator:
964 def __init__(self):
965 self._type_to_validate_type_histology_func = {
966 metadata.Integer: self._validate_integer_histology,
967 metadata.FloatingPoint: self._validate_float_histology,
968 metadata.Enum: self._validate_enum_histology,
969 metadata.String: self._validate_string_histology,
970 metadata.Struct: self._validate_struct_histology,
971 metadata.Array: self._validate_array_histology,
972 metadata.Variant: self._validate_variant_histology,
973 }
974
975 def _validate_integer_histology(self, t):
976 # size is set
977 if t.size is None:
978 raise ConfigError('missing integer type\'s size')
979
980 def _validate_float_histology(self, t):
981 # exponent digits is set
982 if t.exp_size is None:
983 raise ConfigError('missing floating point number type\'s exponent size')
984
985 # mantissa digits is set
986 if t.mant_size is None:
987 raise ConfigError('missing floating point number type\'s mantissa size')
988
989 # exponent and mantissa sum is a multiple of 8
990 if (t.exp_size + t.mant_size) % 8 != 0:
991 raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8')
992
993 def _validate_enum_histology(self, t):
994 # integer type is set
995 if t.value_type is None:
996 raise ConfigError('missing enumeration type\'s value type')
997
998 # there's at least one member
999 if not t.members:
1000 raise ConfigError('enumeration type needs at least one member')
1001
1002 # no overlapping values and all values are valid considering
1003 # the value type
1004 ranges = []
1005
1006 if t.value_type.signed:
1007 value_min = -(1 << t.value_type.size - 1)
1008 value_max = (1 << (t.value_type.size - 1)) - 1
1009 else:
1010 value_min = 0
1011 value_max = (1 << t.value_type.size) - 1
1012
1013 for label, value in t.members.items():
1014 for rg in ranges:
1015 if value[0] <= rg[1] and rg[0] <= value[1]:
1016 raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label))
1017
1018 fmt = 'enumeration type\'s member "{}": value {} is outside the value type range [{}, {}]'
1019
1020 if value[0] < value_min or value[0] > value_max:
1021 raise ConfigError(fmt.format(label, value[0], value_min, value_max))
1022
1023 if value[1] < value_min or value[1] > value_max:
1024 raise ConfigError(fmt.format(label, value[1], value_min, value_max))
1025
1026 ranges.append(value)
1027
1028 def _validate_string_histology(self, t):
1029 # always valid
1030 pass
1031
1032 def _validate_struct_histology(self, t):
1033 # all fields are valid
1034 for field_name, field_type in t.fields.items():
1035 try:
1036 self._validate_type_histology(field_type)
1037 except Exception as e:
1038 raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e)
1039
1040 def _validate_array_histology(self, t):
1041 # length is set
1042 if t.length is None:
1043 raise ConfigError('missing array type\'s length')
1044
1045 # element type is set
1046 if t.element_type is None:
1047 raise ConfigError('missing array type\'s element type')
1048
1049 # element type is valid
1050 try:
1051 self._validate_type_histology(t.element_type)
1052 except Exception as e:
1053 raise ConfigError('invalid array type\'s element type', e)
1054
1055 def _validate_variant_histology(self, t):
1056 # tag is set
1057 if t.tag is None:
1058 raise ConfigError('missing variant type\'s tag')
1059
1060 # there's at least one type
1061 if not t.types:
1062 raise ConfigError('variant type needs at least one type')
1063
1064 # all types are valid
1065 for type_name, type_t in t.types.items():
1066 try:
1067 self._validate_type_histology(type_t)
1068 except Exception as e:
1069 raise ConfigError('invalid variant type\'s type "{}"'.format(type_name), e)
1070
1071 def _validate_type_histology(self, t):
1072 if t is None:
1073 return
1074
1075 self._type_to_validate_type_histology_func[type(t)](t)
1076
1077 def _validate_entity_type_histology(self, t):
1078 if t is None:
1079 return
1080
1081 if type(t) is not metadata.Struct:
1082 raise ConfigError('expecting a structure type')
1083
1084 self._validate_type_histology(t)
1085
1086 def _validate_event_types_histology(self, ev):
1087 ev_name = ev.name
1088
1089 # validate event context type
1090 try:
1091 self._validate_entity_type_histology(ev.context_type)
1092 except Exception as e:
1093 raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e)
1094
1095 # validate event payload type
1096 if ev.payload_type is None:
1097 raise ConfigError('event payload type must exist in event "{}"'.format(ev_name))
1098
1099 # TODO: also check arrays, sequences, and variants
1100 if type(ev.payload_type) is metadata.Struct:
1101 if not ev.payload_type.fields:
1102 raise ConfigError('event payload type must have at least one field for event "{}"'.format(ev_name))
1103
1104 try:
1105 self._validate_entity_type_histology(ev.payload_type)
1106 except Exception as e:
1107 raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e)
1108
1109 def _validate_stream_types_histology(self, stream):
1110 stream_name = stream.name
1111
1112 # validate stream packet context type
1113 try:
1114 self._validate_entity_type_histology(stream.packet_context_type)
1115 except Exception as e:
1116 raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e)
1117
1118 # validate stream event header type
1119 try:
1120 self._validate_entity_type_histology(stream.event_header_type)
1121 except Exception as e:
1122 raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e)
1123
1124 # validate stream event context type
1125 try:
1126 self._validate_entity_type_histology(stream.event_context_type)
1127 except Exception as e:
1128 raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e)
1129
1130 # validate events
1131 for ev in stream.events.values():
1132 try:
1133 self._validate_event_types_histology(ev)
1134 except Exception as e:
1135 raise ConfigError('invalid event in stream "{}"'.format(stream_name), e)
1136
1137 def validate(self, meta):
1138 # validate trace packet header type
1139 try:
1140 self._validate_entity_type_histology(meta.trace.packet_header_type)
1141 except Exception as e:
1142 raise ConfigError('invalid trace packet header type', e)
1143
1144 # validate streams
1145 for stream in meta.streams.values():
1146 self._validate_stream_types_histology(stream)
1147
1148
1149 class _YamlConfigParser:
1150 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
1151 self._class_name_to_create_type_func = {
1152 'int': self._create_integer,
1153 'integer': self._create_integer,
1154 'flt': self._create_float,
1155 'float': self._create_float,
1156 'floating-point': self._create_float,
1157 'enum': self._create_enum,
1158 'enumeration': self._create_enum,
1159 'str': self._create_string,
1160 'string': self._create_string,
1161 'struct': self._create_struct,
1162 'structure': self._create_struct,
1163 'array': self._create_array,
1164 'var': self._create_variant,
1165 'variant': self._create_variant,
1166 }
1167 self._type_to_create_type_func = {
1168 metadata.Integer: self._create_integer,
1169 metadata.FloatingPoint: self._create_float,
1170 metadata.Enum: self._create_enum,
1171 metadata.String: self._create_string,
1172 metadata.Struct: self._create_struct,
1173 metadata.Array: self._create_array,
1174 metadata.Variant: self._create_variant,
1175 }
1176 self._include_dirs = include_dirs
1177 self._ignore_include_not_found = ignore_include_not_found
1178 self._dump_config = dump_config
1179
1180 def _set_byte_order(self, metadata_node):
1181 if 'trace' not in metadata_node:
1182 raise ConfigError('missing "trace" property (metadata)')
1183
1184 trace_node = metadata_node['trace']
1185
1186 if not _is_assoc_array_prop(trace_node):
1187 raise ConfigError('"trace" property (metadata) must be an associative array')
1188
1189 if 'byte-order' not in trace_node:
1190 raise ConfigError('missing "byte-order" property (trace)')
1191
1192 bo_node = trace_node['byte-order']
1193
1194 if not _is_str_prop(bo_node):
1195 raise ConfigError('"byte-order" property of trace object must be a string ("le" or "be")')
1196
1197 self._bo = _byte_order_str_to_bo(bo_node)
1198
1199 if self._bo is None:
1200 raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"')
1201
1202 def _lookup_type_alias(self, name):
1203 if name in self._tas:
1204 return copy.deepcopy(self._tas[name])
1205
1206 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
1207 unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
1208
1209 if unk_prop:
1210 raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop))
1211
1212 if 'name' not in prop_mapping_node:
1213 raise ConfigError('missing "name" property in integer type object\'s clock property mapping')
1214
1215 if 'property' not in prop_mapping_node:
1216 raise ConfigError('missing "property" property in integer type object\'s clock property mapping')
1217
1218 clock_name = prop_mapping_node['name']
1219 prop = prop_mapping_node['property']
1220
1221 if not _is_str_prop(clock_name):
1222 raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string')
1223
1224 if not _is_str_prop(prop):
1225 raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string')
1226
1227 if clock_name not in self._clocks:
1228 raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name))
1229
1230 if prop != 'value':
1231 raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop))
1232
1233 mapped_clock = self._clocks[clock_name]
1234 int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
1235
1236 def _get_first_unknown_type_prop(self, type_node, known_props):
1237 kp = known_props + ['inherit', 'class']
1238
1239 if self._version >= 201:
1240 kp.append('$inherit')
1241
1242 return _get_first_unknown_prop(type_node, kp)
1243
1244 def _create_integer(self, obj, node):
1245 if obj is None:
1246 # create integer object
1247 obj = metadata.Integer()
1248
1249 unk_prop = self._get_first_unknown_type_prop(node, [
1250 'size',
1251 'align',
1252 'signed',
1253 'byte-order',
1254 'base',
1255 'encoding',
1256 'property-mappings',
1257 ])
1258
1259 if unk_prop:
1260 raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop))
1261
1262 # size
1263 if 'size' in node:
1264 size = node['size']
1265
1266 if not _is_int_prop(size):
1267 raise ConfigError('"size" property of integer type object must be an integer')
1268
1269 if size < 1:
1270 raise ConfigError('invalid integer size: {}'.format(size))
1271
1272 obj.size = size
1273
1274 # align
1275 if 'align' in node:
1276 align = node['align']
1277
1278 if not _is_int_prop(align):
1279 raise ConfigError('"align" property of integer type object must be an integer')
1280
1281 if not _is_valid_alignment(align):
1282 raise ConfigError('invalid alignment: {}'.format(align))
1283
1284 obj.align = align
1285
1286 # signed
1287 if 'signed' in node:
1288 signed = node['signed']
1289
1290 if not _is_bool_prop(signed):
1291 raise ConfigError('"signed" property of integer type object must be a boolean')
1292
1293 obj.signed = signed
1294
1295 # byte order
1296 if 'byte-order' in node:
1297 byte_order = node['byte-order']
1298
1299 if not _is_str_prop(byte_order):
1300 raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")')
1301
1302 byte_order = _byte_order_str_to_bo(byte_order)
1303
1304 if byte_order is None:
1305 raise ConfigError('invalid "byte-order" property in integer type object')
1306 else:
1307 byte_order = self._bo
1308
1309 obj.byte_order = byte_order
1310
1311 # base
1312 if 'base' in node:
1313 base = node['base']
1314
1315 if not _is_str_prop(base):
1316 raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
1317
1318 if base == 'bin':
1319 base = 2
1320 elif base == 'oct':
1321 base = 8
1322 elif base == 'dec':
1323 base = 10
1324 elif base == 'hex':
1325 base = 16
1326 else:
1327 raise ConfigError('unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base))
1328
1329 obj.base = base
1330
1331 # encoding
1332 if 'encoding' in node:
1333 encoding = node['encoding']
1334
1335 if not _is_str_prop(encoding):
1336 raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
1337
1338 encoding = _encoding_str_to_encoding(encoding)
1339
1340 if encoding is None:
1341 raise ConfigError('invalid "encoding" property in integer type object')
1342
1343 obj.encoding = encoding
1344
1345 # property mappings
1346 if 'property-mappings' in node:
1347 prop_mappings = node['property-mappings']
1348
1349 if not _is_array_prop(prop_mappings):
1350 raise ConfigError('"property-mappings" property of integer type object must be an array')
1351
1352 if len(prop_mappings) > 1:
1353 raise ConfigError('length of "property-mappings" array in integer type object must be 1')
1354
1355 del obj.property_mappings[:]
1356
1357 for index, prop_mapping in enumerate(prop_mappings):
1358 if not _is_assoc_array_prop(prop_mapping):
1359 raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays')
1360
1361 if 'type' not in prop_mapping:
1362 raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
1363
1364 prop_type = prop_mapping['type']
1365
1366 if not _is_str_prop(prop_type):
1367 raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
1368
1369 if prop_type == 'clock':
1370 self._set_int_clock_prop_mapping(obj, prop_mapping)
1371 else:
1372 raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
1373
1374 return obj
1375
1376 def _create_float(self, obj, node):
1377 if obj is None:
1378 # create floating point number object
1379 obj = metadata.FloatingPoint()
1380
1381 unk_prop = self._get_first_unknown_type_prop(node, [
1382 'size',
1383 'align',
1384 'byte-order',
1385 ])
1386
1387 if unk_prop:
1388 raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop))
1389
1390 # size
1391 if 'size' in node:
1392 size = node['size']
1393
1394 if not _is_assoc_array_prop(size):
1395 raise ConfigError('"size" property of floating point number type object must be an associative array')
1396
1397 unk_prop = _get_first_unknown_prop(size, ['exp', 'mant'])
1398
1399 if unk_prop:
1400 raise ConfigError('unknown floating point number type object\'s "size" property: "{}"'.format(unk_prop))
1401
1402 if 'exp' in size:
1403 exp = size['exp']
1404
1405 if not _is_int_prop(exp):
1406 raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer')
1407
1408 if exp < 1:
1409 raise ConfigError('invalid floating point number exponent size: {}')
1410
1411 obj.exp_size = exp
1412
1413 if 'mant' in size:
1414 mant = size['mant']
1415
1416 if not _is_int_prop(mant):
1417 raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer')
1418
1419 if mant < 1:
1420 raise ConfigError('invalid floating point number mantissa size: {}')
1421
1422 obj.mant_size = mant
1423
1424 # align
1425 if 'align' in node:
1426 align = node['align']
1427
1428 if not _is_int_prop(align):
1429 raise ConfigError('"align" property of floating point number type object must be an integer')
1430
1431 if not _is_valid_alignment(align):
1432 raise ConfigError('invalid alignment: {}'.format(align))
1433
1434 obj.align = align
1435
1436 # byte order
1437 if 'byte-order' in node:
1438 byte_order = node['byte-order']
1439
1440 if not _is_str_prop(byte_order):
1441 raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")')
1442
1443 byte_order = _byte_order_str_to_bo(byte_order)
1444
1445 if byte_order is None:
1446 raise ConfigError('invalid "byte-order" property in floating point number type object')
1447 else:
1448 byte_order = self._bo
1449
1450 obj.byte_order = byte_order
1451
1452 return obj
1453
1454 def _create_enum(self, obj, node):
1455 if obj is None:
1456 # create enumeration object
1457 obj = metadata.Enum()
1458
1459 unk_prop = self._get_first_unknown_type_prop(node, [
1460 'value-type',
1461 'members',
1462 ])
1463
1464 if unk_prop:
1465 raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop))
1466
1467 # value type
1468 if 'value-type' in node:
1469 try:
1470 obj.value_type = self._create_type(node['value-type'])
1471 except Exception as e:
1472 raise ConfigError('cannot create enumeration type\'s integer type', e)
1473
1474 # members
1475 if 'members' in node:
1476 members_node = node['members']
1477
1478 if not _is_array_prop(members_node):
1479 raise ConfigError('"members" property of enumeration type object must be an array')
1480
1481 cur = 0
1482
1483 for index, m_node in enumerate(members_node):
1484 if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
1485 raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index))
1486
1487 if _is_str_prop(m_node):
1488 label = m_node
1489 value = (cur, cur)
1490 cur += 1
1491 else:
1492 unk_prop = _get_first_unknown_prop(m_node, [
1493 'label',
1494 'value',
1495 ])
1496
1497 if unk_prop:
1498 raise ConfigError('unknown enumeration type member object property: "{}"'.format(unk_prop))
1499
1500 if 'label' not in m_node:
1501 raise ConfigError('missing "label" property in enumeration member #{}'.format(index))
1502
1503 label = m_node['label']
1504
1505 if not _is_str_prop(label):
1506 raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index))
1507
1508 if 'value' not in m_node:
1509 raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label))
1510
1511 value = m_node['value']
1512
1513 if not _is_int_prop(value) and not _is_array_prop(value):
1514 raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label))
1515
1516 if _is_int_prop(value):
1517 cur = value + 1
1518 value = (value, value)
1519 else:
1520 if len(value) != 2:
1521 raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label))
1522
1523 mn = value[0]
1524 mx = value[1]
1525
1526 if mn > mx:
1527 raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
1528
1529 value = (mn, mx)
1530 cur = mx + 1
1531
1532 obj.members[label] = value
1533
1534 return obj
1535
1536 def _create_string(self, obj, node):
1537 if obj is None:
1538 # create string object
1539 obj = metadata.String()
1540
1541 unk_prop = self._get_first_unknown_type_prop(node, [
1542 'encoding',
1543 ])
1544
1545 if unk_prop:
1546 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1547
1548 # encoding
1549 if 'encoding' in node:
1550 encoding = node['encoding']
1551
1552 if not _is_str_prop(encoding):
1553 raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")')
1554
1555 encoding = _encoding_str_to_encoding(encoding)
1556
1557 if encoding is None:
1558 raise ConfigError('invalid "encoding" property in string type object')
1559
1560 obj.encoding = encoding
1561
1562 return obj
1563
1564 def _create_struct(self, obj, node):
1565 if obj is None:
1566 # create structure object
1567 obj = metadata.Struct()
1568
1569 unk_prop = self._get_first_unknown_type_prop(node, [
1570 'min-align',
1571 'fields',
1572 ])
1573
1574 if unk_prop:
1575 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1576
1577 # minimum alignment
1578 if 'min-align' in node:
1579 min_align = node['min-align']
1580
1581 if not _is_int_prop(min_align):
1582 raise ConfigError('"min-align" property of structure type object must be an integer')
1583
1584 if not _is_valid_alignment(min_align):
1585 raise ConfigError('invalid minimum alignment: {}'.format(min_align))
1586
1587 obj.min_align = min_align
1588
1589 # fields
1590 if 'fields' in node:
1591 fields = node['fields']
1592
1593 if not _is_assoc_array_prop(fields):
1594 raise ConfigError('"fields" property of structure type object must be an associative array')
1595
1596 for field_name, field_node in fields.items():
1597 if not is_valid_identifier(field_name):
1598 raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name))
1599
1600 try:
1601 obj.fields[field_name] = self._create_type(field_node)
1602 except Exception as e:
1603 raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e)
1604
1605 return obj
1606
1607 def _create_array(self, obj, node):
1608 if obj is None:
1609 # create array object
1610 obj = metadata.Array()
1611
1612 unk_prop = self._get_first_unknown_type_prop(node, [
1613 'length',
1614 'element-type',
1615 ])
1616
1617 if unk_prop:
1618 raise ConfigError('unknown array type object property: "{}"'.format(unk_prop))
1619
1620 # length
1621 if 'length' in node:
1622 length = node['length']
1623
1624 if not _is_int_prop(length) and not _is_str_prop(length):
1625 raise ConfigError('"length" property of array type object must be an integer or a string')
1626
1627 if type(length) is int and length < 0:
1628 raise ConfigError('invalid static array length: {}'.format(length))
1629
1630 obj.length = length
1631
1632 # element type
1633 if 'element-type' in node:
1634 try:
1635 obj.element_type = self._create_type(node['element-type'])
1636 except Exception as e:
1637 raise ConfigError('cannot create array type\'s element type', e)
1638
1639 return obj
1640
1641 def _create_variant(self, obj, node):
1642 if obj is None:
1643 # create variant object
1644 obj = metadata.Variant()
1645
1646 unk_prop = self._get_first_unknown_type_prop(node, [
1647 'tag',
1648 'types',
1649 ])
1650
1651 if unk_prop:
1652 raise ConfigError('unknown variant type object property: "{}"'.format(unk_prop))
1653
1654 # tag
1655 if 'tag' in node:
1656 tag = node['tag']
1657
1658 if not _is_str_prop(tag):
1659 raise ConfigError('"tag" property of variant type object must be a string')
1660
1661 # do not validate variant tag for the moment; will be done in a
1662 # second phase
1663 obj.tag = tag
1664
1665 # element type
1666 if 'types' in node:
1667 types = node['types']
1668
1669 if not _is_assoc_array_prop(types):
1670 raise ConfigError('"types" property of variant type object must be an associative array')
1671
1672 # do not validate type names for the moment; will be done in a
1673 # second phase
1674 for type_name, type_node in types.items():
1675 if not is_valid_identifier(type_name):
1676 raise ConfigError('"{}" is not a valid type name for variant type'.format(type_name))
1677
1678 try:
1679 obj.types[type_name] = self._create_type(type_node)
1680 except Exception as e:
1681 raise ConfigError('cannot create variant type\'s type "{}"'.format(type_name), e)
1682
1683 return obj
1684
1685 def _create_type(self, type_node):
1686 if type(type_node) is str:
1687 t = self._lookup_type_alias(type_node)
1688
1689 if t is None:
1690 raise ConfigError('unknown type alias "{}"'.format(type_node))
1691
1692 return t
1693
1694 if not _is_assoc_array_prop(type_node):
1695 raise ConfigError('type objects must be associative arrays or strings (type alias name)')
1696
1697 # inherit:
1698 # v2.0: "inherit"
1699 # v2.1+: "$inherit"
1700 inherit_node = None
1701
1702 if self._version >= 200:
1703 if 'inherit' in type_node:
1704 inherit_prop = 'inherit'
1705 inherit_node = type_node[inherit_prop]
1706
1707 if self._version >= 201:
1708 if '$inherit' in type_node:
1709 if inherit_node is not None:
1710 raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
1711
1712 inherit_prop = '$inherit'
1713 inherit_node = type_node[inherit_prop]
1714
1715 if inherit_node is not None and 'class' in type_node:
1716 raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
1717
1718 if inherit_node is not None:
1719 if not _is_str_prop(inherit_node):
1720 raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop))
1721
1722 base = self._lookup_type_alias(inherit_node)
1723
1724 if base is None:
1725 raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
1726
1727 func = self._type_to_create_type_func[type(base)]
1728 else:
1729 if 'class' not in type_node:
1730 raise ConfigError('type objects which do not inherit must have a "class" property')
1731
1732 class_name = type_node['class']
1733
1734 if type(class_name) is not str:
1735 raise ConfigError('type objects\' "class" property must be a string')
1736
1737 if class_name not in self._class_name_to_create_type_func:
1738 raise ConfigError('unknown type class "{}"'.format(class_name))
1739
1740 base = None
1741 func = self._class_name_to_create_type_func[class_name]
1742
1743 return func(base, type_node)
1744
1745 def _register_type_aliases(self, metadata_node):
1746 self._tas = dict()
1747
1748 if 'type-aliases' not in metadata_node:
1749 return
1750
1751 ta_node = metadata_node['type-aliases']
1752
1753 if not _is_assoc_array_prop(ta_node):
1754 raise ConfigError('"type-aliases" property (metadata) must be an associative array')
1755
1756 for ta_name, ta_type in ta_node.items():
1757 if ta_name in self._tas:
1758 raise ConfigError('duplicate type alias "{}"'.format(ta_name))
1759
1760 try:
1761 t = self._create_type(ta_type)
1762 except Exception as e:
1763 raise ConfigError('cannot create type alias "{}"'.format(ta_name), e)
1764
1765 self._tas[ta_name] = t
1766
1767 def _create_clock(self, node):
1768 # create clock object
1769 clock = metadata.Clock()
1770
1771 if not _is_assoc_array_prop(node):
1772 raise ConfigError('clock objects must be associative arrays')
1773
1774 known_props = [
1775 'uuid',
1776 'description',
1777 'freq',
1778 'error-cycles',
1779 'offset',
1780 'absolute',
1781 'return-ctype',
1782 ]
1783
1784 if self._version >= 201:
1785 known_props.append('$return-ctype')
1786
1787 unk_prop = _get_first_unknown_prop(node, known_props)
1788
1789 if unk_prop:
1790 raise ConfigError('unknown clock object property: "{}"'.format(unk_prop))
1791
1792 # UUID
1793 if 'uuid' in node:
1794 uuidp = node['uuid']
1795
1796 if not _is_str_prop(uuidp):
1797 raise ConfigError('"uuid" property of clock object must be a string')
1798
1799 try:
1800 uuidp = uuid.UUID(uuidp)
1801 except:
1802 raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp))
1803
1804 clock.uuid = uuidp
1805
1806 # description
1807 if 'description' in node:
1808 desc = node['description']
1809
1810 if not _is_str_prop(desc):
1811 raise ConfigError('"description" property of clock object must be a string')
1812
1813 clock.description = desc
1814
1815 # frequency
1816 if 'freq' in node:
1817 freq = node['freq']
1818
1819 if not _is_int_prop(freq):
1820 raise ConfigError('"freq" property of clock object must be an integer')
1821
1822 if freq < 1:
1823 raise ConfigError('invalid clock frequency: {}'.format(freq))
1824
1825 clock.freq = freq
1826
1827 # error cycles
1828 if 'error-cycles' in node:
1829 error_cycles = node['error-cycles']
1830
1831 if not _is_int_prop(error_cycles):
1832 raise ConfigError('"error-cycles" property of clock object must be an integer')
1833
1834 if error_cycles < 0:
1835 raise ConfigError('invalid clock error cycles: {}'.format(error_cycles))
1836
1837 clock.error_cycles = error_cycles
1838
1839 # offset
1840 if 'offset' in node:
1841 offset = node['offset']
1842
1843 if not _is_assoc_array_prop(offset):
1844 raise ConfigError('"offset" property of clock object must be an associative array')
1845
1846 unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
1847
1848 if unk_prop:
1849 raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop))
1850
1851 # cycles
1852 if 'cycles' in offset:
1853 offset_cycles = offset['cycles']
1854
1855 if not _is_int_prop(offset_cycles):
1856 raise ConfigError('"cycles" property of clock object\'s offset property must be an integer')
1857
1858 if offset_cycles < 0:
1859 raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles))
1860
1861 clock.offset_cycles = offset_cycles
1862
1863 # seconds
1864 if 'seconds' in offset:
1865 offset_seconds = offset['seconds']
1866
1867 if not _is_int_prop(offset_seconds):
1868 raise ConfigError('"seconds" property of clock object\'s offset property must be an integer')
1869
1870 if offset_seconds < 0:
1871 raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds))
1872
1873 clock.offset_seconds = offset_seconds
1874
1875 # absolute
1876 if 'absolute' in node:
1877 absolute = node['absolute']
1878
1879 if not _is_bool_prop(absolute):
1880 raise ConfigError('"absolute" property of clock object must be a boolean')
1881
1882 clock.absolute = absolute
1883
1884 # return C type:
1885 # v2.0: "return-ctype"
1886 # v2.1+: "$return-ctype"
1887 return_ctype_node = None
1888
1889 if self._version >= 200:
1890 if 'return-ctype' in node:
1891 return_ctype_prop = 'return-ctype'
1892 return_ctype_node = node[return_ctype_prop]
1893
1894 if self._version >= 201:
1895 if '$return-ctype' in node:
1896 if return_ctype_node is not None:
1897 raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"')
1898
1899 return_ctype_prop = '$return-ctype'
1900 return_ctype_node = node[return_ctype_prop]
1901
1902 if return_ctype_node is not None:
1903 if not _is_str_prop(return_ctype_node):
1904 raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop))
1905
1906 clock.return_ctype = return_ctype_node
1907
1908 return clock
1909
1910 def _register_clocks(self, metadata_node):
1911 self._clocks = collections.OrderedDict()
1912
1913 if 'clocks' not in metadata_node:
1914 return
1915
1916 clocks_node = metadata_node['clocks']
1917
1918 if not _is_assoc_array_prop(clocks_node):
1919 raise ConfigError('"clocks" property (metadata) must be an associative array')
1920
1921 for clock_name, clock_node in clocks_node.items():
1922 if not is_valid_identifier(clock_name):
1923 raise ConfigError('invalid clock name: "{}"'.format(clock_name))
1924
1925 if clock_name in self._clocks:
1926 raise ConfigError('duplicate clock "{}"'.format(clock_name))
1927
1928 try:
1929 clock = self._create_clock(clock_node)
1930 except Exception as e:
1931 raise ConfigError('cannot create clock "{}"'.format(clock_name), e)
1932
1933 clock.name = clock_name
1934 self._clocks[clock_name] = clock
1935
1936 def _create_env(self, metadata_node):
1937 env = collections.OrderedDict()
1938
1939 if 'env' not in metadata_node:
1940 return env
1941
1942 env_node = metadata_node['env']
1943
1944 if not _is_assoc_array_prop(env_node):
1945 raise ConfigError('"env" property (metadata) must be an associative array')
1946
1947 for env_name, env_value in env_node.items():
1948 if env_name in env:
1949 raise ConfigError('duplicate environment variable "{}"'.format(env_name))
1950
1951 if not is_valid_identifier(env_name):
1952 raise ConfigError('invalid environment variable name: "{}"'.format(env_name))
1953
1954 if not _is_int_prop(env_value) and not _is_str_prop(env_value):
1955 raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
1956
1957 env[env_name] = env_value
1958
1959 return env
1960
1961 def _register_log_levels(self, metadata_node):
1962 self._log_levels = dict()
1963
1964 # log levels:
1965 # v2.0: "log-levels"
1966 # v2.1+: "$log-levels"
1967 log_levels_node = None
1968
1969 if self._version >= 200:
1970 if 'log-levels' in metadata_node:
1971 log_levels_prop = 'log-levels'
1972 log_levels_node = metadata_node[log_levels_prop]
1973
1974 if self._version >= 201:
1975 if '$log-levels' in metadata_node:
1976 if log_levels_node is not None:
1977 raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
1978
1979 log_levels_prop = '$log-levels'
1980 log_levels_node = metadata_node[log_levels_prop]
1981
1982 if log_levels_node is None:
1983 return
1984
1985 if not _is_assoc_array_prop(log_levels_node):
1986 raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop))
1987
1988 for ll_name, ll_value in log_levels_node.items():
1989 if ll_name in self._log_levels:
1990 raise ConfigError('duplicate log level entry "{}"'.format(ll_name))
1991
1992 if not _is_int_prop(ll_value):
1993 raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name))
1994
1995 if ll_value < 0:
1996 raise ConfigError('invalid log level entry ("{}"): log level value must be positive'.format(ll_name))
1997
1998 self._log_levels[ll_name] = ll_value
1999
2000 def _create_trace(self, metadata_node):
2001 # create trace object
2002 trace = metadata.Trace()
2003
2004 if 'trace' not in metadata_node:
2005 raise ConfigError('missing "trace" property (metadata)')
2006
2007 trace_node = metadata_node['trace']
2008
2009 if not _is_assoc_array_prop(trace_node):
2010 raise ConfigError('"trace" property (metadata) must be an associative array')
2011
2012 unk_prop = _get_first_unknown_prop(trace_node, [
2013 'byte-order',
2014 'uuid',
2015 'packet-header-type',
2016 ])
2017
2018 if unk_prop:
2019 raise ConfigError('unknown trace object property: "{}"'.format(unk_prop))
2020
2021 # set byte order (already parsed)
2022 trace.byte_order = self._bo
2023
2024 # UUID
2025 if 'uuid' in trace_node:
2026 uuidp = trace_node['uuid']
2027
2028 if not _is_str_prop(uuidp):
2029 raise ConfigError('"uuid" property of trace object must be a string')
2030
2031 if uuidp == 'auto':
2032 uuidp = uuid.uuid1()
2033 else:
2034 try:
2035 uuidp = uuid.UUID(uuidp)
2036 except:
2037 raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp))
2038
2039 trace.uuid = uuidp
2040
2041 # packet header type
2042 if 'packet-header-type' in trace_node:
2043 try:
2044 ph_type = self._create_type(trace_node['packet-header-type'])
2045 except Exception as e:
2046 raise ConfigError('cannot create packet header type (trace)', e)
2047
2048 trace.packet_header_type = ph_type
2049
2050 return trace
2051
2052 def _lookup_log_level(self, ll):
2053 if _is_int_prop(ll):
2054 return ll
2055 elif _is_str_prop(ll) and ll in self._log_levels:
2056 return self._log_levels[ll]
2057
2058 def _create_event(self, event_node):
2059 event = metadata.Event()
2060
2061 if not _is_assoc_array_prop(event_node):
2062 raise ConfigError('event objects must be associative arrays')
2063
2064 unk_prop = _get_first_unknown_prop(event_node, [
2065 'log-level',
2066 'context-type',
2067 'payload-type',
2068 ])
2069
2070 if unk_prop:
2071 raise ConfigError('unknown event object property: "{}"'.format(unk_prop))
2072
2073 if 'log-level' in event_node:
2074 ll_node = event_node['log-level']
2075
2076 if _is_str_prop(ll_node):
2077 ll = self._lookup_log_level(event_node['log-level'])
2078
2079 if ll is None:
2080 raise ConfigError('cannot find log level "{}"'.format(ll_node))
2081 elif _is_int_prop(ll_node):
2082 if ll_node < 0:
2083 raise ConfigError('invalid log level value {}: value must be positive'.format(ll_node))
2084
2085 ll = ll_node
2086 else:
2087 raise ConfigError('"log-level" property must be either a string or an integer')
2088
2089 event.log_level = ll
2090
2091 if 'context-type' in event_node:
2092 try:
2093 t = self._create_type(event_node['context-type'])
2094 except Exception as e:
2095 raise ConfigError('cannot create event\'s context type object', e)
2096
2097 event.context_type = t
2098
2099 if 'payload-type' not in event_node:
2100 raise ConfigError('missing "payload-type" property in event object')
2101
2102 try:
2103 t = self._create_type(event_node['payload-type'])
2104 except Exception as e:
2105 raise ConfigError('cannot create event\'s payload type object', e)
2106
2107 event.payload_type = t
2108
2109 return event
2110
2111 def _create_stream(self, stream_node):
2112 stream = metadata.Stream()
2113
2114 if not _is_assoc_array_prop(stream_node):
2115 raise ConfigError('stream objects must be associative arrays')
2116
2117 unk_prop = _get_first_unknown_prop(stream_node, [
2118 'packet-context-type',
2119 'event-header-type',
2120 'event-context-type',
2121 'events',
2122 ])
2123
2124 if unk_prop:
2125 raise ConfigError('unknown stream object property: "{}"'.format(unk_prop))
2126
2127 if 'packet-context-type' in stream_node:
2128 try:
2129 t = self._create_type(stream_node['packet-context-type'])
2130 except Exception as e:
2131 raise ConfigError('cannot create stream\'s packet context type object', e)
2132
2133 stream.packet_context_type = t
2134
2135 if 'event-header-type' in stream_node:
2136 try:
2137 t = self._create_type(stream_node['event-header-type'])
2138 except Exception as e:
2139 raise ConfigError('cannot create stream\'s event header type object', e)
2140
2141 stream.event_header_type = t
2142
2143 if 'event-context-type' in stream_node:
2144 try:
2145 t = self._create_type(stream_node['event-context-type'])
2146 except Exception as e:
2147 raise ConfigError('cannot create stream\'s event context type object', e)
2148
2149 stream.event_context_type = t
2150
2151 if 'events' not in stream_node:
2152 raise ConfigError('missing "events" property in stream object')
2153
2154 events = stream_node['events']
2155
2156 if not _is_assoc_array_prop(events):
2157 raise ConfigError('"events" property of stream object must be an associative array')
2158
2159 if not events:
2160 raise ConfigError('at least one event is needed within a stream object')
2161
2162 cur_id = 0
2163
2164 for ev_name, ev_node in events.items():
2165 try:
2166 ev = self._create_event(ev_node)
2167 except Exception as e:
2168 raise ConfigError('cannot create event "{}"'.format(ev_name), e)
2169
2170 ev.id = cur_id
2171 ev.name = ev_name
2172 stream.events[ev_name] = ev
2173 cur_id += 1
2174
2175 return stream
2176
2177 def _create_streams(self, metadata_node):
2178 streams = collections.OrderedDict()
2179
2180 if 'streams' not in metadata_node:
2181 raise ConfigError('missing "streams" property (metadata)')
2182
2183 streams_node = metadata_node['streams']
2184
2185 if not _is_assoc_array_prop(streams_node):
2186 raise ConfigError('"streams" property (metadata) must be an associative array')
2187
2188 if not streams_node:
2189 raise ConfigError('at least one stream is needed (metadata)')
2190
2191 cur_id = 0
2192
2193 for stream_name, stream_node in streams_node.items():
2194 try:
2195 stream = self._create_stream(stream_node)
2196 except Exception as e:
2197 raise ConfigError('cannot create stream "{}"'.format(stream_name), e)
2198
2199 stream.id = cur_id
2200 stream.name = str(stream_name)
2201 streams[stream_name] = stream
2202 cur_id += 1
2203
2204 return streams
2205
2206 def _create_metadata(self, root):
2207 meta = metadata.Metadata()
2208
2209 if 'metadata' not in root:
2210 raise ConfigError('missing "metadata" property (configuration)')
2211
2212 metadata_node = root['metadata']
2213
2214 if not _is_assoc_array_prop(metadata_node):
2215 raise ConfigError('"metadata" property (configuration) must be an associative array')
2216
2217 known_props = [
2218 'type-aliases',
2219 'log-levels',
2220 'trace',
2221 'env',
2222 'clocks',
2223 'streams',
2224 ]
2225
2226 if self._version >= 201:
2227 known_props.append('$log-levels')
2228
2229 unk_prop = _get_first_unknown_prop(metadata_node, known_props)
2230
2231 if unk_prop:
2232 add = ''
2233
2234 if unk_prop == '$include':
2235 add = ' (use version 2.1 or greater)'
2236
2237 raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop))
2238
2239 self._set_byte_order(metadata_node)
2240 self._register_clocks(metadata_node)
2241 meta.clocks = self._clocks
2242 self._register_type_aliases(metadata_node)
2243 meta.env = self._create_env(metadata_node)
2244 meta.trace = self._create_trace(metadata_node)
2245 self._register_log_levels(metadata_node)
2246 meta.streams = self._create_streams(metadata_node)
2247
2248 return meta
2249
2250 def _get_version(self, root):
2251 if 'version' not in root:
2252 raise ConfigError('missing "version" property (configuration)')
2253
2254 version_node = root['version']
2255
2256 if not _is_str_prop(version_node):
2257 raise ConfigError('"version" property (configuration) must be a string')
2258
2259 version_node = version_node.strip()
2260
2261 if version_node not in ['2.0', '2.1']:
2262 raise ConfigError('unsupported version ({}): versions 2.0 and 2.1 are supported'.format(version_node))
2263
2264 # convert version string to comparable version integer
2265 parts = version_node.split('.')
2266 version = int(parts[0]) * 100 + int(parts[1])
2267
2268 return version
2269
2270 def _get_prefix(self, root):
2271 if 'prefix' not in root:
2272 return 'barectf_'
2273
2274 prefix_node = root['prefix']
2275
2276 if not _is_str_prop(prefix_node):
2277 raise ConfigError('"prefix" property (configuration) must be a string')
2278
2279 if not is_valid_identifier(prefix_node):
2280 raise ConfigError('"prefix" property (configuration) must be a valid C identifier')
2281
2282 return prefix_node
2283
2284 def _get_last_include_file(self):
2285 if self._include_stack:
2286 return self._include_stack[-1]
2287
2288 return self._root_yaml_path
2289
2290 def _load_include(self, yaml_path):
2291 for inc_dir in self._include_dirs:
2292 # current include dir + file name path
2293 # note: os.path.join() only takes the last arg if it's absolute
2294 inc_path = os.path.join(inc_dir, yaml_path)
2295
2296 # real path (symbolic links resolved)
2297 real_path = os.path.realpath(inc_path)
2298
2299 # normalized path (weird stuff removed!)
2300 norm_path = os.path.normpath(real_path)
2301
2302 if not os.path.isfile(norm_path):
2303 # file does not exist: skip
2304 continue
2305
2306 if norm_path in self._include_stack:
2307 base_path = self._get_last_include_file()
2308 raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path))
2309
2310 self._include_stack.append(norm_path)
2311
2312 # load raw content
2313 return self._yaml_ordered_load(norm_path)
2314
2315 if not self._ignore_include_not_found:
2316 base_path = self._get_last_include_file()
2317 raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
2318
2319 return None
2320
2321 def _get_include_paths(self, include_node):
2322 if _is_str_prop(include_node):
2323 return [include_node]
2324 elif _is_array_prop(include_node):
2325 for include_path in include_node:
2326 if not _is_str_prop(include_path):
2327 raise ConfigError('invalid include property: expecting array of strings')
2328
2329 return include_node
2330
2331 raise ConfigError('invalid include property: expecting string or array of strings')
2332
2333 def _update_node(self, base_node, overlay_node):
2334 for olay_key, olay_value in overlay_node.items():
2335 if olay_key in base_node:
2336 base_value = base_node[olay_key]
2337
2338 if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
2339 # merge dictionaries
2340 self._update_node(base_value, olay_value)
2341 elif _is_array_prop(olay_value) and _is_array_prop(base_value):
2342 # append extension array items to base items
2343 base_value += olay_value
2344 else:
2345 # fall back to replacing
2346 base_node[olay_key] = olay_value
2347 else:
2348 base_node[olay_key] = olay_value
2349
2350 def _process_node_include(self, last_overlay_node, name,
2351 process_base_include_cb,
2352 process_children_include_cb=None):
2353 if not _is_assoc_array_prop(last_overlay_node):
2354 raise ConfigError('{} objects must be associative arrays'.format(name))
2355
2356 # process children inclusions first
2357 if process_children_include_cb:
2358 process_children_include_cb(last_overlay_node)
2359
2360 if '$include' in last_overlay_node:
2361 include_node = last_overlay_node['$include']
2362 else:
2363 # no includes!
2364 return last_overlay_node
2365
2366 include_paths = self._get_include_paths(include_node)
2367 cur_base_path = self._get_last_include_file()
2368 base_node = None
2369
2370 # keep the include paths and remove the include property
2371 include_paths = copy.deepcopy(include_paths)
2372 del last_overlay_node['$include']
2373
2374 for include_path in include_paths:
2375 # load raw YAML from included file
2376 overlay_node = self._load_include(include_path)
2377
2378 if overlay_node is None:
2379 # cannot find include file, but we're ignoring those
2380 # errors, otherwise _load_include() itself raises
2381 # a config error
2382 continue
2383
2384 # recursively process includes
2385 try:
2386 overlay_node = process_base_include_cb(overlay_node)
2387 except Exception as e:
2388 raise ConfigError('in "{}"'.format(cur_base_path), e)
2389
2390 # pop include stack now that we're done including
2391 del self._include_stack[-1]
2392
2393 # at this point, base_node is fully resolved (does not
2394 # contain any include property)
2395 if base_node is None:
2396 base_node = overlay_node
2397 else:
2398 self._update_node(base_node, overlay_node)
2399
2400 # finally, we update the latest base node with our last overlay
2401 # node
2402 if base_node is None:
2403 # nothing was included, which is possible when we're
2404 # ignoring include errors
2405 return last_overlay_node
2406
2407 self._update_node(base_node, last_overlay_node)
2408
2409 return base_node
2410
2411 def _process_event_include(self, event_node):
2412 return self._process_node_include(event_node, 'event',
2413 self._process_event_include)
2414
2415 def _process_stream_include(self, stream_node):
2416 def process_children_include(stream_node):
2417 if 'events' in stream_node:
2418 events_node = stream_node['events']
2419
2420 if not _is_assoc_array_prop(events_node):
2421 raise ConfigError('"events" property must be an associative array')
2422
2423 events_node_keys = list(events_node.keys())
2424
2425 for key in events_node_keys:
2426 event_node = events_node[key]
2427
2428 try:
2429 events_node[key] = self._process_event_include(event_node)
2430 except Exception as e:
2431 raise ConfigError('cannot process includes of event object "{}"'.format(key), e)
2432
2433 return self._process_node_include(stream_node, 'stream',
2434 self._process_stream_include,
2435 process_children_include)
2436
2437 def _process_trace_include(self, trace_node):
2438 return self._process_node_include(trace_node, 'trace',
2439 self._process_trace_include)
2440
2441 def _process_clock_include(self, clock_node):
2442 return self._process_node_include(clock_node, 'clock',
2443 self._process_clock_include)
2444
2445 def _process_metadata_include(self, metadata_node):
2446 def process_children_include(metadata_node):
2447 if 'trace' in metadata_node:
2448 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
2449
2450 if 'clocks' in metadata_node:
2451 clocks_node = metadata_node['clocks']
2452
2453 if not _is_assoc_array_prop(clocks_node):
2454 raise ConfigError('"clocks" property (metadata) must be an associative array')
2455
2456 clocks_node_keys = list(clocks_node.keys())
2457
2458 for key in clocks_node_keys:
2459 clock_node = clocks_node[key]
2460
2461 try:
2462 clocks_node[key] = self._process_clock_include(clock_node)
2463 except Exception as e:
2464 raise ConfigError('cannot process includes of clock object "{}"'.format(key), e)
2465
2466 if 'streams' in metadata_node:
2467 streams_node = metadata_node['streams']
2468
2469 if not _is_assoc_array_prop(streams_node):
2470 raise ConfigError('"streams" property (metadata) must be an associative array')
2471
2472 streams_node_keys = list(streams_node.keys())
2473
2474 for key in streams_node_keys:
2475 stream_node = streams_node[key]
2476
2477 try:
2478 streams_node[key] = self._process_stream_include(stream_node)
2479 except Exception as e:
2480 raise ConfigError('cannot process includes of stream object "{}"'.format(key), e)
2481
2482 return self._process_node_include(metadata_node, 'metadata',
2483 self._process_metadata_include,
2484 process_children_include)
2485
2486 def _process_root_includes(self, root):
2487 # The following config objects support includes:
2488 #
2489 # * Metadata object
2490 # * Trace object
2491 # * Stream object
2492 # * Event object
2493 #
2494 # We need to process the event includes first, then the stream
2495 # includes, then the trace includes, and finally the metadata
2496 # includes.
2497 #
2498 # In each object, only one of the $include and $include-replace
2499 # special properties is allowed.
2500 #
2501 # We keep a stack of absolute paths to included files to detect
2502 # recursion.
2503 if 'metadata' in root:
2504 root['metadata'] = self._process_metadata_include(root['metadata'])
2505
2506 return root
2507
2508 def _yaml_ordered_dump(self, node, **kwds):
2509 class ODumper(yaml.Dumper):
2510 pass
2511
2512 def dict_representer(dumper, node):
2513 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2514 node.items())
2515
2516 ODumper.add_representer(collections.OrderedDict, dict_representer)
2517
2518 return yaml.dump(node, Dumper=ODumper, **kwds)
2519
2520 def _yaml_ordered_load(self, yaml_path):
2521 class OLoader(yaml.Loader):
2522 pass
2523
2524 def construct_mapping(loader, node):
2525 loader.flatten_mapping(node)
2526
2527 return collections.OrderedDict(loader.construct_pairs(node))
2528
2529 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2530 construct_mapping)
2531
2532 # YAML -> Python
2533 try:
2534 with open(yaml_path, 'r') as f:
2535 node = yaml.load(f, OLoader)
2536 except (OSError, IOError) as e:
2537 raise ConfigError('cannot open file "{}"'.format(yaml_path))
2538 except Exception as e:
2539 raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e)
2540
2541 # loaded node must be an associate array
2542 if not _is_assoc_array_prop(node):
2543 raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path))
2544
2545 return node
2546
2547 def _reset(self):
2548 self._version = None
2549 self._include_stack = []
2550
2551 def parse(self, yaml_path):
2552 self._reset()
2553 self._root_yaml_path = yaml_path
2554
2555 try:
2556 root = self._yaml_ordered_load(yaml_path)
2557 except Exception as e:
2558 raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e)
2559
2560 if not _is_assoc_array_prop(root):
2561 raise ConfigError('configuration must be an associative array')
2562
2563 unk_prop = _get_first_unknown_prop(root, [
2564 'version',
2565 'prefix',
2566 'metadata',
2567 ])
2568
2569 if unk_prop:
2570 raise ConfigError('unknown configuration property: "{}"'.format(unk_prop))
2571
2572 # get the config version
2573 self._version = self._get_version(root)
2574
2575 # process includes if supported
2576 if self._version >= 201:
2577 root = self._process_root_includes(root)
2578
2579 # dump config if required
2580 if self._dump_config:
2581 print(self._yaml_ordered_dump(root, indent=2,
2582 default_flow_style=False))
2583
2584 # get prefix and metadata
2585 prefix = self._get_prefix(root)
2586 meta = self._create_metadata(root)
2587
2588 return Config(self._version, prefix, meta)
2589
2590
2591 def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
2592 try:
2593 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
2594 dump_config)
2595 cfg = parser.parse(path)
2596
2597 return cfg
2598 except Exception as e:
2599 raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e)
This page took 0.089105 seconds and 5 git commands to generate.