794492efe1c5acd131d56e19b7cad85eb0c351e2
[barectf.git] / barectf / config.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015-2016 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 # THE SOFTWARE.
22
23 from barectf import metadata
24 import collections
25 import datetime
26 import barectf
27 import enum
28 import yaml
29 import uuid
30 import copy
31 import re
32 import os
33
34
35 class ConfigError(RuntimeError):
36 def __init__(self, msg, prev=None):
37 super().__init__(msg)
38 self._prev = prev
39
40 @property
41 def prev(self):
42 return self._prev
43
44
45 class Config:
46 def __init__(self, version, prefix, metadata):
47 self.prefix = prefix
48 self.version = version
49 self.metadata = metadata
50
51 def _validate_metadata(self, meta):
52 try:
53 validator = _MetadataTypesHistologyValidator()
54 validator.validate(meta)
55 validator = _MetadataDynamicTypesValidator()
56 validator.validate(meta)
57 validator = _MetadataSpecialFieldsValidator()
58 validator.validate(meta)
59 except Exception as e:
60 raise ConfigError('metadata error', e)
61
62 try:
63 validator = _BarectfMetadataValidator()
64 validator.validate(meta)
65 except Exception as e:
66 raise ConfigError('barectf metadata error', e)
67
68 def _augment_metadata_env(self, meta):
69 env = meta.env
70
71 env['domain'] = 'bare'
72 env['tracer_name'] = 'barectf'
73 version_tuple = barectf.get_version_tuple()
74 env['tracer_major'] = version_tuple[0]
75 env['tracer_minor'] = version_tuple[1]
76 env['tracer_patch'] = version_tuple[2]
77 env['barectf_gen_date'] = str(datetime.datetime.now().isoformat())
78
79 @property
80 def version(self):
81 return self._version
82
83 @version.setter
84 def version(self, value):
85 self._version = value
86
87 @property
88 def metadata(self):
89 return self._metadata
90
91 @metadata.setter
92 def metadata(self, value):
93 self._validate_metadata(value)
94 self._augment_metadata_env(value)
95 self._metadata = value
96
97 @property
98 def prefix(self):
99 return self._prefix
100
101 @prefix.setter
102 def prefix(self, value):
103 if not is_valid_identifier(value):
104 raise ConfigError('prefix must be a valid C identifier')
105
106 self._prefix = value
107
108
109 def _is_assoc_array_prop(node):
110 return isinstance(node, dict)
111
112
113 def _is_array_prop(node):
114 return isinstance(node, list)
115
116
117 def _is_int_prop(node):
118 return type(node) is int
119
120
121 def _is_str_prop(node):
122 return type(node) is str
123
124
125 def _is_bool_prop(node):
126 return type(node) is bool
127
128
129 def _is_valid_alignment(align):
130 return ((align & (align - 1)) == 0) and align > 0
131
132
133 def _byte_order_str_to_bo(bo_str):
134 bo_str = bo_str.lower()
135
136 if bo_str == 'le':
137 return metadata.ByteOrder.LE
138 elif bo_str == 'be':
139 return metadata.ByteOrder.BE
140
141
142 def _encoding_str_to_encoding(encoding_str):
143 encoding_str = encoding_str.lower()
144
145 if encoding_str == 'utf-8' or encoding_str == 'utf8':
146 return metadata.Encoding.UTF8
147 elif encoding_str == 'ascii':
148 return metadata.Encoding.ASCII
149 elif encoding_str == 'none':
150 return metadata.Encoding.NONE
151
152
153 _re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
154 _ctf_keywords = set([
155 'align',
156 'callsite',
157 'clock',
158 'enum',
159 'env',
160 'event',
161 'floating_point',
162 'integer',
163 'stream',
164 'string',
165 'struct',
166 'trace',
167 'typealias',
168 'typedef',
169 'variant',
170 ])
171
172
173 def is_valid_identifier(iden):
174 if not _re_iden.match(iden):
175 return False
176
177 if _re_iden in _ctf_keywords:
178 return False
179
180 return True
181
182
183 def _get_first_unknown_prop(node, known_props):
184 for prop_name in node:
185 if prop_name in known_props:
186 continue
187
188 return prop_name
189
190
191 # This validator validates the configured metadata for barectf specific
192 # needs.
193 #
194 # barectf needs:
195 #
196 # * all header/contexts are at least byte-aligned
197 # * all integer and floating point number sizes to be <= 64
198 # * no inner structures, arrays, or variants
199 class _BarectfMetadataValidator:
200 def __init__(self):
201 self._type_to_validate_type_func = {
202 metadata.Integer: self._validate_int_type,
203 metadata.FloatingPoint: self._validate_float_type,
204 metadata.Enum: self._validate_enum_type,
205 metadata.String: self._validate_string_type,
206 metadata.Struct: self._validate_struct_type,
207 metadata.Array: self._validate_array_type,
208 metadata.Variant: self._validate_variant_type,
209 }
210
211 def _validate_int_type(self, t, entity_root):
212 if t.size > 64:
213 raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits')
214
215 def _validate_float_type(self, t, entity_root):
216 if t.size > 64:
217 raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits')
218
219 def _validate_enum_type(self, t, entity_root):
220 if t.value_type.size > 64:
221 raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits')
222
223 def _validate_string_type(self, t, entity_root):
224 pass
225
226 def _validate_struct_type(self, t, entity_root):
227 if not entity_root:
228 raise ConfigError('inner structure types are not supported as of this version')
229
230 for field_name, field_type in t.fields.items():
231 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
232 if field_name == 'uuid':
233 # allow
234 continue
235
236 try:
237 self._validate_type(field_type, False)
238 except Exception as e:
239 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
240
241 def _validate_array_type(self, t, entity_root):
242 raise ConfigError('array types are not supported as of this version')
243
244 def _validate_variant_type(self, t, entity_root):
245 raise ConfigError('variant types are not supported as of this version')
246
247 def _validate_type(self, t, entity_root):
248 self._type_to_validate_type_func[type(t)](t, entity_root)
249
250 def _validate_entity(self, t):
251 if t is None:
252 return
253
254 # make sure entity is byte-aligned
255 if t.align < 8:
256 raise ConfigError('type\'s alignment must be at least byte-aligned')
257
258 # make sure entity is a structure
259 if type(t) is not metadata.Struct:
260 raise ConfigError('expecting a structure type')
261
262 # validate types
263 self._validate_type(t, True)
264
265 def _validate_entities_and_names(self, meta):
266 self._cur_entity = _Entity.TRACE_PACKET_HEADER
267
268 try:
269 self._validate_entity(meta.trace.packet_header_type)
270 except Exception as e:
271 raise ConfigError('invalid trace packet header type', e)
272
273 for stream_name, stream in meta.streams.items():
274 if not is_valid_identifier(stream_name):
275 raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name))
276
277 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
278
279 try:
280 self._validate_entity(stream.packet_context_type)
281 except Exception as e:
282 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
283
284 self._cur_entity = _Entity.STREAM_EVENT_HEADER
285
286 try:
287 self._validate_entity(stream.event_header_type)
288 except Exception as e:
289 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
290
291 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
292
293 try:
294 self._validate_entity(stream.event_context_type)
295 except Exception as e:
296 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
297
298 try:
299 for ev_name, ev in stream.events.items():
300 if not is_valid_identifier(ev_name):
301 raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name))
302
303 self._cur_entity = _Entity.EVENT_CONTEXT
304
305 try:
306 self._validate_entity(ev.context_type)
307 except Exception as e:
308 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
309
310 self._cur_entity = _Entity.EVENT_PAYLOAD
311
312 if ev.payload_type is None:
313 raise ConfigError('missing payload type in event "{}"'.format(ev_name), e)
314
315 try:
316 self._validate_entity(ev.payload_type)
317 except Exception as e:
318 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
319
320 if not ev.payload_type.fields:
321 raise ConfigError('empty payload type in event "{}"'.format(ev_name), e)
322 except Exception as e:
323 raise ConfigError('invalid stream "{}"'.format(stream_name), e)
324
325 def validate(self, meta):
326 self._validate_entities_and_names(meta)
327
328
329 # This validator validates special fields of trace, stream, and event
330 # types. For example, if checks that the "stream_id" field exists in the
331 # trace packet header if there's more than one stream, and much more.
332 class _MetadataSpecialFieldsValidator:
333 def _validate_trace_packet_header_type(self, t):
334 # needs "stream_id" field?
335 if len(self._meta.streams) > 1:
336 # yes
337 if t is None:
338 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is missing')
339
340 if type(t) is not metadata.Struct:
341 raise ConfigError('need "stream_id" field in trace packet header type, but trace packet header type is not a structure type')
342
343 if 'stream_id' not in t.fields:
344 raise ConfigError('need "stream_id" field in trace packet header type')
345
346 # validate "magic" and "stream_id" types
347 if type(t) is not metadata.Struct:
348 return
349
350 for i, (field_name, field_type) in enumerate(t.fields.items()):
351 if field_name == 'magic':
352 if type(field_type) is not metadata.Integer:
353 raise ConfigError('"magic" field in trace packet header type must be an integer type')
354
355 if field_type.signed or field_type.size != 32:
356 raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type')
357
358 if i != 0:
359 raise ConfigError('"magic" field must be the first trace packet header type\'s field')
360 elif field_name == 'stream_id':
361 if type(field_type) is not metadata.Integer:
362 raise ConfigError('"stream_id" field in trace packet header type must be an integer type')
363
364 if field_type.signed:
365 raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type')
366
367 # "id" size can fit all event IDs
368 if len(self._meta.streams) > (1 << field_type.size):
369 raise ConfigError('"stream_id" field\' size in trace packet header type is too small for the number of trace streams')
370 elif field_name == 'uuid':
371 if self._meta.trace.uuid is None:
372 raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided')
373
374 if type(field_type) is not metadata.Array:
375 raise ConfigError('"uuid" field in trace packet header type must be an array')
376
377 if field_type.length != 16:
378 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
379
380 element_type = field_type.element_type
381
382 if type(element_type) is not metadata.Integer:
383 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
384
385 if element_type.size != 8:
386 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
387
388 if element_type.signed:
389 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
390
391 if element_type.align != 8:
392 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned, byte-aligned bytes')
393
394 def _validate_trace(self, meta):
395 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
396
397 def _validate_stream_packet_context(self, stream):
398 t = stream.packet_context_type
399
400 if type(t) is None:
401 raise ConfigError('missing "packet-context-type" property in stream object')
402
403 if type(t) is not metadata.Struct:
404 raise ConfigError('"packet-context-type": expecting a structure type')
405
406 # "timestamp_begin", if exists, is an unsigned integer type,
407 # mapped to a clock
408 ts_begin = None
409
410 if 'timestamp_begin' in t.fields:
411 ts_begin = t.fields['timestamp_begin']
412
413 if type(ts_begin) is not metadata.Integer:
414 raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type')
415
416 if ts_begin.signed:
417 raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type')
418
419 if not ts_begin.property_mappings:
420 raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock')
421
422 # "timestamp_end", if exists, is an unsigned integer type,
423 # mapped to a clock
424 ts_end = None
425
426 if 'timestamp_end' in t.fields:
427 ts_end = t.fields['timestamp_end']
428
429 if type(ts_end) is not metadata.Integer:
430 raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type')
431
432 if ts_end.signed:
433 raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type')
434
435 if not ts_end.property_mappings:
436 raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock')
437
438 # "timestamp_begin" and "timestamp_end" exist together
439 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
440 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
441
442 # "timestamp_begin" and "timestamp_end" are mapped to the same clock
443 if ts_begin is not None and ts_end is not None:
444 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
445 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be mapped to the same clock object in stream packet context type')
446
447 # "events_discarded", if exists, is an unsigned integer type
448 if 'events_discarded' in t.fields:
449 events_discarded = t.fields['events_discarded']
450
451 if type(events_discarded) is not metadata.Integer:
452 raise ConfigError('"events_discarded" field in stream packet context type must be an integer type')
453
454 if events_discarded.signed:
455 raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type')
456
457 # "packet_size" and "content_size" must exist
458 if 'packet_size' not in t.fields:
459 raise ConfigError('missing "packet_size" field in stream packet context type')
460
461 packet_size = t.fields['packet_size']
462
463 # "content_size" and "content_size" must exist
464 if 'content_size' not in t.fields:
465 raise ConfigError('missing "content_size" field in stream packet context type')
466
467 content_size = t.fields['content_size']
468
469 # "packet_size" is an unsigned integer type
470 if type(packet_size) is not metadata.Integer:
471 raise ConfigError('"packet_size" field in stream packet context type must be an integer type')
472
473 if packet_size.signed:
474 raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type')
475
476 # "content_size" is an unsigned integer type
477 if type(content_size) is not metadata.Integer:
478 raise ConfigError('"content_size" field in stream packet context type must be an integer type')
479
480 if content_size.signed:
481 raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type')
482
483 # "packet_size" size should be greater than or equal to "content_size" size
484 if content_size.size > packet_size.size:
485 raise ConfigError('"content_size" field size must be lesser than or equal to "packet_size" field size')
486
487 def _validate_stream_event_header(self, stream):
488 t = stream.event_header_type
489
490 # needs "id" field?
491 if len(stream.events) > 1:
492 # yes
493 if t is None:
494 raise ConfigError('need "id" field in stream event header type, but stream event header type is missing')
495
496 if type(t) is not metadata.Struct:
497 raise ConfigError('need "id" field in stream event header type, but stream event header type is not a structure type')
498
499 if 'id' not in t.fields:
500 raise ConfigError('need "id" field in stream event header type')
501
502 # validate "id" and "timestamp" types
503 if type(t) is not metadata.Struct:
504 return
505
506 # "timestamp", if exists, is an unsigned integer type,
507 # mapped to a clock
508 if 'timestamp' in t.fields:
509 ts = t.fields['timestamp']
510
511 if type(ts) is not metadata.Integer:
512 raise ConfigError('"timestamp" field in stream event header type must be an integer type')
513
514 if ts.signed:
515 raise ConfigError('"timestamp" field in stream event header type must be an unsigned integer type')
516
517 if not ts.property_mappings:
518 raise ConfigError('"timestamp" field in stream event header type must be mapped to a clock')
519
520 if 'id' in t.fields:
521 eid = t.fields['id']
522
523 # "id" is an unsigned integer type
524 if type(eid) is not metadata.Integer:
525 raise ConfigError('"id" field in stream event header type must be an integer type')
526
527 if eid.signed:
528 raise ConfigError('"id" field in stream event header type must be an unsigned integer type')
529
530 # "id" size can fit all event IDs
531 if len(stream.events) > (1 << eid.size):
532 raise ConfigError('"id" field\' size in stream event header type is too small for the number of stream events')
533
534 def _validate_stream(self, stream):
535 self._validate_stream_packet_context(stream)
536 self._validate_stream_event_header(stream)
537
538 def validate(self, meta):
539 self._meta = meta
540 self._validate_trace(meta)
541
542 for stream in meta.streams.values():
543 try:
544 self._validate_stream(stream)
545 except Exception as e:
546 raise ConfigError('invalid stream "{}"'.format(stream.name), e)
547
548
549 class _MetadataDynamicTypesValidatorStackEntry:
550 def __init__(self, base_t):
551 self._base_t = base_t
552 self._index = 0
553
554 @property
555 def index(self):
556 return self._index
557
558 @index.setter
559 def index(self, value):
560 self._index = value
561
562 @property
563 def base_t(self):
564 return self._base_t
565
566 @base_t.setter
567 def base_t(self, value):
568 self._base_t = value
569
570
571 # Entities. Order of values is important here.
572 @enum.unique
573 class _Entity(enum.IntEnum):
574 TRACE_PACKET_HEADER = 0
575 STREAM_PACKET_CONTEXT = 1
576 STREAM_EVENT_HEADER = 2
577 STREAM_EVENT_CONTEXT = 3
578 EVENT_CONTEXT = 4
579 EVENT_PAYLOAD = 5
580
581
582 # This validator validates dynamic metadata types, that is, it ensures
583 # variable-length array lengths and variant tags actually point to
584 # something that exists. It also checks that variable-length array
585 # lengths point to integer types and variant tags to enumeration types.
586 class _MetadataDynamicTypesValidator:
587 def __init__(self):
588 self._type_to_visit_type_func = {
589 metadata.Integer: None,
590 metadata.FloatingPoint: None,
591 metadata.Enum: None,
592 metadata.String: None,
593 metadata.Struct: self._visit_struct_type,
594 metadata.Array: self._visit_array_type,
595 metadata.Variant: self._visit_variant_type,
596 }
597
598 self._cur_trace = None
599 self._cur_stream = None
600 self._cur_event = None
601
602 def _lookup_path_from_base(self, path, parts, base, start_index,
603 base_is_current, from_t):
604 index = start_index
605 cur_t = base
606 found_path = []
607
608 while index < len(parts):
609 part = parts[index]
610 next_t = None
611
612 if type(cur_t) is metadata.Struct:
613 enumerated_items = enumerate(cur_t.fields.items())
614
615 # lookup each field
616 for i, (field_name, field_type) in enumerated_items:
617 if field_name == part:
618 next_t = field_type
619 found_path.append((i, field_type))
620
621 if next_t is None:
622 raise ConfigError('invalid path "{}": cannot find field "{}" in structure type'.format(path, part))
623 elif type(cur_t) is metadata.Variant:
624 enumerated_items = enumerate(cur_t.types.items())
625
626 # lookup each type
627 for i, (type_name, type_type) in enumerated_items:
628 if type_name == part:
629 next_t = type_type
630 found_path.append((i, type_type))
631
632 if next_t is None:
633 raise ConfigError('invalid path "{}": cannot find type "{}" in variant type'.format(path, part))
634 else:
635 raise ConfigError('invalid path "{}": requesting "{}" in a non-variant, non-structure type'.format(path, part))
636
637 cur_t = next_t
638 index += 1
639
640 # make sure that the pointed type is not the pointing type
641 if from_t is cur_t:
642 raise ConfigError('invalid path "{}": pointing to self'.format(path))
643
644 # if we're here, we found the type; however, it could be located
645 # _after_ the variant/VLA looking for it, if the pointing
646 # and pointed types are in the same entity, so compare the
647 # current stack entries indexes to our index path in that case
648 if not base_is_current:
649 return cur_t
650
651 for index, entry in enumerate(self._stack):
652 if index == len(found_path):
653 # end of index path; valid so far
654 break
655
656 if found_path[index][0] > entry.index:
657 raise ConfigError('invalid path "{}": pointed type is after pointing type'.format(path))
658
659 # also make sure that both pointed and pointing types share
660 # a common structure ancestor
661 for index, entry in enumerate(self._stack):
662 if index == len(found_path):
663 break
664
665 if entry.base_t is not found_path[index][1]:
666 # found common ancestor
667 if type(entry.base_t) is metadata.Variant:
668 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
669
670 return cur_t
671
672 def _lookup_path_from_top(self, path, parts):
673 if len(parts) != 1:
674 raise ConfigError('invalid path "{}": multipart relative path not supported'.format(path))
675
676 find_name = parts[0]
677 index = len(self._stack) - 1
678 got_struct = False
679
680 # check stack entries in reversed order
681 for entry in reversed(self._stack):
682 # structure base type
683 if type(entry.base_t) is metadata.Struct:
684 got_struct = True
685 enumerated_items = enumerate(entry.base_t.fields.items())
686
687 # lookup each field, until the current visiting index is met
688 for i, (field_name, field_type) in enumerated_items:
689 if i == entry.index:
690 break
691
692 if field_name == find_name:
693 return field_type
694
695 # variant base type
696 elif type(entry.base_t) is metadata.Variant:
697 enumerated_items = enumerate(entry.base_t.types.items())
698
699 # lookup each type, until the current visiting index is met
700 for i, (type_name, type_type) in enumerated_items:
701 if i == entry.index:
702 break
703
704 if type_name == find_name:
705 if not got_struct:
706 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
707
708 return type_type
709
710 # nothing returned here: cannot find type
711 raise ConfigError('invalid path "{}": cannot find type in current context'.format(path))
712
713 def _lookup_path(self, path, from_t):
714 parts = path.lower().split('.')
715 base = None
716 base_is_current = False
717
718 if len(parts) >= 3:
719 if parts[0] == 'trace':
720 if parts[1] == 'packet' and parts[2] == 'header':
721 # make sure packet header exists
722 if self._cur_trace.packet_header_type is None:
723 raise ConfigError('invalid path "{}": no defined trace packet header type'.format(path))
724
725 base = self._cur_trace.packet_header_type
726
727 if self._cur_entity == _Entity.TRACE_PACKET_HEADER:
728 base_is_current = True
729 else:
730 raise ConfigError('invalid path "{}": unknown names after "trace"'.format(path))
731 elif parts[0] == 'stream':
732 if parts[1] == 'packet' and parts[2] == 'context':
733 if self._cur_entity < _Entity.STREAM_PACKET_CONTEXT:
734 raise ConfigError('invalid path "{}": cannot access stream packet context here'.format(path))
735
736 if self._cur_stream.packet_context_type is None:
737 raise ConfigError('invalid path "{}": no defined stream packet context type'.format(path))
738
739 base = self._cur_stream.packet_context_type
740
741 if self._cur_entity == _Entity.STREAM_PACKET_CONTEXT:
742 base_is_current = True
743 elif parts[1] == 'event':
744 if parts[2] == 'header':
745 if self._cur_entity < _Entity.STREAM_EVENT_HEADER:
746 raise ConfigError('invalid path "{}": cannot access stream event header here'.format(path))
747
748 if self._cur_stream.event_header_type is None:
749 raise ConfigError('invalid path "{}": no defined stream event header type'.format(path))
750
751 base = self._cur_stream.event_header_type
752
753 if self._cur_entity == _Entity.STREAM_EVENT_HEADER:
754 base_is_current = True
755 elif parts[2] == 'context':
756 if self._cur_entity < _Entity.STREAM_EVENT_CONTEXT:
757 raise ConfigError('invalid path "{}": cannot access stream event context here'.format(path))
758
759 if self._cur_stream.event_context_type is None:
760 raise ConfigError('invalid path "{}": no defined stream event context type'.format(path))
761
762 base = self._cur_stream.event_context_type
763
764 if self._cur_entity == _Entity.STREAM_EVENT_CONTEXT:
765 base_is_current = True
766 else:
767 raise ConfigError('invalid path "{}": unknown names after "stream.event"'.format(path))
768 else:
769 raise ConfigError('invalid path "{}": unknown names after "stream"'.format(path))
770
771 if base is not None:
772 start_index = 3
773
774 if len(parts) >= 2 and base is None:
775 if parts[0] == 'event':
776 if parts[1] == 'context':
777 if self._cur_entity < _Entity.EVENT_CONTEXT:
778 raise ConfigError('invalid path "{}": cannot access event context here'.format(path))
779
780 if self._cur_event.context_type is None:
781 raise ConfigError('invalid path "{}": no defined event context type'.format(path))
782
783 base = self._cur_event.context_type
784
785 if self._cur_entity == _Entity.EVENT_CONTEXT:
786 base_is_current = True
787 elif parts[1] == 'payload' or parts[1] == 'fields':
788 if self._cur_entity < _Entity.EVENT_PAYLOAD:
789 raise ConfigError('invalid path "{}": cannot access event payload here'.format(path))
790
791 if self._cur_event.payload_type is None:
792 raise ConfigError('invalid path "{}": no defined event payload type'.format(path))
793
794 base = self._cur_event.payload_type
795
796 if self._cur_entity == _Entity.EVENT_PAYLOAD:
797 base_is_current = True
798 else:
799 raise ConfigError('invalid path "{}": unknown names after "event"'.format(path))
800
801 if base is not None:
802 start_index = 2
803
804 if base is not None:
805 return self._lookup_path_from_base(path, parts, base, start_index,
806 base_is_current, from_t)
807 else:
808 return self._lookup_path_from_top(path, parts)
809
810 def _stack_reset(self):
811 self._stack = []
812
813 def _stack_push(self, base_t):
814 entry = _MetadataDynamicTypesValidatorStackEntry(base_t)
815 self._stack.append(entry)
816
817 def _stack_pop(self):
818 self._stack.pop()
819
820 def _stack_incr_index(self):
821 self._stack[-1].index += 1
822
823 def _visit_struct_type(self, t):
824 self._stack_push(t)
825
826 for field_name, field_type in t.fields.items():
827 try:
828 self._visit_type(field_type)
829 except Exception as e:
830 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
831
832 self._stack_incr_index()
833
834 self._stack_pop()
835
836 def _visit_array_type(self, t):
837 if t.is_variable_length:
838 # find length type
839 try:
840 length_type = self._lookup_path(t.length, t)
841 except Exception as e:
842 raise ConfigError('invalid array type\'s length', e)
843
844 # make sure length type an unsigned integer
845 if type(length_type) is not metadata.Integer:
846 raise ConfigError('array type\'s length does not point to an integer type')
847
848 if length_type.signed:
849 raise ConfigError('array type\'s length does not point to an unsigned integer type')
850
851 self._visit_type(t.element_type)
852
853 def _visit_variant_type(self, t):
854 # find tag type
855 try:
856 tag_type = self._lookup_path(t.tag, t)
857 except Exception as e:
858 raise ConfigError('invalid variant type\'s tag', e)
859
860 # make sure tag type is an enumeration
861 if type(tag_type) is not metadata.Enum:
862 raise ConfigError('variant type\'s tag does not point to an enumeration type')
863
864 # verify that each variant type's type exists as an enumeration member
865 for tag_name in t.types.keys():
866 if tag_name not in tag_type.members:
867 raise ConfigError('cannot find variant type\'s type "{}" in pointed tag type'.format(tag_name))
868
869 self._stack_push(t)
870
871 for type_name, type_type in t.types.items():
872 try:
873 self._visit_type(type_type)
874 except Exception as e:
875 raise ConfigError('in variant type\'s type "{}"'.format(type_name), e)
876
877 self._stack_incr_index()
878
879 self._stack_pop()
880
881 def _visit_type(self, t):
882 if t is None:
883 return
884
885 if type(t) in self._type_to_visit_type_func:
886 func = self._type_to_visit_type_func[type(t)]
887
888 if func is not None:
889 func(t)
890
891 def _visit_event(self, ev):
892 ev_name = ev.name
893
894 # set current event
895 self._cur_event = ev
896
897 # visit event context type
898 self._stack_reset()
899 self._cur_entity = _Entity.EVENT_CONTEXT
900
901 try:
902 self._visit_type(ev.context_type)
903 except Exception as e:
904 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
905
906 # visit event payload type
907 self._stack_reset()
908 self._cur_entity = _Entity.EVENT_PAYLOAD
909
910 try:
911 self._visit_type(ev.payload_type)
912 except Exception as e:
913 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
914
915 def _visit_stream(self, stream):
916 stream_name = stream.name
917
918 # set current stream
919 self._cur_stream = stream
920
921 # reset current event
922 self._cur_event = None
923
924 # visit stream packet context type
925 self._stack_reset()
926 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
927
928 try:
929 self._visit_type(stream.packet_context_type)
930 except Exception as e:
931 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
932
933 # visit stream event header type
934 self._stack_reset()
935 self._cur_entity = _Entity.STREAM_EVENT_HEADER
936
937 try:
938 self._visit_type(stream.event_header_type)
939 except Exception as e:
940 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
941
942 # visit stream event context type
943 self._stack_reset()
944 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
945
946 try:
947 self._visit_type(stream.event_context_type)
948 except Exception as e:
949 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
950
951 # visit events
952 for ev in stream.events.values():
953 try:
954 self._visit_event(ev)
955 except Exception as e:
956 raise ConfigError('invalid stream "{}"'.format(stream_name))
957
958 def validate(self, meta):
959 # set current trace
960 self._cur_trace = meta.trace
961
962 # visit trace packet header type
963 self._stack_reset()
964 self._cur_entity = _Entity.TRACE_PACKET_HEADER
965
966 try:
967 self._visit_type(meta.trace.packet_header_type)
968 except Exception as e:
969 raise ConfigError('invalid packet header type in trace', e)
970
971 # visit streams
972 for stream in meta.streams.values():
973 self._visit_stream(stream)
974
975
976 # Since type inheritance allows types to be only partially defined at
977 # any place in the configuration, this validator validates that actual
978 # trace, stream, and event types are all complete and valid. Therefore
979 # an invalid, but unusued type alias is accepted.
980 class _MetadataTypesHistologyValidator:
981 def __init__(self):
982 self._type_to_validate_type_histology_func = {
983 metadata.Integer: self._validate_integer_histology,
984 metadata.FloatingPoint: self._validate_float_histology,
985 metadata.Enum: self._validate_enum_histology,
986 metadata.String: self._validate_string_histology,
987 metadata.Struct: self._validate_struct_histology,
988 metadata.Array: self._validate_array_histology,
989 metadata.Variant: self._validate_variant_histology,
990 }
991
992 def _validate_integer_histology(self, t):
993 # size is set
994 if t.size is None:
995 raise ConfigError('missing integer type\'s size')
996
997 def _validate_float_histology(self, t):
998 # exponent digits is set
999 if t.exp_size is None:
1000 raise ConfigError('missing floating point number type\'s exponent size')
1001
1002 # mantissa digits is set
1003 if t.mant_size is None:
1004 raise ConfigError('missing floating point number type\'s mantissa size')
1005
1006 # exponent and mantissa sum is a multiple of 8
1007 if (t.exp_size + t.mant_size) % 8 != 0:
1008 raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8')
1009
1010 def _validate_enum_histology(self, t):
1011 # integer type is set
1012 if t.value_type is None:
1013 raise ConfigError('missing enumeration type\'s value type')
1014
1015 # there's at least one member
1016 if not t.members:
1017 raise ConfigError('enumeration type needs at least one member')
1018
1019 # no overlapping values and all values are valid considering
1020 # the value type
1021 ranges = []
1022
1023 if t.value_type.signed:
1024 value_min = -(1 << t.value_type.size - 1)
1025 value_max = (1 << (t.value_type.size - 1)) - 1
1026 else:
1027 value_min = 0
1028 value_max = (1 << t.value_type.size) - 1
1029
1030 for label, value in t.members.items():
1031 for rg in ranges:
1032 if value[0] <= rg[1] and rg[0] <= value[1]:
1033 raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label))
1034
1035 fmt = 'enumeration type\'s member "{}": value {} is outside the value type range [{}, {}]'
1036
1037 if value[0] < value_min or value[0] > value_max:
1038 raise ConfigError(fmt.format(label, value[0], value_min, value_max))
1039
1040 if value[1] < value_min or value[1] > value_max:
1041 raise ConfigError(fmt.format(label, value[1], value_min, value_max))
1042
1043 ranges.append(value)
1044
1045 def _validate_string_histology(self, t):
1046 # always valid
1047 pass
1048
1049 def _validate_struct_histology(self, t):
1050 # all fields are valid
1051 for field_name, field_type in t.fields.items():
1052 try:
1053 self._validate_type_histology(field_type)
1054 except Exception as e:
1055 raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e)
1056
1057 def _validate_array_histology(self, t):
1058 # length is set
1059 if t.length is None:
1060 raise ConfigError('missing array type\'s length')
1061
1062 # element type is set
1063 if t.element_type is None:
1064 raise ConfigError('missing array type\'s element type')
1065
1066 # element type is valid
1067 try:
1068 self._validate_type_histology(t.element_type)
1069 except Exception as e:
1070 raise ConfigError('invalid array type\'s element type', e)
1071
1072 def _validate_variant_histology(self, t):
1073 # tag is set
1074 if t.tag is None:
1075 raise ConfigError('missing variant type\'s tag')
1076
1077 # there's at least one type
1078 if not t.types:
1079 raise ConfigError('variant type needs at least one type')
1080
1081 # all types are valid
1082 for type_name, type_t in t.types.items():
1083 try:
1084 self._validate_type_histology(type_t)
1085 except Exception as e:
1086 raise ConfigError('invalid variant type\'s type "{}"'.format(type_name), e)
1087
1088 def _validate_type_histology(self, t):
1089 if t is None:
1090 return
1091
1092 self._type_to_validate_type_histology_func[type(t)](t)
1093
1094 def _validate_entity_type_histology(self, t):
1095 if t is None:
1096 return
1097
1098 if type(t) is not metadata.Struct:
1099 raise ConfigError('expecting a structure type')
1100
1101 self._validate_type_histology(t)
1102
1103 def _validate_event_types_histology(self, ev):
1104 ev_name = ev.name
1105
1106 # validate event context type
1107 try:
1108 self._validate_entity_type_histology(ev.context_type)
1109 except Exception as e:
1110 raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e)
1111
1112 # validate event payload type
1113 if ev.payload_type is None:
1114 raise ConfigError('event payload type must exist in event "{}"'.format(ev_name))
1115
1116 # TODO: also check arrays, sequences, and variants
1117 if type(ev.payload_type) is metadata.Struct:
1118 if not ev.payload_type.fields:
1119 raise ConfigError('event payload type must have at least one field for event "{}"'.format(ev_name))
1120
1121 try:
1122 self._validate_entity_type_histology(ev.payload_type)
1123 except Exception as e:
1124 raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e)
1125
1126 def _validate_stream_types_histology(self, stream):
1127 stream_name = stream.name
1128
1129 # validate stream packet context type
1130 try:
1131 self._validate_entity_type_histology(stream.packet_context_type)
1132 except Exception as e:
1133 raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e)
1134
1135 # validate stream event header type
1136 try:
1137 self._validate_entity_type_histology(stream.event_header_type)
1138 except Exception as e:
1139 raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e)
1140
1141 # validate stream event context type
1142 try:
1143 self._validate_entity_type_histology(stream.event_context_type)
1144 except Exception as e:
1145 raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e)
1146
1147 # validate events
1148 for ev in stream.events.values():
1149 try:
1150 self._validate_event_types_histology(ev)
1151 except Exception as e:
1152 raise ConfigError('invalid event in stream "{}"'.format(stream_name), e)
1153
1154 def validate(self, meta):
1155 # validate trace packet header type
1156 try:
1157 self._validate_entity_type_histology(meta.trace.packet_header_type)
1158 except Exception as e:
1159 raise ConfigError('invalid trace packet header type', e)
1160
1161 # validate streams
1162 for stream in meta.streams.values():
1163 self._validate_stream_types_histology(stream)
1164
1165
1166 class _YamlConfigParser:
1167 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
1168 self._class_name_to_create_type_func = {
1169 'int': self._create_integer,
1170 'integer': self._create_integer,
1171 'flt': self._create_float,
1172 'float': self._create_float,
1173 'floating-point': self._create_float,
1174 'enum': self._create_enum,
1175 'enumeration': self._create_enum,
1176 'str': self._create_string,
1177 'string': self._create_string,
1178 'struct': self._create_struct,
1179 'structure': self._create_struct,
1180 'array': self._create_array,
1181 'var': self._create_variant,
1182 'variant': self._create_variant,
1183 }
1184 self._type_to_create_type_func = {
1185 metadata.Integer: self._create_integer,
1186 metadata.FloatingPoint: self._create_float,
1187 metadata.Enum: self._create_enum,
1188 metadata.String: self._create_string,
1189 metadata.Struct: self._create_struct,
1190 metadata.Array: self._create_array,
1191 metadata.Variant: self._create_variant,
1192 }
1193 self._include_dirs = include_dirs
1194 self._ignore_include_not_found = ignore_include_not_found
1195 self._dump_config = dump_config
1196
1197 def _set_byte_order(self, metadata_node):
1198 if 'trace' not in metadata_node:
1199 raise ConfigError('missing "trace" property (metadata)')
1200
1201 trace_node = metadata_node['trace']
1202
1203 if not _is_assoc_array_prop(trace_node):
1204 raise ConfigError('"trace" property (metadata) must be an associative array')
1205
1206 if 'byte-order' not in trace_node:
1207 raise ConfigError('missing "byte-order" property (trace)')
1208
1209 bo_node = trace_node['byte-order']
1210
1211 if not _is_str_prop(bo_node):
1212 raise ConfigError('"byte-order" property of trace object must be a string ("le" or "be")')
1213
1214 self._bo = _byte_order_str_to_bo(bo_node)
1215
1216 if self._bo is None:
1217 raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"')
1218
1219 def _lookup_type_alias(self, name):
1220 if name in self._tas:
1221 return copy.deepcopy(self._tas[name])
1222
1223 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
1224 unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
1225
1226 if unk_prop:
1227 raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop))
1228
1229 if 'name' not in prop_mapping_node:
1230 raise ConfigError('missing "name" property in integer type object\'s clock property mapping')
1231
1232 if 'property' not in prop_mapping_node:
1233 raise ConfigError('missing "property" property in integer type object\'s clock property mapping')
1234
1235 clock_name = prop_mapping_node['name']
1236 prop = prop_mapping_node['property']
1237
1238 if not _is_str_prop(clock_name):
1239 raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string')
1240
1241 if not _is_str_prop(prop):
1242 raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string')
1243
1244 if clock_name not in self._clocks:
1245 raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name))
1246
1247 if prop != 'value':
1248 raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop))
1249
1250 mapped_clock = self._clocks[clock_name]
1251 int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
1252
1253 def _get_first_unknown_type_prop(self, type_node, known_props):
1254 kp = known_props + ['inherit', 'class']
1255
1256 if self._version >= 201:
1257 kp.append('$inherit')
1258
1259 return _get_first_unknown_prop(type_node, kp)
1260
1261 def _create_integer(self, obj, node):
1262 if obj is None:
1263 # create integer object
1264 obj = metadata.Integer()
1265
1266 unk_prop = self._get_first_unknown_type_prop(node, [
1267 'size',
1268 'align',
1269 'signed',
1270 'byte-order',
1271 'base',
1272 'encoding',
1273 'property-mappings',
1274 ])
1275
1276 if unk_prop:
1277 raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop))
1278
1279 # size
1280 if 'size' in node:
1281 size = node['size']
1282
1283 if not _is_int_prop(size):
1284 raise ConfigError('"size" property of integer type object must be an integer')
1285
1286 if size < 1:
1287 raise ConfigError('invalid integer size: {}'.format(size))
1288
1289 obj.size = size
1290
1291 # align
1292 if 'align' in node:
1293 align = node['align']
1294
1295 if align is None:
1296 obj.set_default_align()
1297 else:
1298 if not _is_int_prop(align):
1299 raise ConfigError('"align" property of integer type object must be an integer')
1300
1301 if not _is_valid_alignment(align):
1302 raise ConfigError('invalid alignment: {}'.format(align))
1303
1304 obj.align = align
1305
1306 # signed
1307 if 'signed' in node:
1308 signed = node['signed']
1309
1310 if signed is None:
1311 obj.set_default_signed()
1312 else:
1313 if not _is_bool_prop(signed):
1314 raise ConfigError('"signed" property of integer type object must be a boolean')
1315
1316 obj.signed = signed
1317
1318 # byte order
1319 if 'byte-order' in node:
1320 byte_order = node['byte-order']
1321
1322 if byte_order is None:
1323 obj.byte_order = self._bo
1324 else:
1325 if not _is_str_prop(byte_order):
1326 raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")')
1327
1328 byte_order = _byte_order_str_to_bo(byte_order)
1329
1330 if byte_order is None:
1331 raise ConfigError('invalid "byte-order" property in integer type object')
1332
1333 obj.byte_order = byte_order
1334 else:
1335 obj.byte_order = self._bo
1336
1337 # base
1338 if 'base' in node:
1339 base = node['base']
1340
1341 if base is None:
1342 obj.set_default_base()
1343 else:
1344 if not _is_str_prop(base):
1345 raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
1346
1347 if base == 'bin':
1348 base = 2
1349 elif base == 'oct':
1350 base = 8
1351 elif base == 'dec':
1352 base = 10
1353 elif base == 'hex':
1354 base = 16
1355 else:
1356 raise ConfigError('unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base))
1357
1358 obj.base = base
1359
1360 # encoding
1361 if 'encoding' in node:
1362 encoding = node['encoding']
1363
1364 if encoding is None:
1365 obj.set_default_encoding()
1366 else:
1367 if not _is_str_prop(encoding):
1368 raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
1369
1370 encoding = _encoding_str_to_encoding(encoding)
1371
1372 if encoding is None:
1373 raise ConfigError('invalid "encoding" property in integer type object')
1374
1375 obj.encoding = encoding
1376
1377 # property mappings
1378 if 'property-mappings' in node:
1379 prop_mappings = node['property-mappings']
1380
1381 if prop_mappings is None:
1382 obj.set_default_property_mappings()
1383 else:
1384 if not _is_array_prop(prop_mappings):
1385 raise ConfigError('"property-mappings" property of integer type object must be an array')
1386
1387 if len(prop_mappings) > 1:
1388 raise ConfigError('length of "property-mappings" array in integer type object must be 1')
1389
1390 for index, prop_mapping in enumerate(prop_mappings):
1391 if not _is_assoc_array_prop(prop_mapping):
1392 raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays')
1393
1394 if 'type' not in prop_mapping:
1395 raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
1396
1397 prop_type = prop_mapping['type']
1398
1399 if not _is_str_prop(prop_type):
1400 raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
1401
1402 if prop_type == 'clock':
1403 self._set_int_clock_prop_mapping(obj, prop_mapping)
1404 else:
1405 raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
1406
1407 return obj
1408
1409 def _create_float(self, obj, node):
1410 if obj is None:
1411 # create floating point number object
1412 obj = metadata.FloatingPoint()
1413
1414 unk_prop = self._get_first_unknown_type_prop(node, [
1415 'size',
1416 'align',
1417 'byte-order',
1418 ])
1419
1420 if unk_prop:
1421 raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop))
1422
1423 # size
1424 if 'size' in node:
1425 size = node['size']
1426
1427 if not _is_assoc_array_prop(size):
1428 raise ConfigError('"size" property of floating point number type object must be an associative array')
1429
1430 unk_prop = _get_first_unknown_prop(size, ['exp', 'mant'])
1431
1432 if unk_prop:
1433 raise ConfigError('unknown floating point number type object\'s "size" property: "{}"'.format(unk_prop))
1434
1435 if 'exp' in size:
1436 exp = size['exp']
1437
1438 if not _is_int_prop(exp):
1439 raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer')
1440
1441 if exp < 1:
1442 raise ConfigError('invalid floating point number exponent size: {}')
1443
1444 obj.exp_size = exp
1445
1446 if 'mant' in size:
1447 mant = size['mant']
1448
1449 if not _is_int_prop(mant):
1450 raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer')
1451
1452 if mant < 1:
1453 raise ConfigError('invalid floating point number mantissa size: {}')
1454
1455 obj.mant_size = mant
1456
1457 # align
1458 if 'align' in node:
1459 align = node['align']
1460
1461 if align is None:
1462 obj.set_default_align()
1463 else:
1464 if not _is_int_prop(align):
1465 raise ConfigError('"align" property of floating point number type object must be an integer')
1466
1467 if not _is_valid_alignment(align):
1468 raise ConfigError('invalid alignment: {}'.format(align))
1469
1470 obj.align = align
1471
1472 # byte order
1473 if 'byte-order' in node:
1474 byte_order = node['byte-order']
1475
1476 if byte_order is None:
1477 obj.byte_order = self._bo
1478 else:
1479 if not _is_str_prop(byte_order):
1480 raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")')
1481
1482 byte_order = _byte_order_str_to_bo(byte_order)
1483
1484 if byte_order is None:
1485 raise ConfigError('invalid "byte-order" property in floating point number type object')
1486 else:
1487 obj.byte_order = self._bo
1488
1489 return obj
1490
1491 def _create_enum(self, obj, node):
1492 if obj is None:
1493 # create enumeration object
1494 obj = metadata.Enum()
1495
1496 unk_prop = self._get_first_unknown_type_prop(node, [
1497 'value-type',
1498 'members',
1499 ])
1500
1501 if unk_prop:
1502 raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop))
1503
1504 # value type
1505 if 'value-type' in node:
1506 value_type_node = node['value-type']
1507
1508 try:
1509 obj.value_type = self._create_type(value_type_node)
1510 except Exception as e:
1511 raise ConfigError('cannot create enumeration type\'s integer type', e)
1512
1513 # members
1514 if 'members' in node:
1515 members_node = node['members']
1516
1517 if not _is_array_prop(members_node):
1518 raise ConfigError('"members" property of enumeration type object must be an array')
1519
1520 cur = 0
1521 last_value = obj.last_value
1522
1523 if last_value is None:
1524 cur = 0
1525 else:
1526 cur = last_value + 1
1527
1528 for index, m_node in enumerate(members_node):
1529 if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
1530 raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index))
1531
1532 if _is_str_prop(m_node):
1533 label = m_node
1534 value = (cur, cur)
1535 cur += 1
1536 else:
1537 unk_prop = _get_first_unknown_prop(m_node, [
1538 'label',
1539 'value',
1540 ])
1541
1542 if unk_prop:
1543 raise ConfigError('unknown enumeration type member object property: "{}"'.format(unk_prop))
1544
1545 if 'label' not in m_node:
1546 raise ConfigError('missing "label" property in enumeration member #{}'.format(index))
1547
1548 label = m_node['label']
1549
1550 if not _is_str_prop(label):
1551 raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index))
1552
1553 if 'value' not in m_node:
1554 raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label))
1555
1556 value = m_node['value']
1557
1558 if not _is_int_prop(value) and not _is_array_prop(value):
1559 raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label))
1560
1561 if _is_int_prop(value):
1562 cur = value + 1
1563 value = (value, value)
1564 else:
1565 if len(value) != 2:
1566 raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label))
1567
1568 mn = value[0]
1569 mx = value[1]
1570
1571 if mn > mx:
1572 raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
1573
1574 value = (mn, mx)
1575 cur = mx + 1
1576
1577 obj.members[label] = value
1578
1579 return obj
1580
1581 def _create_string(self, obj, node):
1582 if obj is None:
1583 # create string object
1584 obj = metadata.String()
1585
1586 unk_prop = self._get_first_unknown_type_prop(node, [
1587 'encoding',
1588 ])
1589
1590 if unk_prop:
1591 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1592
1593 # encoding
1594 if 'encoding' in node:
1595 encoding = node['encoding']
1596
1597 if encoding is None:
1598 obj.set_default_encoding()
1599 else:
1600 if not _is_str_prop(encoding):
1601 raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")')
1602
1603 encoding = _encoding_str_to_encoding(encoding)
1604
1605 if encoding is None:
1606 raise ConfigError('invalid "encoding" property in string type object')
1607
1608 obj.encoding = encoding
1609
1610 return obj
1611
1612 def _create_struct(self, obj, node):
1613 if obj is None:
1614 # create structure object
1615 obj = metadata.Struct()
1616
1617 unk_prop = self._get_first_unknown_type_prop(node, [
1618 'min-align',
1619 'fields',
1620 ])
1621
1622 if unk_prop:
1623 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1624
1625 # minimum alignment
1626 if 'min-align' in node:
1627 min_align = node['min-align']
1628
1629 if min_align is None:
1630 obj.set_default_min_align()
1631 else:
1632 if not _is_int_prop(min_align):
1633 raise ConfigError('"min-align" property of structure type object must be an integer')
1634
1635 if not _is_valid_alignment(min_align):
1636 raise ConfigError('invalid minimum alignment: {}'.format(min_align))
1637
1638 obj.min_align = min_align
1639
1640 # fields
1641 if 'fields' in node:
1642 fields = node['fields']
1643
1644 if fields is None:
1645 obj.set_default_fields()
1646 else:
1647 if not _is_assoc_array_prop(fields):
1648 raise ConfigError('"fields" property of structure type object must be an associative array')
1649
1650 for field_name, field_node in fields.items():
1651 if not is_valid_identifier(field_name):
1652 raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name))
1653
1654 try:
1655 obj.fields[field_name] = self._create_type(field_node)
1656 except Exception as e:
1657 raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e)
1658
1659 return obj
1660
1661 def _create_array(self, obj, node):
1662 if obj is None:
1663 # create array object
1664 obj = metadata.Array()
1665
1666 unk_prop = self._get_first_unknown_type_prop(node, [
1667 'length',
1668 'element-type',
1669 ])
1670
1671 if unk_prop:
1672 raise ConfigError('unknown array type object property: "{}"'.format(unk_prop))
1673
1674 # length
1675 if 'length' in node:
1676 length = node['length']
1677
1678 if not _is_int_prop(length) and not _is_str_prop(length):
1679 raise ConfigError('"length" property of array type object must be an integer or a string')
1680
1681 if type(length) is int and length < 0:
1682 raise ConfigError('invalid static array length: {}'.format(length))
1683
1684 obj.length = length
1685
1686 # element type
1687 if 'element-type' in node:
1688 element_type_node = node['element-type']
1689
1690 try:
1691 obj.element_type = self._create_type(node['element-type'])
1692 except Exception as e:
1693 raise ConfigError('cannot create array type\'s element type', e)
1694
1695 return obj
1696
1697 def _create_variant(self, obj, node):
1698 if obj is None:
1699 # create variant object
1700 obj = metadata.Variant()
1701
1702 unk_prop = self._get_first_unknown_type_prop(node, [
1703 'tag',
1704 'types',
1705 ])
1706
1707 if unk_prop:
1708 raise ConfigError('unknown variant type object property: "{}"'.format(unk_prop))
1709
1710 # tag
1711 if 'tag' in node:
1712 tag = node['tag']
1713
1714 if not _is_str_prop(tag):
1715 raise ConfigError('"tag" property of variant type object must be a string')
1716
1717 # do not validate variant tag for the moment; will be done in a
1718 # second phase
1719 obj.tag = tag
1720
1721 # element type
1722 if 'types' in node:
1723 types = node['types']
1724
1725 if not _is_assoc_array_prop(types):
1726 raise ConfigError('"types" property of variant type object must be an associative array')
1727
1728 # do not validate type names for the moment; will be done in a
1729 # second phase
1730 for type_name, type_node in types.items():
1731 if not is_valid_identifier(type_name):
1732 raise ConfigError('"{}" is not a valid type name for variant type'.format(type_name))
1733
1734 try:
1735 obj.types[type_name] = self._create_type(type_node)
1736 except Exception as e:
1737 raise ConfigError('cannot create variant type\'s type "{}"'.format(type_name), e)
1738
1739 return obj
1740
1741 def _create_type(self, type_node):
1742 if type(type_node) is str:
1743 t = self._lookup_type_alias(type_node)
1744
1745 if t is None:
1746 raise ConfigError('unknown type alias "{}"'.format(type_node))
1747
1748 return t
1749
1750 if not _is_assoc_array_prop(type_node):
1751 raise ConfigError('type objects must be associative arrays or strings (type alias name)')
1752
1753 # inherit:
1754 # v2.0: "inherit"
1755 # v2.1+: "$inherit"
1756 inherit_node = None
1757
1758 if self._version >= 200:
1759 if 'inherit' in type_node:
1760 inherit_prop = 'inherit'
1761 inherit_node = type_node[inherit_prop]
1762
1763 if self._version >= 201:
1764 if '$inherit' in type_node:
1765 if inherit_node is not None:
1766 raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
1767
1768 inherit_prop = '$inherit'
1769 inherit_node = type_node[inherit_prop]
1770
1771 if inherit_node is not None and 'class' in type_node:
1772 raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
1773
1774 if inherit_node is not None:
1775 if not _is_str_prop(inherit_node):
1776 raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop))
1777
1778 base = self._lookup_type_alias(inherit_node)
1779
1780 if base is None:
1781 raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
1782
1783 func = self._type_to_create_type_func[type(base)]
1784 else:
1785 if 'class' not in type_node:
1786 raise ConfigError('type objects which do not inherit must have a "class" property')
1787
1788 class_name = type_node['class']
1789
1790 if type(class_name) is not str:
1791 raise ConfigError('type objects\' "class" property must be a string')
1792
1793 if class_name not in self._class_name_to_create_type_func:
1794 raise ConfigError('unknown type class "{}"'.format(class_name))
1795
1796 base = None
1797 func = self._class_name_to_create_type_func[class_name]
1798
1799 return func(base, type_node)
1800
1801 def _register_type_aliases(self, metadata_node):
1802 self._tas = dict()
1803
1804 if 'type-aliases' not in metadata_node:
1805 return
1806
1807 ta_node = metadata_node['type-aliases']
1808
1809 if ta_node is None:
1810 return
1811
1812 if not _is_assoc_array_prop(ta_node):
1813 raise ConfigError('"type-aliases" property (metadata) must be an associative array')
1814
1815 for ta_name, ta_type in ta_node.items():
1816 if ta_name in self._tas:
1817 raise ConfigError('duplicate type alias "{}"'.format(ta_name))
1818
1819 try:
1820 t = self._create_type(ta_type)
1821 except Exception as e:
1822 raise ConfigError('cannot create type alias "{}"'.format(ta_name), e)
1823
1824 self._tas[ta_name] = t
1825
1826 def _create_clock(self, node):
1827 # create clock object
1828 clock = metadata.Clock()
1829
1830 if not _is_assoc_array_prop(node):
1831 raise ConfigError('clock objects must be associative arrays')
1832
1833 known_props = [
1834 'uuid',
1835 'description',
1836 'freq',
1837 'error-cycles',
1838 'offset',
1839 'absolute',
1840 'return-ctype',
1841 ]
1842
1843 if self._version >= 201:
1844 known_props.append('$return-ctype')
1845
1846 unk_prop = _get_first_unknown_prop(node, known_props)
1847
1848 if unk_prop:
1849 raise ConfigError('unknown clock object property: "{}"'.format(unk_prop))
1850
1851 # UUID
1852 if 'uuid' in node:
1853 uuidp = node['uuid']
1854
1855 if uuidp is None:
1856 clock.set_default_uuid()
1857 else:
1858 if not _is_str_prop(uuidp):
1859 raise ConfigError('"uuid" property of clock object must be a string')
1860
1861 try:
1862 uuidp = uuid.UUID(uuidp)
1863 except:
1864 raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp))
1865
1866 clock.uuid = uuidp
1867
1868 # description
1869 if 'description' in node:
1870 desc = node['description']
1871
1872 if desc is None:
1873 clock.set_default_description()
1874 else:
1875 if not _is_str_prop(desc):
1876 raise ConfigError('"description" property of clock object must be a string')
1877
1878 clock.description = desc
1879
1880 # frequency
1881 if 'freq' in node:
1882 freq = node['freq']
1883
1884 if freq is None:
1885 clock.set_default_freq()
1886 else:
1887 if not _is_int_prop(freq):
1888 raise ConfigError('"freq" property of clock object must be an integer')
1889
1890 if freq < 1:
1891 raise ConfigError('invalid clock frequency: {}'.format(freq))
1892
1893 clock.freq = freq
1894
1895 # error cycles
1896 if 'error-cycles' in node:
1897 error_cycles = node['error-cycles']
1898
1899 if error_cycles is None:
1900 clock.set_default_error_cycles()
1901 else:
1902 if not _is_int_prop(error_cycles):
1903 raise ConfigError('"error-cycles" property of clock object must be an integer')
1904
1905 if error_cycles < 0:
1906 raise ConfigError('invalid clock error cycles: {}'.format(error_cycles))
1907
1908 clock.error_cycles = error_cycles
1909
1910 # offset
1911 if 'offset' in node:
1912 offset = node['offset']
1913
1914 if offset is None:
1915 self.set_default_offset_seconds()
1916 self.set_default_offset_cycles()
1917 else:
1918 if not _is_assoc_array_prop(offset):
1919 raise ConfigError('"offset" property of clock object must be an associative array')
1920
1921 unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
1922
1923 if unk_prop:
1924 raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop))
1925
1926 # cycles
1927 if 'cycles' in offset:
1928 offset_cycles = offset['cycles']
1929
1930 if not _is_int_prop(offset_cycles):
1931 raise ConfigError('"cycles" property of clock object\'s offset property must be an integer')
1932
1933 if offset_cycles < 0:
1934 raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles))
1935
1936 clock.offset_cycles = offset_cycles
1937
1938 # seconds
1939 if 'seconds' in offset:
1940 offset_seconds = offset['seconds']
1941
1942 if not _is_int_prop(offset_seconds):
1943 raise ConfigError('"seconds" property of clock object\'s offset property must be an integer')
1944
1945 if offset_seconds < 0:
1946 raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds))
1947
1948 clock.offset_seconds = offset_seconds
1949
1950 # absolute
1951 if 'absolute' in node:
1952 absolute = node['absolute']
1953
1954 if absolute is None:
1955 clock.set_default_absolute()
1956 else:
1957 if not _is_bool_prop(absolute):
1958 raise ConfigError('"absolute" property of clock object must be a boolean')
1959
1960 clock.absolute = absolute
1961
1962 # return C type:
1963 # v2.0: "return-ctype"
1964 # v2.1+: "$return-ctype"
1965 return_ctype_node = None
1966
1967 if self._version >= 200:
1968 if 'return-ctype' in node:
1969 return_ctype_prop = 'return-ctype'
1970 return_ctype_node = node[return_ctype_prop]
1971
1972 if self._version >= 201:
1973 if '$return-ctype' in node:
1974 if return_ctype_node is not None:
1975 raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"')
1976
1977 return_ctype_prop = '$return-ctype'
1978 return_ctype_node = node[return_ctype_prop]
1979
1980 if return_ctype_node is not None:
1981 if return_ctype_node is None:
1982 clock.set_default_return_ctype()
1983 else:
1984 if not _is_str_prop(return_ctype_node):
1985 raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop))
1986
1987 clock.return_ctype = return_ctype_node
1988
1989 return clock
1990
1991 def _register_clocks(self, metadata_node):
1992 self._clocks = collections.OrderedDict()
1993
1994 if 'clocks' not in metadata_node:
1995 return
1996
1997 clocks_node = metadata_node['clocks']
1998
1999 if clocks_node is None:
2000 return
2001
2002 if not _is_assoc_array_prop(clocks_node):
2003 raise ConfigError('"clocks" property (metadata) must be an associative array')
2004
2005 for clock_name, clock_node in clocks_node.items():
2006 if not is_valid_identifier(clock_name):
2007 raise ConfigError('invalid clock name: "{}"'.format(clock_name))
2008
2009 if clock_name in self._clocks:
2010 raise ConfigError('duplicate clock "{}"'.format(clock_name))
2011
2012 try:
2013 clock = self._create_clock(clock_node)
2014 except Exception as e:
2015 raise ConfigError('cannot create clock "{}"'.format(clock_name), e)
2016
2017 clock.name = clock_name
2018 self._clocks[clock_name] = clock
2019
2020 def _create_env(self, metadata_node):
2021 env = collections.OrderedDict()
2022
2023 if 'env' not in metadata_node:
2024 return env
2025
2026 env_node = metadata_node['env']
2027
2028 if env_node is None:
2029 return env
2030
2031 if not _is_assoc_array_prop(env_node):
2032 raise ConfigError('"env" property (metadata) must be an associative array')
2033
2034 for env_name, env_value in env_node.items():
2035 if env_name in env:
2036 raise ConfigError('duplicate environment variable "{}"'.format(env_name))
2037
2038 if not is_valid_identifier(env_name):
2039 raise ConfigError('invalid environment variable name: "{}"'.format(env_name))
2040
2041 if not _is_int_prop(env_value) and not _is_str_prop(env_value):
2042 raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
2043
2044 env[env_name] = env_value
2045
2046 return env
2047
2048 def _register_log_levels(self, metadata_node):
2049 self._log_levels = dict()
2050
2051 # log levels:
2052 # v2.0: "log-levels"
2053 # v2.1+: "$log-levels"
2054 log_levels_node = None
2055
2056 if self._version >= 200:
2057 if 'log-levels' in metadata_node:
2058 log_levels_prop = 'log-levels'
2059 log_levels_node = metadata_node[log_levels_prop]
2060
2061 if self._version >= 201:
2062 if '$log-levels' in metadata_node:
2063 if log_levels_node is not None:
2064 raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
2065
2066 log_levels_prop = '$log-levels'
2067 log_levels_node = metadata_node[log_levels_prop]
2068
2069 if log_levels_node is None:
2070 return
2071
2072 if not _is_assoc_array_prop(log_levels_node):
2073 raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop))
2074
2075 for ll_name, ll_value in log_levels_node.items():
2076 if ll_name in self._log_levels:
2077 raise ConfigError('duplicate log level entry "{}"'.format(ll_name))
2078
2079 if not _is_int_prop(ll_value):
2080 raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name))
2081
2082 if ll_value < 0:
2083 raise ConfigError('invalid log level entry ("{}"): log level value must be positive'.format(ll_name))
2084
2085 self._log_levels[ll_name] = ll_value
2086
2087 def _create_trace(self, metadata_node):
2088 # create trace object
2089 trace = metadata.Trace()
2090
2091 if 'trace' not in metadata_node:
2092 raise ConfigError('missing "trace" property (metadata)')
2093
2094 trace_node = metadata_node['trace']
2095
2096 if not _is_assoc_array_prop(trace_node):
2097 raise ConfigError('"trace" property (metadata) must be an associative array')
2098
2099 unk_prop = _get_first_unknown_prop(trace_node, [
2100 'byte-order',
2101 'uuid',
2102 'packet-header-type',
2103 ])
2104
2105 if unk_prop:
2106 raise ConfigError('unknown trace object property: "{}"'.format(unk_prop))
2107
2108 # set byte order (already parsed)
2109 trace.byte_order = self._bo
2110
2111 # UUID
2112 if 'uuid' in trace_node and trace_node['uuid'] is not None:
2113 uuidp = trace_node['uuid']
2114
2115 if not _is_str_prop(uuidp):
2116 raise ConfigError('"uuid" property of trace object must be a string')
2117
2118 if uuidp == 'auto':
2119 uuidp = uuid.uuid1()
2120 else:
2121 try:
2122 uuidp = uuid.UUID(uuidp)
2123 except:
2124 raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp))
2125
2126 trace.uuid = uuidp
2127
2128 # packet header type
2129 if 'packet-header-type' in trace_node and trace_node['packet-header-type'] is not None:
2130 try:
2131 ph_type = self._create_type(trace_node['packet-header-type'])
2132 except Exception as e:
2133 raise ConfigError('cannot create packet header type (trace)', e)
2134
2135 trace.packet_header_type = ph_type
2136
2137 return trace
2138
2139 def _lookup_log_level(self, ll):
2140 if _is_int_prop(ll):
2141 return ll
2142 elif _is_str_prop(ll) and ll in self._log_levels:
2143 return self._log_levels[ll]
2144
2145 def _create_event(self, event_node):
2146 event = metadata.Event()
2147
2148 if not _is_assoc_array_prop(event_node):
2149 raise ConfigError('event objects must be associative arrays')
2150
2151 unk_prop = _get_first_unknown_prop(event_node, [
2152 'log-level',
2153 'context-type',
2154 'payload-type',
2155 ])
2156
2157 if unk_prop:
2158 raise ConfigError('unknown event object property: "{}"'.format(unk_prop))
2159
2160 if 'log-level' in event_node and event_node['log-level'] is not None:
2161 ll_node = event_node['log-level']
2162
2163 if _is_str_prop(ll_node):
2164 ll_value = self._lookup_log_level(event_node['log-level'])
2165
2166 if ll_value is None:
2167 raise ConfigError('cannot find log level "{}"'.format(ll_node))
2168
2169 ll = metadata.LogLevel(event_node['log-level'], ll_value)
2170 elif _is_int_prop(ll_node):
2171 if ll_node < 0:
2172 raise ConfigError('invalid log level value {}: value must be positive'.format(ll_node))
2173
2174 ll = metadata.LogLevel(None, ll_node)
2175 else:
2176 raise ConfigError('"log-level" property must be either a string or an integer')
2177
2178 event.log_level = ll
2179
2180 if 'context-type' in event_node and event_node['context-type'] is not None:
2181 ctx_type_node = event_node['context-type']
2182
2183 try:
2184 t = self._create_type(event_node['context-type'])
2185 except Exception as e:
2186 raise ConfigError('cannot create event\'s context type object', e)
2187
2188 event.context_type = t
2189
2190 if 'payload-type' not in event_node:
2191 raise ConfigError('missing "payload-type" property in event object')
2192
2193 if event_node['payload-type'] is not None:
2194 try:
2195 t = self._create_type(event_node['payload-type'])
2196 except Exception as e:
2197 raise ConfigError('cannot create event\'s payload type object', e)
2198
2199 event.payload_type = t
2200
2201 return event
2202
2203 def _create_stream(self, stream_node):
2204 stream = metadata.Stream()
2205
2206 if not _is_assoc_array_prop(stream_node):
2207 raise ConfigError('stream objects must be associative arrays')
2208
2209 unk_prop = _get_first_unknown_prop(stream_node, [
2210 'packet-context-type',
2211 'event-header-type',
2212 'event-context-type',
2213 'events',
2214 ])
2215
2216 if unk_prop:
2217 raise ConfigError('unknown stream object property: "{}"'.format(unk_prop))
2218
2219 if 'packet-context-type' in stream_node and stream_node['packet-context-type'] is not None:
2220 try:
2221 t = self._create_type(stream_node['packet-context-type'])
2222 except Exception as e:
2223 raise ConfigError('cannot create stream\'s packet context type object', e)
2224
2225 stream.packet_context_type = t
2226
2227 if 'event-header-type' in stream_node and stream_node['event-header-type'] is not None:
2228 try:
2229 t = self._create_type(stream_node['event-header-type'])
2230 except Exception as e:
2231 raise ConfigError('cannot create stream\'s event header type object', e)
2232
2233 stream.event_header_type = t
2234
2235 if 'event-context-type' in stream_node and stream_node['event-context-type'] is not None:
2236 try:
2237 t = self._create_type(stream_node['event-context-type'])
2238 except Exception as e:
2239 raise ConfigError('cannot create stream\'s event context type object', e)
2240
2241 stream.event_context_type = t
2242
2243 if 'events' not in stream_node:
2244 raise ConfigError('missing "events" property in stream object')
2245
2246 events = stream_node['events']
2247
2248 if events is not None:
2249 if not _is_assoc_array_prop(events):
2250 raise ConfigError('"events" property of stream object must be an associative array')
2251
2252 if not events:
2253 raise ConfigError('at least one event is needed within a stream object')
2254
2255 cur_id = 0
2256
2257 for ev_name, ev_node in events.items():
2258 try:
2259 ev = self._create_event(ev_node)
2260 except Exception as e:
2261 raise ConfigError('cannot create event "{}"'.format(ev_name), e)
2262
2263 ev.id = cur_id
2264 ev.name = ev_name
2265 stream.events[ev_name] = ev
2266 cur_id += 1
2267
2268 return stream
2269
2270 def _create_streams(self, metadata_node):
2271 streams = collections.OrderedDict()
2272
2273 if 'streams' not in metadata_node:
2274 raise ConfigError('missing "streams" property (metadata)')
2275
2276 streams_node = metadata_node['streams']
2277
2278 if not _is_assoc_array_prop(streams_node):
2279 raise ConfigError('"streams" property (metadata) must be an associative array')
2280
2281 if not streams_node:
2282 raise ConfigError('at least one stream is needed (metadata)')
2283
2284 cur_id = 0
2285
2286 for stream_name, stream_node in streams_node.items():
2287 try:
2288 stream = self._create_stream(stream_node)
2289 except Exception as e:
2290 raise ConfigError('cannot create stream "{}"'.format(stream_name), e)
2291
2292 stream.id = cur_id
2293 stream.name = str(stream_name)
2294 streams[stream_name] = stream
2295 cur_id += 1
2296
2297 return streams
2298
2299 def _create_metadata(self, root):
2300 meta = metadata.Metadata()
2301
2302 if 'metadata' not in root:
2303 raise ConfigError('missing "metadata" property (configuration)')
2304
2305 metadata_node = root['metadata']
2306
2307 if not _is_assoc_array_prop(metadata_node):
2308 raise ConfigError('"metadata" property (configuration) must be an associative array')
2309
2310 known_props = [
2311 'type-aliases',
2312 'log-levels',
2313 'trace',
2314 'env',
2315 'clocks',
2316 'streams',
2317 ]
2318
2319 if self._version >= 201:
2320 known_props.append('$log-levels')
2321
2322 unk_prop = _get_first_unknown_prop(metadata_node, known_props)
2323
2324 if unk_prop:
2325 add = ''
2326
2327 if unk_prop == '$include':
2328 add = ' (use version 2.1 or greater)'
2329
2330 raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop))
2331
2332 self._set_byte_order(metadata_node)
2333 self._register_clocks(metadata_node)
2334 meta.clocks = self._clocks
2335 self._register_type_aliases(metadata_node)
2336 meta.env = self._create_env(metadata_node)
2337 meta.trace = self._create_trace(metadata_node)
2338 self._register_log_levels(metadata_node)
2339 meta.streams = self._create_streams(metadata_node)
2340
2341 return meta
2342
2343 def _get_version(self, root):
2344 if 'version' not in root:
2345 raise ConfigError('missing "version" property (configuration)')
2346
2347 version_node = root['version']
2348
2349 if not _is_str_prop(version_node):
2350 raise ConfigError('"version" property (configuration) must be a string')
2351
2352 version_node = version_node.strip()
2353
2354 if version_node not in ['2.0', '2.1']:
2355 raise ConfigError('unsupported version ({}): versions 2.0 and 2.1 are supported'.format(version_node))
2356
2357 # convert version string to comparable version integer
2358 parts = version_node.split('.')
2359 version = int(parts[0]) * 100 + int(parts[1])
2360
2361 return version
2362
2363 def _get_prefix(self, root):
2364 def_prefix = 'barectf_'
2365
2366 if 'prefix' not in root:
2367 return def_prefix
2368
2369 prefix_node = root['prefix']
2370
2371 if prefix_node is None:
2372 return def_prefix
2373
2374 if not _is_str_prop(prefix_node):
2375 raise ConfigError('"prefix" property (configuration) must be a string')
2376
2377 if not is_valid_identifier(prefix_node):
2378 raise ConfigError('"prefix" property (configuration) must be a valid C identifier')
2379
2380 return prefix_node
2381
2382 def _get_last_include_file(self):
2383 if self._include_stack:
2384 return self._include_stack[-1]
2385
2386 return self._root_yaml_path
2387
2388 def _load_include(self, yaml_path):
2389 for inc_dir in self._include_dirs:
2390 # current include dir + file name path
2391 # note: os.path.join() only takes the last arg if it's absolute
2392 inc_path = os.path.join(inc_dir, yaml_path)
2393
2394 # real path (symbolic links resolved)
2395 real_path = os.path.realpath(inc_path)
2396
2397 # normalized path (weird stuff removed!)
2398 norm_path = os.path.normpath(real_path)
2399
2400 if not os.path.isfile(norm_path):
2401 # file does not exist: skip
2402 continue
2403
2404 if norm_path in self._include_stack:
2405 base_path = self._get_last_include_file()
2406 raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path))
2407
2408 self._include_stack.append(norm_path)
2409
2410 # load raw content
2411 return self._yaml_ordered_load(norm_path)
2412
2413 if not self._ignore_include_not_found:
2414 base_path = self._get_last_include_file()
2415 raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
2416
2417 return None
2418
2419 def _get_include_paths(self, include_node):
2420 if _is_str_prop(include_node):
2421 return [include_node]
2422 elif _is_array_prop(include_node):
2423 for include_path in include_node:
2424 if not _is_str_prop(include_path):
2425 raise ConfigError('invalid include property: expecting array of strings')
2426
2427 return include_node
2428
2429 raise ConfigError('invalid include property: expecting string or array of strings')
2430
2431 def _update_node(self, base_node, overlay_node):
2432 for olay_key, olay_value in overlay_node.items():
2433 if olay_key in base_node:
2434 base_value = base_node[olay_key]
2435
2436 if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
2437 # merge dictionaries
2438 self._update_node(base_value, olay_value)
2439 elif _is_array_prop(olay_value) and _is_array_prop(base_value):
2440 # append extension array items to base items
2441 base_value += olay_value
2442 else:
2443 # fall back to replacing
2444 base_node[olay_key] = olay_value
2445 else:
2446 base_node[olay_key] = olay_value
2447
2448 def _process_node_include(self, last_overlay_node, name,
2449 process_base_include_cb,
2450 process_children_include_cb=None):
2451 if not _is_assoc_array_prop(last_overlay_node):
2452 raise ConfigError('{} objects must be associative arrays'.format(name))
2453
2454 # process children inclusions first
2455 if process_children_include_cb:
2456 process_children_include_cb(last_overlay_node)
2457
2458 if '$include' in last_overlay_node:
2459 include_node = last_overlay_node['$include']
2460 else:
2461 # no includes!
2462 return last_overlay_node
2463
2464 include_paths = self._get_include_paths(include_node)
2465 cur_base_path = self._get_last_include_file()
2466 base_node = None
2467
2468 # keep the include paths and remove the include property
2469 include_paths = copy.deepcopy(include_paths)
2470 del last_overlay_node['$include']
2471
2472 for include_path in include_paths:
2473 # load raw YAML from included file
2474 overlay_node = self._load_include(include_path)
2475
2476 if overlay_node is None:
2477 # cannot find include file, but we're ignoring those
2478 # errors, otherwise _load_include() itself raises
2479 # a config error
2480 continue
2481
2482 # recursively process includes
2483 try:
2484 overlay_node = process_base_include_cb(overlay_node)
2485 except Exception as e:
2486 raise ConfigError('in "{}"'.format(cur_base_path), e)
2487
2488 # pop include stack now that we're done including
2489 del self._include_stack[-1]
2490
2491 # at this point, base_node is fully resolved (does not
2492 # contain any include property)
2493 if base_node is None:
2494 base_node = overlay_node
2495 else:
2496 self._update_node(base_node, overlay_node)
2497
2498 # finally, we update the latest base node with our last overlay
2499 # node
2500 if base_node is None:
2501 # nothing was included, which is possible when we're
2502 # ignoring include errors
2503 return last_overlay_node
2504
2505 self._update_node(base_node, last_overlay_node)
2506
2507 return base_node
2508
2509 def _process_event_include(self, event_node):
2510 return self._process_node_include(event_node, 'event',
2511 self._process_event_include)
2512
2513 def _process_stream_include(self, stream_node):
2514 def process_children_include(stream_node):
2515 if 'events' in stream_node:
2516 events_node = stream_node['events']
2517
2518 if not _is_assoc_array_prop(events_node):
2519 raise ConfigError('"events" property must be an associative array')
2520
2521 events_node_keys = list(events_node.keys())
2522
2523 for key in events_node_keys:
2524 event_node = events_node[key]
2525
2526 try:
2527 events_node[key] = self._process_event_include(event_node)
2528 except Exception as e:
2529 raise ConfigError('cannot process includes of event object "{}"'.format(key), e)
2530
2531 return self._process_node_include(stream_node, 'stream',
2532 self._process_stream_include,
2533 process_children_include)
2534
2535 def _process_trace_include(self, trace_node):
2536 return self._process_node_include(trace_node, 'trace',
2537 self._process_trace_include)
2538
2539 def _process_clock_include(self, clock_node):
2540 return self._process_node_include(clock_node, 'clock',
2541 self._process_clock_include)
2542
2543 def _process_metadata_include(self, metadata_node):
2544 def process_children_include(metadata_node):
2545 if 'trace' in metadata_node:
2546 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
2547
2548 if 'clocks' in metadata_node:
2549 clocks_node = metadata_node['clocks']
2550
2551 if not _is_assoc_array_prop(clocks_node):
2552 raise ConfigError('"clocks" property (metadata) must be an associative array')
2553
2554 clocks_node_keys = list(clocks_node.keys())
2555
2556 for key in clocks_node_keys:
2557 clock_node = clocks_node[key]
2558
2559 try:
2560 clocks_node[key] = self._process_clock_include(clock_node)
2561 except Exception as e:
2562 raise ConfigError('cannot process includes of clock object "{}"'.format(key), e)
2563
2564 if 'streams' in metadata_node:
2565 streams_node = metadata_node['streams']
2566
2567 if not _is_assoc_array_prop(streams_node):
2568 raise ConfigError('"streams" property (metadata) must be an associative array')
2569
2570 streams_node_keys = list(streams_node.keys())
2571
2572 for key in streams_node_keys:
2573 stream_node = streams_node[key]
2574
2575 try:
2576 streams_node[key] = self._process_stream_include(stream_node)
2577 except Exception as e:
2578 raise ConfigError('cannot process includes of stream object "{}"'.format(key), e)
2579
2580 return self._process_node_include(metadata_node, 'metadata',
2581 self._process_metadata_include,
2582 process_children_include)
2583
2584 def _process_root_includes(self, root):
2585 # The following config objects support includes:
2586 #
2587 # * Metadata object
2588 # * Trace object
2589 # * Stream object
2590 # * Event object
2591 #
2592 # We need to process the event includes first, then the stream
2593 # includes, then the trace includes, and finally the metadata
2594 # includes.
2595 #
2596 # In each object, only one of the $include and $include-replace
2597 # special properties is allowed.
2598 #
2599 # We keep a stack of absolute paths to included files to detect
2600 # recursion.
2601 if 'metadata' in root:
2602 root['metadata'] = self._process_metadata_include(root['metadata'])
2603
2604 return root
2605
2606 def _yaml_ordered_dump(self, node, **kwds):
2607 class ODumper(yaml.Dumper):
2608 pass
2609
2610 def dict_representer(dumper, node):
2611 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2612 node.items())
2613
2614 ODumper.add_representer(collections.OrderedDict, dict_representer)
2615
2616 return yaml.dump(node, Dumper=ODumper, **kwds)
2617
2618 def _yaml_ordered_load(self, yaml_path):
2619 class OLoader(yaml.Loader):
2620 pass
2621
2622 def construct_mapping(loader, node):
2623 loader.flatten_mapping(node)
2624
2625 return collections.OrderedDict(loader.construct_pairs(node))
2626
2627 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2628 construct_mapping)
2629
2630 # YAML -> Python
2631 try:
2632 with open(yaml_path, 'r') as f:
2633 node = yaml.load(f, OLoader)
2634 except (OSError, IOError) as e:
2635 raise ConfigError('cannot open file "{}"'.format(yaml_path))
2636 except Exception as e:
2637 raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e)
2638
2639 # loaded node must be an associate array
2640 if not _is_assoc_array_prop(node):
2641 raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path))
2642
2643 return node
2644
2645 def _reset(self):
2646 self._version = None
2647 self._include_stack = []
2648
2649 def parse(self, yaml_path):
2650 self._reset()
2651 self._root_yaml_path = yaml_path
2652
2653 try:
2654 root = self._yaml_ordered_load(yaml_path)
2655 except Exception as e:
2656 raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e)
2657
2658 if not _is_assoc_array_prop(root):
2659 raise ConfigError('configuration must be an associative array')
2660
2661 unk_prop = _get_first_unknown_prop(root, [
2662 'version',
2663 'prefix',
2664 'metadata',
2665 ])
2666
2667 if unk_prop:
2668 raise ConfigError('unknown configuration property: "{}"'.format(unk_prop))
2669
2670 # get the config version
2671 self._version = self._get_version(root)
2672
2673 # process includes if supported
2674 if self._version >= 201:
2675 root = self._process_root_includes(root)
2676
2677 # dump config if required
2678 if self._dump_config:
2679 print(self._yaml_ordered_dump(root, indent=2,
2680 default_flow_style=False))
2681
2682 # get prefix and metadata
2683 prefix = self._get_prefix(root)
2684 meta = self._create_metadata(root)
2685
2686 return Config(self._version, prefix, meta)
2687
2688
2689 def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
2690 try:
2691 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
2692 dump_config)
2693 cfg = parser.parse(path)
2694
2695 return cfg
2696 except Exception as e:
2697 raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e)
This page took 0.137349 seconds and 4 git commands to generate.