barectf v2.2.0-dev
[deliverable/barectf.git] / barectf / config.py
CommitLineData
e5aa0be3
PP
1# The MIT License (MIT)
2#
13405819 3# Copyright (c) 2015-2016 Philippe Proulx <pproulx@efficios.com>
e5aa0be3
PP
4#
5# Permission is hereby granted, free of charge, to any person obtaining a copy
6# of this software and associated documentation files (the "Software"), to deal
7# in the Software without restriction, including without limitation the rights
8# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9# copies of the Software, and to permit persons to whom the Software is
10# furnished to do so, subject to the following conditions:
11#
12# The above copyright notice and this permission notice shall be included in
13# all copies or substantial portions of the Software.
14#
15# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21# THE SOFTWARE.
22
23from barectf import metadata
24import collections
25import datetime
26import barectf
27import enum
28import yaml
29import uuid
30import copy
31import re
f58be68f 32import os
e5aa0be3
PP
33
34
35class ConfigError(RuntimeError):
36 def __init__(self, msg, prev=None):
37 super().__init__(msg)
38 self._prev = prev
39
40 @property
41 def prev(self):
42 return self._prev
43
44
45class Config:
46 def __init__(self, version, prefix, metadata):
47 self.prefix = prefix
48 self.version = version
49 self.metadata = metadata
50
51 def _validate_metadata(self, meta):
52 try:
53 validator = _MetadataTypesHistologyValidator()
54 validator.validate(meta)
55 validator = _MetadataDynamicTypesValidator()
56 validator.validate(meta)
57 validator = _MetadataSpecialFieldsValidator()
58 validator.validate(meta)
59 except Exception as e:
60 raise ConfigError('metadata error', e)
61
62 try:
63 validator = _BarectfMetadataValidator()
64 validator.validate(meta)
65 except Exception as e:
66 raise ConfigError('barectf metadata error', e)
67
68 def _augment_metadata_env(self, meta):
e5aa0be3 69 version_tuple = barectf.get_version_tuple()
a1a610d4
PP
70 base_env = {
71 'domain': 'bare',
72 'tracer_name': 'barectf' ,
73 'tracer_major': version_tuple[0],
74 'tracer_minor': version_tuple[1],
75 'tracer_patch': version_tuple[2],
76 'barectf_gen_date': str(datetime.datetime.now().isoformat()),
77 }
78
79 base_env.update(meta.env)
80 meta.env = base_env
e5aa0be3
PP
81
82 @property
83 def version(self):
84 return self._version
85
86 @version.setter
87 def version(self, value):
88 self._version = value
89
90 @property
91 def metadata(self):
92 return self._metadata
93
94 @metadata.setter
95 def metadata(self, value):
96 self._validate_metadata(value)
97 self._augment_metadata_env(value)
98 self._metadata = value
99
100 @property
101 def prefix(self):
102 return self._prefix
103
104 @prefix.setter
105 def prefix(self, value):
6d4df122 106 if not _is_valid_identifier(value):
e5aa0be3
PP
107 raise ConfigError('prefix must be a valid C identifier')
108
109 self._prefix = value
110
111
112def _is_assoc_array_prop(node):
113 return isinstance(node, dict)
114
115
116def _is_array_prop(node):
117 return isinstance(node, list)
118
119
120def _is_int_prop(node):
121 return type(node) is int
122
123
124def _is_str_prop(node):
125 return type(node) is str
126
127
128def _is_bool_prop(node):
129 return type(node) is bool
130
131
132def _is_valid_alignment(align):
133 return ((align & (align - 1)) == 0) and align > 0
134
135
136def _byte_order_str_to_bo(bo_str):
137 bo_str = bo_str.lower()
138
139 if bo_str == 'le':
140 return metadata.ByteOrder.LE
141 elif bo_str == 'be':
142 return metadata.ByteOrder.BE
143
144
145def _encoding_str_to_encoding(encoding_str):
146 encoding_str = encoding_str.lower()
147
148 if encoding_str == 'utf-8' or encoding_str == 'utf8':
149 return metadata.Encoding.UTF8
150 elif encoding_str == 'ascii':
151 return metadata.Encoding.ASCII
152 elif encoding_str == 'none':
153 return metadata.Encoding.NONE
154
155
156_re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
157_ctf_keywords = set([
158 'align',
159 'callsite',
160 'clock',
161 'enum',
162 'env',
163 'event',
164 'floating_point',
165 'integer',
166 'stream',
167 'string',
168 'struct',
169 'trace',
170 'typealias',
171 'typedef',
172 'variant',
173])
174
175
6d4df122 176def _is_valid_identifier(iden):
e5aa0be3
PP
177 if not _re_iden.match(iden):
178 return False
179
180 if _re_iden in _ctf_keywords:
181 return False
182
183 return True
184
185
186def _get_first_unknown_prop(node, known_props):
187 for prop_name in node:
188 if prop_name in known_props:
189 continue
190
191 return prop_name
192
193
e5aa0be3
PP
194# This validator validates the configured metadata for barectf specific
195# needs.
196#
197# barectf needs:
198#
199# * all header/contexts are at least byte-aligned
200# * all integer and floating point number sizes to be <= 64
201# * no inner structures, arrays, or variants
202class _BarectfMetadataValidator:
203 def __init__(self):
204 self._type_to_validate_type_func = {
205 metadata.Integer: self._validate_int_type,
206 metadata.FloatingPoint: self._validate_float_type,
207 metadata.Enum: self._validate_enum_type,
208 metadata.String: self._validate_string_type,
209 metadata.Struct: self._validate_struct_type,
210 metadata.Array: self._validate_array_type,
211 metadata.Variant: self._validate_variant_type,
212 }
213
214 def _validate_int_type(self, t, entity_root):
215 if t.size > 64:
216 raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits')
217
218 def _validate_float_type(self, t, entity_root):
219 if t.size > 64:
220 raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits')
221
222 def _validate_enum_type(self, t, entity_root):
223 if t.value_type.size > 64:
224 raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits')
225
226 def _validate_string_type(self, t, entity_root):
227 pass
228
229 def _validate_struct_type(self, t, entity_root):
230 if not entity_root:
231 raise ConfigError('inner structure types are not supported as of this version')
232
233 for field_name, field_type in t.fields.items():
234 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
235 if field_name == 'uuid':
236 # allow
237 continue
238
239 try:
240 self._validate_type(field_type, False)
241 except Exception as e:
242 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
243
244 def _validate_array_type(self, t, entity_root):
245 raise ConfigError('array types are not supported as of this version')
246
247 def _validate_variant_type(self, t, entity_root):
248 raise ConfigError('variant types are not supported as of this version')
249
250 def _validate_type(self, t, entity_root):
251 self._type_to_validate_type_func[type(t)](t, entity_root)
252
253 def _validate_entity(self, t):
254 if t is None:
255 return
256
257 # make sure entity is byte-aligned
258 if t.align < 8:
259 raise ConfigError('type\'s alignment must be at least byte-aligned')
260
261 # make sure entity is a structure
262 if type(t) is not metadata.Struct:
263 raise ConfigError('expecting a structure type')
264
265 # validate types
266 self._validate_type(t, True)
267
268 def _validate_entities_and_names(self, meta):
269 self._cur_entity = _Entity.TRACE_PACKET_HEADER
270
271 try:
272 self._validate_entity(meta.trace.packet_header_type)
273 except Exception as e:
274 raise ConfigError('invalid trace packet header type', e)
275
276 for stream_name, stream in meta.streams.items():
6d4df122 277 if not _is_valid_identifier(stream_name):
e5aa0be3
PP
278 raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name))
279
280 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
281
282 try:
283 self._validate_entity(stream.packet_context_type)
284 except Exception as e:
285 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
286
287 self._cur_entity = _Entity.STREAM_EVENT_HEADER
288
289 try:
290 self._validate_entity(stream.event_header_type)
291 except Exception as e:
292 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
293
294 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
295
296 try:
297 self._validate_entity(stream.event_context_type)
298 except Exception as e:
299 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
300
301 try:
302 for ev_name, ev in stream.events.items():
6d4df122 303 if not _is_valid_identifier(ev_name):
e5aa0be3
PP
304 raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name))
305
306 self._cur_entity = _Entity.EVENT_CONTEXT
307
308 try:
309 self._validate_entity(ev.context_type)
310 except Exception as e:
311 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
312
313 self._cur_entity = _Entity.EVENT_PAYLOAD
314
e5aa0be3
PP
315 try:
316 self._validate_entity(ev.payload_type)
317 except Exception as e:
318 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
319
13eaac62
PP
320 if stream.is_event_empty(ev):
321 raise ConfigError('event "{}" is empty'.format(ev_name))
e5aa0be3
PP
322 except Exception as e:
323 raise ConfigError('invalid stream "{}"'.format(stream_name), e)
324
325 def validate(self, meta):
326 self._validate_entities_and_names(meta)
327
328
329# This validator validates special fields of trace, stream, and event
330# types. For example, if checks that the "stream_id" field exists in the
331# trace packet header if there's more than one stream, and much more.
332class _MetadataSpecialFieldsValidator:
333 def _validate_trace_packet_header_type(self, t):
334 # needs "stream_id" field?
335 if len(self._meta.streams) > 1:
336 # yes
337 if t is None:
3ae2561f 338 raise ConfigError('need "stream_id" field in trace packet header type (more than one stream), but trace packet header type is missing')
e5aa0be3
PP
339
340 if type(t) is not metadata.Struct:
3ae2561f 341 raise ConfigError('need "stream_id" field in trace packet header type (more than one stream), but trace packet header type is not a structure type')
e5aa0be3
PP
342
343 if 'stream_id' not in t.fields:
3ae2561f 344 raise ConfigError('need "stream_id" field in trace packet header type (more than one stream)')
e5aa0be3
PP
345
346 # validate "magic" and "stream_id" types
347 if type(t) is not metadata.Struct:
348 return
349
350 for i, (field_name, field_type) in enumerate(t.fields.items()):
351 if field_name == 'magic':
352 if type(field_type) is not metadata.Integer:
353 raise ConfigError('"magic" field in trace packet header type must be an integer type')
354
355 if field_type.signed or field_type.size != 32:
356 raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type')
357
358 if i != 0:
359 raise ConfigError('"magic" field must be the first trace packet header type\'s field')
360 elif field_name == 'stream_id':
361 if type(field_type) is not metadata.Integer:
362 raise ConfigError('"stream_id" field in trace packet header type must be an integer type')
363
364 if field_type.signed:
365 raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type')
5bcd0a3e
PP
366
367 # "id" size can fit all event IDs
368 if len(self._meta.streams) > (1 << field_type.size):
369 raise ConfigError('"stream_id" field\' size in trace packet header type is too small for the number of trace streams')
e5aa0be3
PP
370 elif field_name == 'uuid':
371 if self._meta.trace.uuid is None:
372 raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided')
373
374 if type(field_type) is not metadata.Array:
375 raise ConfigError('"uuid" field in trace packet header type must be an array')
376
377 if field_type.length != 16:
378 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
379
380 element_type = field_type.element_type
381
382 if type(element_type) is not metadata.Integer:
5097a833 383 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
e5aa0be3
PP
384
385 if element_type.size != 8:
5097a833
PP
386 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
387
388 if element_type.signed:
389 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
e5aa0be3
PP
390
391 if element_type.align != 8:
5097a833 392 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned, byte-aligned bytes')
e5aa0be3
PP
393
394 def _validate_trace(self, meta):
395 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
396
397 def _validate_stream_packet_context(self, stream):
398 t = stream.packet_context_type
399
400 if type(t) is None:
b235192f 401 raise ConfigError('missing "packet-context-type" property in stream object')
e5aa0be3
PP
402
403 if type(t) is not metadata.Struct:
b235192f 404 raise ConfigError('"packet-context-type": expecting a structure type')
e5aa0be3
PP
405
406 # "timestamp_begin", if exists, is an unsigned integer type,
407 # mapped to a clock
e4ac07c8
PP
408 ts_begin = None
409
e5aa0be3
PP
410 if 'timestamp_begin' in t.fields:
411 ts_begin = t.fields['timestamp_begin']
412
413 if type(ts_begin) is not metadata.Integer:
414 raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type')
415
416 if ts_begin.signed:
417 raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type')
418
419 if not ts_begin.property_mappings:
420 raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock')
421
422 # "timestamp_end", if exists, is an unsigned integer type,
423 # mapped to a clock
e4ac07c8
PP
424 ts_end = None
425
e5aa0be3
PP
426 if 'timestamp_end' in t.fields:
427 ts_end = t.fields['timestamp_end']
428
429 if type(ts_end) is not metadata.Integer:
430 raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type')
431
432 if ts_end.signed:
433 raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type')
434
435 if not ts_end.property_mappings:
436 raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock')
437
438 # "timestamp_begin" and "timestamp_end" exist together
439 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
440 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
441
e4ac07c8
PP
442 # "timestamp_begin" and "timestamp_end" are mapped to the same clock
443 if ts_begin is not None and ts_end is not None:
444 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
445 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be mapped to the same clock object in stream packet context type')
446
e5aa0be3
PP
447 # "events_discarded", if exists, is an unsigned integer type
448 if 'events_discarded' in t.fields:
449 events_discarded = t.fields['events_discarded']
450
451 if type(events_discarded) is not metadata.Integer:
452 raise ConfigError('"events_discarded" field in stream packet context type must be an integer type')
453
454 if events_discarded.signed:
455 raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type')
456
457 # "packet_size" and "content_size" must exist
458 if 'packet_size' not in t.fields:
459 raise ConfigError('missing "packet_size" field in stream packet context type')
460
461 packet_size = t.fields['packet_size']
462
463 # "content_size" and "content_size" must exist
464 if 'content_size' not in t.fields:
465 raise ConfigError('missing "content_size" field in stream packet context type')
466
467 content_size = t.fields['content_size']
468
469 # "packet_size" is an unsigned integer type
470 if type(packet_size) is not metadata.Integer:
471 raise ConfigError('"packet_size" field in stream packet context type must be an integer type')
472
473 if packet_size.signed:
474 raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type')
475
476 # "content_size" is an unsigned integer type
477 if type(content_size) is not metadata.Integer:
478 raise ConfigError('"content_size" field in stream packet context type must be an integer type')
479
480 if content_size.signed:
481 raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type')
482
4b9b1e5b
PP
483 # "packet_size" size should be greater than or equal to "content_size" size
484 if content_size.size > packet_size.size:
485 raise ConfigError('"content_size" field size must be lesser than or equal to "packet_size" field size')
486
e5aa0be3
PP
487 def _validate_stream_event_header(self, stream):
488 t = stream.event_header_type
489
490 # needs "id" field?
491 if len(stream.events) > 1:
492 # yes
493 if t is None:
3ae2561f 494 raise ConfigError('need "id" field in stream event header type (more than one event), but stream event header type is missing')
e5aa0be3
PP
495
496 if type(t) is not metadata.Struct:
3ae2561f 497 raise ConfigError('need "id" field in stream event header type (more than one event), but stream event header type is not a structure type')
e5aa0be3
PP
498
499 if 'id' not in t.fields:
3ae2561f 500 raise ConfigError('need "id" field in stream event header type (more than one event)')
e5aa0be3
PP
501
502 # validate "id" and "timestamp" types
503 if type(t) is not metadata.Struct:
504 return
505
506 # "timestamp", if exists, is an unsigned integer type,
507 # mapped to a clock
508 if 'timestamp' in t.fields:
509 ts = t.fields['timestamp']
510
511 if type(ts) is not metadata.Integer:
1ea0300c 512 raise ConfigError('"timestamp" field in stream event header type must be an integer type')
e5aa0be3
PP
513
514 if ts.signed:
1ea0300c 515 raise ConfigError('"timestamp" field in stream event header type must be an unsigned integer type')
e5aa0be3
PP
516
517 if not ts.property_mappings:
1ea0300c 518 raise ConfigError('"timestamp" field in stream event header type must be mapped to a clock')
e5aa0be3 519
e5aa0be3
PP
520 if 'id' in t.fields:
521 eid = t.fields['id']
522
5bcd0a3e 523 # "id" is an unsigned integer type
e5aa0be3
PP
524 if type(eid) is not metadata.Integer:
525 raise ConfigError('"id" field in stream event header type must be an integer type')
526
527 if eid.signed:
528 raise ConfigError('"id" field in stream event header type must be an unsigned integer type')
529
5bcd0a3e
PP
530 # "id" size can fit all event IDs
531 if len(stream.events) > (1 << eid.size):
532 raise ConfigError('"id" field\' size in stream event header type is too small for the number of stream events')
533
e5aa0be3
PP
534 def _validate_stream(self, stream):
535 self._validate_stream_packet_context(stream)
536 self._validate_stream_event_header(stream)
537
538 def validate(self, meta):
539 self._meta = meta
540 self._validate_trace(meta)
541
542 for stream in meta.streams.values():
543 try:
544 self._validate_stream(stream)
545 except Exception as e:
546 raise ConfigError('invalid stream "{}"'.format(stream.name), e)
547
548
549class _MetadataDynamicTypesValidatorStackEntry:
550 def __init__(self, base_t):
551 self._base_t = base_t
552 self._index = 0
553
554 @property
555 def index(self):
556 return self._index
557
558 @index.setter
559 def index(self, value):
560 self._index = value
561
562 @property
563 def base_t(self):
564 return self._base_t
565
566 @base_t.setter
567 def base_t(self, value):
568 self._base_t = value
569
570
571# Entities. Order of values is important here.
572@enum.unique
573class _Entity(enum.IntEnum):
574 TRACE_PACKET_HEADER = 0
575 STREAM_PACKET_CONTEXT = 1
576 STREAM_EVENT_HEADER = 2
577 STREAM_EVENT_CONTEXT = 3
578 EVENT_CONTEXT = 4
579 EVENT_PAYLOAD = 5
580
581
582# This validator validates dynamic metadata types, that is, it ensures
583# variable-length array lengths and variant tags actually point to
584# something that exists. It also checks that variable-length array
585# lengths point to integer types and variant tags to enumeration types.
586class _MetadataDynamicTypesValidator:
587 def __init__(self):
588 self._type_to_visit_type_func = {
589 metadata.Integer: None,
590 metadata.FloatingPoint: None,
591 metadata.Enum: None,
592 metadata.String: None,
593 metadata.Struct: self._visit_struct_type,
594 metadata.Array: self._visit_array_type,
595 metadata.Variant: self._visit_variant_type,
596 }
597
598 self._cur_trace = None
599 self._cur_stream = None
600 self._cur_event = None
601
602 def _lookup_path_from_base(self, path, parts, base, start_index,
603 base_is_current, from_t):
604 index = start_index
605 cur_t = base
606 found_path = []
607
608 while index < len(parts):
609 part = parts[index]
610 next_t = None
611
612 if type(cur_t) is metadata.Struct:
613 enumerated_items = enumerate(cur_t.fields.items())
614
615 # lookup each field
616 for i, (field_name, field_type) in enumerated_items:
617 if field_name == part:
618 next_t = field_type
619 found_path.append((i, field_type))
620
621 if next_t is None:
622 raise ConfigError('invalid path "{}": cannot find field "{}" in structure type'.format(path, part))
623 elif type(cur_t) is metadata.Variant:
624 enumerated_items = enumerate(cur_t.types.items())
625
626 # lookup each type
627 for i, (type_name, type_type) in enumerated_items:
628 if type_name == part:
629 next_t = type_type
630 found_path.append((i, type_type))
631
632 if next_t is None:
633 raise ConfigError('invalid path "{}": cannot find type "{}" in variant type'.format(path, part))
634 else:
635 raise ConfigError('invalid path "{}": requesting "{}" in a non-variant, non-structure type'.format(path, part))
636
637 cur_t = next_t
638 index += 1
639
640 # make sure that the pointed type is not the pointing type
641 if from_t is cur_t:
642 raise ConfigError('invalid path "{}": pointing to self'.format(path))
643
644 # if we're here, we found the type; however, it could be located
645 # _after_ the variant/VLA looking for it, if the pointing
646 # and pointed types are in the same entity, so compare the
647 # current stack entries indexes to our index path in that case
648 if not base_is_current:
649 return cur_t
650
651 for index, entry in enumerate(self._stack):
652 if index == len(found_path):
653 # end of index path; valid so far
654 break
655
656 if found_path[index][0] > entry.index:
657 raise ConfigError('invalid path "{}": pointed type is after pointing type'.format(path))
658
659 # also make sure that both pointed and pointing types share
660 # a common structure ancestor
661 for index, entry in enumerate(self._stack):
662 if index == len(found_path):
663 break
664
665 if entry.base_t is not found_path[index][1]:
666 # found common ancestor
667 if type(entry.base_t) is metadata.Variant:
668 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
669
670 return cur_t
671
672 def _lookup_path_from_top(self, path, parts):
673 if len(parts) != 1:
674 raise ConfigError('invalid path "{}": multipart relative path not supported'.format(path))
675
676 find_name = parts[0]
677 index = len(self._stack) - 1
678 got_struct = False
679
680 # check stack entries in reversed order
681 for entry in reversed(self._stack):
682 # structure base type
683 if type(entry.base_t) is metadata.Struct:
684 got_struct = True
685 enumerated_items = enumerate(entry.base_t.fields.items())
686
687 # lookup each field, until the current visiting index is met
688 for i, (field_name, field_type) in enumerated_items:
689 if i == entry.index:
690 break
691
692 if field_name == find_name:
693 return field_type
694
695 # variant base type
696 elif type(entry.base_t) is metadata.Variant:
697 enumerated_items = enumerate(entry.base_t.types.items())
698
699 # lookup each type, until the current visiting index is met
700 for i, (type_name, type_type) in enumerated_items:
701 if i == entry.index:
702 break
703
704 if type_name == find_name:
705 if not got_struct:
706 raise ConfigError('invalid path "{}": type cannot be reached because pointed and pointing types are in the same variant type'.format(path))
707
708 return type_type
709
710 # nothing returned here: cannot find type
711 raise ConfigError('invalid path "{}": cannot find type in current context'.format(path))
712
713 def _lookup_path(self, path, from_t):
714 parts = path.lower().split('.')
715 base = None
716 base_is_current = False
717
718 if len(parts) >= 3:
719 if parts[0] == 'trace':
720 if parts[1] == 'packet' and parts[2] == 'header':
721 # make sure packet header exists
722 if self._cur_trace.packet_header_type is None:
723 raise ConfigError('invalid path "{}": no defined trace packet header type'.format(path))
724
725 base = self._cur_trace.packet_header_type
726
727 if self._cur_entity == _Entity.TRACE_PACKET_HEADER:
728 base_is_current = True
729 else:
730 raise ConfigError('invalid path "{}": unknown names after "trace"'.format(path))
731 elif parts[0] == 'stream':
732 if parts[1] == 'packet' and parts[2] == 'context':
733 if self._cur_entity < _Entity.STREAM_PACKET_CONTEXT:
734 raise ConfigError('invalid path "{}": cannot access stream packet context here'.format(path))
735
736 if self._cur_stream.packet_context_type is None:
737 raise ConfigError('invalid path "{}": no defined stream packet context type'.format(path))
738
739 base = self._cur_stream.packet_context_type
740
741 if self._cur_entity == _Entity.STREAM_PACKET_CONTEXT:
742 base_is_current = True
743 elif parts[1] == 'event':
744 if parts[2] == 'header':
745 if self._cur_entity < _Entity.STREAM_EVENT_HEADER:
746 raise ConfigError('invalid path "{}": cannot access stream event header here'.format(path))
747
748 if self._cur_stream.event_header_type is None:
749 raise ConfigError('invalid path "{}": no defined stream event header type'.format(path))
750
751 base = self._cur_stream.event_header_type
752
753 if self._cur_entity == _Entity.STREAM_EVENT_HEADER:
754 base_is_current = True
755 elif parts[2] == 'context':
756 if self._cur_entity < _Entity.STREAM_EVENT_CONTEXT:
757 raise ConfigError('invalid path "{}": cannot access stream event context here'.format(path))
758
759 if self._cur_stream.event_context_type is None:
760 raise ConfigError('invalid path "{}": no defined stream event context type'.format(path))
761
762 base = self._cur_stream.event_context_type
763
764 if self._cur_entity == _Entity.STREAM_EVENT_CONTEXT:
765 base_is_current = True
766 else:
767 raise ConfigError('invalid path "{}": unknown names after "stream.event"'.format(path))
768 else:
769 raise ConfigError('invalid path "{}": unknown names after "stream"'.format(path))
770
771 if base is not None:
772 start_index = 3
773
774 if len(parts) >= 2 and base is None:
775 if parts[0] == 'event':
776 if parts[1] == 'context':
777 if self._cur_entity < _Entity.EVENT_CONTEXT:
778 raise ConfigError('invalid path "{}": cannot access event context here'.format(path))
779
780 if self._cur_event.context_type is None:
781 raise ConfigError('invalid path "{}": no defined event context type'.format(path))
782
783 base = self._cur_event.context_type
784
785 if self._cur_entity == _Entity.EVENT_CONTEXT:
786 base_is_current = True
787 elif parts[1] == 'payload' or parts[1] == 'fields':
788 if self._cur_entity < _Entity.EVENT_PAYLOAD:
789 raise ConfigError('invalid path "{}": cannot access event payload here'.format(path))
790
791 if self._cur_event.payload_type is None:
792 raise ConfigError('invalid path "{}": no defined event payload type'.format(path))
793
794 base = self._cur_event.payload_type
795
796 if self._cur_entity == _Entity.EVENT_PAYLOAD:
797 base_is_current = True
798 else:
799 raise ConfigError('invalid path "{}": unknown names after "event"'.format(path))
800
801 if base is not None:
802 start_index = 2
803
804 if base is not None:
805 return self._lookup_path_from_base(path, parts, base, start_index,
806 base_is_current, from_t)
807 else:
808 return self._lookup_path_from_top(path, parts)
809
810 def _stack_reset(self):
811 self._stack = []
812
813 def _stack_push(self, base_t):
814 entry = _MetadataDynamicTypesValidatorStackEntry(base_t)
815 self._stack.append(entry)
816
817 def _stack_pop(self):
818 self._stack.pop()
819
820 def _stack_incr_index(self):
821 self._stack[-1].index += 1
822
823 def _visit_struct_type(self, t):
824 self._stack_push(t)
825
826 for field_name, field_type in t.fields.items():
827 try:
828 self._visit_type(field_type)
829 except Exception as e:
830 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
831
832 self._stack_incr_index()
833
834 self._stack_pop()
835
836 def _visit_array_type(self, t):
3b919a44 837 if t.is_variable_length:
e5aa0be3
PP
838 # find length type
839 try:
840 length_type = self._lookup_path(t.length, t)
841 except Exception as e:
842 raise ConfigError('invalid array type\'s length', e)
843
844 # make sure length type an unsigned integer
845 if type(length_type) is not metadata.Integer:
846 raise ConfigError('array type\'s length does not point to an integer type')
847
848 if length_type.signed:
849 raise ConfigError('array type\'s length does not point to an unsigned integer type')
850
851 self._visit_type(t.element_type)
852
853 def _visit_variant_type(self, t):
854 # find tag type
855 try:
856 tag_type = self._lookup_path(t.tag, t)
857 except Exception as e:
858 raise ConfigError('invalid variant type\'s tag', e)
859
860 # make sure tag type is an enumeration
861 if type(tag_type) is not metadata.Enum:
862 raise ConfigError('variant type\'s tag does not point to an enumeration type')
863
864 # verify that each variant type's type exists as an enumeration member
865 for tag_name in t.types.keys():
866 if tag_name not in tag_type.members:
867 raise ConfigError('cannot find variant type\'s type "{}" in pointed tag type'.format(tag_name))
868
869 self._stack_push(t)
870
871 for type_name, type_type in t.types.items():
872 try:
873 self._visit_type(type_type)
874 except Exception as e:
875 raise ConfigError('in variant type\'s type "{}"'.format(type_name), e)
876
877 self._stack_incr_index()
878
879 self._stack_pop()
880
881 def _visit_type(self, t):
882 if t is None:
883 return
884
885 if type(t) in self._type_to_visit_type_func:
886 func = self._type_to_visit_type_func[type(t)]
887
888 if func is not None:
889 func(t)
890
891 def _visit_event(self, ev):
892 ev_name = ev.name
893
894 # set current event
895 self._cur_event = ev
896
897 # visit event context type
898 self._stack_reset()
899 self._cur_entity = _Entity.EVENT_CONTEXT
900
901 try:
902 self._visit_type(ev.context_type)
903 except Exception as e:
904 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
905
906 # visit event payload type
907 self._stack_reset()
908 self._cur_entity = _Entity.EVENT_PAYLOAD
909
910 try:
911 self._visit_type(ev.payload_type)
912 except Exception as e:
913 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
914
915 def _visit_stream(self, stream):
916 stream_name = stream.name
917
918 # set current stream
919 self._cur_stream = stream
920
921 # reset current event
922 self._cur_event = None
923
924 # visit stream packet context type
925 self._stack_reset()
926 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
927
928 try:
929 self._visit_type(stream.packet_context_type)
930 except Exception as e:
931 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
932
933 # visit stream event header type
934 self._stack_reset()
935 self._cur_entity = _Entity.STREAM_EVENT_HEADER
936
937 try:
938 self._visit_type(stream.event_header_type)
939 except Exception as e:
940 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
941
942 # visit stream event context type
943 self._stack_reset()
944 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
945
946 try:
947 self._visit_type(stream.event_context_type)
948 except Exception as e:
949 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
950
951 # visit events
952 for ev in stream.events.values():
953 try:
954 self._visit_event(ev)
955 except Exception as e:
956 raise ConfigError('invalid stream "{}"'.format(stream_name))
957
958 def validate(self, meta):
959 # set current trace
960 self._cur_trace = meta.trace
961
962 # visit trace packet header type
963 self._stack_reset()
964 self._cur_entity = _Entity.TRACE_PACKET_HEADER
965
966 try:
967 self._visit_type(meta.trace.packet_header_type)
968 except Exception as e:
969 raise ConfigError('invalid packet header type in trace', e)
970
971 # visit streams
972 for stream in meta.streams.values():
973 self._visit_stream(stream)
974
975
976# Since type inheritance allows types to be only partially defined at
977# any place in the configuration, this validator validates that actual
d98191f5
PP
978# trace, stream, and event types are all complete and valid. Therefore
979# an invalid, but unusued type alias is accepted.
e5aa0be3
PP
980class _MetadataTypesHistologyValidator:
981 def __init__(self):
982 self._type_to_validate_type_histology_func = {
983 metadata.Integer: self._validate_integer_histology,
984 metadata.FloatingPoint: self._validate_float_histology,
985 metadata.Enum: self._validate_enum_histology,
986 metadata.String: self._validate_string_histology,
987 metadata.Struct: self._validate_struct_histology,
988 metadata.Array: self._validate_array_histology,
989 metadata.Variant: self._validate_variant_histology,
990 }
991
992 def _validate_integer_histology(self, t):
993 # size is set
994 if t.size is None:
995 raise ConfigError('missing integer type\'s size')
996
997 def _validate_float_histology(self, t):
998 # exponent digits is set
999 if t.exp_size is None:
1000 raise ConfigError('missing floating point number type\'s exponent size')
1001
1002 # mantissa digits is set
1003 if t.mant_size is None:
1004 raise ConfigError('missing floating point number type\'s mantissa size')
1005
1006 # exponent and mantissa sum is a multiple of 8
1007 if (t.exp_size + t.mant_size) % 8 != 0:
1008 raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8')
1009
1010 def _validate_enum_histology(self, t):
1011 # integer type is set
1012 if t.value_type is None:
7ea0b871 1013 raise ConfigError('missing enumeration type\'s value type')
e5aa0be3
PP
1014
1015 # there's at least one member
1016 if not t.members:
1017 raise ConfigError('enumeration type needs at least one member')
1018
2ad701f1
PP
1019 # no overlapping values and all values are valid considering
1020 # the value type
e5aa0be3
PP
1021 ranges = []
1022
2ad701f1
PP
1023 if t.value_type.signed:
1024 value_min = -(1 << t.value_type.size - 1)
1025 value_max = (1 << (t.value_type.size - 1)) - 1
1026 else:
1027 value_min = 0
1028 value_max = (1 << t.value_type.size) - 1
1029
e5aa0be3
PP
1030 for label, value in t.members.items():
1031 for rg in ranges:
1032 if value[0] <= rg[1] and rg[0] <= value[1]:
1033 raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label))
1034
2ad701f1
PP
1035 fmt = 'enumeration type\'s member "{}": value {} is outside the value type range [{}, {}]'
1036
1037 if value[0] < value_min or value[0] > value_max:
1038 raise ConfigError(fmt.format(label, value[0], value_min, value_max))
1039
1040 if value[1] < value_min or value[1] > value_max:
1041 raise ConfigError(fmt.format(label, value[1], value_min, value_max))
1042
e5aa0be3
PP
1043 ranges.append(value)
1044
1045 def _validate_string_histology(self, t):
1046 # always valid
1047 pass
1048
1049 def _validate_struct_histology(self, t):
1050 # all fields are valid
1051 for field_name, field_type in t.fields.items():
1052 try:
1053 self._validate_type_histology(field_type)
1054 except Exception as e:
1055 raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e)
1056
1057 def _validate_array_histology(self, t):
1058 # length is set
1059 if t.length is None:
1060 raise ConfigError('missing array type\'s length')
1061
1062 # element type is set
1063 if t.element_type is None:
1064 raise ConfigError('missing array type\'s element type')
1065
1066 # element type is valid
1067 try:
1068 self._validate_type_histology(t.element_type)
1069 except Exception as e:
1070 raise ConfigError('invalid array type\'s element type', e)
1071
1072 def _validate_variant_histology(self, t):
1073 # tag is set
1074 if t.tag is None:
1075 raise ConfigError('missing variant type\'s tag')
1076
1077 # there's at least one type
1078 if not t.types:
1079 raise ConfigError('variant type needs at least one type')
1080
1081 # all types are valid
1082 for type_name, type_t in t.types.items():
1083 try:
1084 self._validate_type_histology(type_t)
1085 except Exception as e:
1086 raise ConfigError('invalid variant type\'s type "{}"'.format(type_name), e)
1087
1088 def _validate_type_histology(self, t):
1089 if t is None:
1090 return
1091
1092 self._type_to_validate_type_histology_func[type(t)](t)
1093
1094 def _validate_entity_type_histology(self, t):
1095 if t is None:
1096 return
1097
86302721
PP
1098 if type(t) is not metadata.Struct:
1099 raise ConfigError('expecting a structure type')
e5aa0be3
PP
1100
1101 self._validate_type_histology(t)
1102
1103 def _validate_event_types_histology(self, ev):
1104 ev_name = ev.name
1105
1106 # validate event context type
1107 try:
1108 self._validate_entity_type_histology(ev.context_type)
1109 except Exception as e:
1110 raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e)
1111
1112 # validate event payload type
e5aa0be3
PP
1113 try:
1114 self._validate_entity_type_histology(ev.payload_type)
1115 except Exception as e:
1116 raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e)
1117
1118 def _validate_stream_types_histology(self, stream):
1119 stream_name = stream.name
1120
1121 # validate stream packet context type
1122 try:
1123 self._validate_entity_type_histology(stream.packet_context_type)
1124 except Exception as e:
1125 raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e)
1126
1127 # validate stream event header type
1128 try:
1129 self._validate_entity_type_histology(stream.event_header_type)
1130 except Exception as e:
1131 raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e)
1132
1133 # validate stream event context type
1134 try:
1135 self._validate_entity_type_histology(stream.event_context_type)
1136 except Exception as e:
1137 raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e)
1138
1139 # validate events
1140 for ev in stream.events.values():
1141 try:
1142 self._validate_event_types_histology(ev)
1143 except Exception as e:
1144 raise ConfigError('invalid event in stream "{}"'.format(stream_name), e)
1145
1146 def validate(self, meta):
1147 # validate trace packet header type
1148 try:
1149 self._validate_entity_type_histology(meta.trace.packet_header_type)
1150 except Exception as e:
1151 raise ConfigError('invalid trace packet header type', e)
1152
1153 # validate streams
1154 for stream in meta.streams.values():
1155 self._validate_stream_types_histology(stream)
1156
1157
1158class _YamlConfigParser:
f58be68f 1159 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
e5aa0be3
PP
1160 self._class_name_to_create_type_func = {
1161 'int': self._create_integer,
1162 'integer': self._create_integer,
1163 'flt': self._create_float,
1164 'float': self._create_float,
1165 'floating-point': self._create_float,
1166 'enum': self._create_enum,
1167 'enumeration': self._create_enum,
1168 'str': self._create_string,
1169 'string': self._create_string,
1170 'struct': self._create_struct,
1171 'structure': self._create_struct,
1172 'array': self._create_array,
1173 'var': self._create_variant,
1174 'variant': self._create_variant,
1175 }
1176 self._type_to_create_type_func = {
1177 metadata.Integer: self._create_integer,
1178 metadata.FloatingPoint: self._create_float,
1179 metadata.Enum: self._create_enum,
1180 metadata.String: self._create_string,
1181 metadata.Struct: self._create_struct,
1182 metadata.Array: self._create_array,
1183 metadata.Variant: self._create_variant,
1184 }
f58be68f 1185 self._include_dirs = include_dirs
f58be68f
PP
1186 self._ignore_include_not_found = ignore_include_not_found
1187 self._dump_config = dump_config
e5aa0be3
PP
1188
1189 def _set_byte_order(self, metadata_node):
1190 if 'trace' not in metadata_node:
1191 raise ConfigError('missing "trace" property (metadata)')
1192
1193 trace_node = metadata_node['trace']
1194
1195 if not _is_assoc_array_prop(trace_node):
1196 raise ConfigError('"trace" property (metadata) must be an associative array')
1197
1198 if 'byte-order' not in trace_node:
1199 raise ConfigError('missing "byte-order" property (trace)')
1200
005f93d9
PP
1201 bo_node = trace_node['byte-order']
1202
1203 if not _is_str_prop(bo_node):
1204 raise ConfigError('"byte-order" property of trace object must be a string ("le" or "be")')
1205
1206 self._bo = _byte_order_str_to_bo(bo_node)
e5aa0be3
PP
1207
1208 if self._bo is None:
1209 raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"')
1210
1211 def _lookup_type_alias(self, name):
1212 if name in self._tas:
1213 return copy.deepcopy(self._tas[name])
1214
1215 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
1216 unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
1217
1218 if unk_prop:
1219 raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop))
1220
1221 if 'name' not in prop_mapping_node:
1222 raise ConfigError('missing "name" property in integer type object\'s clock property mapping')
1223
1224 if 'property' not in prop_mapping_node:
1225 raise ConfigError('missing "property" property in integer type object\'s clock property mapping')
1226
1227 clock_name = prop_mapping_node['name']
1228 prop = prop_mapping_node['property']
1229
1230 if not _is_str_prop(clock_name):
1231 raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string')
1232
1233 if not _is_str_prop(prop):
1234 raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string')
1235
1236 if clock_name not in self._clocks:
1237 raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name))
1238
1239 if prop != 'value':
1240 raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop))
1241
1242 mapped_clock = self._clocks[clock_name]
1243 int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
1244
fe6cc755
PP
1245 def _get_first_unknown_type_prop(self, type_node, known_props):
1246 kp = known_props + ['inherit', 'class']
1247
1248 if self._version >= 201:
1249 kp.append('$inherit')
1250
1251 return _get_first_unknown_prop(type_node, kp)
1252
e5aa0be3
PP
1253 def _create_integer(self, obj, node):
1254 if obj is None:
1255 # create integer object
1256 obj = metadata.Integer()
1257
fe6cc755 1258 unk_prop = self._get_first_unknown_type_prop(node, [
e5aa0be3
PP
1259 'size',
1260 'align',
1261 'signed',
1262 'byte-order',
1263 'base',
1264 'encoding',
1265 'property-mappings',
1266 ])
1267
1268 if unk_prop:
1269 raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop))
1270
1271 # size
1272 if 'size' in node:
1273 size = node['size']
1274
1275 if not _is_int_prop(size):
1276 raise ConfigError('"size" property of integer type object must be an integer')
1277
1278 if size < 1:
1279 raise ConfigError('invalid integer size: {}'.format(size))
1280
1281 obj.size = size
1282
1283 # align
1284 if 'align' in node:
1285 align = node['align']
1286
99dc5d65
PP
1287 if align is None:
1288 obj.set_default_align()
1289 else:
1290 if not _is_int_prop(align):
1291 raise ConfigError('"align" property of integer type object must be an integer')
e5aa0be3 1292
99dc5d65
PP
1293 if not _is_valid_alignment(align):
1294 raise ConfigError('invalid alignment: {}'.format(align))
e5aa0be3 1295
99dc5d65 1296 obj.align = align
e5aa0be3
PP
1297
1298 # signed
1299 if 'signed' in node:
1300 signed = node['signed']
1301
99dc5d65
PP
1302 if signed is None:
1303 obj.set_default_signed()
1304 else:
1305 if not _is_bool_prop(signed):
1306 raise ConfigError('"signed" property of integer type object must be a boolean')
e5aa0be3 1307
99dc5d65 1308 obj.signed = signed
e5aa0be3
PP
1309
1310 # byte order
1311 if 'byte-order' in node:
1312 byte_order = node['byte-order']
1313
99dc5d65
PP
1314 if byte_order is None:
1315 obj.byte_order = self._bo
1316 else:
1317 if not _is_str_prop(byte_order):
1318 raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")')
1319
1320 byte_order = _byte_order_str_to_bo(byte_order)
e5aa0be3 1321
99dc5d65
PP
1322 if byte_order is None:
1323 raise ConfigError('invalid "byte-order" property in integer type object')
e5aa0be3 1324
99dc5d65 1325 obj.byte_order = byte_order
e5aa0be3 1326 else:
99dc5d65 1327 obj.byte_order = self._bo
e5aa0be3
PP
1328
1329 # base
1330 if 'base' in node:
1331 base = node['base']
1332
99dc5d65
PP
1333 if base is None:
1334 obj.set_default_base()
04064f05 1335 else:
99dc5d65
PP
1336 if not _is_str_prop(base):
1337 raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
1338
1339 if base == 'bin':
1340 base = 2
1341 elif base == 'oct':
1342 base = 8
1343 elif base == 'dec':
1344 base = 10
1345 elif base == 'hex':
1346 base = 16
1347 else:
1348 raise ConfigError('unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base))
e5aa0be3 1349
99dc5d65 1350 obj.base = base
e5aa0be3
PP
1351
1352 # encoding
1353 if 'encoding' in node:
1354 encoding = node['encoding']
1355
99dc5d65
PP
1356 if encoding is None:
1357 obj.set_default_encoding()
1358 else:
1359 if not _is_str_prop(encoding):
1360 raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
e5aa0be3 1361
99dc5d65 1362 encoding = _encoding_str_to_encoding(encoding)
e5aa0be3 1363
99dc5d65
PP
1364 if encoding is None:
1365 raise ConfigError('invalid "encoding" property in integer type object')
e5aa0be3 1366
99dc5d65 1367 obj.encoding = encoding
e5aa0be3
PP
1368
1369 # property mappings
1370 if 'property-mappings' in node:
1371 prop_mappings = node['property-mappings']
1372
99dc5d65
PP
1373 if prop_mappings is None:
1374 obj.set_default_property_mappings()
1375 else:
1376 if not _is_array_prop(prop_mappings):
1377 raise ConfigError('"property-mappings" property of integer type object must be an array')
e5aa0be3 1378
99dc5d65
PP
1379 if len(prop_mappings) > 1:
1380 raise ConfigError('length of "property-mappings" array in integer type object must be 1')
e5aa0be3 1381
99dc5d65
PP
1382 for index, prop_mapping in enumerate(prop_mappings):
1383 if not _is_assoc_array_prop(prop_mapping):
1384 raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays')
e5aa0be3 1385
99dc5d65
PP
1386 if 'type' not in prop_mapping:
1387 raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
e5aa0be3 1388
99dc5d65 1389 prop_type = prop_mapping['type']
e5aa0be3 1390
99dc5d65
PP
1391 if not _is_str_prop(prop_type):
1392 raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
e5aa0be3 1393
99dc5d65
PP
1394 if prop_type == 'clock':
1395 self._set_int_clock_prop_mapping(obj, prop_mapping)
1396 else:
1397 raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
e5aa0be3
PP
1398
1399 return obj
1400
1401 def _create_float(self, obj, node):
1402 if obj is None:
1403 # create floating point number object
1404 obj = metadata.FloatingPoint()
1405
fe6cc755 1406 unk_prop = self._get_first_unknown_type_prop(node, [
e5aa0be3
PP
1407 'size',
1408 'align',
1409 'byte-order',
1410 ])
1411
1412 if unk_prop:
1413 raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop))
1414
1415 # size
1416 if 'size' in node:
1417 size = node['size']
1418
1419 if not _is_assoc_array_prop(size):
1420 raise ConfigError('"size" property of floating point number type object must be an associative array')
1421
df16bc6e
PP
1422 unk_prop = _get_first_unknown_prop(size, ['exp', 'mant'])
1423
1424 if unk_prop:
1425 raise ConfigError('unknown floating point number type object\'s "size" property: "{}"'.format(unk_prop))
e5aa0be3
PP
1426
1427 if 'exp' in size:
1428 exp = size['exp']
1429
1430 if not _is_int_prop(exp):
1431 raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer')
1432
1433 if exp < 1:
1434 raise ConfigError('invalid floating point number exponent size: {}')
1435
1436 obj.exp_size = exp
1437
1438 if 'mant' in size:
1439 mant = size['mant']
1440
1441 if not _is_int_prop(mant):
1442 raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer')
1443
1444 if mant < 1:
1445 raise ConfigError('invalid floating point number mantissa size: {}')
1446
1447 obj.mant_size = mant
1448
1449 # align
1450 if 'align' in node:
1451 align = node['align']
1452
9ed71fb1
PP
1453 if align is None:
1454 obj.set_default_align()
1455 else:
1456 if not _is_int_prop(align):
1457 raise ConfigError('"align" property of floating point number type object must be an integer')
e5aa0be3 1458
9ed71fb1
PP
1459 if not _is_valid_alignment(align):
1460 raise ConfigError('invalid alignment: {}'.format(align))
e5aa0be3 1461
9ed71fb1 1462 obj.align = align
e5aa0be3
PP
1463
1464 # byte order
1465 if 'byte-order' in node:
1466 byte_order = node['byte-order']
1467
9ed71fb1
PP
1468 if byte_order is None:
1469 obj.byte_order = self._bo
1470 else:
1471 if not _is_str_prop(byte_order):
1472 raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")')
e5aa0be3 1473
9ed71fb1 1474 byte_order = _byte_order_str_to_bo(byte_order)
e5aa0be3 1475
9ed71fb1
PP
1476 if byte_order is None:
1477 raise ConfigError('invalid "byte-order" property in floating point number type object')
e5aa0be3 1478 else:
9ed71fb1 1479 obj.byte_order = self._bo
e5aa0be3
PP
1480
1481 return obj
1482
1483 def _create_enum(self, obj, node):
1484 if obj is None:
1485 # create enumeration object
1486 obj = metadata.Enum()
1487
fe6cc755 1488 unk_prop = self._get_first_unknown_type_prop(node, [
e5aa0be3
PP
1489 'value-type',
1490 'members',
1491 ])
1492
1493 if unk_prop:
1494 raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop))
1495
1496 # value type
1497 if 'value-type' in node:
7346956c
PP
1498 value_type_node = node['value-type']
1499
e5aa0be3 1500 try:
7346956c 1501 obj.value_type = self._create_type(value_type_node)
e5aa0be3
PP
1502 except Exception as e:
1503 raise ConfigError('cannot create enumeration type\'s integer type', e)
1504
1505 # members
1506 if 'members' in node:
1507 members_node = node['members']
1508
1509 if not _is_array_prop(members_node):
1510 raise ConfigError('"members" property of enumeration type object must be an array')
1511
1512 cur = 0
a29b6bed
PP
1513 last_value = obj.last_value
1514
1515 if last_value is None:
1516 cur = 0
1517 else:
1518 cur = last_value + 1
e5aa0be3
PP
1519
1520 for index, m_node in enumerate(members_node):
1521 if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
1522 raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index))
1523
1524 if _is_str_prop(m_node):
1525 label = m_node
1526 value = (cur, cur)
1527 cur += 1
1528 else:
9bcce283
PP
1529 unk_prop = _get_first_unknown_prop(m_node, [
1530 'label',
1531 'value',
1532 ])
1533
1534 if unk_prop:
1535 raise ConfigError('unknown enumeration type member object property: "{}"'.format(unk_prop))
1536
e5aa0be3
PP
1537 if 'label' not in m_node:
1538 raise ConfigError('missing "label" property in enumeration member #{}'.format(index))
1539
1540 label = m_node['label']
1541
1542 if not _is_str_prop(label):
1543 raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index))
1544
1545 if 'value' not in m_node:
1546 raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label))
1547
1548 value = m_node['value']
1549
1550 if not _is_int_prop(value) and not _is_array_prop(value):
1551 raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label))
1552
1553 if _is_int_prop(value):
1554 cur = value + 1
1555 value = (value, value)
1556 else:
1557 if len(value) != 2:
1558 raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label))
1559
1560 mn = value[0]
1561 mx = value[1]
1562
1563 if mn > mx:
1564 raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
1565
1566 value = (mn, mx)
1567 cur = mx + 1
1568
1569 obj.members[label] = value
1570
1571 return obj
1572
1573 def _create_string(self, obj, node):
1574 if obj is None:
1575 # create string object
1576 obj = metadata.String()
1577
fe6cc755 1578 unk_prop = self._get_first_unknown_type_prop(node, [
e5aa0be3
PP
1579 'encoding',
1580 ])
1581
1582 if unk_prop:
1583 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1584
1585 # encoding
1586 if 'encoding' in node:
1587 encoding = node['encoding']
1588
a5cafc03
PP
1589 if encoding is None:
1590 obj.set_default_encoding()
1591 else:
1592 if not _is_str_prop(encoding):
1593 raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")')
e5aa0be3 1594
a5cafc03 1595 encoding = _encoding_str_to_encoding(encoding)
e5aa0be3 1596
a5cafc03
PP
1597 if encoding is None:
1598 raise ConfigError('invalid "encoding" property in string type object')
e5aa0be3 1599
a5cafc03 1600 obj.encoding = encoding
e5aa0be3
PP
1601
1602 return obj
1603
1604 def _create_struct(self, obj, node):
1605 if obj is None:
1606 # create structure object
1607 obj = metadata.Struct()
1608
fe6cc755 1609 unk_prop = self._get_first_unknown_type_prop(node, [
e5aa0be3
PP
1610 'min-align',
1611 'fields',
1612 ])
1613
1614 if unk_prop:
1615 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1616
1617 # minimum alignment
1618 if 'min-align' in node:
1619 min_align = node['min-align']
1620
238bab1f
PP
1621 if min_align is None:
1622 obj.set_default_min_align()
1623 else:
1624 if not _is_int_prop(min_align):
1625 raise ConfigError('"min-align" property of structure type object must be an integer')
e5aa0be3 1626
238bab1f
PP
1627 if not _is_valid_alignment(min_align):
1628 raise ConfigError('invalid minimum alignment: {}'.format(min_align))
e5aa0be3 1629
238bab1f 1630 obj.min_align = min_align
e5aa0be3
PP
1631
1632 # fields
1633 if 'fields' in node:
1634 fields = node['fields']
1635
238bab1f
PP
1636 if fields is None:
1637 obj.set_default_fields()
1638 else:
1639 if not _is_assoc_array_prop(fields):
1640 raise ConfigError('"fields" property of structure type object must be an associative array')
e5aa0be3 1641
238bab1f 1642 for field_name, field_node in fields.items():
6d4df122 1643 if not _is_valid_identifier(field_name):
238bab1f 1644 raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name))
e5aa0be3 1645
238bab1f
PP
1646 try:
1647 obj.fields[field_name] = self._create_type(field_node)
1648 except Exception as e:
1649 raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e)
e5aa0be3
PP
1650
1651 return obj
1652
1653 def _create_array(self, obj, node):
1654 if obj is None:
1655 # create array object
1656 obj = metadata.Array()
1657
fe6cc755 1658 unk_prop = self._get_first_unknown_type_prop(node, [
e5aa0be3
PP
1659 'length',
1660 'element-type',
1661 ])
1662
1663 if unk_prop:
1664 raise ConfigError('unknown array type object property: "{}"'.format(unk_prop))
1665
1666 # length
1667 if 'length' in node:
1668 length = node['length']
1669
1670 if not _is_int_prop(length) and not _is_str_prop(length):
1671 raise ConfigError('"length" property of array type object must be an integer or a string')
1672
1673 if type(length) is int and length < 0:
1674 raise ConfigError('invalid static array length: {}'.format(length))
1675
1676 obj.length = length
1677
1678 # element type
1679 if 'element-type' in node:
1ab27484
PP
1680 element_type_node = node['element-type']
1681
e5aa0be3
PP
1682 try:
1683 obj.element_type = self._create_type(node['element-type'])
1684 except Exception as e:
1685 raise ConfigError('cannot create array type\'s element type', e)
1686
1687 return obj
1688
1689 def _create_variant(self, obj, node):
1690 if obj is None:
1691 # create variant object
1692 obj = metadata.Variant()
1693
fe6cc755 1694 unk_prop = self._get_first_unknown_type_prop(node, [
e5aa0be3
PP
1695 'tag',
1696 'types',
1697 ])
1698
1699 if unk_prop:
1700 raise ConfigError('unknown variant type object property: "{}"'.format(unk_prop))
1701
1702 # tag
1703 if 'tag' in node:
1704 tag = node['tag']
1705
1706 if not _is_str_prop(tag):
1707 raise ConfigError('"tag" property of variant type object must be a string')
1708
1709 # do not validate variant tag for the moment; will be done in a
1710 # second phase
1711 obj.tag = tag
1712
1713 # element type
1714 if 'types' in node:
1715 types = node['types']
1716
1717 if not _is_assoc_array_prop(types):
1718 raise ConfigError('"types" property of variant type object must be an associative array')
1719
1720 # do not validate type names for the moment; will be done in a
1721 # second phase
1722 for type_name, type_node in types.items():
6d4df122 1723 if not _is_valid_identifier(type_name):
e5aa0be3
PP
1724 raise ConfigError('"{}" is not a valid type name for variant type'.format(type_name))
1725
1726 try:
1727 obj.types[type_name] = self._create_type(type_node)
1728 except Exception as e:
1729 raise ConfigError('cannot create variant type\'s type "{}"'.format(type_name), e)
1730
1731 return obj
1732
1733 def _create_type(self, type_node):
1734 if type(type_node) is str:
1735 t = self._lookup_type_alias(type_node)
1736
1737 if t is None:
1738 raise ConfigError('unknown type alias "{}"'.format(type_node))
1739
1740 return t
1741
1742 if not _is_assoc_array_prop(type_node):
ae6e8e94 1743 raise ConfigError('type objects must be associative arrays or strings (type alias name)')
e5aa0be3 1744
fe6cc755
PP
1745 # inherit:
1746 # v2.0: "inherit"
1747 # v2.1+: "$inherit"
1748 inherit_node = None
1749
1750 if self._version >= 200:
1751 if 'inherit' in type_node:
1752 inherit_prop = 'inherit'
1753 inherit_node = type_node[inherit_prop]
1754
1755 if self._version >= 201:
1756 if '$inherit' in type_node:
1757 if inherit_node is not None:
1758 raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
1759
1760 inherit_prop = '$inherit'
1761 inherit_node = type_node[inherit_prop]
e5aa0be3 1762
fe6cc755
PP
1763 if inherit_node is not None and 'class' in type_node:
1764 raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
e5aa0be3 1765
fe6cc755
PP
1766 if inherit_node is not None:
1767 if not _is_str_prop(inherit_node):
1768 raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop))
e5aa0be3 1769
fe6cc755 1770 base = self._lookup_type_alias(inherit_node)
e5aa0be3
PP
1771
1772 if base is None:
fe6cc755 1773 raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
e5aa0be3
PP
1774
1775 func = self._type_to_create_type_func[type(base)]
1776 else:
1777 if 'class' not in type_node:
1778 raise ConfigError('type objects which do not inherit must have a "class" property')
1779
1780 class_name = type_node['class']
1781
1782 if type(class_name) is not str:
1783 raise ConfigError('type objects\' "class" property must be a string')
1784
1785 if class_name not in self._class_name_to_create_type_func:
1786 raise ConfigError('unknown type class "{}"'.format(class_name))
1787
1788 base = None
1789 func = self._class_name_to_create_type_func[class_name]
1790
1791 return func(base, type_node)
1792
1793 def _register_type_aliases(self, metadata_node):
1794 self._tas = dict()
1795
1796 if 'type-aliases' not in metadata_node:
1797 return
1798
1799 ta_node = metadata_node['type-aliases']
1800
f8029a55
PP
1801 if ta_node is None:
1802 return
1803
e5aa0be3
PP
1804 if not _is_assoc_array_prop(ta_node):
1805 raise ConfigError('"type-aliases" property (metadata) must be an associative array')
1806
1807 for ta_name, ta_type in ta_node.items():
1808 if ta_name in self._tas:
1809 raise ConfigError('duplicate type alias "{}"'.format(ta_name))
1810
1811 try:
1812 t = self._create_type(ta_type)
1813 except Exception as e:
1814 raise ConfigError('cannot create type alias "{}"'.format(ta_name), e)
1815
1816 self._tas[ta_name] = t
1817
1818 def _create_clock(self, node):
1819 # create clock object
1820 clock = metadata.Clock()
82bf7223 1821
f58be68f 1822 if not _is_assoc_array_prop(node):
82bf7223
PP
1823 raise ConfigError('clock objects must be associative arrays')
1824
a3062dde 1825 known_props = [
e5aa0be3
PP
1826 'uuid',
1827 'description',
1828 'freq',
1829 'error-cycles',
1830 'offset',
1831 'absolute',
1832 'return-ctype',
a3062dde
PP
1833 ]
1834
1835 if self._version >= 201:
1836 known_props.append('$return-ctype')
1837
1838 unk_prop = _get_first_unknown_prop(node, known_props)
e5aa0be3
PP
1839
1840 if unk_prop:
1841 raise ConfigError('unknown clock object property: "{}"'.format(unk_prop))
1842
1843 # UUID
1844 if 'uuid' in node:
1845 uuidp = node['uuid']
1846
f7c79788
PP
1847 if uuidp is None:
1848 clock.set_default_uuid()
1849 else:
1850 if not _is_str_prop(uuidp):
1851 raise ConfigError('"uuid" property of clock object must be a string')
e5aa0be3 1852
f7c79788
PP
1853 try:
1854 uuidp = uuid.UUID(uuidp)
1855 except:
1856 raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp))
e5aa0be3 1857
f7c79788 1858 clock.uuid = uuidp
e5aa0be3
PP
1859
1860 # description
1861 if 'description' in node:
1862 desc = node['description']
1863
f7c79788
PP
1864 if desc is None:
1865 clock.set_default_description()
1866 else:
1867 if not _is_str_prop(desc):
1868 raise ConfigError('"description" property of clock object must be a string')
e5aa0be3 1869
f7c79788 1870 clock.description = desc
e5aa0be3
PP
1871
1872 # frequency
1873 if 'freq' in node:
1874 freq = node['freq']
1875
f7c79788
PP
1876 if freq is None:
1877 clock.set_default_freq()
1878 else:
1879 if not _is_int_prop(freq):
1880 raise ConfigError('"freq" property of clock object must be an integer')
e5aa0be3 1881
f7c79788
PP
1882 if freq < 1:
1883 raise ConfigError('invalid clock frequency: {}'.format(freq))
e5aa0be3 1884
f7c79788 1885 clock.freq = freq
e5aa0be3
PP
1886
1887 # error cycles
1888 if 'error-cycles' in node:
1889 error_cycles = node['error-cycles']
1890
f7c79788
PP
1891 if error_cycles is None:
1892 clock.set_default_error_cycles()
1893 else:
1894 if not _is_int_prop(error_cycles):
1895 raise ConfigError('"error-cycles" property of clock object must be an integer')
e5aa0be3 1896
f7c79788
PP
1897 if error_cycles < 0:
1898 raise ConfigError('invalid clock error cycles: {}'.format(error_cycles))
e5aa0be3 1899
f7c79788 1900 clock.error_cycles = error_cycles
e5aa0be3
PP
1901
1902 # offset
1903 if 'offset' in node:
1904 offset = node['offset']
1905
f7c79788 1906 if offset is None:
c08337d8
PP
1907 clock.set_default_offset_seconds()
1908 clock.set_default_offset_cycles()
f7c79788
PP
1909 else:
1910 if not _is_assoc_array_prop(offset):
1911 raise ConfigError('"offset" property of clock object must be an associative array')
e5aa0be3 1912
f7c79788 1913 unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
e5aa0be3 1914
f7c79788
PP
1915 if unk_prop:
1916 raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop))
e5aa0be3 1917
f7c79788
PP
1918 # cycles
1919 if 'cycles' in offset:
1920 offset_cycles = offset['cycles']
e5aa0be3 1921
c08337d8
PP
1922 if offset_cycles is None:
1923 clock.set_default_offset_cycles()
1924 else:
1925 if not _is_int_prop(offset_cycles):
1926 raise ConfigError('"cycles" property of clock object\'s offset property must be an integer')
e5aa0be3 1927
c08337d8
PP
1928 if offset_cycles < 0:
1929 raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles))
e5aa0be3 1930
c08337d8 1931 clock.offset_cycles = offset_cycles
e5aa0be3 1932
f7c79788
PP
1933 # seconds
1934 if 'seconds' in offset:
1935 offset_seconds = offset['seconds']
e5aa0be3 1936
c08337d8
PP
1937 if offset_seconds is None:
1938 clock.set_default_offset_seconds()
1939 else:
1940 if not _is_int_prop(offset_seconds):
1941 raise ConfigError('"seconds" property of clock object\'s offset property must be an integer')
e5aa0be3 1942
c08337d8
PP
1943 if offset_seconds < 0:
1944 raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds))
e5aa0be3 1945
c08337d8 1946 clock.offset_seconds = offset_seconds
e5aa0be3
PP
1947
1948 # absolute
1949 if 'absolute' in node:
1950 absolute = node['absolute']
1951
f7c79788
PP
1952 if absolute is None:
1953 clock.set_default_absolute()
1954 else:
1955 if not _is_bool_prop(absolute):
1956 raise ConfigError('"absolute" property of clock object must be a boolean')
e5aa0be3 1957
f7c79788 1958 clock.absolute = absolute
e5aa0be3 1959
a3062dde
PP
1960 # return C type:
1961 # v2.0: "return-ctype"
1962 # v2.1+: "$return-ctype"
1963 return_ctype_node = None
1964
1965 if self._version >= 200:
1966 if 'return-ctype' in node:
1967 return_ctype_prop = 'return-ctype'
1968 return_ctype_node = node[return_ctype_prop]
1969
1970 if self._version >= 201:
1971 if '$return-ctype' in node:
1972 if return_ctype_node is not None:
1973 raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"')
1974
1975 return_ctype_prop = '$return-ctype'
1976 return_ctype_node = node[return_ctype_prop]
e5aa0be3 1977
a3062dde 1978 if return_ctype_node is not None:
f7c79788
PP
1979 if return_ctype_node is None:
1980 clock.set_default_return_ctype()
1981 else:
1982 if not _is_str_prop(return_ctype_node):
1983 raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop))
e5aa0be3 1984
f7c79788 1985 clock.return_ctype = return_ctype_node
e5aa0be3
PP
1986
1987 return clock
1988
1989 def _register_clocks(self, metadata_node):
1990 self._clocks = collections.OrderedDict()
1991
1992 if 'clocks' not in metadata_node:
1993 return
1994
1995 clocks_node = metadata_node['clocks']
1996
f8029a55
PP
1997 if clocks_node is None:
1998 return
1999
e5aa0be3
PP
2000 if not _is_assoc_array_prop(clocks_node):
2001 raise ConfigError('"clocks" property (metadata) must be an associative array')
2002
2003 for clock_name, clock_node in clocks_node.items():
6d4df122 2004 if not _is_valid_identifier(clock_name):
e5aa0be3
PP
2005 raise ConfigError('invalid clock name: "{}"'.format(clock_name))
2006
2007 if clock_name in self._clocks:
2008 raise ConfigError('duplicate clock "{}"'.format(clock_name))
2009
2010 try:
2011 clock = self._create_clock(clock_node)
2012 except Exception as e:
2013 raise ConfigError('cannot create clock "{}"'.format(clock_name), e)
2014
2015 clock.name = clock_name
2016 self._clocks[clock_name] = clock
2017
2018 def _create_env(self, metadata_node):
2019 env = collections.OrderedDict()
2020
2021 if 'env' not in metadata_node:
2022 return env
2023
2024 env_node = metadata_node['env']
2025
f8029a55
PP
2026 if env_node is None:
2027 return env
2028
e5aa0be3
PP
2029 if not _is_assoc_array_prop(env_node):
2030 raise ConfigError('"env" property (metadata) must be an associative array')
2031
2032 for env_name, env_value in env_node.items():
2033 if env_name in env:
2034 raise ConfigError('duplicate environment variable "{}"'.format(env_name))
2035
6d4df122 2036 if not _is_valid_identifier(env_name):
e5aa0be3
PP
2037 raise ConfigError('invalid environment variable name: "{}"'.format(env_name))
2038
2039 if not _is_int_prop(env_value) and not _is_str_prop(env_value):
2040 raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
2041
2042 env[env_name] = env_value
2043
2044 return env
2045
2046 def _register_log_levels(self, metadata_node):
2047 self._log_levels = dict()
2048
ab61d11f
PP
2049 # log levels:
2050 # v2.0: "log-levels"
2051 # v2.1+: "$log-levels"
2052 log_levels_node = None
2053
2054 if self._version >= 200:
2055 if 'log-levels' in metadata_node:
2056 log_levels_prop = 'log-levels'
2057 log_levels_node = metadata_node[log_levels_prop]
e5aa0be3 2058
ab61d11f
PP
2059 if self._version >= 201:
2060 if '$log-levels' in metadata_node:
2061 if log_levels_node is not None:
2062 raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
2063
2064 log_levels_prop = '$log-levels'
2065 log_levels_node = metadata_node[log_levels_prop]
2066
2067 if log_levels_node is None:
2068 return
e5aa0be3
PP
2069
2070 if not _is_assoc_array_prop(log_levels_node):
ab61d11f 2071 raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop))
e5aa0be3
PP
2072
2073 for ll_name, ll_value in log_levels_node.items():
2074 if ll_name in self._log_levels:
2075 raise ConfigError('duplicate log level entry "{}"'.format(ll_name))
2076
2077 if not _is_int_prop(ll_value):
2078 raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name))
2079
67bec15e
PP
2080 if ll_value < 0:
2081 raise ConfigError('invalid log level entry ("{}"): log level value must be positive'.format(ll_name))
2082
e5aa0be3
PP
2083 self._log_levels[ll_name] = ll_value
2084
2085 def _create_trace(self, metadata_node):
2086 # create trace object
2087 trace = metadata.Trace()
f58be68f
PP
2088
2089 if 'trace' not in metadata_node:
2090 raise ConfigError('missing "trace" property (metadata)')
2091
e5aa0be3 2092 trace_node = metadata_node['trace']
82bf7223
PP
2093
2094 if not _is_assoc_array_prop(trace_node):
2095 raise ConfigError('"trace" property (metadata) must be an associative array')
2096
e5aa0be3
PP
2097 unk_prop = _get_first_unknown_prop(trace_node, [
2098 'byte-order',
2099 'uuid',
2100 'packet-header-type',
2101 ])
2102
2103 if unk_prop:
2104 raise ConfigError('unknown trace object property: "{}"'.format(unk_prop))
2105
2106 # set byte order (already parsed)
2107 trace.byte_order = self._bo
2108
2109 # UUID
278bd7a3 2110 if 'uuid' in trace_node and trace_node['uuid'] is not None:
e5aa0be3
PP
2111 uuidp = trace_node['uuid']
2112
2113 if not _is_str_prop(uuidp):
2114 raise ConfigError('"uuid" property of trace object must be a string')
2115
2116 if uuidp == 'auto':
2117 uuidp = uuid.uuid1()
2118 else:
2119 try:
2120 uuidp = uuid.UUID(uuidp)
2121 except:
2122 raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp))
2123
2124 trace.uuid = uuidp
2125
2126 # packet header type
278bd7a3 2127 if 'packet-header-type' in trace_node and trace_node['packet-header-type'] is not None:
e5aa0be3
PP
2128 try:
2129 ph_type = self._create_type(trace_node['packet-header-type'])
2130 except Exception as e:
2131 raise ConfigError('cannot create packet header type (trace)', e)
2132
2133 trace.packet_header_type = ph_type
2134
2135 return trace
2136
2137 def _lookup_log_level(self, ll):
2138 if _is_int_prop(ll):
2139 return ll
2140 elif _is_str_prop(ll) and ll in self._log_levels:
2141 return self._log_levels[ll]
2142
2143 def _create_event(self, event_node):
2144 event = metadata.Event()
82bf7223
PP
2145
2146 if not _is_assoc_array_prop(event_node):
2147 raise ConfigError('event objects must be associative arrays')
2148
e5aa0be3
PP
2149 unk_prop = _get_first_unknown_prop(event_node, [
2150 'log-level',
2151 'context-type',
2152 'payload-type',
2153 ])
2154
2155 if unk_prop:
2156 raise ConfigError('unknown event object property: "{}"'.format(unk_prop))
2157
ea404092 2158 if 'log-level' in event_node and event_node['log-level'] is not None:
5e2e80be 2159 ll_node = event_node['log-level']
e5aa0be3 2160
5e2e80be 2161 if _is_str_prop(ll_node):
721eb4df 2162 ll_value = self._lookup_log_level(event_node['log-level'])
5e2e80be 2163
721eb4df 2164 if ll_value is None:
5e2e80be 2165 raise ConfigError('cannot find log level "{}"'.format(ll_node))
721eb4df
PP
2166
2167 ll = metadata.LogLevel(event_node['log-level'], ll_value)
5e2e80be 2168 elif _is_int_prop(ll_node):
67bec15e
PP
2169 if ll_node < 0:
2170 raise ConfigError('invalid log level value {}: value must be positive'.format(ll_node))
2171
721eb4df 2172 ll = metadata.LogLevel(None, ll_node)
5e2e80be
PP
2173 else:
2174 raise ConfigError('"log-level" property must be either a string or an integer')
e5aa0be3
PP
2175
2176 event.log_level = ll
2177
ea404092
PP
2178 if 'context-type' in event_node and event_node['context-type'] is not None:
2179 ctx_type_node = event_node['context-type']
2180
e5aa0be3
PP
2181 try:
2182 t = self._create_type(event_node['context-type'])
2183 except Exception as e:
2184 raise ConfigError('cannot create event\'s context type object', e)
2185
2186 event.context_type = t
2187
13eaac62 2188 if 'payload-type' in event_node and event_node['payload-type'] is not None:
ea404092
PP
2189 try:
2190 t = self._create_type(event_node['payload-type'])
2191 except Exception as e:
2192 raise ConfigError('cannot create event\'s payload type object', e)
e5aa0be3 2193
ea404092 2194 event.payload_type = t
e5aa0be3
PP
2195
2196 return event
2197
2198 def _create_stream(self, stream_node):
2199 stream = metadata.Stream()
82bf7223
PP
2200
2201 if not _is_assoc_array_prop(stream_node):
2202 raise ConfigError('stream objects must be associative arrays')
2203
e5aa0be3
PP
2204 unk_prop = _get_first_unknown_prop(stream_node, [
2205 'packet-context-type',
2206 'event-header-type',
2207 'event-context-type',
2208 'events',
2209 ])
2210
2211 if unk_prop:
2212 raise ConfigError('unknown stream object property: "{}"'.format(unk_prop))
2213
0eb283bd 2214 if 'packet-context-type' in stream_node and stream_node['packet-context-type'] is not None:
e5aa0be3
PP
2215 try:
2216 t = self._create_type(stream_node['packet-context-type'])
2217 except Exception as e:
2218 raise ConfigError('cannot create stream\'s packet context type object', e)
2219
2220 stream.packet_context_type = t
2221
0eb283bd 2222 if 'event-header-type' in stream_node and stream_node['event-header-type'] is not None:
e5aa0be3
PP
2223 try:
2224 t = self._create_type(stream_node['event-header-type'])
2225 except Exception as e:
2226 raise ConfigError('cannot create stream\'s event header type object', e)
2227
2228 stream.event_header_type = t
2229
0eb283bd 2230 if 'event-context-type' in stream_node and stream_node['event-context-type'] is not None:
e5aa0be3
PP
2231 try:
2232 t = self._create_type(stream_node['event-context-type'])
2233 except Exception as e:
2234 raise ConfigError('cannot create stream\'s event context type object', e)
2235
2236 stream.event_context_type = t
2237
2238 if 'events' not in stream_node:
2239 raise ConfigError('missing "events" property in stream object')
2240
2241 events = stream_node['events']
2242
0eb283bd
PP
2243 if events is not None:
2244 if not _is_assoc_array_prop(events):
2245 raise ConfigError('"events" property of stream object must be an associative array')
e5aa0be3 2246
0eb283bd
PP
2247 if not events:
2248 raise ConfigError('at least one event is needed within a stream object')
e5aa0be3 2249
0eb283bd 2250 cur_id = 0
e5aa0be3 2251
0eb283bd
PP
2252 for ev_name, ev_node in events.items():
2253 try:
2254 ev = self._create_event(ev_node)
2255 except Exception as e:
2256 raise ConfigError('cannot create event "{}"'.format(ev_name), e)
e5aa0be3 2257
0eb283bd
PP
2258 ev.id = cur_id
2259 ev.name = ev_name
2260 stream.events[ev_name] = ev
2261 cur_id += 1
e5aa0be3
PP
2262
2263 return stream
2264
2265 def _create_streams(self, metadata_node):
2266 streams = collections.OrderedDict()
2267
2268 if 'streams' not in metadata_node:
2269 raise ConfigError('missing "streams" property (metadata)')
2270
2271 streams_node = metadata_node['streams']
2272
2273 if not _is_assoc_array_prop(streams_node):
2274 raise ConfigError('"streams" property (metadata) must be an associative array')
2275
2276 if not streams_node:
2277 raise ConfigError('at least one stream is needed (metadata)')
2278
2279 cur_id = 0
2280
2281 for stream_name, stream_node in streams_node.items():
2282 try:
2283 stream = self._create_stream(stream_node)
2284 except Exception as e:
2285 raise ConfigError('cannot create stream "{}"'.format(stream_name), e)
2286
2287 stream.id = cur_id
2288 stream.name = str(stream_name)
2289 streams[stream_name] = stream
2290 cur_id += 1
2291
2292 return streams
2293
2294 def _create_metadata(self, root):
2295 meta = metadata.Metadata()
2296
2297 if 'metadata' not in root:
7add139d 2298 raise ConfigError('missing "metadata" property (configuration)')
e5aa0be3 2299
f58be68f
PP
2300 metadata_node = root['metadata']
2301
82bf7223 2302 if not _is_assoc_array_prop(metadata_node):
7add139d 2303 raise ConfigError('"metadata" property (configuration) must be an associative array')
82bf7223 2304
ab61d11f 2305 known_props = [
e5aa0be3
PP
2306 'type-aliases',
2307 'log-levels',
2308 'trace',
2309 'env',
2310 'clocks',
2311 'streams',
ab61d11f
PP
2312 ]
2313
2314 if self._version >= 201:
2315 known_props.append('$log-levels')
2316
2317 unk_prop = _get_first_unknown_prop(metadata_node, known_props)
e5aa0be3
PP
2318
2319 if unk_prop:
f58be68f
PP
2320 add = ''
2321
2322 if unk_prop == '$include':
2323 add = ' (use version 2.1 or greater)'
2324
2325 raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop))
e5aa0be3 2326
e5aa0be3
PP
2327 self._set_byte_order(metadata_node)
2328 self._register_clocks(metadata_node)
2329 meta.clocks = self._clocks
2330 self._register_type_aliases(metadata_node)
2331 meta.env = self._create_env(metadata_node)
2332 meta.trace = self._create_trace(metadata_node)
2333 self._register_log_levels(metadata_node)
2334 meta.streams = self._create_streams(metadata_node)
2335
2336 return meta
2337
2338 def _get_version(self, root):
2339 if 'version' not in root:
7add139d 2340 raise ConfigError('missing "version" property (configuration)')
e5aa0be3
PP
2341
2342 version_node = root['version']
2343
2344 if not _is_str_prop(version_node):
7add139d 2345 raise ConfigError('"version" property (configuration) must be a string')
e5aa0be3 2346
56da32ea 2347 version_node = version_node.strip()
e5aa0be3 2348
56da32ea
PP
2349 if version_node not in ['2.0', '2.1']:
2350 raise ConfigError('unsupported version ({}): versions 2.0 and 2.1 are supported'.format(version_node))
2351
2352 # convert version string to comparable version integer
2353 parts = version_node.split('.')
2354 version = int(parts[0]) * 100 + int(parts[1])
2355
2356 return version
e5aa0be3
PP
2357
2358 def _get_prefix(self, root):
8823eb24
PP
2359 def_prefix = 'barectf_'
2360
e5aa0be3 2361 if 'prefix' not in root:
8823eb24 2362 return def_prefix
e5aa0be3
PP
2363
2364 prefix_node = root['prefix']
2365
8823eb24
PP
2366 if prefix_node is None:
2367 return def_prefix
2368
e5aa0be3 2369 if not _is_str_prop(prefix_node):
7add139d 2370 raise ConfigError('"prefix" property (configuration) must be a string')
e5aa0be3 2371
6d4df122 2372 if not _is_valid_identifier(prefix_node):
7add139d 2373 raise ConfigError('"prefix" property (configuration) must be a valid C identifier')
e5aa0be3
PP
2374
2375 return prefix_node
2376
f58be68f
PP
2377 def _get_last_include_file(self):
2378 if self._include_stack:
2379 return self._include_stack[-1]
2380
2381 return self._root_yaml_path
2382
2383 def _load_include(self, yaml_path):
2384 for inc_dir in self._include_dirs:
2385 # current include dir + file name path
2386 # note: os.path.join() only takes the last arg if it's absolute
2387 inc_path = os.path.join(inc_dir, yaml_path)
2388
2389 # real path (symbolic links resolved)
2390 real_path = os.path.realpath(inc_path)
2391
2392 # normalized path (weird stuff removed!)
2393 norm_path = os.path.normpath(real_path)
2394
2395 if not os.path.isfile(norm_path):
2396 # file does not exist: skip
2397 continue
2398
2399 if norm_path in self._include_stack:
2400 base_path = self._get_last_include_file()
2401 raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path))
2402
2403 self._include_stack.append(norm_path)
2404
2405 # load raw content
2406 return self._yaml_ordered_load(norm_path)
2407
2408 if not self._ignore_include_not_found:
2409 base_path = self._get_last_include_file()
2410 raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
2411
2412 return None
2413
2414 def _get_include_paths(self, include_node):
d21a6060
PP
2415 if include_node is None:
2416 return []
2417
f58be68f
PP
2418 if _is_str_prop(include_node):
2419 return [include_node]
d21a6060
PP
2420
2421 if _is_array_prop(include_node):
f58be68f
PP
2422 for include_path in include_node:
2423 if not _is_str_prop(include_path):
2424 raise ConfigError('invalid include property: expecting array of strings')
2425
2426 return include_node
2427
2428 raise ConfigError('invalid include property: expecting string or array of strings')
2429
2430 def _update_node(self, base_node, overlay_node):
2431 for olay_key, olay_value in overlay_node.items():
2432 if olay_key in base_node:
2433 base_value = base_node[olay_key]
2434
2435 if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
2436 # merge dictionaries
2437 self._update_node(base_value, olay_value)
2438 elif _is_array_prop(olay_value) and _is_array_prop(base_value):
2439 # append extension array items to base items
2440 base_value += olay_value
2441 else:
2442 # fall back to replacing
2443 base_node[olay_key] = olay_value
2444 else:
2445 base_node[olay_key] = olay_value
2446
2447 def _process_node_include(self, last_overlay_node, name,
2448 process_base_include_cb,
2449 process_children_include_cb=None):
2450 if not _is_assoc_array_prop(last_overlay_node):
2451 raise ConfigError('{} objects must be associative arrays'.format(name))
2452
2453 # process children inclusions first
2454 if process_children_include_cb:
2455 process_children_include_cb(last_overlay_node)
2456
2457 if '$include' in last_overlay_node:
2458 include_node = last_overlay_node['$include']
2459 else:
2460 # no includes!
2461 return last_overlay_node
2462
2463 include_paths = self._get_include_paths(include_node)
2464 cur_base_path = self._get_last_include_file()
2465 base_node = None
2466
2467 # keep the include paths and remove the include property
2468 include_paths = copy.deepcopy(include_paths)
2469 del last_overlay_node['$include']
2470
2471 for include_path in include_paths:
2472 # load raw YAML from included file
2473 overlay_node = self._load_include(include_path)
2474
2475 if overlay_node is None:
2476 # cannot find include file, but we're ignoring those
2477 # errors, otherwise _load_include() itself raises
2478 # a config error
2479 continue
2480
2481 # recursively process includes
2482 try:
2483 overlay_node = process_base_include_cb(overlay_node)
2484 except Exception as e:
2485 raise ConfigError('in "{}"'.format(cur_base_path), e)
2486
2487 # pop include stack now that we're done including
2488 del self._include_stack[-1]
2489
2490 # at this point, base_node is fully resolved (does not
2491 # contain any include property)
2492 if base_node is None:
2493 base_node = overlay_node
2494 else:
2495 self._update_node(base_node, overlay_node)
2496
2497 # finally, we update the latest base node with our last overlay
2498 # node
2499 if base_node is None:
2500 # nothing was included, which is possible when we're
2501 # ignoring include errors
2502 return last_overlay_node
2503
2504 self._update_node(base_node, last_overlay_node)
2505
2506 return base_node
2507
2508 def _process_event_include(self, event_node):
2509 return self._process_node_include(event_node, 'event',
2510 self._process_event_include)
2511
2512 def _process_stream_include(self, stream_node):
2513 def process_children_include(stream_node):
2514 if 'events' in stream_node:
2515 events_node = stream_node['events']
2516
2517 if not _is_assoc_array_prop(events_node):
2518 raise ConfigError('"events" property must be an associative array')
2519
2520 events_node_keys = list(events_node.keys())
2521
2522 for key in events_node_keys:
2523 event_node = events_node[key]
2524
2525 try:
2526 events_node[key] = self._process_event_include(event_node)
2527 except Exception as e:
2528 raise ConfigError('cannot process includes of event object "{}"'.format(key), e)
2529
2530 return self._process_node_include(stream_node, 'stream',
2531 self._process_stream_include,
2532 process_children_include)
2533
2534 def _process_trace_include(self, trace_node):
2535 return self._process_node_include(trace_node, 'trace',
2536 self._process_trace_include)
2537
2538 def _process_clock_include(self, clock_node):
2539 return self._process_node_include(clock_node, 'clock',
2540 self._process_clock_include)
2541
2542 def _process_metadata_include(self, metadata_node):
2543 def process_children_include(metadata_node):
2544 if 'trace' in metadata_node:
2545 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
2546
2547 if 'clocks' in metadata_node:
2548 clocks_node = metadata_node['clocks']
2549
2550 if not _is_assoc_array_prop(clocks_node):
2551 raise ConfigError('"clocks" property (metadata) must be an associative array')
2552
2553 clocks_node_keys = list(clocks_node.keys())
2554
2555 for key in clocks_node_keys:
2556 clock_node = clocks_node[key]
2557
2558 try:
2559 clocks_node[key] = self._process_clock_include(clock_node)
2560 except Exception as e:
2561 raise ConfigError('cannot process includes of clock object "{}"'.format(key), e)
2562
2563 if 'streams' in metadata_node:
2564 streams_node = metadata_node['streams']
2565
2566 if not _is_assoc_array_prop(streams_node):
2567 raise ConfigError('"streams" property (metadata) must be an associative array')
2568
2569 streams_node_keys = list(streams_node.keys())
2570
2571 for key in streams_node_keys:
2572 stream_node = streams_node[key]
2573
2574 try:
2575 streams_node[key] = self._process_stream_include(stream_node)
2576 except Exception as e:
2577 raise ConfigError('cannot process includes of stream object "{}"'.format(key), e)
2578
2579 return self._process_node_include(metadata_node, 'metadata',
2580 self._process_metadata_include,
2581 process_children_include)
2582
2583 def _process_root_includes(self, root):
2584 # The following config objects support includes:
2585 #
2586 # * Metadata object
2587 # * Trace object
2588 # * Stream object
2589 # * Event object
2590 #
2591 # We need to process the event includes first, then the stream
2592 # includes, then the trace includes, and finally the metadata
2593 # includes.
2594 #
2595 # In each object, only one of the $include and $include-replace
2596 # special properties is allowed.
2597 #
2598 # We keep a stack of absolute paths to included files to detect
2599 # recursion.
2600 if 'metadata' in root:
2601 root['metadata'] = self._process_metadata_include(root['metadata'])
2602
2603 return root
2604
2605 def _yaml_ordered_dump(self, node, **kwds):
2606 class ODumper(yaml.Dumper):
2607 pass
2608
2609 def dict_representer(dumper, node):
2610 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2611 node.items())
2612
2613 ODumper.add_representer(collections.OrderedDict, dict_representer)
2614
2615 return yaml.dump(node, Dumper=ODumper, **kwds)
2616
2617 def _yaml_ordered_load(self, yaml_path):
e5aa0be3
PP
2618 class OLoader(yaml.Loader):
2619 pass
2620
2621 def construct_mapping(loader, node):
2622 loader.flatten_mapping(node)
2623
2624 return collections.OrderedDict(loader.construct_pairs(node))
2625
2626 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2627 construct_mapping)
2628
f58be68f
PP
2629 # YAML -> Python
2630 try:
2631 with open(yaml_path, 'r') as f:
2632 node = yaml.load(f, OLoader)
2633 except (OSError, IOError) as e:
2634 raise ConfigError('cannot open file "{}"'.format(yaml_path))
2635 except Exception as e:
2636 raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e)
2637
2638 # loaded node must be an associate array
2639 if not _is_assoc_array_prop(node):
2640 raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path))
2641
2642 return node
2643
2644 def _reset(self):
2645 self._version = None
2646 self._include_stack = []
2647
2648 def parse(self, yaml_path):
2649 self._reset()
2650 self._root_yaml_path = yaml_path
e5aa0be3 2651
e5aa0be3 2652 try:
f58be68f 2653 root = self._yaml_ordered_load(yaml_path)
e5aa0be3 2654 except Exception as e:
f58be68f 2655 raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e)
e5aa0be3
PP
2656
2657 if not _is_assoc_array_prop(root):
7add139d
PP
2658 raise ConfigError('configuration must be an associative array')
2659
2660 unk_prop = _get_first_unknown_prop(root, [
2661 'version',
2662 'prefix',
2663 'metadata',
2664 ])
2665
2666 if unk_prop:
2667 raise ConfigError('unknown configuration property: "{}"'.format(unk_prop))
e5aa0be3 2668
f58be68f 2669 # get the config version
e5aa0be3 2670 self._version = self._get_version(root)
e5aa0be3 2671
f58be68f
PP
2672 # process includes if supported
2673 if self._version >= 201:
2674 root = self._process_root_includes(root)
e5aa0be3 2675
f58be68f
PP
2676 # dump config if required
2677 if self._dump_config:
2678 print(self._yaml_ordered_dump(root, indent=2,
2679 default_flow_style=False))
e5aa0be3 2680
f58be68f
PP
2681 # get prefix and metadata
2682 prefix = self._get_prefix(root)
2683 meta = self._create_metadata(root)
e5aa0be3 2684
f58be68f 2685 return Config(self._version, prefix, meta)
e5aa0be3
PP
2686
2687
f58be68f 2688def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
e5aa0be3 2689 try:
f58be68f
PP
2690 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
2691 dump_config)
2692 cfg = parser.parse(path)
2693
2694 return cfg
e5aa0be3 2695 except Exception as e:
f58be68f 2696 raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e)
This page took 0.137532 seconds and 4 git commands to generate.