Remove everything related to CTF sequences and variants
[deliverable/barectf.git] / barectf / config.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015-2016 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining a copy
6 # of this software and associated documentation files (the "Software"), to deal
7 # in the Software without restriction, including without limitation the rights
8 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 # copies of the Software, and to permit persons to whom the Software is
10 # furnished to do so, subject to the following conditions:
11 #
12 # The above copyright notice and this permission notice shall be included in
13 # all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 # THE SOFTWARE.
22
23 from barectf import metadata
24 import collections
25 import datetime
26 import barectf
27 import enum
28 import yaml
29 import uuid
30 import copy
31 import re
32 import os
33
34
35 class ConfigError(RuntimeError):
36 def __init__(self, msg, prev=None):
37 super().__init__(msg)
38 self._prev = prev
39
40 @property
41 def prev(self):
42 return self._prev
43
44
45 class Config:
46 def __init__(self, version, prefix, metadata, options):
47 self.prefix = prefix
48 self.version = version
49 self.metadata = metadata
50 self.options = options
51
52 def _validate_metadata(self, meta):
53 try:
54 validator = _MetadataTypesHistologyValidator()
55 validator.validate(meta)
56 validator = _MetadataSpecialFieldsValidator()
57 validator.validate(meta)
58 except Exception as e:
59 raise ConfigError('metadata error', e)
60
61 try:
62 validator = _BarectfMetadataValidator()
63 validator.validate(meta)
64 except Exception as e:
65 raise ConfigError('barectf metadata error', e)
66
67 def _augment_metadata_env(self, meta):
68 version_tuple = barectf.get_version_tuple()
69 base_env = {
70 'domain': 'bare',
71 'tracer_name': 'barectf',
72 'tracer_major': version_tuple[0],
73 'tracer_minor': version_tuple[1],
74 'tracer_patch': version_tuple[2],
75 'barectf_gen_date': str(datetime.datetime.now().isoformat()),
76 }
77
78 base_env.update(meta.env)
79 meta.env = base_env
80
81 @property
82 def version(self):
83 return self._version
84
85 @version.setter
86 def version(self, value):
87 self._version = value
88
89 @property
90 def metadata(self):
91 return self._metadata
92
93 @metadata.setter
94 def metadata(self, value):
95 self._validate_metadata(value)
96 self._augment_metadata_env(value)
97 self._metadata = value
98
99 @property
100 def prefix(self):
101 return self._prefix
102
103 @prefix.setter
104 def prefix(self, value):
105 if not _is_valid_identifier(value):
106 raise ConfigError('configuration prefix must be a valid C identifier')
107
108 self._prefix = value
109
110 @property
111 def options(self):
112 return self._options
113
114 @options.setter
115 def options(self, options):
116 self._options = options
117
118
119 class ConfigOptions:
120 def __init__(self):
121 self._gen_prefix_def = False
122 self._gen_default_stream_def = False
123
124 @property
125 def gen_prefix_def(self):
126 return self._gen_prefix_def
127
128 @gen_prefix_def.setter
129 def gen_prefix_def(self, value):
130 self._gen_prefix_def = value
131
132 @property
133 def gen_default_stream_def(self):
134 return self._gen_default_stream_def
135
136 @gen_default_stream_def.setter
137 def gen_default_stream_def(self, value):
138 self._gen_default_stream_def = value
139
140
141 def _is_assoc_array_prop(node):
142 return isinstance(node, dict)
143
144
145 def _is_array_prop(node):
146 return isinstance(node, list)
147
148
149 def _is_int_prop(node):
150 return type(node) is int
151
152
153 def _is_str_prop(node):
154 return type(node) is str
155
156
157 def _is_bool_prop(node):
158 return type(node) is bool
159
160
161 def _is_valid_alignment(align):
162 return ((align & (align - 1)) == 0) and align > 0
163
164
165 def _byte_order_str_to_bo(bo_str):
166 bo_str = bo_str.lower()
167
168 if bo_str == 'le':
169 return metadata.ByteOrder.LE
170 elif bo_str == 'be':
171 return metadata.ByteOrder.BE
172
173
174 def _encoding_str_to_encoding(encoding_str):
175 encoding_str = encoding_str.lower()
176
177 if encoding_str == 'utf-8' or encoding_str == 'utf8':
178 return metadata.Encoding.UTF8
179 elif encoding_str == 'ascii':
180 return metadata.Encoding.ASCII
181 elif encoding_str == 'none':
182 return metadata.Encoding.NONE
183
184
185 _re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
186 _ctf_keywords = set([
187 'align',
188 'callsite',
189 'clock',
190 'enum',
191 'env',
192 'event',
193 'floating_point',
194 'integer',
195 'stream',
196 'string',
197 'struct',
198 'trace',
199 'typealias',
200 'typedef',
201 'variant',
202 ])
203
204
205 def _is_valid_identifier(iden):
206 if not _re_iden.match(iden):
207 return False
208
209 if _re_iden in _ctf_keywords:
210 return False
211
212 return True
213
214
215 def _get_first_unknown_prop(node, known_props):
216 for prop_name in node:
217 if prop_name in known_props:
218 continue
219
220 return prop_name
221
222
223 # This validator validates the configured metadata for barectf specific
224 # needs.
225 #
226 # barectf needs:
227 #
228 # * all header/contexts are at least byte-aligned
229 # * all integer and floating point number sizes to be <= 64
230 # * no inner structures or arrays
231 class _BarectfMetadataValidator:
232 def __init__(self):
233 self._type_to_validate_type_func = {
234 metadata.Integer: self._validate_int_type,
235 metadata.FloatingPoint: self._validate_float_type,
236 metadata.Enum: self._validate_enum_type,
237 metadata.String: self._validate_string_type,
238 metadata.Struct: self._validate_struct_type,
239 metadata.Array: self._validate_array_type,
240 }
241
242 def _validate_int_type(self, t, entity_root):
243 if t.size > 64:
244 raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits')
245
246 def _validate_float_type(self, t, entity_root):
247 if t.size > 64:
248 raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits')
249
250 def _validate_enum_type(self, t, entity_root):
251 if t.value_type.size > 64:
252 raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits')
253
254 def _validate_string_type(self, t, entity_root):
255 pass
256
257 def _validate_struct_type(self, t, entity_root):
258 if not entity_root:
259 raise ConfigError('inner structure types are not supported as of this version')
260
261 for field_name, field_type in t.fields.items():
262 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
263 if field_name == 'uuid':
264 # allow
265 continue
266
267 try:
268 self._validate_type(field_type, False)
269 except Exception as e:
270 raise ConfigError('in structure type\'s field "{}"'.format(field_name), e)
271
272 def _validate_array_type(self, t, entity_root):
273 raise ConfigError('array types are not supported as of this version')
274
275 def _validate_type(self, t, entity_root):
276 self._type_to_validate_type_func[type(t)](t, entity_root)
277
278 def _validate_entity(self, t):
279 if t is None:
280 return
281
282 # make sure entity is byte-aligned
283 if t.align < 8:
284 raise ConfigError('type\'s alignment must be at least byte-aligned')
285
286 # make sure entity is a structure
287 if type(t) is not metadata.Struct:
288 raise ConfigError('expecting a structure type')
289
290 # validate types
291 self._validate_type(t, True)
292
293 def _validate_entities_and_names(self, meta):
294 self._cur_entity = _Entity.TRACE_PACKET_HEADER
295
296 try:
297 self._validate_entity(meta.trace.packet_header_type)
298 except Exception as e:
299 raise ConfigError('invalid trace packet header type', e)
300
301 for stream_name, stream in meta.streams.items():
302 if not _is_valid_identifier(stream_name):
303 raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name))
304
305 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
306
307 try:
308 self._validate_entity(stream.packet_context_type)
309 except Exception as e:
310 raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e)
311
312 self._cur_entity = _Entity.STREAM_EVENT_HEADER
313
314 try:
315 self._validate_entity(stream.event_header_type)
316 except Exception as e:
317 raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e)
318
319 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
320
321 try:
322 self._validate_entity(stream.event_context_type)
323 except Exception as e:
324 raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e)
325
326 try:
327 for ev_name, ev in stream.events.items():
328 if not _is_valid_identifier(ev_name):
329 raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name))
330
331 self._cur_entity = _Entity.EVENT_CONTEXT
332
333 try:
334 self._validate_entity(ev.context_type)
335 except Exception as e:
336 raise ConfigError('invalid context type in event "{}"'.format(ev_name), e)
337
338 self._cur_entity = _Entity.EVENT_PAYLOAD
339
340 try:
341 self._validate_entity(ev.payload_type)
342 except Exception as e:
343 raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e)
344
345 if stream.is_event_empty(ev):
346 raise ConfigError('event "{}" is empty'.format(ev_name))
347 except Exception as e:
348 raise ConfigError('invalid stream "{}"'.format(stream_name), e)
349
350 def _validate_default_stream(self, meta):
351 if meta.default_stream_name:
352 if meta.default_stream_name not in meta.streams.keys():
353 raise ConfigError('default stream name ("{}") does not exist'.format(meta.default_stream_name))
354
355 def validate(self, meta):
356 self._validate_entities_and_names(meta)
357 self._validate_default_stream(meta)
358
359
360 # This validator validates special fields of trace, stream, and event
361 # types. For example, if checks that the "stream_id" field exists in the
362 # trace packet header if there's more than one stream, and much more.
363 class _MetadataSpecialFieldsValidator:
364 def _validate_trace_packet_header_type(self, t):
365 # needs "stream_id" field?
366 if len(self._meta.streams) > 1:
367 # yes
368 if t is None:
369 raise ConfigError('need "stream_id" field in trace packet header type (more than one stream), but trace packet header type is missing')
370
371 if type(t) is not metadata.Struct:
372 raise ConfigError('need "stream_id" field in trace packet header type (more than one stream), but trace packet header type is not a structure type')
373
374 if 'stream_id' not in t.fields:
375 raise ConfigError('need "stream_id" field in trace packet header type (more than one stream)')
376
377 # validate "magic" and "stream_id" types
378 if type(t) is not metadata.Struct:
379 return
380
381 for i, (field_name, field_type) in enumerate(t.fields.items()):
382 if field_name == 'magic':
383 if type(field_type) is not metadata.Integer:
384 raise ConfigError('"magic" field in trace packet header type must be an integer type')
385
386 if field_type.signed or field_type.size != 32:
387 raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type')
388
389 if i != 0:
390 raise ConfigError('"magic" field must be the first trace packet header type\'s field')
391 elif field_name == 'stream_id':
392 if type(field_type) is not metadata.Integer:
393 raise ConfigError('"stream_id" field in trace packet header type must be an integer type')
394
395 if field_type.signed:
396 raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type')
397
398 # "id" size can fit all event IDs
399 if len(self._meta.streams) > (1 << field_type.size):
400 raise ConfigError('"stream_id" field\' size in trace packet header type is too small for the number of trace streams')
401 elif field_name == 'uuid':
402 if self._meta.trace.uuid is None:
403 raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided')
404
405 if type(field_type) is not metadata.Array:
406 raise ConfigError('"uuid" field in trace packet header type must be an array')
407
408 if field_type.length != 16:
409 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes')
410
411 element_type = field_type.element_type
412
413 if type(element_type) is not metadata.Integer:
414 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
415
416 if element_type.size != 8:
417 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
418
419 if element_type.signed:
420 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes')
421
422 if element_type.align != 8:
423 raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned, byte-aligned bytes')
424
425 def _validate_trace(self, meta):
426 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
427
428 def _validate_stream_packet_context(self, stream):
429 t = stream.packet_context_type
430
431 if type(t) is None:
432 raise ConfigError('missing "packet-context-type" property in stream object')
433
434 if type(t) is not metadata.Struct:
435 raise ConfigError('"packet-context-type": expecting a structure type')
436
437 # "timestamp_begin", if exists, is an unsigned integer type,
438 # mapped to a clock
439 ts_begin = None
440
441 if 'timestamp_begin' in t.fields:
442 ts_begin = t.fields['timestamp_begin']
443
444 if type(ts_begin) is not metadata.Integer:
445 raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type')
446
447 if ts_begin.signed:
448 raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type')
449
450 if not ts_begin.property_mappings:
451 raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock')
452
453 # "timestamp_end", if exists, is an unsigned integer type,
454 # mapped to a clock
455 ts_end = None
456
457 if 'timestamp_end' in t.fields:
458 ts_end = t.fields['timestamp_end']
459
460 if type(ts_end) is not metadata.Integer:
461 raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type')
462
463 if ts_end.signed:
464 raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type')
465
466 if not ts_end.property_mappings:
467 raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock')
468
469 # "timestamp_begin" and "timestamp_end" exist together
470 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
471 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
472
473 # "timestamp_begin" and "timestamp_end" are mapped to the same clock
474 if ts_begin is not None and ts_end is not None:
475 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
476 raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be mapped to the same clock object in stream packet context type')
477
478 # "events_discarded", if exists, is an unsigned integer type
479 if 'events_discarded' in t.fields:
480 events_discarded = t.fields['events_discarded']
481
482 if type(events_discarded) is not metadata.Integer:
483 raise ConfigError('"events_discarded" field in stream packet context type must be an integer type')
484
485 if events_discarded.signed:
486 raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type')
487
488 # "packet_size" and "content_size" must exist
489 if 'packet_size' not in t.fields:
490 raise ConfigError('missing "packet_size" field in stream packet context type')
491
492 packet_size = t.fields['packet_size']
493
494 # "content_size" and "content_size" must exist
495 if 'content_size' not in t.fields:
496 raise ConfigError('missing "content_size" field in stream packet context type')
497
498 content_size = t.fields['content_size']
499
500 # "packet_size" is an unsigned integer type
501 if type(packet_size) is not metadata.Integer:
502 raise ConfigError('"packet_size" field in stream packet context type must be an integer type')
503
504 if packet_size.signed:
505 raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type')
506
507 # "content_size" is an unsigned integer type
508 if type(content_size) is not metadata.Integer:
509 raise ConfigError('"content_size" field in stream packet context type must be an integer type')
510
511 if content_size.signed:
512 raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type')
513
514 # "packet_size" size should be greater than or equal to "content_size" size
515 if content_size.size > packet_size.size:
516 raise ConfigError('"content_size" field size must be lesser than or equal to "packet_size" field size')
517
518 def _validate_stream_event_header(self, stream):
519 t = stream.event_header_type
520
521 # needs "id" field?
522 if len(stream.events) > 1:
523 # yes
524 if t is None:
525 raise ConfigError('need "id" field in stream event header type (more than one event), but stream event header type is missing')
526
527 if type(t) is not metadata.Struct:
528 raise ConfigError('need "id" field in stream event header type (more than one event), but stream event header type is not a structure type')
529
530 if 'id' not in t.fields:
531 raise ConfigError('need "id" field in stream event header type (more than one event)')
532
533 # validate "id" and "timestamp" types
534 if type(t) is not metadata.Struct:
535 return
536
537 # "timestamp", if exists, is an unsigned integer type,
538 # mapped to a clock
539 if 'timestamp' in t.fields:
540 ts = t.fields['timestamp']
541
542 if type(ts) is not metadata.Integer:
543 raise ConfigError('"timestamp" field in stream event header type must be an integer type')
544
545 if ts.signed:
546 raise ConfigError('"timestamp" field in stream event header type must be an unsigned integer type')
547
548 if not ts.property_mappings:
549 raise ConfigError('"timestamp" field in stream event header type must be mapped to a clock')
550
551 if 'id' in t.fields:
552 eid = t.fields['id']
553
554 # "id" is an unsigned integer type
555 if type(eid) is not metadata.Integer:
556 raise ConfigError('"id" field in stream event header type must be an integer type')
557
558 if eid.signed:
559 raise ConfigError('"id" field in stream event header type must be an unsigned integer type')
560
561 # "id" size can fit all event IDs
562 if len(stream.events) > (1 << eid.size):
563 raise ConfigError('"id" field\' size in stream event header type is too small for the number of stream events')
564
565 def _validate_stream(self, stream):
566 self._validate_stream_packet_context(stream)
567 self._validate_stream_event_header(stream)
568
569 def validate(self, meta):
570 self._meta = meta
571 self._validate_trace(meta)
572
573 for stream in meta.streams.values():
574 try:
575 self._validate_stream(stream)
576 except Exception as e:
577 raise ConfigError('invalid stream "{}"'.format(stream.name), e)
578
579
580 # Entities. Order of values is important here.
581 @enum.unique
582 class _Entity(enum.IntEnum):
583 TRACE_PACKET_HEADER = 0
584 STREAM_PACKET_CONTEXT = 1
585 STREAM_EVENT_HEADER = 2
586 STREAM_EVENT_CONTEXT = 3
587 EVENT_CONTEXT = 4
588 EVENT_PAYLOAD = 5
589
590
591 # Since type inheritance allows types to be only partially defined at
592 # any place in the configuration, this validator validates that actual
593 # trace, stream, and event types are all complete and valid. Therefore
594 # an invalid, but unusued type alias is accepted.
595 class _MetadataTypesHistologyValidator:
596 def __init__(self):
597 self._type_to_validate_type_histology_func = {
598 metadata.Integer: self._validate_integer_histology,
599 metadata.FloatingPoint: self._validate_float_histology,
600 metadata.Enum: self._validate_enum_histology,
601 metadata.String: self._validate_string_histology,
602 metadata.Struct: self._validate_struct_histology,
603 metadata.Array: self._validate_array_histology,
604 }
605
606 def _validate_integer_histology(self, t):
607 # size is set
608 if t.size is None:
609 raise ConfigError('missing integer type\'s size')
610
611 def _validate_float_histology(self, t):
612 # exponent digits is set
613 if t.exp_size is None:
614 raise ConfigError('missing floating point number type\'s exponent size')
615
616 # mantissa digits is set
617 if t.mant_size is None:
618 raise ConfigError('missing floating point number type\'s mantissa size')
619
620 # exponent and mantissa sum is a multiple of 8
621 if (t.exp_size + t.mant_size) % 8 != 0:
622 raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8')
623
624 def _validate_enum_histology(self, t):
625 # integer type is set
626 if t.value_type is None:
627 raise ConfigError('missing enumeration type\'s value type')
628
629 # there's at least one member
630 if not t.members:
631 raise ConfigError('enumeration type needs at least one member')
632
633 # no overlapping values and all values are valid considering
634 # the value type
635 ranges = []
636
637 if t.value_type.signed:
638 value_min = -(1 << t.value_type.size - 1)
639 value_max = (1 << (t.value_type.size - 1)) - 1
640 else:
641 value_min = 0
642 value_max = (1 << t.value_type.size) - 1
643
644 for label, value in t.members.items():
645 for rg in ranges:
646 if value[0] <= rg[1] and rg[0] <= value[1]:
647 raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label))
648
649 fmt = 'enumeration type\'s member "{}": value {} is outside the value type range [{}, {}]'
650
651 if value[0] < value_min or value[0] > value_max:
652 raise ConfigError(fmt.format(label, value[0], value_min, value_max))
653
654 if value[1] < value_min or value[1] > value_max:
655 raise ConfigError(fmt.format(label, value[1], value_min, value_max))
656
657 ranges.append(value)
658
659 def _validate_string_histology(self, t):
660 # always valid
661 pass
662
663 def _validate_struct_histology(self, t):
664 # all fields are valid
665 for field_name, field_type in t.fields.items():
666 try:
667 self._validate_type_histology(field_type)
668 except Exception as e:
669 raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e)
670
671 def _validate_array_histology(self, t):
672 # length is set
673 if t.length is None:
674 raise ConfigError('missing array type\'s length')
675
676 # element type is set
677 if t.element_type is None:
678 raise ConfigError('missing array type\'s element type')
679
680 # element type is valid
681 try:
682 self._validate_type_histology(t.element_type)
683 except Exception as e:
684 raise ConfigError('invalid array type\'s element type', e)
685
686 def _validate_type_histology(self, t):
687 if t is None:
688 return
689
690 self._type_to_validate_type_histology_func[type(t)](t)
691
692 def _validate_entity_type_histology(self, t):
693 if t is None:
694 return
695
696 if type(t) is not metadata.Struct:
697 raise ConfigError('expecting a structure type')
698
699 self._validate_type_histology(t)
700
701 def _validate_event_types_histology(self, ev):
702 ev_name = ev.name
703
704 # validate event context type
705 try:
706 self._validate_entity_type_histology(ev.context_type)
707 except Exception as e:
708 raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e)
709
710 # validate event payload type
711 try:
712 self._validate_entity_type_histology(ev.payload_type)
713 except Exception as e:
714 raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e)
715
716 def _validate_stream_types_histology(self, stream):
717 stream_name = stream.name
718
719 # validate stream packet context type
720 try:
721 self._validate_entity_type_histology(stream.packet_context_type)
722 except Exception as e:
723 raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e)
724
725 # validate stream event header type
726 try:
727 self._validate_entity_type_histology(stream.event_header_type)
728 except Exception as e:
729 raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e)
730
731 # validate stream event context type
732 try:
733 self._validate_entity_type_histology(stream.event_context_type)
734 except Exception as e:
735 raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e)
736
737 # validate events
738 for ev in stream.events.values():
739 try:
740 self._validate_event_types_histology(ev)
741 except Exception as e:
742 raise ConfigError('invalid event in stream "{}"'.format(stream_name), e)
743
744 def validate(self, meta):
745 # validate trace packet header type
746 try:
747 self._validate_entity_type_histology(meta.trace.packet_header_type)
748 except Exception as e:
749 raise ConfigError('invalid trace packet header type', e)
750
751 # validate streams
752 for stream in meta.streams.values():
753 self._validate_stream_types_histology(stream)
754
755
756 class _YamlConfigParser:
757 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
758 self._class_name_to_create_type_func = {
759 'int': self._create_integer,
760 'integer': self._create_integer,
761 'flt': self._create_float,
762 'float': self._create_float,
763 'floating-point': self._create_float,
764 'enum': self._create_enum,
765 'enumeration': self._create_enum,
766 'str': self._create_string,
767 'string': self._create_string,
768 'struct': self._create_struct,
769 'structure': self._create_struct,
770 'array': self._create_array,
771 }
772 self._type_to_create_type_func = {
773 metadata.Integer: self._create_integer,
774 metadata.FloatingPoint: self._create_float,
775 metadata.Enum: self._create_enum,
776 metadata.String: self._create_string,
777 metadata.Struct: self._create_struct,
778 metadata.Array: self._create_array,
779 }
780 self._include_dirs = include_dirs
781 self._ignore_include_not_found = ignore_include_not_found
782 self._dump_config = dump_config
783
784 def _set_byte_order(self, metadata_node):
785 if 'trace' not in metadata_node:
786 raise ConfigError('missing "trace" property (metadata)')
787
788 trace_node = metadata_node['trace']
789
790 if not _is_assoc_array_prop(trace_node):
791 raise ConfigError('"trace" property (metadata) must be an associative array')
792
793 if 'byte-order' not in trace_node:
794 raise ConfigError('missing "byte-order" property (trace)')
795
796 bo_node = trace_node['byte-order']
797
798 if not _is_str_prop(bo_node):
799 raise ConfigError('"byte-order" property of trace object must be a string ("le" or "be")')
800
801 self._bo = _byte_order_str_to_bo(bo_node)
802
803 if self._bo is None:
804 raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"')
805
806 def _lookup_type_alias(self, name):
807 if name in self._tas:
808 return copy.deepcopy(self._tas[name])
809
810 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
811 unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
812
813 if unk_prop:
814 raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop))
815
816 if 'name' not in prop_mapping_node:
817 raise ConfigError('missing "name" property in integer type object\'s clock property mapping')
818
819 if 'property' not in prop_mapping_node:
820 raise ConfigError('missing "property" property in integer type object\'s clock property mapping')
821
822 clock_name = prop_mapping_node['name']
823 prop = prop_mapping_node['property']
824
825 if not _is_str_prop(clock_name):
826 raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string')
827
828 if not _is_str_prop(prop):
829 raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string')
830
831 if clock_name not in self._clocks:
832 raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name))
833
834 if prop != 'value':
835 raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop))
836
837 mapped_clock = self._clocks[clock_name]
838 int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
839
840 def _get_first_unknown_type_prop(self, type_node, known_props):
841 kp = known_props + ['inherit', 'class']
842
843 if self._version >= 201:
844 kp.append('$inherit')
845
846 return _get_first_unknown_prop(type_node, kp)
847
848 def _create_integer(self, obj, node):
849 if obj is None:
850 # create integer object
851 obj = metadata.Integer()
852
853 unk_prop = self._get_first_unknown_type_prop(node, [
854 'size',
855 'align',
856 'signed',
857 'byte-order',
858 'base',
859 'encoding',
860 'property-mappings',
861 ])
862
863 if unk_prop:
864 raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop))
865
866 # size
867 if 'size' in node:
868 size = node['size']
869
870 if not _is_int_prop(size):
871 raise ConfigError('"size" property of integer type object must be an integer')
872
873 if size < 1:
874 raise ConfigError('invalid integer size: {}'.format(size))
875
876 obj.size = size
877
878 # align
879 if 'align' in node:
880 align = node['align']
881
882 if align is None:
883 obj.set_default_align()
884 else:
885 if not _is_int_prop(align):
886 raise ConfigError('"align" property of integer type object must be an integer')
887
888 if not _is_valid_alignment(align):
889 raise ConfigError('invalid alignment: {}'.format(align))
890
891 obj.align = align
892
893 # signed
894 if 'signed' in node:
895 signed = node['signed']
896
897 if signed is None:
898 obj.set_default_signed()
899 else:
900 if not _is_bool_prop(signed):
901 raise ConfigError('"signed" property of integer type object must be a boolean')
902
903 obj.signed = signed
904
905 # byte order
906 if 'byte-order' in node:
907 byte_order = node['byte-order']
908
909 if byte_order is None:
910 obj.byte_order = self._bo
911 else:
912 if not _is_str_prop(byte_order):
913 raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")')
914
915 byte_order = _byte_order_str_to_bo(byte_order)
916
917 if byte_order is None:
918 raise ConfigError('invalid "byte-order" property in integer type object')
919
920 obj.byte_order = byte_order
921 else:
922 obj.byte_order = self._bo
923
924 # base
925 if 'base' in node:
926 base = node['base']
927
928 if base is None:
929 obj.set_default_base()
930 else:
931 if not _is_str_prop(base):
932 raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
933
934 if base == 'bin':
935 base = 2
936 elif base == 'oct':
937 base = 8
938 elif base == 'dec':
939 base = 10
940 elif base == 'hex':
941 base = 16
942 else:
943 raise ConfigError('unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base))
944
945 obj.base = base
946
947 # encoding
948 if 'encoding' in node:
949 encoding = node['encoding']
950
951 if encoding is None:
952 obj.set_default_encoding()
953 else:
954 if not _is_str_prop(encoding):
955 raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
956
957 encoding = _encoding_str_to_encoding(encoding)
958
959 if encoding is None:
960 raise ConfigError('invalid "encoding" property in integer type object')
961
962 obj.encoding = encoding
963
964 # property mappings
965 if 'property-mappings' in node:
966 prop_mappings = node['property-mappings']
967
968 if prop_mappings is None:
969 obj.set_default_property_mappings()
970 else:
971 if not _is_array_prop(prop_mappings):
972 raise ConfigError('"property-mappings" property of integer type object must be an array')
973
974 if len(prop_mappings) > 1:
975 raise ConfigError('length of "property-mappings" array in integer type object must be 1')
976
977 for index, prop_mapping in enumerate(prop_mappings):
978 if not _is_assoc_array_prop(prop_mapping):
979 raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays')
980
981 if 'type' not in prop_mapping:
982 raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
983
984 prop_type = prop_mapping['type']
985
986 if not _is_str_prop(prop_type):
987 raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
988
989 if prop_type == 'clock':
990 self._set_int_clock_prop_mapping(obj, prop_mapping)
991 else:
992 raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
993
994 return obj
995
996 def _create_float(self, obj, node):
997 if obj is None:
998 # create floating point number object
999 obj = metadata.FloatingPoint()
1000
1001 unk_prop = self._get_first_unknown_type_prop(node, [
1002 'size',
1003 'align',
1004 'byte-order',
1005 ])
1006
1007 if unk_prop:
1008 raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop))
1009
1010 # size
1011 if 'size' in node:
1012 size = node['size']
1013
1014 if not _is_assoc_array_prop(size):
1015 raise ConfigError('"size" property of floating point number type object must be an associative array')
1016
1017 unk_prop = _get_first_unknown_prop(size, ['exp', 'mant'])
1018
1019 if unk_prop:
1020 raise ConfigError('unknown floating point number type object\'s "size" property: "{}"'.format(unk_prop))
1021
1022 if 'exp' in size:
1023 exp = size['exp']
1024
1025 if not _is_int_prop(exp):
1026 raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer')
1027
1028 if exp < 1:
1029 raise ConfigError('invalid floating point number exponent size: {}')
1030
1031 obj.exp_size = exp
1032
1033 if 'mant' in size:
1034 mant = size['mant']
1035
1036 if not _is_int_prop(mant):
1037 raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer')
1038
1039 if mant < 1:
1040 raise ConfigError('invalid floating point number mantissa size: {}')
1041
1042 obj.mant_size = mant
1043
1044 # align
1045 if 'align' in node:
1046 align = node['align']
1047
1048 if align is None:
1049 obj.set_default_align()
1050 else:
1051 if not _is_int_prop(align):
1052 raise ConfigError('"align" property of floating point number type object must be an integer')
1053
1054 if not _is_valid_alignment(align):
1055 raise ConfigError('invalid alignment: {}'.format(align))
1056
1057 obj.align = align
1058
1059 # byte order
1060 if 'byte-order' in node:
1061 byte_order = node['byte-order']
1062
1063 if byte_order is None:
1064 obj.byte_order = self._bo
1065 else:
1066 if not _is_str_prop(byte_order):
1067 raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")')
1068
1069 byte_order = _byte_order_str_to_bo(byte_order)
1070
1071 if byte_order is None:
1072 raise ConfigError('invalid "byte-order" property in floating point number type object')
1073 else:
1074 obj.byte_order = self._bo
1075
1076 return obj
1077
1078 def _create_enum(self, obj, node):
1079 if obj is None:
1080 # create enumeration object
1081 obj = metadata.Enum()
1082
1083 unk_prop = self._get_first_unknown_type_prop(node, [
1084 'value-type',
1085 'members',
1086 ])
1087
1088 if unk_prop:
1089 raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop))
1090
1091 # value type
1092 if 'value-type' in node:
1093 value_type_node = node['value-type']
1094
1095 try:
1096 obj.value_type = self._create_type(value_type_node)
1097 except Exception as e:
1098 raise ConfigError('cannot create enumeration type\'s integer type', e)
1099
1100 # members
1101 if 'members' in node:
1102 members_node = node['members']
1103
1104 if not _is_array_prop(members_node):
1105 raise ConfigError('"members" property of enumeration type object must be an array')
1106
1107 cur = 0
1108 last_value = obj.last_value
1109
1110 if last_value is None:
1111 cur = 0
1112 else:
1113 cur = last_value + 1
1114
1115 for index, m_node in enumerate(members_node):
1116 if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
1117 raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index))
1118
1119 if _is_str_prop(m_node):
1120 label = m_node
1121 value = (cur, cur)
1122 cur += 1
1123 else:
1124 unk_prop = _get_first_unknown_prop(m_node, [
1125 'label',
1126 'value',
1127 ])
1128
1129 if unk_prop:
1130 raise ConfigError('unknown enumeration type member object property: "{}"'.format(unk_prop))
1131
1132 if 'label' not in m_node:
1133 raise ConfigError('missing "label" property in enumeration member #{}'.format(index))
1134
1135 label = m_node['label']
1136
1137 if not _is_str_prop(label):
1138 raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index))
1139
1140 if 'value' not in m_node:
1141 raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label))
1142
1143 value = m_node['value']
1144
1145 if not _is_int_prop(value) and not _is_array_prop(value):
1146 raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label))
1147
1148 if _is_int_prop(value):
1149 cur = value + 1
1150 value = (value, value)
1151 else:
1152 if len(value) != 2:
1153 raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label))
1154
1155 mn = value[0]
1156 mx = value[1]
1157
1158 if mn > mx:
1159 raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
1160
1161 value = (mn, mx)
1162 cur = mx + 1
1163
1164 obj.members[label] = value
1165
1166 return obj
1167
1168 def _create_string(self, obj, node):
1169 if obj is None:
1170 # create string object
1171 obj = metadata.String()
1172
1173 unk_prop = self._get_first_unknown_type_prop(node, [
1174 'encoding',
1175 ])
1176
1177 if unk_prop:
1178 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1179
1180 # encoding
1181 if 'encoding' in node:
1182 encoding = node['encoding']
1183
1184 if encoding is None:
1185 obj.set_default_encoding()
1186 else:
1187 if not _is_str_prop(encoding):
1188 raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")')
1189
1190 encoding = _encoding_str_to_encoding(encoding)
1191
1192 if encoding is None:
1193 raise ConfigError('invalid "encoding" property in string type object')
1194
1195 obj.encoding = encoding
1196
1197 return obj
1198
1199 def _create_struct(self, obj, node):
1200 if obj is None:
1201 # create structure object
1202 obj = metadata.Struct()
1203
1204 unk_prop = self._get_first_unknown_type_prop(node, [
1205 'min-align',
1206 'fields',
1207 ])
1208
1209 if unk_prop:
1210 raise ConfigError('unknown string type object property: "{}"'.format(unk_prop))
1211
1212 # minimum alignment
1213 if 'min-align' in node:
1214 min_align = node['min-align']
1215
1216 if min_align is None:
1217 obj.set_default_min_align()
1218 else:
1219 if not _is_int_prop(min_align):
1220 raise ConfigError('"min-align" property of structure type object must be an integer')
1221
1222 if not _is_valid_alignment(min_align):
1223 raise ConfigError('invalid minimum alignment: {}'.format(min_align))
1224
1225 obj.min_align = min_align
1226
1227 # fields
1228 if 'fields' in node:
1229 fields = node['fields']
1230
1231 if fields is None:
1232 obj.set_default_fields()
1233 else:
1234 if not _is_assoc_array_prop(fields):
1235 raise ConfigError('"fields" property of structure type object must be an associative array')
1236
1237 for field_name, field_node in fields.items():
1238 if not _is_valid_identifier(field_name):
1239 raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name))
1240
1241 try:
1242 obj.fields[field_name] = self._create_type(field_node)
1243 except Exception as e:
1244 raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e)
1245
1246 return obj
1247
1248 def _create_array(self, obj, node):
1249 if obj is None:
1250 # create array object
1251 obj = metadata.Array()
1252
1253 unk_prop = self._get_first_unknown_type_prop(node, [
1254 'length',
1255 'element-type',
1256 ])
1257
1258 if unk_prop:
1259 raise ConfigError('unknown array type object property: "{}"'.format(unk_prop))
1260
1261 # length
1262 if 'length' in node:
1263 length = node['length']
1264
1265 if not _is_int_prop(length):
1266 raise ConfigError('"length" property of array type object must be an integer')
1267
1268 if type(length) is int and length < 0:
1269 raise ConfigError('invalid static array length: {}'.format(length))
1270
1271 obj.length = length
1272
1273 # element type
1274 if 'element-type' in node:
1275 element_type_node = node['element-type']
1276
1277 try:
1278 obj.element_type = self._create_type(node['element-type'])
1279 except Exception as e:
1280 raise ConfigError('cannot create array type\'s element type', e)
1281
1282 return obj
1283
1284 def _create_type(self, type_node):
1285 if type(type_node) is str:
1286 t = self._lookup_type_alias(type_node)
1287
1288 if t is None:
1289 raise ConfigError('unknown type alias "{}"'.format(type_node))
1290
1291 return t
1292
1293 if not _is_assoc_array_prop(type_node):
1294 raise ConfigError('type objects must be associative arrays or strings (type alias name)')
1295
1296 # inherit:
1297 # v2.0: "inherit"
1298 # v2.1+: "$inherit"
1299 inherit_node = None
1300
1301 if self._version >= 200:
1302 if 'inherit' in type_node:
1303 inherit_prop = 'inherit'
1304 inherit_node = type_node[inherit_prop]
1305
1306 if self._version >= 201:
1307 if '$inherit' in type_node:
1308 if inherit_node is not None:
1309 raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
1310
1311 inherit_prop = '$inherit'
1312 inherit_node = type_node[inherit_prop]
1313
1314 if inherit_node is not None and 'class' in type_node:
1315 raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
1316
1317 if inherit_node is not None:
1318 if not _is_str_prop(inherit_node):
1319 raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop))
1320
1321 base = self._lookup_type_alias(inherit_node)
1322
1323 if base is None:
1324 raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
1325
1326 func = self._type_to_create_type_func[type(base)]
1327 else:
1328 if 'class' not in type_node:
1329 raise ConfigError('type objects which do not inherit must have a "class" property')
1330
1331 class_name = type_node['class']
1332
1333 if type(class_name) is not str:
1334 raise ConfigError('type objects\' "class" property must be a string')
1335
1336 if class_name not in self._class_name_to_create_type_func:
1337 raise ConfigError('unknown type class "{}"'.format(class_name))
1338
1339 base = None
1340 func = self._class_name_to_create_type_func[class_name]
1341
1342 return func(base, type_node)
1343
1344 def _register_type_aliases(self, metadata_node):
1345 self._tas = dict()
1346
1347 if 'type-aliases' not in metadata_node:
1348 return
1349
1350 ta_node = metadata_node['type-aliases']
1351
1352 if ta_node is None:
1353 return
1354
1355 if not _is_assoc_array_prop(ta_node):
1356 raise ConfigError('"type-aliases" property (metadata) must be an associative array')
1357
1358 for ta_name, ta_type in ta_node.items():
1359 if ta_name in self._tas:
1360 raise ConfigError('duplicate type alias "{}"'.format(ta_name))
1361
1362 try:
1363 t = self._create_type(ta_type)
1364 except Exception as e:
1365 raise ConfigError('cannot create type alias "{}"'.format(ta_name), e)
1366
1367 self._tas[ta_name] = t
1368
1369 def _create_clock(self, node):
1370 # create clock object
1371 clock = metadata.Clock()
1372
1373 if not _is_assoc_array_prop(node):
1374 raise ConfigError('clock objects must be associative arrays')
1375
1376 known_props = [
1377 'uuid',
1378 'description',
1379 'freq',
1380 'error-cycles',
1381 'offset',
1382 'absolute',
1383 'return-ctype',
1384 ]
1385
1386 if self._version >= 201:
1387 known_props.append('$return-ctype')
1388
1389 unk_prop = _get_first_unknown_prop(node, known_props)
1390
1391 if unk_prop:
1392 raise ConfigError('unknown clock object property: "{}"'.format(unk_prop))
1393
1394 # UUID
1395 if 'uuid' in node:
1396 uuidp = node['uuid']
1397
1398 if uuidp is None:
1399 clock.set_default_uuid()
1400 else:
1401 if not _is_str_prop(uuidp):
1402 raise ConfigError('"uuid" property of clock object must be a string')
1403
1404 try:
1405 uuidp = uuid.UUID(uuidp)
1406 except:
1407 raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp))
1408
1409 clock.uuid = uuidp
1410
1411 # description
1412 if 'description' in node:
1413 desc = node['description']
1414
1415 if desc is None:
1416 clock.set_default_description()
1417 else:
1418 if not _is_str_prop(desc):
1419 raise ConfigError('"description" property of clock object must be a string')
1420
1421 clock.description = desc
1422
1423 # frequency
1424 if 'freq' in node:
1425 freq = node['freq']
1426
1427 if freq is None:
1428 clock.set_default_freq()
1429 else:
1430 if not _is_int_prop(freq):
1431 raise ConfigError('"freq" property of clock object must be an integer')
1432
1433 if freq < 1:
1434 raise ConfigError('invalid clock frequency: {}'.format(freq))
1435
1436 clock.freq = freq
1437
1438 # error cycles
1439 if 'error-cycles' in node:
1440 error_cycles = node['error-cycles']
1441
1442 if error_cycles is None:
1443 clock.set_default_error_cycles()
1444 else:
1445 if not _is_int_prop(error_cycles):
1446 raise ConfigError('"error-cycles" property of clock object must be an integer')
1447
1448 if error_cycles < 0:
1449 raise ConfigError('invalid clock error cycles: {}'.format(error_cycles))
1450
1451 clock.error_cycles = error_cycles
1452
1453 # offset
1454 if 'offset' in node:
1455 offset = node['offset']
1456
1457 if offset is None:
1458 clock.set_default_offset_seconds()
1459 clock.set_default_offset_cycles()
1460 else:
1461 if not _is_assoc_array_prop(offset):
1462 raise ConfigError('"offset" property of clock object must be an associative array')
1463
1464 unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
1465
1466 if unk_prop:
1467 raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop))
1468
1469 # cycles
1470 if 'cycles' in offset:
1471 offset_cycles = offset['cycles']
1472
1473 if offset_cycles is None:
1474 clock.set_default_offset_cycles()
1475 else:
1476 if not _is_int_prop(offset_cycles):
1477 raise ConfigError('"cycles" property of clock object\'s offset property must be an integer')
1478
1479 if offset_cycles < 0:
1480 raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles))
1481
1482 clock.offset_cycles = offset_cycles
1483
1484 # seconds
1485 if 'seconds' in offset:
1486 offset_seconds = offset['seconds']
1487
1488 if offset_seconds is None:
1489 clock.set_default_offset_seconds()
1490 else:
1491 if not _is_int_prop(offset_seconds):
1492 raise ConfigError('"seconds" property of clock object\'s offset property must be an integer')
1493
1494 if offset_seconds < 0:
1495 raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds))
1496
1497 clock.offset_seconds = offset_seconds
1498
1499 # absolute
1500 if 'absolute' in node:
1501 absolute = node['absolute']
1502
1503 if absolute is None:
1504 clock.set_default_absolute()
1505 else:
1506 if not _is_bool_prop(absolute):
1507 raise ConfigError('"absolute" property of clock object must be a boolean')
1508
1509 clock.absolute = absolute
1510
1511 # return C type:
1512 # v2.0: "return-ctype"
1513 # v2.1+: "$return-ctype"
1514 return_ctype_node = None
1515
1516 if self._version >= 200:
1517 if 'return-ctype' in node:
1518 return_ctype_prop = 'return-ctype'
1519 return_ctype_node = node[return_ctype_prop]
1520
1521 if self._version >= 201:
1522 if '$return-ctype' in node:
1523 if return_ctype_node is not None:
1524 raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"')
1525
1526 return_ctype_prop = '$return-ctype'
1527 return_ctype_node = node[return_ctype_prop]
1528
1529 if return_ctype_node is not None:
1530 if return_ctype_node is None:
1531 clock.set_default_return_ctype()
1532 else:
1533 if not _is_str_prop(return_ctype_node):
1534 raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop))
1535
1536 clock.return_ctype = return_ctype_node
1537
1538 return clock
1539
1540 def _register_clocks(self, metadata_node):
1541 self._clocks = collections.OrderedDict()
1542
1543 if 'clocks' not in metadata_node:
1544 return
1545
1546 clocks_node = metadata_node['clocks']
1547
1548 if clocks_node is None:
1549 return
1550
1551 if not _is_assoc_array_prop(clocks_node):
1552 raise ConfigError('"clocks" property (metadata) must be an associative array')
1553
1554 for clock_name, clock_node in clocks_node.items():
1555 if not _is_valid_identifier(clock_name):
1556 raise ConfigError('invalid clock name: "{}"'.format(clock_name))
1557
1558 if clock_name in self._clocks:
1559 raise ConfigError('duplicate clock "{}"'.format(clock_name))
1560
1561 try:
1562 clock = self._create_clock(clock_node)
1563 except Exception as e:
1564 raise ConfigError('cannot create clock "{}"'.format(clock_name), e)
1565
1566 clock.name = clock_name
1567 self._clocks[clock_name] = clock
1568
1569 def _create_env(self, metadata_node):
1570 env = collections.OrderedDict()
1571
1572 if 'env' not in metadata_node:
1573 return env
1574
1575 env_node = metadata_node['env']
1576
1577 if env_node is None:
1578 return env
1579
1580 if not _is_assoc_array_prop(env_node):
1581 raise ConfigError('"env" property (metadata) must be an associative array')
1582
1583 for env_name, env_value in env_node.items():
1584 if env_name in env:
1585 raise ConfigError('duplicate environment variable "{}"'.format(env_name))
1586
1587 if not _is_valid_identifier(env_name):
1588 raise ConfigError('invalid environment variable name: "{}"'.format(env_name))
1589
1590 if not _is_int_prop(env_value) and not _is_str_prop(env_value):
1591 raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
1592
1593 env[env_name] = env_value
1594
1595 return env
1596
1597 def _register_log_levels(self, metadata_node):
1598 self._log_levels = dict()
1599
1600 # log levels:
1601 # v2.0: "log-levels"
1602 # v2.1+: "$log-levels"
1603 log_levels_node = None
1604
1605 if self._version >= 200:
1606 if 'log-levels' in metadata_node:
1607 log_levels_prop = 'log-levels'
1608 log_levels_node = metadata_node[log_levels_prop]
1609
1610 if self._version >= 201:
1611 if '$log-levels' in metadata_node:
1612 if log_levels_node is not None:
1613 raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
1614
1615 log_levels_prop = '$log-levels'
1616 log_levels_node = metadata_node[log_levels_prop]
1617
1618 if log_levels_node is None:
1619 return
1620
1621 if not _is_assoc_array_prop(log_levels_node):
1622 raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop))
1623
1624 for ll_name, ll_value in log_levels_node.items():
1625 if ll_name in self._log_levels:
1626 raise ConfigError('duplicate log level entry "{}"'.format(ll_name))
1627
1628 if not _is_int_prop(ll_value):
1629 raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name))
1630
1631 if ll_value < 0:
1632 raise ConfigError('invalid log level entry ("{}"): log level value must be positive'.format(ll_name))
1633
1634 self._log_levels[ll_name] = ll_value
1635
1636 def _create_trace(self, metadata_node):
1637 # create trace object
1638 trace = metadata.Trace()
1639
1640 if 'trace' not in metadata_node:
1641 raise ConfigError('missing "trace" property (metadata)')
1642
1643 trace_node = metadata_node['trace']
1644
1645 if not _is_assoc_array_prop(trace_node):
1646 raise ConfigError('"trace" property (metadata) must be an associative array')
1647
1648 unk_prop = _get_first_unknown_prop(trace_node, [
1649 'byte-order',
1650 'uuid',
1651 'packet-header-type',
1652 ])
1653
1654 if unk_prop:
1655 raise ConfigError('unknown trace object property: "{}"'.format(unk_prop))
1656
1657 # set byte order (already parsed)
1658 trace.byte_order = self._bo
1659
1660 # UUID
1661 if 'uuid' in trace_node and trace_node['uuid'] is not None:
1662 uuidp = trace_node['uuid']
1663
1664 if not _is_str_prop(uuidp):
1665 raise ConfigError('"uuid" property of trace object must be a string')
1666
1667 if uuidp == 'auto':
1668 uuidp = uuid.uuid1()
1669 else:
1670 try:
1671 uuidp = uuid.UUID(uuidp)
1672 except:
1673 raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp))
1674
1675 trace.uuid = uuidp
1676
1677 # packet header type
1678 if 'packet-header-type' in trace_node and trace_node['packet-header-type'] is not None:
1679 try:
1680 ph_type = self._create_type(trace_node['packet-header-type'])
1681 except Exception as e:
1682 raise ConfigError('cannot create packet header type (trace)', e)
1683
1684 trace.packet_header_type = ph_type
1685
1686 return trace
1687
1688 def _lookup_log_level(self, ll):
1689 if _is_int_prop(ll):
1690 return ll
1691 elif _is_str_prop(ll) and ll in self._log_levels:
1692 return self._log_levels[ll]
1693
1694 def _create_event(self, event_node):
1695 event = metadata.Event()
1696
1697 if not _is_assoc_array_prop(event_node):
1698 raise ConfigError('event objects must be associative arrays')
1699
1700 unk_prop = _get_first_unknown_prop(event_node, [
1701 'log-level',
1702 'context-type',
1703 'payload-type',
1704 ])
1705
1706 if unk_prop:
1707 raise ConfigError('unknown event object property: "{}"'.format(unk_prop))
1708
1709 if 'log-level' in event_node and event_node['log-level'] is not None:
1710 ll_node = event_node['log-level']
1711
1712 if _is_str_prop(ll_node):
1713 ll_value = self._lookup_log_level(event_node['log-level'])
1714
1715 if ll_value is None:
1716 raise ConfigError('cannot find log level "{}"'.format(ll_node))
1717
1718 ll = metadata.LogLevel(event_node['log-level'], ll_value)
1719 elif _is_int_prop(ll_node):
1720 if ll_node < 0:
1721 raise ConfigError('invalid log level value {}: value must be positive'.format(ll_node))
1722
1723 ll = metadata.LogLevel(None, ll_node)
1724 else:
1725 raise ConfigError('"log-level" property must be either a string or an integer')
1726
1727 event.log_level = ll
1728
1729 if 'context-type' in event_node and event_node['context-type'] is not None:
1730 ctx_type_node = event_node['context-type']
1731
1732 try:
1733 t = self._create_type(event_node['context-type'])
1734 except Exception as e:
1735 raise ConfigError('cannot create event\'s context type object', e)
1736
1737 event.context_type = t
1738
1739 if 'payload-type' in event_node and event_node['payload-type'] is not None:
1740 try:
1741 t = self._create_type(event_node['payload-type'])
1742 except Exception as e:
1743 raise ConfigError('cannot create event\'s payload type object', e)
1744
1745 event.payload_type = t
1746
1747 return event
1748
1749 def _create_stream(self, stream_name, stream_node):
1750 stream = metadata.Stream()
1751
1752 if not _is_assoc_array_prop(stream_node):
1753 raise ConfigError('stream objects must be associative arrays')
1754
1755 known_props = [
1756 'packet-context-type',
1757 'event-header-type',
1758 'event-context-type',
1759 'events',
1760 ]
1761
1762 if self._version >= 202:
1763 known_props.append('$default')
1764
1765 unk_prop = _get_first_unknown_prop(stream_node, known_props)
1766
1767 if unk_prop:
1768 add = ''
1769
1770 if unk_prop == '$default':
1771 add = ' (use version 2.2 or greater)'
1772
1773 raise ConfigError('unknown stream object property{}: "{}"'.format(add, unk_prop))
1774
1775 if 'packet-context-type' in stream_node and stream_node['packet-context-type'] is not None:
1776 try:
1777 t = self._create_type(stream_node['packet-context-type'])
1778 except Exception as e:
1779 raise ConfigError('cannot create stream\'s packet context type object', e)
1780
1781 stream.packet_context_type = t
1782
1783 if 'event-header-type' in stream_node and stream_node['event-header-type'] is not None:
1784 try:
1785 t = self._create_type(stream_node['event-header-type'])
1786 except Exception as e:
1787 raise ConfigError('cannot create stream\'s event header type object', e)
1788
1789 stream.event_header_type = t
1790
1791 if 'event-context-type' in stream_node and stream_node['event-context-type'] is not None:
1792 try:
1793 t = self._create_type(stream_node['event-context-type'])
1794 except Exception as e:
1795 raise ConfigError('cannot create stream\'s event context type object', e)
1796
1797 stream.event_context_type = t
1798
1799 if 'events' not in stream_node:
1800 raise ConfigError('missing "events" property in stream object')
1801
1802 events = stream_node['events']
1803
1804 if events is not None:
1805 if not _is_assoc_array_prop(events):
1806 raise ConfigError('"events" property of stream object must be an associative array')
1807
1808 if not events:
1809 raise ConfigError('at least one event is needed within a stream object')
1810
1811 cur_id = 0
1812
1813 for ev_name, ev_node in events.items():
1814 try:
1815 ev = self._create_event(ev_node)
1816 except Exception as e:
1817 raise ConfigError('cannot create event "{}"'.format(ev_name), e)
1818
1819 ev.id = cur_id
1820 ev.name = ev_name
1821 stream.events[ev_name] = ev
1822 cur_id += 1
1823
1824 if '$default' in stream_node and stream_node['$default'] is not None:
1825 default_node = stream_node['$default']
1826
1827 if not _is_bool_prop(default_node):
1828 raise ConfigError('invalid "$default" property in stream object: expecting a boolean')
1829
1830 if default_node:
1831 if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
1832 fmt = 'cannot specify more than one default stream (default stream already set to "{}")'
1833 raise ConfigError(fmt.format(self._meta.default_stream_name))
1834
1835 self._meta.default_stream_name = stream_name
1836
1837 return stream
1838
1839 def _create_streams(self, metadata_node):
1840 streams = collections.OrderedDict()
1841
1842 if 'streams' not in metadata_node:
1843 raise ConfigError('missing "streams" property (metadata)')
1844
1845 streams_node = metadata_node['streams']
1846
1847 if not _is_assoc_array_prop(streams_node):
1848 raise ConfigError('"streams" property (metadata) must be an associative array')
1849
1850 if not streams_node:
1851 raise ConfigError('at least one stream is needed (metadata)')
1852
1853 cur_id = 0
1854
1855 for stream_name, stream_node in streams_node.items():
1856 try:
1857 stream = self._create_stream(stream_name, stream_node)
1858 except Exception as e:
1859 raise ConfigError('cannot create stream "{}"'.format(stream_name), e)
1860
1861 stream.id = cur_id
1862 stream.name = str(stream_name)
1863 streams[stream_name] = stream
1864 cur_id += 1
1865
1866 return streams
1867
1868 def _create_metadata(self, root):
1869 self._meta = metadata.Metadata()
1870
1871 if 'metadata' not in root:
1872 raise ConfigError('missing "metadata" property (configuration)')
1873
1874 metadata_node = root['metadata']
1875
1876 if not _is_assoc_array_prop(metadata_node):
1877 raise ConfigError('"metadata" property (configuration) must be an associative array')
1878
1879 known_props = [
1880 'type-aliases',
1881 'log-levels',
1882 'trace',
1883 'env',
1884 'clocks',
1885 'streams',
1886 ]
1887
1888 if self._version >= 201:
1889 known_props.append('$log-levels')
1890
1891 if self._version >= 202:
1892 known_props.append('$default-stream')
1893
1894 unk_prop = _get_first_unknown_prop(metadata_node, known_props)
1895
1896 if unk_prop:
1897 add = ''
1898
1899 if unk_prop == '$include':
1900 add = ' (use version 2.1 or greater)'
1901
1902 if unk_prop == '$default-stream':
1903 add = ' (use version 2.2 or greater)'
1904
1905 raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop))
1906
1907 if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None:
1908 default_stream_node = metadata_node['$default-stream']
1909
1910 if not _is_str_prop(default_stream_node):
1911 raise ConfigError('invalid "$default-stream" property (metadata): expecting a string')
1912
1913 self._meta.default_stream_name = default_stream_node
1914
1915 self._set_byte_order(metadata_node)
1916 self._register_clocks(metadata_node)
1917 self._meta.clocks = self._clocks
1918 self._register_type_aliases(metadata_node)
1919 self._meta.env = self._create_env(metadata_node)
1920 self._meta.trace = self._create_trace(metadata_node)
1921 self._register_log_levels(metadata_node)
1922 self._meta.streams = self._create_streams(metadata_node)
1923
1924 return self._meta
1925
1926 def _get_version(self, root):
1927 if 'version' not in root:
1928 raise ConfigError('missing "version" property (configuration)')
1929
1930 version_node = root['version']
1931
1932 if not _is_str_prop(version_node):
1933 raise ConfigError('"version" property (configuration) must be a string')
1934
1935 version_node = version_node.strip()
1936
1937 if version_node not in ['2.0', '2.1', '2.2']:
1938 raise ConfigError('unsupported version ({}): versions 2.0, 2.1, and 2.2 are supported'.format(version_node))
1939
1940 # convert version string to comparable version integer
1941 parts = version_node.split('.')
1942 version = int(parts[0]) * 100 + int(parts[1])
1943
1944 return version
1945
1946 def _get_prefix(self, root):
1947 def_prefix = 'barectf_'
1948
1949 if 'prefix' not in root:
1950 return def_prefix
1951
1952 prefix_node = root['prefix']
1953
1954 if prefix_node is None:
1955 return def_prefix
1956
1957 if not _is_str_prop(prefix_node):
1958 raise ConfigError('"prefix" property (configuration) must be a string')
1959
1960 if not _is_valid_identifier(prefix_node):
1961 raise ConfigError('"prefix" property (configuration) must be a valid C identifier')
1962
1963 return prefix_node
1964
1965 def _get_options(self, root):
1966 cfg_options = ConfigOptions()
1967
1968 if 'options' not in root:
1969 return cfg_options
1970
1971 options_node = root['options']
1972
1973 if not _is_assoc_array_prop(options_node):
1974 raise ConfigError('"options" property (configuration) must be an associative array')
1975
1976 known_props = [
1977 'gen-prefix-def',
1978 'gen-default-stream-def',
1979 ]
1980 unk_prop = _get_first_unknown_prop(options_node, known_props)
1981
1982 if unk_prop:
1983 raise ConfigError('unknown configuration option property: "{}"'.format(unk_prop))
1984
1985 if 'gen-prefix-def' in options_node and options_node['gen-prefix-def'] is not None:
1986 gen_prefix_def_node = options_node['gen-prefix-def']
1987
1988 if not _is_bool_prop(gen_prefix_def_node):
1989 raise ConfigError('invalid configuration option "gen-prefix-def": expecting a boolean')
1990
1991 cfg_options.gen_prefix_def = gen_prefix_def_node
1992
1993 if 'gen-default-stream-def' in options_node and options_node['gen-default-stream-def'] is not None:
1994 gen_default_stream_def_node = options_node['gen-default-stream-def']
1995
1996 if not _is_bool_prop(gen_default_stream_def_node):
1997 raise ConfigError('invalid configuration option "gen-default-stream-def": expecting a boolean')
1998
1999 cfg_options.gen_default_stream_def = gen_default_stream_def_node
2000
2001 return cfg_options
2002
2003 def _get_last_include_file(self):
2004 if self._include_stack:
2005 return self._include_stack[-1]
2006
2007 return self._root_yaml_path
2008
2009 def _load_include(self, yaml_path):
2010 for inc_dir in self._include_dirs:
2011 # current include dir + file name path
2012 # note: os.path.join() only takes the last arg if it's absolute
2013 inc_path = os.path.join(inc_dir, yaml_path)
2014
2015 # real path (symbolic links resolved)
2016 real_path = os.path.realpath(inc_path)
2017
2018 # normalized path (weird stuff removed!)
2019 norm_path = os.path.normpath(real_path)
2020
2021 if not os.path.isfile(norm_path):
2022 # file does not exist: skip
2023 continue
2024
2025 if norm_path in self._include_stack:
2026 base_path = self._get_last_include_file()
2027 raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path))
2028
2029 self._include_stack.append(norm_path)
2030
2031 # load raw content
2032 return self._yaml_ordered_load(norm_path)
2033
2034 if not self._ignore_include_not_found:
2035 base_path = self._get_last_include_file()
2036 raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
2037
2038 return None
2039
2040 def _get_include_paths(self, include_node):
2041 if include_node is None:
2042 return []
2043
2044 if _is_str_prop(include_node):
2045 return [include_node]
2046
2047 if _is_array_prop(include_node):
2048 for include_path in include_node:
2049 if not _is_str_prop(include_path):
2050 raise ConfigError('invalid include property: expecting array of strings')
2051
2052 return include_node
2053
2054 raise ConfigError('invalid include property: expecting string or array of strings')
2055
2056 def _update_node(self, base_node, overlay_node):
2057 for olay_key, olay_value in overlay_node.items():
2058 if olay_key in base_node:
2059 base_value = base_node[olay_key]
2060
2061 if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
2062 # merge dictionaries
2063 self._update_node(base_value, olay_value)
2064 elif _is_array_prop(olay_value) and _is_array_prop(base_value):
2065 # append extension array items to base items
2066 base_value += olay_value
2067 else:
2068 # fall back to replacing
2069 base_node[olay_key] = olay_value
2070 else:
2071 base_node[olay_key] = olay_value
2072
2073 def _process_node_include(self, last_overlay_node, name,
2074 process_base_include_cb,
2075 process_children_include_cb=None):
2076 if not _is_assoc_array_prop(last_overlay_node):
2077 raise ConfigError('{} objects must be associative arrays'.format(name))
2078
2079 # process children inclusions first
2080 if process_children_include_cb:
2081 process_children_include_cb(last_overlay_node)
2082
2083 if '$include' in last_overlay_node:
2084 include_node = last_overlay_node['$include']
2085 else:
2086 # no includes!
2087 return last_overlay_node
2088
2089 include_paths = self._get_include_paths(include_node)
2090 cur_base_path = self._get_last_include_file()
2091 base_node = None
2092
2093 # keep the include paths and remove the include property
2094 include_paths = copy.deepcopy(include_paths)
2095 del last_overlay_node['$include']
2096
2097 for include_path in include_paths:
2098 # load raw YAML from included file
2099 overlay_node = self._load_include(include_path)
2100
2101 if overlay_node is None:
2102 # cannot find include file, but we're ignoring those
2103 # errors, otherwise _load_include() itself raises
2104 # a config error
2105 continue
2106
2107 # recursively process includes
2108 try:
2109 overlay_node = process_base_include_cb(overlay_node)
2110 except Exception as e:
2111 raise ConfigError('in "{}"'.format(cur_base_path), e)
2112
2113 # pop include stack now that we're done including
2114 del self._include_stack[-1]
2115
2116 # at this point, base_node is fully resolved (does not
2117 # contain any include property)
2118 if base_node is None:
2119 base_node = overlay_node
2120 else:
2121 self._update_node(base_node, overlay_node)
2122
2123 # finally, we update the latest base node with our last overlay
2124 # node
2125 if base_node is None:
2126 # nothing was included, which is possible when we're
2127 # ignoring include errors
2128 return last_overlay_node
2129
2130 self._update_node(base_node, last_overlay_node)
2131
2132 return base_node
2133
2134 def _process_event_include(self, event_node):
2135 return self._process_node_include(event_node, 'event',
2136 self._process_event_include)
2137
2138 def _process_stream_include(self, stream_node):
2139 def process_children_include(stream_node):
2140 if 'events' in stream_node:
2141 events_node = stream_node['events']
2142
2143 if not _is_assoc_array_prop(events_node):
2144 raise ConfigError('"events" property must be an associative array')
2145
2146 events_node_keys = list(events_node.keys())
2147
2148 for key in events_node_keys:
2149 event_node = events_node[key]
2150
2151 try:
2152 events_node[key] = self._process_event_include(event_node)
2153 except Exception as e:
2154 raise ConfigError('cannot process includes of event object "{}"'.format(key), e)
2155
2156 return self._process_node_include(stream_node, 'stream',
2157 self._process_stream_include,
2158 process_children_include)
2159
2160 def _process_trace_include(self, trace_node):
2161 return self._process_node_include(trace_node, 'trace',
2162 self._process_trace_include)
2163
2164 def _process_clock_include(self, clock_node):
2165 return self._process_node_include(clock_node, 'clock',
2166 self._process_clock_include)
2167
2168 def _process_metadata_include(self, metadata_node):
2169 def process_children_include(metadata_node):
2170 if 'trace' in metadata_node:
2171 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
2172
2173 if 'clocks' in metadata_node:
2174 clocks_node = metadata_node['clocks']
2175
2176 if not _is_assoc_array_prop(clocks_node):
2177 raise ConfigError('"clocks" property (metadata) must be an associative array')
2178
2179 clocks_node_keys = list(clocks_node.keys())
2180
2181 for key in clocks_node_keys:
2182 clock_node = clocks_node[key]
2183
2184 try:
2185 clocks_node[key] = self._process_clock_include(clock_node)
2186 except Exception as e:
2187 raise ConfigError('cannot process includes of clock object "{}"'.format(key), e)
2188
2189 if 'streams' in metadata_node:
2190 streams_node = metadata_node['streams']
2191
2192 if not _is_assoc_array_prop(streams_node):
2193 raise ConfigError('"streams" property (metadata) must be an associative array')
2194
2195 streams_node_keys = list(streams_node.keys())
2196
2197 for key in streams_node_keys:
2198 stream_node = streams_node[key]
2199
2200 try:
2201 streams_node[key] = self._process_stream_include(stream_node)
2202 except Exception as e:
2203 raise ConfigError('cannot process includes of stream object "{}"'.format(key), e)
2204
2205 return self._process_node_include(metadata_node, 'metadata',
2206 self._process_metadata_include,
2207 process_children_include)
2208
2209 def _process_root_includes(self, root):
2210 # The following config objects support includes:
2211 #
2212 # * Metadata object
2213 # * Trace object
2214 # * Stream object
2215 # * Event object
2216 #
2217 # We need to process the event includes first, then the stream
2218 # includes, then the trace includes, and finally the metadata
2219 # includes.
2220 #
2221 # In each object, only one of the $include and $include-replace
2222 # special properties is allowed.
2223 #
2224 # We keep a stack of absolute paths to included files to detect
2225 # recursion.
2226 if 'metadata' in root:
2227 root['metadata'] = self._process_metadata_include(root['metadata'])
2228
2229 return root
2230
2231 def _yaml_ordered_dump(self, node, **kwds):
2232 class ODumper(yaml.Dumper):
2233 pass
2234
2235 def dict_representer(dumper, node):
2236 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2237 node.items())
2238
2239 ODumper.add_representer(collections.OrderedDict, dict_representer)
2240
2241 return yaml.dump(node, Dumper=ODumper, **kwds)
2242
2243 def _yaml_ordered_load(self, yaml_path):
2244 class OLoader(yaml.Loader):
2245 pass
2246
2247 def construct_mapping(loader, node):
2248 loader.flatten_mapping(node)
2249
2250 return collections.OrderedDict(loader.construct_pairs(node))
2251
2252 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
2253 construct_mapping)
2254
2255 # YAML -> Python
2256 try:
2257 with open(yaml_path, 'r') as f:
2258 node = yaml.load(f, OLoader)
2259 except (OSError, IOError) as e:
2260 raise ConfigError('cannot open file "{}"'.format(yaml_path))
2261 except Exception as e:
2262 raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e)
2263
2264 # loaded node must be an associate array
2265 if not _is_assoc_array_prop(node):
2266 raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path))
2267
2268 return node
2269
2270 def _reset(self):
2271 self._version = None
2272 self._include_stack = []
2273
2274 def parse(self, yaml_path):
2275 self._reset()
2276 self._root_yaml_path = yaml_path
2277
2278 try:
2279 root = self._yaml_ordered_load(yaml_path)
2280 except Exception as e:
2281 raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e)
2282
2283 if not _is_assoc_array_prop(root):
2284 raise ConfigError('configuration must be an associative array')
2285
2286 # get the config version
2287 self._version = self._get_version(root)
2288
2289 known_props = [
2290 'version',
2291 'prefix',
2292 'metadata',
2293 ]
2294
2295 if self._version >= 202:
2296 known_props.append('options')
2297
2298 unk_prop = _get_first_unknown_prop(root, known_props)
2299
2300 if unk_prop:
2301 add = ''
2302
2303 if unk_prop == 'options':
2304 add = ' (use version 2.2 or greater)'
2305
2306 raise ConfigError('unknown configuration property{}: "{}"'.format(add, unk_prop))
2307
2308 # process includes if supported
2309 if self._version >= 201:
2310 root = self._process_root_includes(root)
2311
2312 # dump config if required
2313 if self._dump_config:
2314 print(self._yaml_ordered_dump(root, indent=2,
2315 default_flow_style=False))
2316
2317 # get prefix and metadata
2318 prefix = self._get_prefix(root)
2319 meta = self._create_metadata(root)
2320 opts = self._get_options(root)
2321
2322 return Config(self._version, prefix, meta, opts)
2323
2324
2325 def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
2326 try:
2327 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
2328 dump_config)
2329 cfg = parser.parse(path)
2330
2331 return cfg
2332 except Exception as e:
2333 raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e)
This page took 0.077974 seconds and 5 git commands to generate.