config.py: remove bit array field type's byte order property
[deliverable/barectf.git] / barectf / config.py
index beec4c986773bcc0f34c0d8f3a2ee3a217340043..fbef355747568c3b3dd1d97a82575b7180e70a49 100644 (file)
 # The MIT License (MIT)
 #
-# Copyright (c) 2015-2016 Philippe Proulx <pproulx@efficios.com>
+# Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
 #
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
 #
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
 #
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-
-from barectf import metadata
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import barectf.version as barectf_version
+from typing import Optional, Any, FrozenSet, Mapping, Iterator, Set, Union, Callable
+import typing
+from barectf.typing import Count, Alignment, _OptStr, Id
+import collections.abc
 import collections
 import datetime
-import barectf
 import enum
-import yaml
-import uuid
-import copy
-import re
-import os
+import uuid as uuidp
 
 
-class _ConfigErrorCtx:
-    def __init__(self, name, msg=None):
-        self._name = name
-        self._msg = msg
+@enum.unique
+class ByteOrder(enum.Enum):
+    LITTLE_ENDIAN = 'le'
+    BIG_ENDIAN = 'be'
 
+
+class _FieldType:
     @property
-    def name(self):
-        return self._name
+    def alignment(self) -> Alignment:
+        raise NotImplementedError
 
     @property
-    def msg(self):
-        return self._msg
+    def size_is_dynamic(self):
+        return False
 
 
-class ConfigError(RuntimeError):
-    def __init__(self, init_ctx_name, init_ctx_msg=None):
-        self._ctx = []
-        self.append_ctx(init_ctx_name, init_ctx_msg)
+class _BitArrayFieldType(_FieldType):
+    def __init__(self, size: Count, alignment: Alignment = Alignment(1)):
+        self._size = size
+        self._alignment = alignment
 
     @property
-    def ctx(self):
-        return self._ctx
-
-    def append_ctx(self, name, msg=None):
-        self._ctx.append(_ConfigErrorCtx(name, msg))
+    def size(self) -> Count:
+        return self._size
 
+    @property
+    def alignment(self) -> Alignment:
+        return self._alignment
 
-class Config:
-    def __init__(self, version, prefix, metadata, options):
-        self.prefix = prefix
-        self.version = version
-        self.metadata = metadata
-        self.options = options
 
-    def _validate_metadata(self, meta):
-        try:
-            validator = _MetadataTypesHistologyValidator()
-            validator.validate(meta)
-            validator = _MetadataSpecialFieldsValidator()
-            validator.validate(meta)
-        except ConfigError as exc:
-            exc.append_ctx('Metadata')
-            raise
+class DisplayBase(enum.Enum):
+    BINARY = 2
+    OCTAL = 8
+    DECIMAL = 10
+    HEXADECIMAL = 16
 
-        try:
-            validator = _BarectfMetadataValidator()
-            validator.validate(meta)
-        except ConfigError as exc:
-            exc.append_ctx('barectf metadata')
-            raise
 
-    def _augment_metadata_env(self, meta):
-        version_tuple = barectf.get_version_tuple()
-        base_env = {
-            'domain': 'bare',
-            'tracer_name': 'barectf',
-            'tracer_major': version_tuple[0],
-            'tracer_minor': version_tuple[1],
-            'tracer_patch': version_tuple[2],
-            'barectf_gen_date': str(datetime.datetime.now().isoformat()),
-        }
+class _IntegerFieldType(_BitArrayFieldType):
+    def __init__(self, size: Count, alignment: Optional[Alignment] = None,
+                 preferred_display_base: DisplayBase = DisplayBase.DECIMAL):
+        if alignment is None:
+            alignment = Alignment(8 if size % 8 == 0 else 1)
 
-        base_env.update(meta.env)
-        meta.env = base_env
+        super().__init__(size, alignment)
+        self._preferred_display_base = preferred_display_base
 
     @property
-    def version(self):
-        return self._version
+    def preferred_display_base(self) -> DisplayBase:
+        return self._preferred_display_base
 
-    @version.setter
-    def version(self, value):
-        self._version = value
 
-    @property
-    def metadata(self):
-        return self._metadata
+class UnsignedIntegerFieldType(_IntegerFieldType):
+    def __init__(self, *args):
+        super().__init__(*args)
+        self._mapped_clk_type_name = None
 
-    @metadata.setter
-    def metadata(self, value):
-        self._validate_metadata(value)
-        self._augment_metadata_env(value)
-        self._metadata = value
 
-    @property
-    def prefix(self):
-        return self._prefix
+class SignedIntegerFieldType(_IntegerFieldType):
+    pass
 
-    @prefix.setter
-    def prefix(self, value):
-        if not _is_valid_identifier(value):
-            raise ConfigError('Configuration',
-                              'Prefix must be a valid C identifier')
 
-        self._prefix = value
+class EnumerationFieldTypeMappingRange:
+    def __init__(self, lower: int, upper: int):
+        self._lower = lower
+        self._upper = upper
 
     @property
-    def options(self):
-        return self._options
-
-    @options.setter
-    def options(self, options):
-        self._options = options
-
-
-class ConfigOptions:
-    def __init__(self):
-        self._gen_prefix_def = False
-        self._gen_default_stream_def = False
+    def lower(self) -> int:
+        return self._lower
 
     @property
-    def gen_prefix_def(self):
-        return self._gen_prefix_def
-
-    @gen_prefix_def.setter
-    def gen_prefix_def(self, value):
-        self._gen_prefix_def = value
+    def upper(self) -> int:
+        return self._upper
 
-    @property
-    def gen_default_stream_def(self):
-        return self._gen_default_stream_def
+    def __eq__(self, other: Any) -> bool:
+        if type(other) is not type(self):
+            return False
 
-    @gen_default_stream_def.setter
-    def gen_default_stream_def(self, value):
-        self._gen_default_stream_def = value
+        return (self._lower, self._upper) == (other._lower, other._upper)
 
+    def __hash__(self) -> int:
+        return hash((self._lower, self._upper))
 
-def _is_assoc_array_prop(node):
-    return isinstance(node, dict)
+    def contains(self, value: int) -> bool:
+        return self._lower <= value <= self._upper
 
 
-def _is_array_prop(node):
-    return isinstance(node, list)
+class EnumerationFieldTypeMapping:
+    def __init__(self, ranges: Set[EnumerationFieldTypeMappingRange]):
+        self._ranges = frozenset(ranges)
 
+    @property
+    def ranges(self) -> FrozenSet[EnumerationFieldTypeMappingRange]:
+        return self._ranges
 
-def _is_int_prop(node):
-    return type(node) is int
+    def ranges_contain_value(self, value: int) -> bool:
+        return any([rg.contains(value) for rg in self._ranges])
 
 
-def _is_str_prop(node):
-    return type(node) is str
+_EnumFtMappings = Mapping[str, EnumerationFieldTypeMapping]
 
 
-def _is_bool_prop(node):
-    return type(node) is bool
+class EnumerationFieldTypeMappings(collections.abc.Mapping):
+    def __init__(self, mappings: _EnumFtMappings):
+        self._mappings = {label: mapping for label, mapping in mappings.items()}
 
+    def __getitem__(self, key: str) -> EnumerationFieldTypeMapping:
+        return self._mappings[key]
 
-def _is_valid_alignment(align):
-    return ((align & (align - 1)) == 0) and align > 0
+    def __iter__(self) -> Iterator[str]:
+        return iter(self._mappings)
 
+    def __len__(self) -> int:
+        return len(self._mappings)
 
-def _byte_order_str_to_bo(bo_str):
-    bo_str = bo_str.lower()
 
-    if bo_str == 'le':
-        return metadata.ByteOrder.LE
-    elif bo_str == 'be':
-        return metadata.ByteOrder.BE
+class _EnumerationFieldType(_IntegerFieldType):
+    def __init__(self, size: Count, alignment: Optional[Alignment] = None,
+                 preferred_display_base: DisplayBase = DisplayBase.DECIMAL,
+                 mappings: Optional[_EnumFtMappings] = None):
+        super().__init__(size, alignment, preferred_display_base)
+        self._mappings = EnumerationFieldTypeMappings({})
 
+        if mappings is not None:
+            self._mappings = EnumerationFieldTypeMappings(mappings)
 
-def _encoding_str_to_encoding(encoding_str):
-    encoding_str = encoding_str.lower()
+    @property
+    def mappings(self) -> EnumerationFieldTypeMappings:
+        return self._mappings
 
-    if encoding_str == 'utf-8' or encoding_str == 'utf8':
-        return metadata.Encoding.UTF8
-    elif encoding_str == 'ascii':
-        return metadata.Encoding.ASCII
-    elif encoding_str == 'none':
-        return metadata.Encoding.NONE
+    def labels_for_value(self, value: int) -> Set[str]:
+        labels = set()
 
+        for label, mapping in self._mappings.items():
+            if mapping.ranges_contain_value(value):
+                labels.add(label)
 
-_re_iden = re.compile(r'^[a-zA-Z][a-zA-Z0-9_]*$')
-_ctf_keywords = set([
-    'align',
-    'callsite',
-    'clock',
-    'enum',
-    'env',
-    'event',
-    'floating_point',
-    'integer',
-    'stream',
-    'string',
-    'struct',
-    'trace',
-    'typealias',
-    'typedef',
-    'variant',
-])
+        return labels
 
 
-def _is_valid_identifier(iden):
-    if not _re_iden.match(iden):
-        return False
+class UnsignedEnumerationFieldType(_EnumerationFieldType, UnsignedIntegerFieldType):
+    pass
 
-    if _re_iden in _ctf_keywords:
-        return False
 
-    return True
+class SignedEnumerationFieldType(_EnumerationFieldType, SignedIntegerFieldType):
+    pass
 
 
-def _get_first_unknown_prop(node, known_props):
-    for prop_name in node:
-        if prop_name in known_props:
-            continue
+class RealFieldType(_BitArrayFieldType):
+    pass
 
-        return prop_name
 
+class StringFieldType(_FieldType):
+    @property
+    def alignment(self) -> Alignment:
+        return Alignment(8)
 
-# This validator validates the configured metadata for barectf specific
-# needs.
-#
-# barectf needs:
-#
-#   * all header/contexts are at least byte-aligned
-#   * all integer and floating point number sizes to be <= 64
-#   * no inner structures or arrays
-class _BarectfMetadataValidator:
-    def __init__(self):
-        self._type_to_validate_type_func = {
-            metadata.Integer: self._validate_int_type,
-            metadata.FloatingPoint: self._validate_float_type,
-            metadata.Enum: self._validate_enum_type,
-            metadata.String: self._validate_string_type,
-            metadata.Struct: self._validate_struct_type,
-            metadata.Array: self._validate_array_type,
-        }
-
-    def _validate_int_type(self, t, entity_root):
-        if t.size > 64:
-            raise ConfigError('Integer type', 'Size must be lesser than or equal to 64 bits')
-
-    def _validate_float_type(self, t, entity_root):
-        if t.size > 64:
-            raise ConfigError('Floating point number type', 'Size must be lesser than or equal to 64 bits')
-
-    def _validate_enum_type(self, t, entity_root):
-        if t.value_type.size > 64:
-            raise ConfigError('Enumeration type', 'Integer type\'s size must be lesser than or equal to 64 bits')
-
-    def _validate_string_type(self, t, entity_root):
-        pass
-
-    def _validate_struct_type(self, t, entity_root):
-        if not entity_root:
-            raise ConfigError('Structure type', 'Inner structure types are not supported as of this version')
-
-        for field_name, field_type in t.fields.items():
-            if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
-                if field_name == 'uuid':
-                    # allow
-                    continue
-
-            try:
-                self._validate_type(field_type, False)
-            except ConfigError as exc:
-                exc.append_ctx('Structure type\' field "{}"'.format(field_name))
-                raise
-
-    def _validate_array_type(self, t, entity_root):
-        raise ConfigError('Array type', 'Not supported as of this version')
-
-    def _validate_type(self, t, entity_root):
-        self._type_to_validate_type_func[type(t)](t, entity_root)
-
-    def _validate_entity(self, t):
-        if t is None:
-            return
-
-        # make sure entity is byte-aligned
-        if t.align < 8:
-            raise ConfigError('Root type', 'Alignment must be at least byte-aligned')
-
-        # make sure entity is a structure
-        if type(t) is not metadata.Struct:
-            raise ConfigError('Root type', 'Expecting a structure type')
-
-        # validate types
-        self._validate_type(t, True)
-
-    def _validate_entities_and_names(self, meta):
-        self._cur_entity = _Entity.TRACE_PACKET_HEADER
-
-        try:
-            self._validate_entity(meta.trace.packet_header_type)
-        except ConfigError as exc:
-            exc.append_ctx('Trace', 'Invalid packet header type')
-            raise
-
-        for stream_name, stream in meta.streams.items():
-            if not _is_valid_identifier(stream_name):
-                raise ConfigError('Trace', 'Stream name "{}" is not a valid C identifier'.format(stream_name))
-
-            self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
-
-            try:
-                self._validate_entity(stream.packet_context_type)
-            except ConfigError as exc:
-                exc.append_ctx('Stream "{}"'.format(stream_name),
-                               'Invalid packet context type')
-                raise
-
-            self._cur_entity = _Entity.STREAM_EVENT_HEADER
-
-            try:
-                self._validate_entity(stream.event_header_type)
-            except ConfigError as exc:
-                exc.append_ctx('Stream "{}"'.format(stream_name),
-                               'Invalid event header type')
-                raise
-
-            self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
-
-            try:
-                self._validate_entity(stream.event_context_type)
-            except ConfigError as exc:
-                exc.append_ctx('Stream "{}"'.format(stream_name),
-                               'Invalid event context type'.format(stream_name))
-                raise
-
-            try:
-                for ev_name, ev in stream.events.items():
-                    if not _is_valid_identifier(ev_name):
-                        raise ConfigError('Stream "{}"'.format(stream_name),
-                                          'Event name "{}" is not a valid C identifier'.format(ev_name))
-
-                    self._cur_entity = _Entity.EVENT_CONTEXT
-
-                    try:
-                        self._validate_entity(ev.context_type)
-                    except ConfigError as exc:
-                        exc.append_ctx('Event "{}"'.format(ev_name),
-                                       'Invalid context type')
-                        raise
-
-                    self._cur_entity = _Entity.EVENT_PAYLOAD
-
-                    try:
-                        self._validate_entity(ev.payload_type)
-                    except ConfigError as exc:
-                        exc.append_ctx('Event "{}"'.format(ev_name),
-                                       'Invalid payload type')
-                        raise
-
-                    if stream.is_event_empty(ev):
-                        raise ConfigError('Event "{}"'.format(ev_name), 'Empty')
-            except ConfigError as exc:
-                exc.append_ctx('Stream "{}"'.format(stream_name))
-                raise
-
-    def _validate_default_stream(self, meta):
-        if meta.default_stream_name:
-            if meta.default_stream_name not in meta.streams.keys():
-                raise ConfigError('barectf metadata', 'Default stream name ("{}") does not exist'.format(meta.default_stream_name))
-
-    def validate(self, meta):
-        self._validate_entities_and_names(meta)
-        self._validate_default_stream(meta)
-
-
-# This validator validates special fields of trace, stream, and event
-# types. For example, if checks that the "stream_id" field exists in the
-# trace packet header if there's more than one stream, and much more.
-class _MetadataSpecialFieldsValidator:
-    def _validate_trace_packet_header_type(self, t):
-        # needs "stream_id" field?
-        if len(self._meta.streams) > 1:
-            # yes
-            if t is None:
-                raise ConfigError('"packet-header-type" property',
-                                  'Need "stream_id" field (more than one stream), but trace packet header type is missing')
-
-            if type(t) is not metadata.Struct:
-                raise ConfigError('"packet-header-type" property',
-                                  'Need "stream_id" field (more than one stream), but trace packet header type is not a structure type')
-
-            if 'stream_id' not in t.fields:
-                raise ConfigError('"packet-header-type" property',
-                                  'Need "stream_id" field (more than one stream)')
-
-        # validate "magic" and "stream_id" types
-        if type(t) is not metadata.Struct:
-            return
+    @property
+    def size_is_dynamic(self):
+        return True
 
-        for i, (field_name, field_type) in enumerate(t.fields.items()):
-            if field_name == 'magic':
-                if type(field_type) is not metadata.Integer:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"magic" field must be an integer type')
 
-                if field_type.signed or field_type.size != 32:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"magic" field must be a 32-bit unsigned integer type')
+class _ArrayFieldType(_FieldType):
+    def __init__(self, element_field_type: _FieldType):
+        self._element_field_type = element_field_type
 
-                if i != 0:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"magic" field must be the first trace packet header type\'s field')
-            elif field_name == 'stream_id':
-                if type(field_type) is not metadata.Integer:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"stream_id" field must be an integer type')
+    @property
+    def element_field_type(self) -> _FieldType:
+        return self._element_field_type
 
-                if field_type.signed:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"stream_id" field must be an unsigned integer type')
+    @property
+    def alignment(self) -> Alignment:
+        return self._element_field_type.alignment
 
-                # "id" size can fit all event IDs
-                if len(self._meta.streams) > (1 << field_type.size):
-                    raise ConfigError('"packet-header-type" property',
-                                      '"stream_id" field\' size is too small for the number of trace streams')
-            elif field_name == 'uuid':
-                if self._meta.trace.uuid is None:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"uuid" field specified, but no trace UUID provided')
-
-                if type(field_type) is not metadata.Array:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"uuid" field must be an array')
-
-                if field_type.length != 16:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"uuid" field must be an array of 16 bytes')
-
-                element_type = field_type.element_type
-
-                if type(element_type) is not metadata.Integer:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"uuid" field must be an array of 16 unsigned bytes')
 
-                if element_type.size != 8:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"uuid" field must be an array of 16 unsigned bytes')
+class StaticArrayFieldType(_ArrayFieldType):
+    def __init__(self, length: Count, element_field_type: _FieldType):
+        super().__init__(element_field_type)
+        self._length = length
 
-                if element_type.signed:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"uuid" field must be an array of 16 unsigned bytes')
+    @property
+    def length(self) -> Count:
+        return self._length
 
-                if element_type.align != 8:
-                    raise ConfigError('"packet-header-type" property',
-                                      '"uuid" field must be an array of 16 unsigned, byte-aligned bytes')
-
-    def _validate_trace(self, meta):
-        self._validate_trace_packet_header_type(meta.trace.packet_header_type)
-
-    def _validate_stream_packet_context(self, stream):
-        t = stream.packet_context_type
-
-        if type(t) is None:
-            raise ConfigError('Stream',
-                              'Missing "packet-context-type" property')
-
-        if type(t) is not metadata.Struct:
-            raise ConfigError('"packet-context-type" property',
-                              'Expecting a structure type')
-
-        # "timestamp_begin", if exists, is an unsigned integer type,
-        # mapped to a clock
-        ts_begin = None
-
-        if 'timestamp_begin' in t.fields:
-            ts_begin = t.fields['timestamp_begin']
-
-            if type(ts_begin) is not metadata.Integer:
-                raise ConfigError('"packet-context-type" property',
-                                  '"timestamp_begin" field must be an integer type')
-
-            if ts_begin.signed:
-                raise ConfigError('"packet-context-type" property',
-                                  '"timestamp_begin" field must be an unsigned integer type')
-
-            if not ts_begin.property_mappings:
-                raise ConfigError('"packet-context-type" property',
-                                  '"timestamp_begin" field must be mapped to a clock')
-
-        # "timestamp_end", if exists, is an unsigned integer type,
-        # mapped to a clock
-        ts_end = None
-
-        if 'timestamp_end' in t.fields:
-            ts_end = t.fields['timestamp_end']
-
-            if type(ts_end) is not metadata.Integer:
-                raise ConfigError('"packet-context-type" property',
-                                  '"timestamp_end" field must be an integer type')
-
-            if ts_end.signed:
-                raise ConfigError('"packet-context-type" property',
-                                  '"timestamp_end" field must be an unsigned integer type')
-
-            if not ts_end.property_mappings:
-                raise ConfigError('"packet-context-type" property',
-                                  '"timestamp_end" field must be mapped to a clock')
-
-        # "timestamp_begin" and "timestamp_end" exist together
-        if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
-            raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be defined together in stream packet context type')
-
-        # "timestamp_begin" and "timestamp_end" are mapped to the same clock
-        if ts_begin is not None and ts_end is not None:
-            if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
-                raise ConfigError('"timestamp_begin" and "timestamp_end" fields must be mapped to the same clock object in stream packet context type')
 
-        # "events_discarded", if exists, is an unsigned integer type
-        if 'events_discarded' in t.fields:
-            events_discarded = t.fields['events_discarded']
+class StructureFieldTypeMember:
+    def __init__(self, field_type: _FieldType):
+        self._field_type = field_type
 
-            if type(events_discarded) is not metadata.Integer:
-                raise ConfigError('"packet-context-type" property',
-                                  '"events_discarded" field must be an integer type')
+    @property
+    def field_type(self) -> _FieldType:
+        return self._field_type
 
-            if events_discarded.signed:
-                raise ConfigError('"packet-context-type" property',
-                                  '"events_discarded" field must be an unsigned integer type')
 
-        # "packet_size" and "content_size" must exist
-        if 'packet_size' not in t.fields:
-            raise ConfigError('"packet-context-type" property',
-                              'Missing "packet_size" field in stream packet context type')
+_StructFtMembers = Mapping[str, StructureFieldTypeMember]
 
-        packet_size = t.fields['packet_size']
 
-        # "content_size" and "content_size" must exist
-        if 'content_size' not in t.fields:
-            raise ConfigError('"packet-context-type" property',
-                              'Missing "content_size" field in stream packet context type')
+class StructureFieldTypeMembers(collections.abc.Mapping):
+    def __init__(self, members: _StructFtMembers):
+        self._members = collections.OrderedDict()
 
-        content_size = t.fields['content_size']
+        for name, member in members.items():
+            assert type(member) is StructureFieldTypeMember
+            self._members[name] = member
 
-        # "packet_size" is an unsigned integer type
-        if type(packet_size) is not metadata.Integer:
-            raise ConfigError('"packet-context-type" property',
-                              '"packet_size" field in stream packet context type must be an integer type')
+    def __getitem__(self, key: str) -> StructureFieldTypeMember:
+        return self._members[key]
 
-        if packet_size.signed:
-            raise ConfigError('"packet-context-type" property',
-                              '"packet_size" field in stream packet context type must be an unsigned integer type')
+    def __iter__(self) -> Iterator[str]:
+        return iter(self._members)
 
-        # "content_size" is an unsigned integer type
-        if type(content_size) is not metadata.Integer:
-            raise ConfigError('"packet-context-type" property',
-                              '"content_size" field in stream packet context type must be an integer type')
+    def __len__(self) -> int:
+        return len(self._members)
 
-        if content_size.signed:
-            raise ConfigError('"packet-context-type" property',
-                              '"content_size" field in stream packet context type must be an unsigned integer type')
 
-        # "packet_size" size should be greater than or equal to "content_size" size
-        if content_size.size > packet_size.size:
-            raise ConfigError('"packet-context-type" property',
-                              '"content_size" field size must be lesser than or equal to "packet_size" field size')
+class StructureFieldType(_FieldType):
+    def __init__(self, minimum_alignment: Alignment = Alignment(1),
+                 members: Optional[_StructFtMembers] = None):
+        self._minimum_alignment = minimum_alignment
+        self._members = StructureFieldTypeMembers({})
 
-    def _validate_stream_event_header(self, stream):
-        t = stream.event_header_type
+        if members is not None:
+            self._members = StructureFieldTypeMembers(members)
 
-        # needs "id" field?
-        if len(stream.events) > 1:
-            # yes
-            if t is None:
-                raise ConfigError('"event-header-type" property',
-                                  'Need "id" field (more than one event), but stream event header type is missing')
+        self._set_alignment()
 
-            if type(t) is not metadata.Struct:
-                raise ConfigError('"event-header-type" property',
-                                  'Need "id" field (more than one event), but stream event header type is not a structure type')
+    def _set_alignment(self):
+        self._alignment: Alignment = self._minimum_alignment
 
-            if 'id' not in t.fields:
-                raise ConfigError('"event-header-type" property',
-                                  'Need "id" field (more than one event)')
+        for member in self._members.values():
+            if member.field_type.alignment > self._alignment:
+                self._alignment = member.field_type.alignment
 
-        # validate "id" and "timestamp" types
-        if type(t) is not metadata.Struct:
-            return
+    @property
+    def minimum_alignment(self) -> Alignment:
+        return self._minimum_alignment
 
-        # "timestamp", if exists, is an unsigned integer type,
-        # mapped to a clock
-        if 'timestamp' in t.fields:
-            ts = t.fields['timestamp']
+    @property
+    def alignment(self) -> Alignment:
+        return self._alignment
 
-            if type(ts) is not metadata.Integer:
-                raise ConfigError('"event-header-type" property',
-                                  '"timestamp" field must be an integer type')
+    @property
+    def size_is_dynamic(self):
+        return any([member.field_type.size_is_dynamic for member in self.members.values()])
 
-            if ts.signed:
-                raise ConfigError('"event-header-type" property',
-                                  '"timestamp" field must be an unsigned integer type')
+    @property
+    def members(self) -> StructureFieldTypeMembers:
+        return self._members
 
-            if not ts.property_mappings:
-                raise ConfigError('"event-header-type" property',
-                                  '"timestamp" field must be mapped to a clock')
 
-        if 'id' in t.fields:
-            eid = t.fields['id']
+class _UniqueByName:
+    _name: str
 
-            # "id" is an unsigned integer type
-            if type(eid) is not metadata.Integer:
-                raise ConfigError('"event-header-type" property',
-                                  '"id" field must be an integer type')
+    def __eq__(self, other: Any) -> bool:
+        if type(other) is not type(self):
+            return False
 
-            if eid.signed:
-                raise ConfigError('"event-header-type" property',
-                                  '"id" field must be an unsigned integer type')
+        return self._name == other._name
 
-            # "id" size can fit all event IDs
-            if len(stream.events) > (1 << eid.size):
-                raise ConfigError('"event-header-type" property',
-                                  '"id" field\' size is too small for the number of stream events')
+    def __lt__(self, other: '_UniqueByName'):
+        assert type(self) is type(other)
+        return self._name < other._name
 
-    def _validate_stream(self, stream):
-        self._validate_stream_packet_context(stream)
-        self._validate_stream_event_header(stream)
+    def __hash__(self) -> int:
+        return hash(self._name)
 
-    def validate(self, meta):
-        self._meta = meta
-        self._validate_trace(meta)
 
-        for stream in meta.streams.values():
-            try:
-                self._validate_stream(stream)
-            except ConfigError as exc:
-                exc.append_ctx('Stream "{}"'.format(stream.name), 'Invalid')
-                raise
+_OptFt = Optional[_FieldType]
+_OptStructFt = Optional[StructureFieldType]
+LogLevel = typing.NewType('LogLevel', int)
 
 
-# Entities. Order of values is important here.
-@enum.unique
-class _Entity(enum.IntEnum):
-    TRACE_PACKET_HEADER = 0
-    STREAM_PACKET_CONTEXT = 1
-    STREAM_EVENT_HEADER = 2
-    STREAM_EVENT_CONTEXT = 3
-    EVENT_CONTEXT = 4
-    EVENT_PAYLOAD = 5
-
-
-# Since type inheritance allows types to be only partially defined at
-# any place in the configuration, this validator validates that actual
-# trace, stream, and event types are all complete and valid. Therefore
-# an invalid, but unusued type alias is accepted.
-class _MetadataTypesHistologyValidator:
-    def __init__(self):
-        self._type_to_validate_type_histology_func = {
-            metadata.Integer: self._validate_integer_histology,
-            metadata.FloatingPoint: self._validate_float_histology,
-            metadata.Enum: self._validate_enum_histology,
-            metadata.String: self._validate_string_histology,
-            metadata.Struct: self._validate_struct_histology,
-            metadata.Array: self._validate_array_histology,
-        }
-
-    def _validate_integer_histology(self, t):
-        # size is set
-        if t.size is None:
-            raise ConfigError('Integer type', 'Missing size')
-
-    def _validate_float_histology(self, t):
-        # exponent digits is set
-        if t.exp_size is None:
-            raise ConfigError('Floating point number type',
-                              'Missing exponent size')
-
-        # mantissa digits is set
-        if t.mant_size is None:
-            raise ConfigError('Floating point number type',
-                              'Missing mantissa size')
-
-        # exponent and mantissa sum is a multiple of 8
-        if (t.exp_size + t.mant_size) % 8 != 0:
-            raise ConfigError('Floating point number type',
-                              'Mantissa and exponent sizes sum must be a multiple of 8')
-
-    def _validate_enum_histology(self, t):
-        # integer type is set
-        if t.value_type is None:
-            raise ConfigError('Enumeration type', 'Missing value type')
-
-        # there's at least one member
-        if not t.members:
-            raise ConfigError('Enumeration type', 'At least one member required')
-
-        # no overlapping values and all values are valid considering
-        # the value type
-        ranges = []
-
-        if t.value_type.signed:
-            value_min = -(1 << t.value_type.size - 1)
-            value_max = (1 << (t.value_type.size - 1)) - 1
-        else:
-            value_min = 0
-            value_max = (1 << t.value_type.size) - 1
-
-        for label, value in t.members.items():
-            for rg in ranges:
-                if value[0] <= rg[1] and rg[0] <= value[1]:
-                    raise ConfigError('Enumeration type\'s member "{}"',
-                                      'Overlaps another member'.format(label))
-
-            name_fmt = 'Enumeration type\'s member "{}"'
-            msg_fmt = 'Value {} is outside the value type range [{}, {}]'
-
-            if value[0] < value_min or value[0] > value_max:
-                raise ConfigError(name_fmt.format(label),
-                                  msg_fmt.format(value[0], value_min, value_max))
-
-            if value[1] < value_min or value[1] > value_max:
-                raise ConfigError(name_fmt.format(label),
-                                  msg_fmt.format(value[0], value_min, value_max))
-
-            ranges.append(value)
-
-    def _validate_string_histology(self, t):
-        # always valid
-        pass
-
-    def _validate_struct_histology(self, t):
-        # all fields are valid
-        for field_name, field_type in t.fields.items():
-            try:
-                self._validate_type_histology(field_type)
-            except ConfigError as exc:
-                exc.append_ctx('Structure type\'s field "{}"'.format(field_name))
-                raise
-
-    def _validate_array_histology(self, t):
-        # length is set
-        if t.length is None:
-            raise ConfigError('Array type', 'Missing length')
-
-        # element type is set
-        if t.element_type is None:
-            raise ConfigError('Array type', 'Missing element type')
-
-        # element type is valid
-        try:
-            self._validate_type_histology(t.element_type)
-        except ConfigError as exc:
-            exc.append_ctx('Array type', 'Invalid element type')
-            raise
-
-    def _validate_type_histology(self, t):
-        if t is None:
-            return
+class EventType(_UniqueByName):
+    def __init__(self, name: str, log_level: Optional[LogLevel] = None,
+                 specific_context_field_type: _OptStructFt = None, payload_field_type: _OptStructFt = None):
+        self._id: Optional[Id] = None
+        self._name = name
+        self._log_level = log_level
+        self._specific_context_field_type = specific_context_field_type
+        self._payload_field_type = payload_field_type
 
-        self._type_to_validate_type_histology_func[type(t)](t)
+    @property
+    def id(self) -> Optional[Id]:
+        return self._id
 
-    def _validate_entity_type_histology(self, t):
-        if t is None:
-            return
+    @property
+    def name(self) -> str:
+        return self._name
 
-        if type(t) is not metadata.Struct:
-            raise ConfigError('Root type', 'Expecting a structure type')
-
-        self._validate_type_histology(t)
-
-    def _validate_event_types_histology(self, ev):
-        ev_name = ev.name
-
-        # validate event context type
-        try:
-            self._validate_entity_type_histology(ev.context_type)
-        except ConfigError as exc:
-            exc.append_ctx('Event "{}"'.format(ev.name),
-                           'Invalid context type')
-            raise
-
-        # validate event payload type
-        try:
-            self._validate_entity_type_histology(ev.payload_type)
-        except ConfigError as exc:
-            exc.append_ctx('Event "{}"'.format(ev.name),
-                           'Invalid payload type')
-            raise
-
-    def _validate_stream_types_histology(self, stream):
-        stream_name = stream.name
-
-        # validate stream packet context type
-        try:
-            self._validate_entity_type_histology(stream.packet_context_type)
-        except ConfigError as exc:
-            exc.append_ctx('Stream "{}"'.format(stream_name),
-                           'Invalid packet context type')
-            raise
-
-        # validate stream event header type
-        try:
-            self._validate_entity_type_histology(stream.event_header_type)
-        except ConfigError as exc:
-            exc.append_ctx('Stream "{}"'.format(stream_name),
-                           'Invalid event header type')
-            raise
-
-        # validate stream event context type
-        try:
-            self._validate_entity_type_histology(stream.event_context_type)
-        except ConfigError as exc:
-            exc.append_ctx('Stream "{}"'.format(stream_name),
-                           'Invalid event context type')
-            raise
-
-        # validate events
-        for ev in stream.events.values():
-            try:
-                self._validate_event_types_histology(ev)
-            except ConfigError as exc:
-                exc.append_ctx('Stream "{}"'.format(stream_name),
-                               'Invalid event')
-                raise
-
-    def validate(self, meta):
-        # validate trace packet header type
-        try:
-            self._validate_entity_type_histology(meta.trace.packet_header_type)
-        except ConfigError as exc:
-            exc.append_ctx('Metadata\'s trace', 'Invalid packet header type')
-            raise
-
-        # validate streams
-        for stream in meta.streams.values():
-            self._validate_stream_types_histology(stream)
-
-
-class _YamlConfigParser:
-    def __init__(self, include_dirs, ignore_include_not_found, dump_config):
-        self._class_name_to_create_type_func = {
-            'int': self._create_integer,
-            'integer': self._create_integer,
-            'flt': self._create_float,
-            'float': self._create_float,
-            'floating-point': self._create_float,
-            'enum': self._create_enum,
-            'enumeration': self._create_enum,
-            'str': self._create_string,
-            'string': self._create_string,
-            'struct': self._create_struct,
-            'structure': self._create_struct,
-            'array': self._create_array,
-        }
-        self._type_to_create_type_func = {
-            metadata.Integer: self._create_integer,
-            metadata.FloatingPoint: self._create_float,
-            metadata.Enum: self._create_enum,
-            metadata.String: self._create_string,
-            metadata.Struct: self._create_struct,
-            metadata.Array: self._create_array,
-        }
-        self._include_dirs = include_dirs
-        self._ignore_include_not_found = ignore_include_not_found
-        self._dump_config = dump_config
-
-    def _set_byte_order(self, metadata_node):
-        if 'trace' not in metadata_node:
-            raise ConfigError('Metadata', 'Missing "trace" property')
-
-        trace_node = metadata_node['trace']
-
-        if not _is_assoc_array_prop(trace_node):
-            raise ConfigError('Metadata\'s "trace" property',
-                              'Must be an associative array')
-
-        if 'byte-order' not in trace_node:
-            raise ConfigError('Metadata\'s "trace" property',
-                              'Missing "byte-order" property')
-
-        bo_node = trace_node['byte-order']
-
-        if not _is_str_prop(bo_node):
-            raise ConfigError('Metadata\'s "trace" property',
-                              '"byte-order" property must be a string ("le" or "be")')
-
-        self._bo = _byte_order_str_to_bo(bo_node)
-
-        if self._bo is None:
-            raise ConfigError('Metadata\'s "trace" property',
-                              'Invalid "byte-order" property: must be "le" or "be"')
-
-    def _lookup_type_alias(self, name):
-        if name in self._tas:
-            return copy.deepcopy(self._tas[name])
-
-    def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
-        unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property'])
-
-        if unk_prop:
-            raise ConfigError('Integer type\'s clock property mapping',
-                              'Unknown property: "{}"'.format(unk_prop))
-
-        if 'name' not in prop_mapping_node:
-            raise ConfigError('Integer type\'s clock property mapping',
-                              'Missing "name" property')
-
-        if 'property' not in prop_mapping_node:
-            raise ConfigError('Integer type\'s clock property mapping',
-                              'Missing "property" property')
-
-        clock_name = prop_mapping_node['name']
-        prop = prop_mapping_node['property']
-
-        if not _is_str_prop(clock_name):
-            raise ConfigError('Integer type\'s clock property mapping',
-                              '"name" property must be a string')
-
-        if not _is_str_prop(prop):
-            raise ConfigError('Integer type\'s clock property mapping',
-                              '"property" property must be a string')
-
-        if clock_name not in self._clocks:
-            raise ConfigError('Integer type\'s clock property mapping',
-                              'Invalid clock name "{}"'.format(clock_name))
-
-        if prop != 'value':
-            raise ConfigError('Integer type\'s clock property mapping',
-                              'Invalid "property" property: "{}"'.format(prop))
-
-        mapped_clock = self._clocks[clock_name]
-        int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop))
-
-    def _get_first_unknown_type_prop(self, type_node, known_props):
-        kp = known_props + ['inherit', 'class']
-
-        if self._version >= 201:
-            kp.append('$inherit')
-
-        return _get_first_unknown_prop(type_node, kp)
-
-    def _create_integer(self, obj, node):
-        if obj is None:
-            # create integer object
-            obj = metadata.Integer()
-
-        unk_prop = self._get_first_unknown_type_prop(node, [
-            'size',
-            'align',
-            'signed',
-            'byte-order',
-            'base',
-            'encoding',
-            'property-mappings',
-        ])
+    @property
+    def log_level(self) -> Optional[LogLevel]:
+        return self._log_level
 
-        if unk_prop:
-            raise ConfigError('Integer type',
-                              'Unknown property: "{}"'.format(unk_prop))
+    @property
+    def specific_context_field_type(self) -> _OptStructFt:
+        return self._specific_context_field_type
 
-        # size
-        if 'size' in node:
-            size = node['size']
+    @property
+    def payload_field_type(self) -> _OptStructFt:
+        return self._payload_field_type
 
-            if not _is_int_prop(size):
-                raise ConfigError('Integer type',
-                                  '"size" property of integer type object must be an integer')
 
-            if size < 1:
-                raise ConfigError('Integer type',
-                                  'Invalid integer size: {}'.format(size))
+class ClockTypeOffset:
+    def __init__(self, seconds: int = 0, cycles: Count = Count(0)):
+        self._seconds = seconds
+        self._cycles = cycles
 
-            obj.size = size
+    @property
+    def seconds(self) -> int:
+        return self._seconds
 
-        # align
-        if 'align' in node:
-            align = node['align']
+    @property
+    def cycles(self) -> Count:
+        return self._cycles
 
-            if align is None:
-                obj.set_default_align()
-            else:
-                if not _is_int_prop(align):
-                    raise ConfigError('Integer type',
-                                      '"align" property of integer type object must be an integer')
 
-                if not _is_valid_alignment(align):
-                    raise ConfigError('Integer type',
-                                      'Invalid alignment: {}'.format(align))
+_OptUuid = Optional[uuidp.UUID]
 
-                obj.align = align
 
-        # signed
-        if 'signed' in node:
-            signed = node['signed']
+class ClockType(_UniqueByName):
+    def __init__(self, name: str, frequency: Count = Count(int(1e9)), uuid: _OptUuid = None,
+                 description: _OptStr = None, precision: Count = Count(0),
+                 offset: Optional[ClockTypeOffset] = None, origin_is_unix_epoch: bool = False):
+        self._name = name
+        self._frequency = frequency
+        self._uuid = uuid
+        self._description = description
+        self._precision = precision
+        self._offset = ClockTypeOffset()
 
-            if signed is None:
-                obj.set_default_signed()
-            else:
-                if not _is_bool_prop(signed):
-                    raise ConfigError('Integer type',
-                                      '"signed" property of integer type object must be a boolean')
+        if offset is not None:
+            self._offset = offset
 
-                obj.signed = signed
+        self._origin_is_unix_epoch = origin_is_unix_epoch
 
-        # byte order
-        if 'byte-order' in node:
-            byte_order = node['byte-order']
+    @property
+    def name(self) -> str:
+        return self._name
 
-            if byte_order is None:
-                obj.byte_order = self._bo
-            else:
-                if not _is_str_prop(byte_order):
-                    raise ConfigError('Integer type',
-                                      '"byte-order" property of integer type object must be a string ("le" or "be")')
+    @property
+    def frequency(self) -> Count:
+        return self._frequency
 
-                byte_order = _byte_order_str_to_bo(byte_order)
+    @property
+    def uuid(self) -> _OptUuid:
+        return self._uuid
 
-                if byte_order is None:
-                    raise ConfigError('Integer type',
-                                      'Invalid "byte-order" property in integer type object')
+    @property
+    def description(self) -> _OptStr:
+        return self._description
 
-                obj.byte_order = byte_order
-        else:
-            obj.byte_order = self._bo
-
-        # base
-        if 'base' in node:
-            base = node['base']
-
-            if base is None:
-                obj.set_default_base()
-            else:
-                if not _is_str_prop(base):
-                    raise ConfigError('Integer type',
-                                      '"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")')
-
-                if base == 'bin':
-                    base = 2
-                elif base == 'oct':
-                    base = 8
-                elif base == 'dec':
-                    base = 10
-                elif base == 'hex':
-                    base = 16
-                else:
-                    raise ConfigError('Integer type',
-                                      'Unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base))
-
-                obj.base = base
-
-        # encoding
-        if 'encoding' in node:
-            encoding = node['encoding']
-
-            if encoding is None:
-                obj.set_default_encoding()
-            else:
-                if not _is_str_prop(encoding):
-                    raise ConfigError('Integer type',
-                                      '"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")')
-
-                encoding = _encoding_str_to_encoding(encoding)
-
-                if encoding is None:
-                    raise ConfigError('Integer type',
-                                      'Invalid "encoding" property in integer type object')
-
-                obj.encoding = encoding
-
-        # property mappings
-        if 'property-mappings' in node:
-            prop_mappings = node['property-mappings']
-
-            if prop_mappings is None:
-                obj.set_default_property_mappings()
-            else:
-                if not _is_array_prop(prop_mappings):
-                    raise ConfigError('Integer type',
-                                      '"property-mappings" property of integer type object must be an array')
-
-                if len(prop_mappings) > 1:
-                    raise ConfigError('Integer type',
-                                      'Length of "property-mappings" array in integer type object must be 1')
-
-                for index, prop_mapping in enumerate(prop_mappings):
-                    if not _is_assoc_array_prop(prop_mapping):
-                        raise ConfigError('Integer type',
-                                          'Elements of "property-mappings" property of integer type object must be associative arrays')
-
-                    if 'type' not in prop_mapping:
-                        raise ConfigError('Integer type',
-                                          'Missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index))
-
-                    prop_type = prop_mapping['type']
-
-                    if not _is_str_prop(prop_type):
-                        raise ConfigError('Integer type',
-                                          '"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index))
-
-                    if prop_type == 'clock':
-                        self._set_int_clock_prop_mapping(obj, prop_mapping)
-                    else:
-                        raise ConfigError('Integer type',
-                                          'Unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index))
-
-        return obj
-
-    def _create_float(self, obj, node):
-        if obj is None:
-            # create floating point number object
-            obj = metadata.FloatingPoint()
-
-        unk_prop = self._get_first_unknown_type_prop(node, [
-            'size',
-            'align',
-            'byte-order',
-        ])
+    @property
+    def precision(self) -> Count:
+        return self._precision
 
-        if unk_prop:
-            raise ConfigError('Floating point number type',
-                              'Unknown property: "{}"'.format(unk_prop))
+    @property
+    def offset(self) -> ClockTypeOffset:
+        return self._offset
 
-        # size
-        if 'size' in node:
-            size = node['size']
+    @property
+    def origin_is_unix_epoch(self) -> bool:
+        return self._origin_is_unix_epoch
 
-            if not _is_assoc_array_prop(size):
-                raise ConfigError('Floating point number type',
-                                  '"size" property must be an associative array')
 
-            unk_prop = _get_first_unknown_prop(size, ['exp', 'mant'])
+DEFAULT_FIELD_TYPE = 'default'
+_DefaultableUIntFt = Union[str, UnsignedIntegerFieldType]
+_OptDefaultableUIntFt = Optional[_DefaultableUIntFt]
+_OptUIntFt = Optional[UnsignedIntegerFieldType]
 
-            if unk_prop:
-                raise ConfigError('Floating point number type\'s "size" property',
-                                  'Unknown property: "{}"'.format(unk_prop))
 
-            if 'exp' in size:
-                exp = size['exp']
+class StreamTypePacketFeatures:
+    def __init__(self, total_size_field_type: _DefaultableUIntFt = DEFAULT_FIELD_TYPE,
+                 content_size_field_type: _DefaultableUIntFt = DEFAULT_FIELD_TYPE,
+                 beginning_time_field_type: _OptDefaultableUIntFt = None,
+                 end_time_field_type: _OptDefaultableUIntFt = None,
+                 discarded_events_counter_field_type: _OptDefaultableUIntFt = None):
+        def get_ft(user_ft: _OptDefaultableUIntFt) -> _OptUIntFt:
+            if user_ft == DEFAULT_FIELD_TYPE:
+                return UnsignedIntegerFieldType(64)
 
-                if not _is_int_prop(exp):
-                    raise ConfigError('Floating point number type\'s "size" property',
-                                      '"exp" property must be an integer')
+            return typing.cast(_OptUIntFt, user_ft)
 
-                if exp < 1:
-                    raise ConfigError('Floating point number type\'s "size" property',
-                                      'Invalid exponent size: {}')
+        self._total_size_field_type = get_ft(total_size_field_type)
+        self._content_size_field_type = get_ft(content_size_field_type)
+        self._beginning_time_field_type = get_ft(beginning_time_field_type)
+        self._end_time_field_type = get_ft(end_time_field_type)
+        self._discarded_events_counter_field_type = get_ft(discarded_events_counter_field_type)
 
-                obj.exp_size = exp
+    @property
+    def total_size_field_type(self) -> _OptUIntFt:
+        return self._total_size_field_type
 
-            if 'mant' in size:
-                mant = size['mant']
+    @property
+    def content_size_field_type(self) -> _OptUIntFt:
+        return self._content_size_field_type
 
-                if not _is_int_prop(mant):
-                    raise ConfigError('Floating point number type\'s "size" property',
-                                      '"mant" property must be an integer')
+    @property
+    def beginning_time_field_type(self) -> _OptUIntFt:
+        return self._beginning_time_field_type
 
-                if mant < 1:
-                    raise ConfigError('Floating point number type\'s "size" property',
-                                      'Invalid mantissa size: {}')
+    @property
+    def end_time_field_type(self) -> _OptUIntFt:
+        return self._end_time_field_type
 
-                obj.mant_size = mant
+    @property
+    def discarded_events_counter_field_type(self) -> _OptUIntFt:
+        return self._discarded_events_counter_field_type
 
-        # align
-        if 'align' in node:
-            align = node['align']
 
-            if align is None:
-                obj.set_default_align()
-            else:
-                if not _is_int_prop(align):
-                    raise ConfigError('Floating point number type',
-                                      '"align" property must be an integer')
+class StreamTypeEventFeatures:
+    def __init__(self, type_id_field_type: _OptDefaultableUIntFt = DEFAULT_FIELD_TYPE,
+                 time_field_type: _OptDefaultableUIntFt = None):
+        def get_ft(user_ft: _OptDefaultableUIntFt) -> _OptUIntFt:
+            if user_ft == DEFAULT_FIELD_TYPE:
+                return UnsignedIntegerFieldType(64)
 
-                if not _is_valid_alignment(align):
-                    raise ConfigError('Floating point number type',
-                                      'Invalid alignment: {}'.format(align))
+            return typing.cast(_OptUIntFt, user_ft)
 
-                obj.align = align
+        self._type_id_field_type = get_ft(type_id_field_type)
+        self._time_field_type = get_ft(time_field_type)
 
-        # byte order
-        if 'byte-order' in node:
-            byte_order = node['byte-order']
+    @property
+    def type_id_field_type(self) -> _OptUIntFt:
+        return self._type_id_field_type
 
-            if byte_order is None:
-                obj.byte_order = self._bo
-            else:
-                if not _is_str_prop(byte_order):
-                    raise ConfigError('Floating point number type',
-                                      '"byte-order" property must be a string ("le" or "be")')
+    @property
+    def time_field_type(self) -> _OptUIntFt:
+        return self._time_field_type
 
-                byte_order = _byte_order_str_to_bo(byte_order)
 
-                if byte_order is None:
-                    raise ConfigError('Floating point number type',
-                                      'Invalid "byte-order" property')
+class StreamTypeFeatures:
+    def __init__(self, packet_features: Optional[StreamTypePacketFeatures] = None,
+                 event_features: Optional[StreamTypeEventFeatures] = None):
+        if packet_features is None:
+            self._packet_features = StreamTypePacketFeatures()
         else:
-            obj.byte_order = self._bo
-
-        return obj
-
-    def _create_enum(self, obj, node):
-        if obj is None:
-            # create enumeration object
-            obj = metadata.Enum()
-
-        unk_prop = self._get_first_unknown_type_prop(node, [
-            'value-type',
-            'members',
-        ])
-
-        if unk_prop:
-            raise ConfigError('Enumeration type',
-                              'Unknown property: "{}"'.format(unk_prop))
-
-        # value type
-        if 'value-type' in node:
-            value_type_node = node['value-type']
-
-            try:
-                obj.value_type = self._create_type(value_type_node)
-            except ConfigError as exc:
-                exc.append_ctx('Enumeration type', 'Cannot create integer type')
-                raise
-
-        # members
-        if 'members' in node:
-            members_node = node['members']
-
-            if not _is_array_prop(members_node):
-                raise ConfigError('Enumeration type',
-                                  '"members" property must be an array')
-
-            cur = 0
-            last_value = obj.last_value
-
-            if last_value is None:
-                cur = 0
-            else:
-                cur = last_value + 1
-
-            for index, m_node in enumerate(members_node):
-                if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node):
-                    raise ConfigError('Enumeration type',
-                                      'Invalid member #{}: expecting a string or an associative array'.format(index))
-
-                if _is_str_prop(m_node):
-                    label = m_node
-                    value = (cur, cur)
-                    cur += 1
-                else:
-                    unk_prop = _get_first_unknown_prop(m_node, [
-                        'label',
-                        'value',
-                    ])
-
-                    if unk_prop:
-                        raise ConfigError('Enumeration type',
-                                          'Unknown member object property: "{}"'.format(unk_prop))
-
-                    if 'label' not in m_node:
-                        raise ConfigError('Enumeration type',
-                                          'Missing "label" property in member #{}'.format(index))
-
-                    label = m_node['label']
-
-                    if not _is_str_prop(label):
-                        raise ConfigError('Enumeration type',
-                                          '"label" property of member #{} must be a string'.format(index))
-
-                    if 'value' not in m_node:
-                        raise ConfigError('Enumeration type',
-                                          'Missing "value" property in member ("{}")'.format(label))
-
-                    value = m_node['value']
-
-                    if not _is_int_prop(value) and not _is_array_prop(value):
-                        raise ConfigError('Enumeration type',
-                                          'Invalid member ("{}"): expecting an integer or an array'.format(label))
-
-                    if _is_int_prop(value):
-                        cur = value + 1
-                        value = (value, value)
-                    else:
-                        if len(value) != 2:
-                            raise ConfigError('Enumeration type',
-                                              'Invalid member ("{}"): range must have exactly two items'.format(label))
-
-                        mn = value[0]
-                        mx = value[1]
-
-                        if mn > mx:
-                            raise ConfigError('Enumeration type',
-                                              'Invalid member ("{}"): invalid range ({} > {})'.format(label, mn, mx))
-
-                        value = (mn, mx)
-                        cur = mx + 1
-
-                obj.members[label] = value
-
-        return obj
-
-    def _create_string(self, obj, node):
-        if obj is None:
-            # create string object
-            obj = metadata.String()
-
-        unk_prop = self._get_first_unknown_type_prop(node, [
-            'encoding',
-        ])
-
-        if unk_prop:
-            raise ConfigError('String type',
-                              'Unknown object property: "{}"'.format(unk_prop))
-
-        # encoding
-        if 'encoding' in node:
-            encoding = node['encoding']
-
-            if encoding is None:
-                obj.set_default_encoding()
-            else:
-                if not _is_str_prop(encoding):
-                    raise ConfigError('String type',
-                                      '"encoding" property of must be a string ("none", "ascii", or "utf-8")')
-
-                encoding = _encoding_str_to_encoding(encoding)
-
-                if encoding is None:
-                    raise ConfigError('String type',
-                                      'Invalid "encoding" property')
-
-                obj.encoding = encoding
-
-        return obj
-
-    def _create_struct(self, obj, node):
-        if obj is None:
-            # create structure object
-            obj = metadata.Struct()
-
-        unk_prop = self._get_first_unknown_type_prop(node, [
-            'min-align',
-            'fields',
-        ])
-
-        if unk_prop:
-            raise ConfigError('Structure type',
-                              'Unknown object property: "{}"'.format(unk_prop))
-
-        # minimum alignment
-        if 'min-align' in node:
-            min_align = node['min-align']
-
-            if min_align is None:
-                obj.set_default_min_align()
-            else:
-                if not _is_int_prop(min_align):
-                    raise ConfigError('Structure type',
-                                      '"min-align" property must be an integer')
-
-                if not _is_valid_alignment(min_align):
-                    raise ConfigError('Structure type',
-                                      'Invalid minimum alignment: {}'.format(min_align))
-
-                obj.min_align = min_align
-
-        # fields
-        if 'fields' in node:
-            fields = node['fields']
-
-            if fields is None:
-                obj.set_default_fields()
-            else:
-                if not _is_assoc_array_prop(fields):
-                    raise ConfigError('Structure type',
-                                      '"fields" property must be an associative array')
-
-                for field_name, field_node in fields.items():
-                    if not _is_valid_identifier(field_name):
-                        raise ConfigError('Structure type',
-                                          '"{}" is not a valid field name'.format(field_name))
-
-                    try:
-                        obj.fields[field_name] = self._create_type(field_node)
-                    except ConfigError as exc:
-                        exc.append_ctx('Structure type',
-                                       'Cannot create field "{}"'.format(field_name))
-                        raise
-
-        return obj
-
-    def _create_array(self, obj, node):
-        if obj is None:
-            # create array object
-            obj = metadata.Array()
-
-        unk_prop = self._get_first_unknown_type_prop(node, [
-            'length',
-            'element-type',
-        ])
-
-        if unk_prop:
-            raise ConfigError('Array type',
-                              'Unknown property: "{}"'.format(unk_prop))
-
-        # length
-        if 'length' in node:
-            length = node['length']
-
-            if not _is_int_prop(length):
-                raise ConfigError('Array type',
-                                  '"length" property must be an integer')
-
-            if type(length) is int and length < 0:
-                raise ConfigError('Array type',
-                                  'Invalid length: {}'.format(length))
-
-            obj.length = length
-
-        # element type
-        if 'element-type' in node:
-            element_type_node = node['element-type']
+            self._packet_features = packet_features
 
-            try:
-                obj.element_type = self._create_type(node['element-type'])
-            except ConfigError as exc:
-                exc.append_ctx('Array type', 'Cannot create element type')
-                raise
-
-        return obj
-
-    def _create_type(self, type_node):
-        if type(type_node) is str:
-            t = self._lookup_type_alias(type_node)
-
-            if t is None:
-                raise ConfigError('Type',
-                                  'Unknown type alias "{}"'.format(type_node))
-
-            return t
-
-        if not _is_assoc_array_prop(type_node):
-            raise ConfigError('Type',
-                              'Expecting associative arrays or string (type alias name)')
-
-        # inherit:
-        #   v2.0:  "inherit"
-        #   v2.1+: "$inherit"
-        inherit_node = None
-
-        if self._version >= 200:
-            if 'inherit' in type_node:
-                inherit_prop = 'inherit'
-                inherit_node = type_node[inherit_prop]
-
-        if self._version >= 201:
-            if '$inherit' in type_node:
-                if inherit_node is not None:
-                    raise ConfigError('Type',
-                                      'Cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"')
-
-                inherit_prop = '$inherit'
-                inherit_node = type_node[inherit_prop]
-
-        if inherit_node is not None and 'class' in type_node:
-            raise ConfigError('Type',
-                              'Cannot specify both "{}" and "class" properties in type object'.format(inherit_prop))
-
-        if inherit_node is not None:
-            if not _is_str_prop(inherit_node):
-                raise ConfigError('Type',
-                                  '"{}" property of type object must be a string'.format(inherit_prop))
-
-            base = self._lookup_type_alias(inherit_node)
-
-            if base is None:
-                raise ConfigError('Type',
-                                  'Cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node))
-
-            func = self._type_to_create_type_func[type(base)]
+        if event_features is None:
+            self._event_features = StreamTypeEventFeatures()
         else:
-            if 'class' not in type_node:
-                raise ConfigError('Type',
-                                  'Does not inherit, therefore must have a "class" property')
-
-            class_name = type_node['class']
-
-            if type(class_name) is not str:
-                raise ConfigError('Type', '"class" property must be a string')
-
-            if class_name not in self._class_name_to_create_type_func:
-                raise ConfigError('Type',
-                                  'Unknown class "{}"'.format(class_name))
-
-            base = None
-            func = self._class_name_to_create_type_func[class_name]
-
-        return func(base, type_node)
-
-    def _register_type_aliases(self, metadata_node):
-        self._tas = dict()
-
-        if 'type-aliases' not in metadata_node:
-            return
-
-        ta_node = metadata_node['type-aliases']
-
-        if ta_node is None:
-            return
-
-        if not _is_assoc_array_prop(ta_node):
-            raise ConfigError('Metadata',
-                              '"type-aliases" property must be an associative array')
-
-        for ta_name, ta_type in ta_node.items():
-            if ta_name in self._tas:
-                raise ConfigError('Metadata',
-                                  'Duplicate type alias "{}"'.format(ta_name))
-
-            try:
-                t = self._create_type(ta_type)
-            except ConfigError as exc:
-                exc.append_ctx('Metadata',
-                               'Cannot create type alias "{}"'.format(ta_name))
-                raise
-
-            self._tas[ta_name] = t
-
-    def _create_clock(self, node):
-        # create clock object
-        clock = metadata.Clock()
-
-        if not _is_assoc_array_prop(node):
-            raise ConfigError('Metadata',
-                              'Clock objects must be associative arrays')
-
-        known_props = [
-            'uuid',
-            'description',
-            'freq',
-            'error-cycles',
-            'offset',
-            'absolute',
-            'return-ctype',
-        ]
-
-        if self._version >= 201:
-            known_props.append('$return-ctype')
-
-        unk_prop = _get_first_unknown_prop(node, known_props)
-
-        if unk_prop:
-            raise ConfigError('Clock',
-                              'Unknown property: "{}"'.format(unk_prop))
-
-        # UUID
-        if 'uuid' in node:
-            uuidp = node['uuid']
-
-            if uuidp is None:
-                clock.set_default_uuid()
-            else:
-                if not _is_str_prop(uuidp):
-                    raise ConfigError('Clock',
-                                      '"uuid" property must be a string')
-
-                try:
-                    uuidp = uuid.UUID(uuidp)
-                except:
-                    raise ConfigError('Clock', 'Malformed UUID: "{}"'.format(uuidp))
-
-                clock.uuid = uuidp
-
-        # description
-        if 'description' in node:
-            desc = node['description']
-
-            if desc is None:
-                clock.set_default_description()
-            else:
-                if not _is_str_prop(desc):
-                    raise ConfigError('Clock',
-                                      '"description" property must be a string')
-
-                clock.description = desc
-
-        # frequency
-        if 'freq' in node:
-            freq = node['freq']
-
-            if freq is None:
-                clock.set_default_freq()
-            else:
-                if not _is_int_prop(freq):
-                    raise ConfigError('Clock',
-                                      '"freq" property must be an integer')
-
-                if freq < 1:
-                    raise ConfigError('Clock',
-                                      'Invalid frequency: {}'.format(freq))
-
-                clock.freq = freq
-
-        # error cycles
-        if 'error-cycles' in node:
-            error_cycles = node['error-cycles']
-
-            if error_cycles is None:
-                clock.set_default_error_cycles()
-            else:
-                if not _is_int_prop(error_cycles):
-                    raise ConfigError('Clock',
-                                      '"error-cycles" property must be an integer')
-
-                if error_cycles < 0:
-                    raise ConfigError('Clock',
-                                      'Invalid error cycles: {}'.format(error_cycles))
-
-                clock.error_cycles = error_cycles
-
-        # offset
-        if 'offset' in node:
-            offset = node['offset']
-
-            if offset is None:
-                clock.set_default_offset_seconds()
-                clock.set_default_offset_cycles()
-            else:
-                if not _is_assoc_array_prop(offset):
-                    raise ConfigError('Clock',
-                                      '"offset" property must be an associative array')
-
-                unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds'])
-
-                if unk_prop:
-                    raise ConfigError('Clock',
-                                      'Unknown offset property: "{}"'.format(unk_prop))
-
-                # cycles
-                if 'cycles' in offset:
-                    offset_cycles = offset['cycles']
-
-                    if offset_cycles is None:
-                        clock.set_default_offset_cycles()
-                    else:
-                        if not _is_int_prop(offset_cycles):
-                            raise ConfigError('Clock\'s "offset" property',
-                                              '"cycles" property must be an integer')
-
-                        if offset_cycles < 0:
-                            raise ConfigError('Clock\'s "offset" property',
-                                              'Invalid cycles: {}'.format(offset_cycles))
-
-                        clock.offset_cycles = offset_cycles
-
-                # seconds
-                if 'seconds' in offset:
-                    offset_seconds = offset['seconds']
-
-                    if offset_seconds is None:
-                        clock.set_default_offset_seconds()
-                    else:
-                        if not _is_int_prop(offset_seconds):
-                            raise ConfigError('Clock\'s "offset" property',
-                                              '"seconds" property must be an integer')
-
-                        if offset_seconds < 0:
-                            raise ConfigError('Clock\'s "offset" property',
-                                              'Invalid seconds: {}'.format(offset_seconds))
-
-                        clock.offset_seconds = offset_seconds
-
-        # absolute
-        if 'absolute' in node:
-            absolute = node['absolute']
-
-            if absolute is None:
-                clock.set_default_absolute()
-            else:
-                if not _is_bool_prop(absolute):
-                    raise ConfigError('Clock',
-                                      '"absolute" property must be a boolean')
-
-                clock.absolute = absolute
-
-        # return C type:
-        #   v2.0:  "return-ctype"
-        #   v2.1+: "$return-ctype"
-        return_ctype_node = None
-
-        if self._version >= 200:
-            if 'return-ctype' in node:
-                return_ctype_prop = 'return-ctype'
-                return_ctype_node = node[return_ctype_prop]
-
-        if self._version >= 201:
-            if '$return-ctype' in node:
-                if return_ctype_node is not None:
-                    raise ConfigError('Clock',
-                                      'Cannot specify both "return-ctype" and "$return-ctype" properties: prefer "$return-ctype"')
-
-                return_ctype_prop = '$return-ctype'
-                return_ctype_node = node[return_ctype_prop]
-
-        if return_ctype_node is not None:
-            if return_ctype_node is None:
-                clock.set_default_return_ctype()
-            else:
-                if not _is_str_prop(return_ctype_node):
-                    raise ConfigError('Clock',
-                                      '"{}" property of must be a string'.format(return_ctype_prop))
-
-                clock.return_ctype = return_ctype_node
-
-        return clock
-
-    def _register_clocks(self, metadata_node):
-        self._clocks = collections.OrderedDict()
-
-        if 'clocks' not in metadata_node:
-            return
-
-        clocks_node = metadata_node['clocks']
+            self._event_features = event_features
 
-        if clocks_node is None:
-            return
-
-        if not _is_assoc_array_prop(clocks_node):
-            raise ConfigError('Metadata',
-                              '"clocks" property must be an associative array')
-
-        for clock_name, clock_node in clocks_node.items():
-            if not _is_valid_identifier(clock_name):
-                raise ConfigError('Metadata',
-                                  'Invalid clock name: "{}"'.format(clock_name))
-
-            if clock_name in self._clocks:
-                raise ConfigError('Metadata',
-                                  'Duplicate clock "{}"'.format(clock_name))
-
-            try:
-                clock = self._create_clock(clock_node)
-            except ConfigError as exc:
-                exc.append_ctx('Metadata',
-                               'Cannot create clock "{}"'.format(clock_name))
-                raise
-
-            clock.name = clock_name
-            self._clocks[clock_name] = clock
-
-    def _create_env(self, metadata_node):
-        env = collections.OrderedDict()
-
-        if 'env' not in metadata_node:
-            return env
-
-        env_node = metadata_node['env']
-
-        if env_node is None:
-            return env
-
-        if not _is_assoc_array_prop(env_node):
-            raise ConfigError('Metadata',
-                              '"env" property must be an associative array')
-
-        for env_name, env_value in env_node.items():
-            if env_name in env:
-                raise ConfigError('Metadata',
-                                  'Duplicate environment variable "{}"'.format(env_name))
-
-            if not _is_valid_identifier(env_name):
-                raise ConfigError('Metadata',
-                                  'Invalid environment variable name: "{}"'.format(env_name))
-
-            if not _is_int_prop(env_value) and not _is_str_prop(env_value):
-                raise ConfigError('Metadata',
-                                  'Invalid environment variable value ("{}"): expecting integer or string'.format(env_name))
-
-            env[env_name] = env_value
-
-        return env
-
-    def _register_log_levels(self, metadata_node):
-        self._log_levels = dict()
-
-        # log levels:
-        #   v2.0:  "log-levels"
-        #   v2.1+: "$log-levels"
-        log_levels_node = None
-
-        if self._version >= 200:
-            if 'log-levels' in metadata_node:
-                log_levels_prop = 'log-levels'
-                log_levels_node = metadata_node[log_levels_prop]
+    @property
+    def packet_features(self) -> StreamTypePacketFeatures:
+        return self._packet_features
 
-        if self._version >= 201:
-            if '$log-levels' in metadata_node:
-                if log_levels_node is not None:
-                    raise ConfigError('Metadata',
-                                      'Cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"')
+    @property
+    def event_features(self) -> StreamTypeEventFeatures:
+        return self._event_features
 
-                log_levels_prop = '$log-levels'
-                log_levels_node = metadata_node[log_levels_prop]
 
-        if log_levels_node is None:
+class StreamType(_UniqueByName):
+    def __init__(self, name: str, event_types: Set[EventType],
+                 default_clock_type: Optional[ClockType] = None,
+                 features: Optional[StreamTypeFeatures] = None,
+                 packet_context_field_type_extra_members: Optional[_StructFtMembers] = None,
+                 event_common_context_field_type: _OptStructFt = None):
+        self._id: Optional[Id] = None
+        self._name = name
+        self._default_clock_type = default_clock_type
+        self._event_common_context_field_type = event_common_context_field_type
+        self._event_types = frozenset(event_types)
+
+        # assign unique IDs
+        for index, ev_type in enumerate(sorted(self._event_types, key=lambda evt: evt.name)):
+            assert ev_type._id is None
+            ev_type._id = Id(index)
+
+        self._set_features(features)
+        self._packet_context_field_type_extra_members = StructureFieldTypeMembers({})
+
+        if packet_context_field_type_extra_members is not None:
+            self._packet_context_field_type_extra_members = StructureFieldTypeMembers(packet_context_field_type_extra_members)
+
+        self._set_pkt_ctx_ft()
+        self._set_ev_header_ft()
+
+    def _set_features(self, features: Optional[StreamTypeFeatures]):
+        if features is not None:
+            self._features = features
+            return None
+
+        ev_time_ft = None
+        pkt_beginning_time_ft = None
+        pkt_end_time_ft = None
+
+        if self._default_clock_type is not None:
+            # Automatic time field types because the stream type has a
+            # default clock type.
+            ev_time_ft = DEFAULT_FIELD_TYPE
+            pkt_beginning_time_ft = DEFAULT_FIELD_TYPE
+            pkt_end_time_ft = DEFAULT_FIELD_TYPE
+
+        self._features = StreamTypeFeatures(StreamTypePacketFeatures(beginning_time_field_type=pkt_beginning_time_ft,
+                                                                     end_time_field_type=pkt_end_time_ft),
+                                            StreamTypeEventFeatures(time_field_type=ev_time_ft))
+
+    def _set_ft_mapped_clk_type_name(self, ft: Optional[UnsignedIntegerFieldType]):
+        if ft is None:
             return
 
-        if not _is_assoc_array_prop(log_levels_node):
-            raise ConfigError('Metadata',
-                              '"{}" property (metadata) must be an associative array'.format(log_levels_prop))
-
-        for ll_name, ll_value in log_levels_node.items():
-            if ll_name in self._log_levels:
-                raise ConfigError('"{}" property"'.format(log_levels_prop),
-                                  'Duplicate entry "{}"'.format(ll_name))
-
-            if not _is_int_prop(ll_value):
-                raise ConfigError('"{}" property"'.format(log_levels_prop),
-                                  'Invalid entry ("{}"): expecting an integer'.format(ll_name))
-
-            if ll_value < 0:
-                raise ConfigError('"{}" property"'.format(log_levels_prop),
-                                  'Invalid entry ("{}"): value must be positive'.format(ll_name))
+        if self._default_clock_type is not None:
+            assert isinstance(ft, UnsignedIntegerFieldType)
+            ft._mapped_clk_type_name = self._default_clock_type.name
 
-            self._log_levels[ll_name] = ll_value
+    def _set_pkt_ctx_ft(self):
+        members = None
 
-    def _create_trace(self, metadata_node):
-        # create trace object
-        trace = metadata.Trace()
+        def add_member_if_exists(name: str, ft: _FieldType, set_mapped_clk_type_name: bool = False):
+            nonlocal members
 
-        if 'trace' not in metadata_node:
-            raise ConfigError('Metadata', 'Missing "trace" property')
+            if ft is not None:
+                if set_mapped_clk_type_name:
+                    self._set_ft_mapped_clk_type_name(typing.cast(UnsignedIntegerFieldType, ft))
 
-        trace_node = metadata_node['trace']
-
-        if not _is_assoc_array_prop(trace_node):
-            raise ConfigError('Metadata',
-                              '"trace" property must be an associative array')
-
-        unk_prop = _get_first_unknown_prop(trace_node, [
-            'byte-order',
-            'uuid',
-            'packet-header-type',
-        ])
+                members[name] = StructureFieldTypeMember(ft)
 
-        if unk_prop:
-            raise ConfigError('Trace',
-                              'Unknown property: "{}"'.format(unk_prop))
-
-        # set byte order (already parsed)
-        trace.byte_order = self._bo
-
-        # UUID
-        if 'uuid' in trace_node and trace_node['uuid'] is not None:
-            uuidp = trace_node['uuid']
-
-            if not _is_str_prop(uuidp):
-                raise ConfigError('Trace',
-                                  '"uuid" property must be a string')
-
-            if uuidp == 'auto':
-                uuidp = uuid.uuid1()
-            else:
-                try:
-                    uuidp = uuid.UUID(uuidp)
-                except:
-                    raise ConfigError('Trace',
-                                      'Malformed UUID: "{}"'.format(uuidp))
-
-            trace.uuid = uuidp
-
-        # packet header type
-        if 'packet-header-type' in trace_node and trace_node['packet-header-type'] is not None:
-            try:
-                ph_type = self._create_type(trace_node['packet-header-type'])
-            except ConfigError as exc:
-                exc.append_ctx('Trace',
-                               'Cannot create packet header type')
-                raise
-
-            trace.packet_header_type = ph_type
-
-        return trace
-
-    def _lookup_log_level(self, ll):
-        if _is_int_prop(ll):
-            return ll
-        elif _is_str_prop(ll) and ll in self._log_levels:
-            return self._log_levels[ll]
-
-    def _create_event(self, event_node):
-        event = metadata.Event()
-
-        if not _is_assoc_array_prop(event_node):
-            raise ConfigError('Event',
-                              'Expecting associative array')
-
-        unk_prop = _get_first_unknown_prop(event_node, [
-            'log-level',
-            'context-type',
-            'payload-type',
+        members = collections.OrderedDict([
+            (
+                'packet_size',
+                StructureFieldTypeMember(self._features.packet_features.total_size_field_type)
+            ),
+            (
+                'content_size',
+                StructureFieldTypeMember(self._features.packet_features.content_size_field_type)
+            )
         ])
 
-        if unk_prop:
-            raise ConfigError('Event',
-                              'Unknown property: "{}"'.format(unk_prop))
+        add_member_if_exists('timestamp_begin',
+                             self._features.packet_features.beginning_time_field_type, True)
+        add_member_if_exists('timestamp_end', self._features.packet_features.end_time_field_type,
+                             True)
+        add_member_if_exists('events_discarded',
+                             self._features.packet_features.discarded_events_counter_field_type)
 
-        if 'log-level' in event_node and event_node['log-level'] is not None:
-            ll_node = event_node['log-level']
+        if self._packet_context_field_type_extra_members is not None:
+            for name, field_type in self._packet_context_field_type_extra_members.items():
+                assert name not in members
+                members[name] = field_type
 
-            if _is_str_prop(ll_node):
-                ll_value = self._lookup_log_level(event_node['log-level'])
+        self._pkt_ctx_ft = StructureFieldType(8, members)
 
-                if ll_value is None:
-                    raise ConfigError('Event\'s "log-level" property',
-                                      'Cannot find log level "{}"'.format(ll_node))
+    def _set_ev_header_ft(self):
+        members = collections.OrderedDict()
 
-                ll = metadata.LogLevel(event_node['log-level'], ll_value)
-            elif _is_int_prop(ll_node):
-                if ll_node < 0:
-                    raise ConfigError('Event\'s "log-level" property',
-                                      'Invalid value {}: value must be positive'.format(ll_node))
+        if self._features.event_features.type_id_field_type is not None:
+            members['id'] = StructureFieldTypeMember(self._features.event_features.type_id_field_type)
 
-                ll = metadata.LogLevel(None, ll_node)
-            else:
-                raise ConfigError('Event\'s "log-level" property',
-                                  'Must be either a string or an integer')
+        if self._features.event_features.time_field_type is not None:
+            ft = self._features.event_features.time_field_type
+            self._set_ft_mapped_clk_type_name(ft)
+            members['timestamp'] = StructureFieldTypeMember(ft)
 
-            event.log_level = ll
+        self._ev_header_ft = StructureFieldType(8, members)
 
-        if 'context-type' in event_node and event_node['context-type'] is not None:
-            ctx_type_node = event_node['context-type']
-
-            try:
-                t = self._create_type(event_node['context-type'])
-            except ConfigError as exc:
-                exc.append_ctx('Event',
-                               'Cannot create context type object')
-                raise
-
-            event.context_type = t
-
-        if 'payload-type' in event_node and event_node['payload-type'] is not None:
-            try:
-                t = self._create_type(event_node['payload-type'])
-            except ConfigError as exc:
-                exc.append_ctx('Event',
-                               'Cannot create payload type object')
-                raise
-
-            event.payload_type = t
-
-        return event
-
-    def _create_stream(self, stream_name, stream_node):
-        stream = metadata.Stream()
-
-        if not _is_assoc_array_prop(stream_node):
-            raise ConfigError('Stream objects must be associative arrays')
-
-        known_props = [
-            'packet-context-type',
-            'event-header-type',
-            'event-context-type',
-            'events',
-        ]
-
-        if self._version >= 202:
-            known_props.append('$default')
-
-        unk_prop = _get_first_unknown_prop(stream_node, known_props)
-
-        if unk_prop:
-            add = ''
-
-            if unk_prop == '$default':
-                add = ' (use version 2.2 or greater)'
-
-            raise ConfigError('Stream',
-                              'Unknown property{}: "{}"'.format(add, unk_prop))
-
-        if 'packet-context-type' in stream_node and stream_node['packet-context-type'] is not None:
-            try:
-                t = self._create_type(stream_node['packet-context-type'])
-            except ConfigError as exc:
-                exc.append_ctx('Stream',
-                               'Cannot create packet context type object')
-                raise
-
-            stream.packet_context_type = t
-
-        if 'event-header-type' in stream_node and stream_node['event-header-type'] is not None:
-            try:
-                t = self._create_type(stream_node['event-header-type'])
-            except ConfigError as exc:
-                exc.append_ctx('Stream',
-                               'Cannot create event header type object')
-                raise
-
-            stream.event_header_type = t
-
-        if 'event-context-type' in stream_node and stream_node['event-context-type'] is not None:
-            try:
-                t = self._create_type(stream_node['event-context-type'])
-            except ConfigError as exc:
-                exc.append_ctx('Stream',
-                               'Cannot create event context type object')
-                raise
+    @property
+    def id(self) -> Optional[Id]:
+        return self._id
 
-            stream.event_context_type = t
+    @property
+    def name(self) -> str:
+        return self._name
 
-        if 'events' not in stream_node:
-            raise ConfigError('Stream',
-                              'Missing "events" property')
+    @property
+    def default_clock_type(self) -> Optional[ClockType]:
+        return self._default_clock_type
 
-        events = stream_node['events']
+    @property
+    def features(self) -> StreamTypeFeatures:
+        return self._features
 
-        if events is not None:
-            if not _is_assoc_array_prop(events):
-                raise ConfigError('Stream',
-                                  '"events" property must be an associative array')
+    @property
+    def packet_context_field_type_extra_members(self) -> StructureFieldTypeMembers:
+        return self._packet_context_field_type_extra_members
 
-            if not events:
-                raise ConfigError('Stream', 'At least one event is needed')
+    @property
+    def event_common_context_field_type(self) -> _OptStructFt:
+        return self._event_common_context_field_type
 
-            cur_id = 0
+    @property
+    def event_types(self) -> FrozenSet[EventType]:
+        return self._event_types
 
-            for ev_name, ev_node in events.items():
-                try:
-                    ev = self._create_event(ev_node)
-                except ConfigError as exc:
-                    exc.append_ctx('Stream',
-                                   'Cannot create event "{}"'.format(ev_name))
-                    raise
 
-                ev.id = cur_id
-                ev.name = ev_name
-                stream.events[ev_name] = ev
-                cur_id += 1
+_OptUuidFt = Optional[Union[str, StaticArrayFieldType]]
 
-        if '$default' in stream_node and stream_node['$default'] is not None:
-            default_node = stream_node['$default']
 
-            if not _is_bool_prop(default_node):
-                raise ConfigError('Stream',
-                                  'Invalid "$default" property: expecting a boolean')
+class TraceTypeFeatures:
+    def __init__(self, magic_field_type: _OptDefaultableUIntFt = DEFAULT_FIELD_TYPE,
+                 uuid_field_type: _OptUuidFt = None,
+                 stream_type_id_field_type: _OptDefaultableUIntFt = DEFAULT_FIELD_TYPE):
+        def get_field_type(user_ft: Optional[Union[str, _FieldType]],
+                           create_default_ft: Callable[[], _FieldType]) -> _OptFt:
+            if user_ft == DEFAULT_FIELD_TYPE:
+                return create_default_ft()
 
-            if default_node:
-                if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
-                    fmt = 'Cannot specify more than one default stream (default stream already set to "{}")'
-                    raise ConfigError('Stream',
-                                      fmt.format(self._meta.default_stream_name))
+            return typing.cast(_OptFt, user_ft)
 
-                self._meta.default_stream_name = stream_name
+        def create_default_magic_ft():
+            return UnsignedIntegerFieldType(32)
 
-        return stream
+        def create_default_uuid_ft():
+            return StaticArrayFieldType(Count(16), UnsignedIntegerFieldType(8))
 
-    def _create_streams(self, metadata_node):
-        streams = collections.OrderedDict()
+        def create_default_stream_type_id_ft():
+            return UnsignedIntegerFieldType(64)
 
-        if 'streams' not in metadata_node:
-            raise ConfigError('Metadata',
-                              'Missing "streams" property')
+        self._magic_field_type = typing.cast(_OptUIntFt, get_field_type(magic_field_type, create_default_magic_ft))
+        self._uuid_field_type = typing.cast(Optional[StaticArrayFieldType],
+                                            get_field_type(uuid_field_type, create_default_uuid_ft))
+        self._stream_type_id_field_type = typing.cast(_OptUIntFt,
+                                                      get_field_type(stream_type_id_field_type,
+                                                                     create_default_stream_type_id_ft))
 
-        streams_node = metadata_node['streams']
+    @property
+    def magic_field_type(self) -> _OptUIntFt:
+        return self._magic_field_type
 
-        if not _is_assoc_array_prop(streams_node):
-            raise ConfigError('Metadata',
-                              '"streams" property must be an associative array')
+    @property
+    def uuid_field_type(self) -> Optional[StaticArrayFieldType]:
+        return self._uuid_field_type
 
-        if not streams_node:
-            raise ConfigError('Metadata\'s "streams" property',
-                              'At least one stream is needed')
+    @property
+    def stream_type_id_field_type(self) -> _OptUIntFt:
+        return self._stream_type_id_field_type
 
-        cur_id = 0
 
-        for stream_name, stream_node in streams_node.items():
-            try:
-                stream = self._create_stream(stream_name, stream_node)
-            except ConfigError as exc:
-                exc.append_ctx('Metadata',
-                               'Cannot create stream "{}"'.format(stream_name))
-                raise
+class TraceType:
+    def __init__(self, stream_types: Set[StreamType], uuid: _OptUuid = None,
+                 features: Optional[TraceTypeFeatures] = None):
+        self._stream_types = frozenset(stream_types)
 
-            stream.id = cur_id
-            stream.name = str(stream_name)
-            streams[stream_name] = stream
-            cur_id += 1
+        # assign unique IDs
+        for index, stream_type in enumerate(sorted(self._stream_types, key=lambda st: st.name)):
+            assert stream_type._id is None
+            stream_type._id = Id(index)
 
-        return streams
+        self._uuid = uuid
+        self._set_features(features)
+        self._set_pkt_header_ft()
 
-    def _create_metadata(self, root):
-        self._meta = metadata.Metadata()
+    def _set_features(self, features: Optional[TraceTypeFeatures]):
+        if features is not None:
+            self._features = features
+            return
 
-        if 'metadata' not in root:
-            raise ConfigError('Configuration',
-                              'Missing "metadata" property')
+        # automatic UUID field type because the trace type has a UUID
+        uuid_ft = None if self._uuid is None else DEFAULT_FIELD_TYPE
+        self._features = TraceTypeFeatures(uuid_field_type=uuid_ft)
 
-        metadata_node = root['metadata']
+    def _set_pkt_header_ft(self):
+        members = collections.OrderedDict()
 
-        if not _is_assoc_array_prop(metadata_node):
-            raise ConfigError('Configuration\'s "metadata" property',
-                              'Must be an associative array')
+        def add_member_if_exists(name: str, ft: _OptFt):
+            nonlocal members
 
-        known_props = [
-            'type-aliases',
-            'log-levels',
-            'trace',
-            'env',
-            'clocks',
-            'streams',
-        ]
+            if ft is not None:
+                members[name] = StructureFieldTypeMember(ft)
 
-        if self._version >= 201:
-            known_props.append('$log-levels')
+        add_member_if_exists('magic', self._features.magic_field_type)
+        add_member_if_exists('uuid', self._features.uuid_field_type)
+        add_member_if_exists('stream_id', self._features.stream_type_id_field_type)
+        self._pkt_header_ft = StructureFieldType(8, members)
 
-        if self._version >= 202:
-            known_props.append('$default-stream')
+    @property
+    def uuid(self) -> _OptUuid:
+        return self._uuid
 
-        unk_prop = _get_first_unknown_prop(metadata_node, known_props)
+    @property
+    def stream_types(self) -> FrozenSet[StreamType]:
+        return self._stream_types
 
-        if unk_prop:
-            add = ''
+    def stream_type(self, name: str) -> Optional[StreamType]:
+        for cand_stream_type in self._stream_types:
+            if cand_stream_type.name == name:
+                return cand_stream_type
 
-            if unk_prop == '$include':
-                add = ' (use version 2.1 or greater)'
+        return None
 
-            if unk_prop == '$default-stream':
-                add = ' (use version 2.2 or greater)'
+    @property
+    def features(self) -> TraceTypeFeatures:
+        return self._features
 
-            raise ConfigError('Metadata',
-                              'Unknown property{}: "{}"'.format(add, unk_prop))
+    @property
+    def clock_types(self) -> Set[ClockType]:
+        clk_types = set()
 
-        if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None:
-            default_stream_node = metadata_node['$default-stream']
+        for stream_type in self._stream_types:
+            if stream_type.default_clock_type is not None:
+                clk_types.add(stream_type.default_clock_type)
 
-            if not _is_str_prop(default_stream_node):
-                raise ConfigError('Metadata\'s "$default-stream" property',
-                                  'Expecting a string')
+        return clk_types
 
-            self._meta.default_stream_name = default_stream_node
 
-        self._set_byte_order(metadata_node)
-        self._register_clocks(metadata_node)
-        self._meta.clocks = self._clocks
-        self._register_type_aliases(metadata_node)
-        self._meta.env = self._create_env(metadata_node)
-        self._meta.trace = self._create_trace(metadata_node)
-        self._register_log_levels(metadata_node)
-        self._meta.streams = self._create_streams(metadata_node)
+_EnvEntry = Union[str, int]
+_EnvEntries = Mapping[str, _EnvEntry]
 
-        return self._meta
 
-    def _get_version(self, root):
-        if 'version' not in root:
-            raise ConfigError('Configuration',
-                              'Missing "version" property')
+class TraceEnvironment(collections.abc.Mapping):
+    def __init__(self, environment: _EnvEntries):
+        self._env = {name: value for name, value in environment.items()}
 
-        version_node = root['version']
+    def __getitem__(self, key: str) -> _EnvEntry:
+        return self._env[key]
 
-        if not _is_str_prop(version_node):
-            raise ConfigError('Configuration\'s "version" property',
-                              'Must be a string')
+    def __iter__(self) -> Iterator[str]:
+        return iter(self._env)
 
-        version_node = version_node.strip()
+    def __len__(self) -> int:
+        return len(self._env)
 
-        if version_node not in ['2.0', '2.1', '2.2']:
-            raise ConfigError('Configuration',
-                              'Unsupported version ({}): versions 2.0, 2.1, and 2.2 are supported'.format(version_node))
 
-        # convert version string to comparable version integer
-        parts = version_node.split('.')
-        version = int(parts[0]) * 100 + int(parts[1])
+class Trace:
+    def __init__(self, type: TraceType, environment: Optional[_EnvEntries] = None):
+        self._type = type
+        self._set_env(environment)
 
-        return version
+    def _set_env(self, environment: Optional[_EnvEntries]):
+        init_env = collections.OrderedDict([
+            ('domain', 'bare'),
+            ('tracer_name', 'barectf'),
+            ('tracer_major', barectf_version.__major_version__),
+            ('tracer_minor', barectf_version.__minor_version__),
+            ('tracer_patch', barectf_version.__patch_version__),
+            ('barectf_gen_date', str(datetime.datetime.now().isoformat())),
+        ])
 
-    def _get_prefix(self, root):
-        def_prefix = 'barectf_'
+        if environment is None:
+            environment = {}
 
-        if 'prefix' not in root:
-            return def_prefix
+        init_env.update(environment)
+        self._env = TraceEnvironment(typing.cast(_EnvEntries, init_env))
 
-        prefix_node = root['prefix']
+    @property
+    def type(self) -> TraceType:
+        return self._type
 
-        if prefix_node is None:
-            return def_prefix
+    @property
+    def environment(self) -> TraceEnvironment:
+        return self._env
 
-        if not _is_str_prop(prefix_node):
-            raise ConfigError('Configuration\'s "prefix" property',
-                              'Must be a string')
 
-        if not _is_valid_identifier(prefix_node):
-            raise ConfigError('Configuration\'s "prefix" property',
-                              'Must be a valid C identifier')
+_ClkTypeCTypes = Mapping[ClockType, str]
 
-        return prefix_node
 
-    def _get_options(self, root):
-        cfg_options = ConfigOptions()
+class ClockTypeCTypes(collections.abc.Mapping):
+    def __init__(self, c_types: _ClkTypeCTypes):
+        self._c_types = {clk_type: c_type for clk_type, c_type in c_types.items()}
 
-        if 'options' not in root:
-            return cfg_options
+    def __getitem__(self, key: ClockType) -> str:
+        return self._c_types[key]
 
-        options_node = root['options']
+    def __iter__(self) -> Iterator[ClockType]:
+        return iter(self._c_types)
 
-        if not _is_assoc_array_prop(options_node):
-            raise ConfigError('Configuration\'s "options" property',
-                              'Must be an associative array')
+    def __len__(self) -> int:
+        return len(self._c_types)
 
-        known_props = [
-            'gen-prefix-def',
-            'gen-default-stream-def',
-        ]
-        unk_prop = _get_first_unknown_prop(options_node, known_props)
 
-        if unk_prop:
-            raise ConfigError('Configuration\'s "options" property',
-                              'Unknown property: "{}"'.format(unk_prop))
+class ConfigurationCodeGenerationHeaderOptions:
+    def __init__(self, identifier_prefix_definition: bool = False,
+                 default_stream_type_name_definition: bool = False):
+        self._identifier_prefix_definition = identifier_prefix_definition
+        self._default_stream_type_name_definition = default_stream_type_name_definition
 
-        if 'gen-prefix-def' in options_node and options_node['gen-prefix-def'] is not None:
-            gen_prefix_def_node = options_node['gen-prefix-def']
+    @property
+    def identifier_prefix_definition(self) -> bool:
+        return self._identifier_prefix_definition
 
-            if not _is_bool_prop(gen_prefix_def_node):
-                raise ConfigError('Configuration\'s "options" property',
-                                  'Invalid option "gen-prefix-def": expecting a boolean')
+    @property
+    def default_stream_type_name_definition(self) -> bool:
+        return self._default_stream_type_name_definition
 
-            cfg_options.gen_prefix_def = gen_prefix_def_node
 
-        if 'gen-default-stream-def' in options_node and options_node['gen-default-stream-def'] is not None:
-            gen_default_stream_def_node = options_node['gen-default-stream-def']
+class ConfigurationCodeGenerationOptions:
+    def __init__(self, identifier_prefix: str = 'barectf_', file_name_prefix: str = 'barectf',
+                 default_stream_type: Optional[StreamType] = None,
+                 header_options: Optional[ConfigurationCodeGenerationHeaderOptions] = None,
+                 clock_type_c_types: Optional[_ClkTypeCTypes] = None):
+        self._identifier_prefix = identifier_prefix
+        self._file_name_prefix = file_name_prefix
+        self._default_stream_type = default_stream_type
 
-            if not _is_bool_prop(gen_default_stream_def_node):
-                raise ConfigError('Configuration\'s "options" property',
-                                  'Invalid option "gen-default-stream-def": expecting a boolean')
+        self._header_options = ConfigurationCodeGenerationHeaderOptions()
 
-            cfg_options.gen_default_stream_def = gen_default_stream_def_node
+        if header_options is not None:
+            self._header_options = header_options
 
-        return cfg_options
+        self._clock_type_c_types = ClockTypeCTypes({})
 
-    def _get_last_include_file(self):
-        if self._include_stack:
-            return self._include_stack[-1]
+        if clock_type_c_types is not None:
+            self._clock_type_c_types = ClockTypeCTypes(clock_type_c_types)
 
-        return self._root_yaml_path
+    @property
+    def identifier_prefix(self) -> str:
+        return self._identifier_prefix
 
-    def _load_include(self, yaml_path):
-        for inc_dir in self._include_dirs:
-            # current include dir + file name path
-            # note: os.path.join() only takes the last arg if it's absolute
-            inc_path = os.path.join(inc_dir, yaml_path)
+    @property
+    def file_name_prefix(self) -> str:
+        return self._file_name_prefix
 
-            # real path (symbolic links resolved)
-            real_path = os.path.realpath(inc_path)
+    @property
+    def default_stream_type(self) -> Optional[StreamType]:
+        return self._default_stream_type
 
-            # normalized path (weird stuff removed!)
-            norm_path = os.path.normpath(real_path)
+    @property
+    def header_options(self) -> ConfigurationCodeGenerationHeaderOptions:
+        return self._header_options
 
-            if not os.path.isfile(norm_path):
-                # file does not exist: skip
-                continue
+    @property
+    def clock_type_c_types(self) -> ClockTypeCTypes:
+        return self._clock_type_c_types
 
-            if norm_path in self._include_stack:
-                base_path = self._get_last_include_file()
-                raise ConfigError('In "{}"',
-                                  'Cannot recursively include file "{}"'.format(base_path, norm_path))
 
-            self._include_stack.append(norm_path)
+class ConfigurationOptions:
+    def __init__(self,
+                 code_generation_options: Optional[ConfigurationCodeGenerationOptions] = None):
+        self._code_generation_options = ConfigurationCodeGenerationOptions()
 
-            # load raw content
-            return self._yaml_ordered_load(norm_path)
+        if code_generation_options is not None:
+            self._code_generation_options = code_generation_options
 
-        if not self._ignore_include_not_found:
-            base_path = self._get_last_include_file()
-            raise ConfigError('In "{}"',
-                              'Cannot include file "{}": file not found in include directories'.format(base_path, yaml_path))
+    @property
+    def code_generation_options(self) -> ConfigurationCodeGenerationOptions:
+        return self._code_generation_options
 
-        return None
 
-    def _get_include_paths(self, include_node):
-        if include_node is None:
-            return []
-
-        if _is_str_prop(include_node):
-            return [include_node]
-
-        if _is_array_prop(include_node):
-            for include_path in include_node:
-                if not _is_str_prop(include_path):
-                    raise ConfigError('"$include" property',
-                                      'Expecting array of strings')
-
-            return include_node
-
-        raise ConfigError('"$include" property',
-                          'Expecting string or array of strings')
-
-    def _update_node(self, base_node, overlay_node):
-        for olay_key, olay_value in overlay_node.items():
-            if olay_key in base_node:
-                base_value = base_node[olay_key]
-
-                if _is_assoc_array_prop(olay_value) and _is_assoc_array_prop(base_value):
-                    # merge dictionaries
-                    self._update_node(base_value, olay_value)
-                elif _is_array_prop(olay_value) and _is_array_prop(base_value):
-                    # append extension array items to base items
-                    base_value += olay_value
-                else:
-                    # fall back to replacing
-                    base_node[olay_key] = olay_value
-            else:
-                base_node[olay_key] = olay_value
-
-    def _process_node_include(self, last_overlay_node, name,
-                              process_base_include_cb,
-                              process_children_include_cb=None):
-        if not _is_assoc_array_prop(last_overlay_node):
-            raise ConfigError('"$include" property',
-                              '{} objects must be associative arrays'.format(name))
-
-        # process children inclusions first
-        if process_children_include_cb:
-            process_children_include_cb(last_overlay_node)
-
-        if '$include' in last_overlay_node:
-            include_node = last_overlay_node['$include']
-        else:
-            # no includes!
-            return last_overlay_node
+class Configuration:
+    def __init__(self, trace: Trace, target_byte_order: ByteOrder,
+                 options: Optional[ConfigurationOptions] = None):
+        self._trace = trace
+        self._options = ConfigurationOptions()
+        self._target_byte_order = target_byte_order
 
-        include_paths = self._get_include_paths(include_node)
-        cur_base_path = self._get_last_include_file()
-        base_node = None
+        if options is not None:
+            self._options = options
 
-        # keep the include paths and remove the include property
-        include_paths = copy.deepcopy(include_paths)
-        del last_overlay_node['$include']
+        clk_type_c_types = self._options.code_generation_options.clock_type_c_types
 
-        for include_path in include_paths:
-            # load raw YAML from included file
-            overlay_node = self._load_include(include_path)
+        for stream_type in trace.type.stream_types:
+            def_clk_type = stream_type.default_clock_type
 
-            if overlay_node is None:
-                # cannot find include file, but we're ignoring those
-                # errors, otherwise _load_include() itself raises
-                # a config error
+            if def_clk_type is None:
                 continue
 
-            # recursively process includes
-            try:
-                overlay_node = process_base_include_cb(overlay_node)
-            except ConfigError as exc:
-                exc.append_ctx('In "{}"'.format(cur_base_path))
-                raise
-
-            # pop include stack now that we're done including
-            del self._include_stack[-1]
-
-            # at this point, base_node is fully resolved (does not
-            # contain any include property)
-            if base_node is None:
-                base_node = overlay_node
-            else:
-                self._update_node(base_node, overlay_node)
-
-        # finally, we update the latest base node with our last overlay
-        # node
-        if base_node is None:
-            # nothing was included, which is possible when we're
-            # ignoring include errors
-            return last_overlay_node
-
-        self._update_node(base_node, last_overlay_node)
-
-        return base_node
-
-    def _process_event_include(self, event_node):
-        return self._process_node_include(event_node, 'event',
-                                          self._process_event_include)
-
-    def _process_stream_include(self, stream_node):
-        def process_children_include(stream_node):
-            if 'events' in stream_node:
-                events_node = stream_node['events']
-
-                if not _is_assoc_array_prop(events_node):
-                    raise ConfigError('"$include" property',
-                                      '"events" property must be an associative array')
-
-                events_node_keys = list(events_node.keys())
-
-                for key in events_node_keys:
-                    event_node = events_node[key]
-
-                    try:
-                        events_node[key] = self._process_event_include(event_node)
-                    except ConfigError as exc:
-                        exc.append_ctx('"$include" property',
-                                       'Cannot process includes of event object "{}"'.format(key))
-                        raise
-
-        return self._process_node_include(stream_node, 'stream',
-                                          self._process_stream_include,
-                                          process_children_include)
-
-    def _process_trace_include(self, trace_node):
-        return self._process_node_include(trace_node, 'trace',
-                                          self._process_trace_include)
-
-    def _process_clock_include(self, clock_node):
-        return self._process_node_include(clock_node, 'clock',
-                                          self._process_clock_include)
-
-    def _process_metadata_include(self, metadata_node):
-        def process_children_include(metadata_node):
-            if 'trace' in metadata_node:
-                metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
-
-            if 'clocks' in metadata_node:
-                clocks_node = metadata_node['clocks']
-
-                if not _is_assoc_array_prop(clocks_node):
-                    raise ConfigError('"$include" property',
-                                      '"clocks" property must be an associative array')
-
-                clocks_node_keys = list(clocks_node.keys())
-
-                for key in clocks_node_keys:
-                    clock_node = clocks_node[key]
-
-                    try:
-                        clocks_node[key] = self._process_clock_include(clock_node)
-                    except ConfigError as exc:
-                        exc.append_ctx('"$include" property',
-                                       'Cannot process includes of clock object "{}"'.format(key))
-                        raise
-
-            if 'streams' in metadata_node:
-                streams_node = metadata_node['streams']
-
-                if not _is_assoc_array_prop(streams_node):
-                    raise ConfigError('"$include" property',
-                                      '"streams" property must be an associative array')
-
-                streams_node_keys = list(streams_node.keys())
-
-                for key in streams_node_keys:
-                    stream_node = streams_node[key]
-
-                    try:
-                        streams_node[key] = self._process_stream_include(stream_node)
-                    except ConfigError as exc:
-                        exc.append_ctx('"$include" property',
-                                       'Cannot process includes of stream object "{}"'.format(key))
-                        raise
-
-        return self._process_node_include(metadata_node, 'metadata',
-                                          self._process_metadata_include,
-                                          process_children_include)
-
-    def _process_root_includes(self, root):
-        # The following config objects support includes:
-        #
-        #   * Metadata object
-        #   * Trace object
-        #   * Stream object
-        #   * Event object
-        #
-        # We need to process the event includes first, then the stream
-        # includes, then the trace includes, and finally the metadata
-        # includes.
-        #
-        # In each object, only one of the $include and $include-replace
-        # special properties is allowed.
-        #
-        # We keep a stack of absolute paths to included files to detect
-        # recursion.
-        if 'metadata' in root:
-            root['metadata'] = self._process_metadata_include(root['metadata'])
-
-        return root
-
-    def _yaml_ordered_dump(self, node, **kwds):
-        class ODumper(yaml.Dumper):
-            pass
-
-        def dict_representer(dumper, node):
-            return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
-                                            node.items())
-
-        ODumper.add_representer(collections.OrderedDict, dict_representer)
-
-        return yaml.dump(node, Dumper=ODumper, **kwds)
-
-    def _yaml_ordered_load(self, yaml_path):
-        class OLoader(yaml.Loader):
-            pass
-
-        def construct_mapping(loader, node):
-            loader.flatten_mapping(node)
-
-            return collections.OrderedDict(loader.construct_pairs(node))
-
-        OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
-                                construct_mapping)
-
-        # YAML -> Python
-        try:
-            with open(yaml_path, 'r') as f:
-                node = yaml.load(f, OLoader)
-        except (OSError, IOError) as e:
-            raise ConfigError('Configuration',
-                              'Cannot open file "{}"'.format(yaml_path))
-        except ConfigError as exc:
-            exc.append_ctx('Configuration',
-                           'Unknown error while trying to load file "{}"'.format(yaml_path))
-            raise
-
-        # loaded node must be an associate array
-        if not _is_assoc_array_prop(node):
-            raise ConfigError('Configuration',
-                              'Root of YAML file "{}" must be an associative array'.format(yaml_path))
-
-        return node
-
-    def _reset(self):
-        self._version = None
-        self._include_stack = []
-
-    def parse(self, yaml_path):
-        self._reset()
-        self._root_yaml_path = yaml_path
-
-        try:
-            root = self._yaml_ordered_load(yaml_path)
-        except ConfigError as exc:
-            exc.append_ctx('Configuration',
-                           'Cannot parse YAML file "{}"'.format(yaml_path))
-            raise
-
-        if not _is_assoc_array_prop(root):
-            raise ConfigError('Configuration',
-                              'Must be an associative array')
-
-        # get the config version
-        self._version = self._get_version(root)
-
-        known_props = [
-            'version',
-            'prefix',
-            'metadata',
-        ]
-
-        if self._version >= 202:
-            known_props.append('options')
-
-        unk_prop = _get_first_unknown_prop(root, known_props)
-
-        if unk_prop:
-            add = ''
-
-            if unk_prop == 'options':
-                add = ' (use version 2.2 or greater)'
-
-            raise ConfigError('Configuration',
-                              'Unknown property{}: "{}"'.format(add, unk_prop))
-
-        # process includes if supported
-        if self._version >= 201:
-            root = self._process_root_includes(root)
-
-        # dump config if required
-        if self._dump_config:
-            print(self._yaml_ordered_dump(root, indent=2,
-                                          default_flow_style=False))
-
-        # get prefix and metadata
-        prefix = self._get_prefix(root)
-        meta = self._create_metadata(root)
-        opts = self._get_options(root)
-
-        return Config(self._version, prefix, meta, opts)
+            if def_clk_type not in clk_type_c_types:
+                clk_type_c_types._c_types[def_clk_type] = 'uint32_t'
 
+    @property
+    def trace(self) -> Trace:
+        return self._trace
 
-def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config):
-    try:
-        parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
-                                   dump_config)
-        cfg = parser.parse(path)
+    @property
+    def target_byte_order(self):
+        return self._target_byte_order
 
-        return cfg
-    except ConfigError as exc:
-        exc.append_ctx('Configuration',
-                       'Cannot create configuration from YAML file "{}"'.format(path))
-        raise
+    @property
+    def options(self) -> ConfigurationOptions:
+        return self._options
This page took 0.09637 seconds and 4 git commands to generate.