Rename "event type" -> "event record type", "stream type" -> "data stream type"
authorPhilippe Proulx <eeppeliteloop@gmail.com>
Wed, 16 Sep 2020 23:36:59 +0000 (19:36 -0400)
committerPhilippe Proulx <eeppeliteloop@gmail.com>
Thu, 17 Sep 2020 00:01:43 +0000 (20:01 -0400)
This matches an eventual CTF 2 terminology and makes writing the barectf
documentation easier, as a data stream type describes data streams and
an event record type describes event records. An event record is what
the tracer records for an occurring event.

Throughout the code, for names, variables, and file names:

* `dst` means "data stream type".
* `ds` means "data stream".
* `ert` means "event record type".
* `er` means "event record".

The v3 YAML schemas are modified accordingly.

This patch also renames "discarded events counter" to "discarded event
records counter snapshot", as this field is a snapshot of the per-data
stream counter.

Signed-off-by: Philippe Proulx <eeppeliteloop@gmail.com>
78 files changed:
barectf/__init__.py
barectf/cgen.py
barectf/cli.py
barectf/config.py
barectf/config_parse_v2.py
barectf/config_parse_v3.py
barectf/include/2/trace-basic.yaml
barectf/schemas/config/2/config-pre-field-type-expansion.yaml
barectf/schemas/config/2/config.yaml
barectf/schemas/config/2/dst-pre-include.yaml [new file with mode: 0644]
barectf/schemas/config/2/ert-pre-include.yaml [new file with mode: 0644]
barectf/schemas/config/2/event-type-pre-include.yaml [deleted file]
barectf/schemas/config/2/metadata-pre-include.yaml
barectf/schemas/config/2/stream-type-pre-include.yaml [deleted file]
barectf/schemas/config/3/config-pre-field-type-expansion.yaml
barectf/schemas/config/3/config-pre-log-level-alias-sub.yaml
barectf/schemas/config/3/config.yaml
barectf/schemas/config/3/dst-pre-include.yaml [new file with mode: 0644]
barectf/schemas/config/3/ert-pre-include.yaml [new file with mode: 0644]
barectf/schemas/config/3/event-type-pre-include.yaml [deleted file]
barectf/schemas/config/3/stream-type-pre-include.yaml [deleted file]
barectf/schemas/config/3/trace-type-pre-include.yaml
barectf/templates/c/barectf.c-macros.j2
barectf/templates/c/barectf.c.j2
barectf/templates/c/barectf.h.j2
barectf/templates/c/close-func-proto.j2
barectf/templates/c/common.j2
barectf/templates/c/open-func-proto.j2
barectf/templates/c/serialize-write-array-statements.j2
barectf/templates/c/serialize-write-dst-id-statements.j2 [new file with mode: 0644]
barectf/templates/c/serialize-write-ert-id-statements.j2 [new file with mode: 0644]
barectf/templates/c/serialize-write-ev-type-id-statements.j2 [deleted file]
barectf/templates/c/serialize-write-stream-type-id-statements.j2 [deleted file]
barectf/templates/c/serialize-write-struct-statements.j2
barectf/templates/c/size-write-array-statements.j2
barectf/templates/c/size-write-struct-statements.j2
barectf/templates/c/trace-func-proto.j2
barectf/templates/metadata/metadata.j2
tests/tracing/configs/succeed/dynamic-array/nested-5-uint8.yaml
tests/tracing/configs/succeed/dynamic-array/of-double.yaml
tests/tracing/configs/succeed/dynamic-array/of-static-array-of-double.yaml
tests/tracing/configs/succeed/dynamic-array/of-static-array-of-str.yaml
tests/tracing/configs/succeed/dynamic-array/of-static-array-of-uint8.yaml
tests/tracing/configs/succeed/dynamic-array/of-str.yaml
tests/tracing/configs/succeed/dynamic-array/of-uint3-middle.yaml
tests/tracing/configs/succeed/dynamic-array/of-uint3.yaml
tests/tracing/configs/succeed/dynamic-array/of-uint8.yaml
tests/tracing/configs/succeed/dynamic-array/zero-len.yaml
tests/tracing/configs/succeed/static-array/nested-5-uint8.yaml
tests/tracing/configs/succeed/static-array/of-double.yaml
tests/tracing/configs/succeed/static-array/of-static-array-of-double.yaml
tests/tracing/configs/succeed/static-array/of-static-array-of-str.yaml
tests/tracing/configs/succeed/static-array/of-static-array-of-uint8.yaml
tests/tracing/configs/succeed/static-array/of-str.yaml
tests/tracing/configs/succeed/static-array/of-uint3-middle.yaml
tests/tracing/configs/succeed/static-array/of-uint3.yaml
tests/tracing/configs/succeed/static-array/of-uint8.yaml
tests/tracing/configs/succeed/static-array/zero-len.yaml
tests/tracing/expect/succeed/dynamic-array/nested-5-uint8.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-double.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-static-array-of-double.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-static-array-of-str.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-static-array-of-uint8.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-str.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-uint3-middle.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-uint3.metadata.expect
tests/tracing/expect/succeed/dynamic-array/of-uint8.metadata.expect
tests/tracing/expect/succeed/dynamic-array/zero-len.metadata.expect
tests/tracing/expect/succeed/static-array/nested-5-uint8.metadata.expect
tests/tracing/expect/succeed/static-array/of-double.metadata.expect
tests/tracing/expect/succeed/static-array/of-static-array-of-double.metadata.expect
tests/tracing/expect/succeed/static-array/of-static-array-of-str.metadata.expect
tests/tracing/expect/succeed/static-array/of-static-array-of-uint8.metadata.expect
tests/tracing/expect/succeed/static-array/of-str.metadata.expect
tests/tracing/expect/succeed/static-array/of-uint3-middle.metadata.expect
tests/tracing/expect/succeed/static-array/of-uint3.metadata.expect
tests/tracing/expect/succeed/static-array/of-uint8.metadata.expect
tests/tracing/expect/succeed/static-array/zero-len.metadata.expect

index b47d71c779a71a3c341c4531fb25c6f841b286c1..c8a8766b12ab2ccc74c0f4d7edc22701a68c3103 100644 (file)
@@ -65,16 +65,16 @@ DynamicArrayFieldType = barectf_config.DynamicArrayFieldType
 EnumerationFieldTypeMapping = barectf_config.EnumerationFieldTypeMapping
 EnumerationFieldTypeMappingRange = barectf_config.EnumerationFieldTypeMappingRange
 EnumerationFieldTypeMappings = barectf_config.EnumerationFieldTypeMappings
-EventType = barectf_config.EventType
+EventRecordType = barectf_config.EventRecordType
 LogLevel = barectf_config.LogLevel
 RealFieldType = barectf_config.RealFieldType
 SignedEnumerationFieldType = barectf_config.SignedEnumerationFieldType
 SignedIntegerFieldType = barectf_config.SignedIntegerFieldType
 StaticArrayFieldType = barectf_config.StaticArrayFieldType
-StreamType = barectf_config.StreamType
-StreamTypeEventFeatures = barectf_config.StreamTypeEventFeatures
-StreamTypeFeatures = barectf_config.StreamTypeFeatures
-StreamTypePacketFeatures = barectf_config.StreamTypePacketFeatures
+DataStreamType = barectf_config.DataStreamType
+DataStreamTypeEventRecordFeatures = barectf_config.DataStreamTypeEventRecordFeatures
+DataStreamTypeFeatures = barectf_config.DataStreamTypeFeatures
+DataStreamTypePacketFeatures = barectf_config.DataStreamTypePacketFeatures
 StringFieldType = barectf_config.StringFieldType
 StructureFieldType = barectf_config.StructureFieldType
 StructureFieldTypeMember = barectf_config.StructureFieldTypeMember
index 1b0b124ab83d81d09dc5d8cd401b320daad1e6cf..2319b9564c62c0fd47e811929d34a3d8ece293ab 100644 (file)
@@ -279,8 +279,8 @@ class _OpBuilder:
                     # suboperation.
                     #
                     # This is not strictly needed (could be appended to
-                    # `ops`), but the properties of `_StreamOps` and
-                    # `_EvOps` offer a single (structure field type)
+                    # `ops`), but the properties of `_DsOps` and
+                    # `_ErOps` offer a single (structure field type)
                     # operation.
                     subops.append(init_align_op)
 
@@ -335,13 +335,13 @@ class _OpBuilder:
 _OptCompoundOp = Optional[_CompoundOp]
 
 
-# The operations for an event.
+# The operations for an event record.
 #
 # The available operations are:
 #
 # * Specific context operation.
 # * Payload operation.
-class _EvOps:
+class _ErOps:
     def __init__(self, spec_ctx_op: _OptCompoundOp, payload_op: _OptCompoundOp):
         self._spec_ctx_op = spec_ctx_op
         self._payload_op = payload_op
@@ -355,26 +355,26 @@ class _EvOps:
         return self._payload_op
 
 
-_EvOpsMap = Mapping[barectf_config.EventType, _EvOps]
+_ErOpsMap = Mapping[barectf_config.EventRecordType, _ErOps]
 
 
-# The operations for a stream.
+# The operations for a data stream.
 #
 # The available operations are:
 #
 # * Packet header operation.
 # * Packet context operation.
-# * Event header operation.
-# * Event common context operation.
-# * Event operations (`_EvOps`).
-class _StreamOps:
+# * Event record header operation.
+# * Event record common context operation.
+# * Event record operations (`_ErOps`).
+class _DsOps:
     def __init__(self, pkt_header_op: _OptCompoundOp, pkt_ctx_op: _CompoundOp,
-                 ev_header_op: _OptCompoundOp, ev_common_ctx_op: _OptCompoundOp, ev_ops: _EvOpsMap):
+                 er_header_op: _OptCompoundOp, er_common_ctx_op: _OptCompoundOp, er_ops: _ErOpsMap):
         self._pkt_header_op = pkt_header_op
         self._pkt_ctx_op = pkt_ctx_op
-        self._ev_header_op = ev_header_op
-        self._ev_common_ctx_op = ev_common_ctx_op
-        self._ev_ops = ev_ops
+        self._er_header_op = er_header_op
+        self._er_common_ctx_op = er_common_ctx_op
+        self._er_ops = er_ops
 
     @property
     def pkt_header_op(self) -> _OptCompoundOp:
@@ -385,24 +385,24 @@ class _StreamOps:
         return self._pkt_ctx_op
 
     @property
-    def ev_header_op(self) -> _OptCompoundOp:
-        return self._ev_header_op
+    def er_header_op(self) -> _OptCompoundOp:
+        return self._er_header_op
 
     @property
-    def ev_common_ctx_op(self) -> _OptCompoundOp:
-        return self._ev_common_ctx_op
+    def er_common_ctx_op(self) -> _OptCompoundOp:
+        return self._er_common_ctx_op
 
     @property
-    def ev_ops(self) -> _EvOpsMap:
-        return self._ev_ops
+    def er_ops(self) -> _ErOpsMap:
+        return self._er_ops
 
 
 # The C variable name prefixes for the six kinds of root field types.
 class _RootFtPrefixes:
     PH = 'ph'
     PC = 'pc'
-    EH = 'eh'
-    ECC = 'ecc'
+    ERH = 'erh'
+    ERCC = 'ercc'
     SC = 'sc'
     P = 'p'
 
@@ -411,8 +411,8 @@ class _RootFtPrefixes:
 _ROOT_FT_PREFIX_NAMES = {
     _RootFtPrefixes.PH: 'packet header',
     _RootFtPrefixes.PC: 'packet context',
-    _RootFtPrefixes.EH: 'event header',
-    _RootFtPrefixes.ECC: 'event common context',
+    _RootFtPrefixes.ERH: 'event record header',
+    _RootFtPrefixes.ERCC: 'event record common context',
     _RootFtPrefixes.SC: 'specific context',
     _RootFtPrefixes.P: 'payload',
 }
@@ -493,7 +493,7 @@ class _CodeGen:
             'ft_c_type': self._ft_c_type,
             'open_func_params_str': self._open_func_params_str,
             'trace_func_params_str': self._trace_func_params_str,
-            'serialize_ev_common_ctx_func_params_str': self._serialize_ev_common_ctx_func_params_str,
+            'serialize_er_common_ctx_func_params_str': self._serialize_er_common_ctx_func_params_str,
             'loop_var_name': _loop_var_name,
             'op_src_var_name': self._op_src_var_name,
         }
@@ -507,11 +507,11 @@ class _CodeGen:
         self._serialize_write_dynamic_array_statements_templ = self._create_template('serialize-write-dynamic-array-statements.j2')
         self._serialize_write_magic_statements_templ = self._create_template('serialize-write-magic-statements.j2')
         self._serialize_write_uuid_statements_templ = self._create_template('serialize-write-uuid-statements.j2')
-        self._serialize_write_stream_type_id_statements_templ = self._create_template('serialize-write-stream-type-id-statements.j2')
+        self._serialize_write_dst_id_statements_templ = self._create_template('serialize-write-dst-id-statements.j2')
         self._serialize_write_time_statements_templ = self._create_template('serialize-write-time-statements.j2')
         self._serialize_write_packet_size_statements_templ = self._create_template('serialize-write-packet-size-statements.j2')
         self._serialize_write_skip_save_statements_templ = self._create_template('serialize-write-skip-save-statements.j2')
-        self._serialize_write_ev_type_id_statements_templ = self._create_template('serialize-write-ev-type-id-statements.j2')
+        self._serialize_write_ert_id_statements_templ = self._create_template('serialize-write-ert-id-statements.j2')
         self._size_align_statements_templ = self._create_template('size-align-statements.j2')
         self._size_write_bit_array_statements_templ = self._create_template('size-write-bit-array-statements.j2')
         self._size_write_string_statements_templ = self._create_template('size-write-string-statements.j2')
@@ -633,9 +633,8 @@ class _CodeGen:
                                                     const_params=const_params)
 
     # Returns the packet opening function prototype parameters for the
-    # stream type `stream_type`.
-    def _open_func_params_str(self, stream_type: barectf_config.StreamType,
-                              const_params: bool) -> str:
+    # data stream type `dst`.
+    def _open_func_params_str(self, dst: barectf_config.DataStreamType, const_params: bool) -> str:
         parts = []
         parts.append(self._proto_params_str(self._trace_type._pkt_header_ft, _RootFtPrefixes.PH,
                                             const_params, {'magic', 'stream_id', 'uuid'}))
@@ -647,46 +646,46 @@ class _CodeGen:
             'content_size',
             'events_discarded',
         }
-        parts.append(self._proto_params_str(stream_type._pkt_ctx_ft, _RootFtPrefixes.PC,
-                                            const_params, exclude_set))
+        parts.append(self._proto_params_str(dst._pkt_ctx_ft, _RootFtPrefixes.PC, const_params,
+                                            exclude_set))
         return ''.join(parts)
 
-    # Returns the tracing function prototype parameters for the stream
-    # and event types `stream_ev_types`.
-    def _trace_func_params_str(self, stream_ev_types: Tuple[barectf_config.StreamType,
-                                                            barectf_config.EventType],
+    # Returns the tracing function prototype parameters for the data
+    # stream and event record types `ds_er_types`.
+    def _trace_func_params_str(self, ds_er_types: Tuple[barectf_config.DataStreamType,
+                                                        barectf_config.EventRecordType],
                                const_params: bool, only_dyn: bool = False):
-        stream_type = stream_ev_types[0]
-        ev_type = stream_ev_types[1]
+        dst = ds_er_types[0]
+        ert = ds_er_types[1]
         parts = []
 
-        if stream_type._ev_header_ft is not None:
-            parts.append(self._proto_params_str(stream_type._ev_header_ft, _RootFtPrefixes.EH,
+        if dst._er_header_ft is not None:
+            parts.append(self._proto_params_str(dst._er_header_ft, _RootFtPrefixes.ERH,
                                                 const_params, {'id', 'timestamp'},
                                                 only_dyn=only_dyn))
 
-        if stream_type.event_common_context_field_type is not None:
-            parts.append(self._proto_params_str(stream_type.event_common_context_field_type,
-                                                _RootFtPrefixes.ECC, const_params,
+        if dst.event_record_common_context_field_type is not None:
+            parts.append(self._proto_params_str(dst.event_record_common_context_field_type,
+                                                _RootFtPrefixes.ERCC, const_params,
                                                 only_dyn=only_dyn))
 
-        if ev_type.specific_context_field_type is not None:
-            parts.append(self._proto_params_str(ev_type.specific_context_field_type,
+        if ert.specific_context_field_type is not None:
+            parts.append(self._proto_params_str(ert.specific_context_field_type,
                                                 _RootFtPrefixes.SC, const_params,
                                                 only_dyn=only_dyn))
 
-        if ev_type.payload_field_type is not None:
-            parts.append(self._proto_params_str(ev_type.payload_field_type, _RootFtPrefixes.P,
+        if ert.payload_field_type is not None:
+            parts.append(self._proto_params_str(ert.payload_field_type, _RootFtPrefixes.P,
                                                 const_params, only_dyn=only_dyn))
 
         return ''.join(parts)
 
-    # Returns the event header serialization function prototype
-    # parameters for the stream type `stream_type`.
-    def _serialize_ev_common_ctx_func_params_str(self, stream_type: barectf_config.StreamType,
+    # Returns the event record common context serialization function
+    # prototype parameters for the data stream type `dst`.
+    def _serialize_er_common_ctx_func_params_str(self, dst: barectf_config.DataStreamType,
                                                  const_params: bool) -> str:
-        return self._proto_params_str(stream_type.event_common_context_field_type,
-                                      _RootFtPrefixes.ECC, const_params)
+        return self._proto_params_str(dst.event_record_common_context_field_type,
+                                      _RootFtPrefixes.ERCC, const_params)
 
     # Generates the bitfield header file contents.
     def gen_bitfield_header(self) -> str:
@@ -698,13 +697,13 @@ class _CodeGen:
 
     # Generates the source code file contents.
     def gen_src(self, header_file_name: str, bitfield_header_file_name: str) -> str:
-        # Creates and returns the operations for all the stream and for
-        # all their events.
-        def create_stream_ops() -> Mapping[barectf_config.StreamType, _StreamOps]:
-            stream_ser_ops = {}
+        # Creates and returns the operations for all the data stream and
+        # for all their event records.
+        def create_ds_ops() -> Mapping[barectf_config.DataStreamType, _DsOps]:
+            ds_ops = {}
 
-            for stream_type in self._trace_type.stream_types:
-                pkt_header_ser_op = None
+            for dst in self._trace_type.data_stream_types:
+                pkt_header_op = None
                 builder = _OpBuilder(self)
                 pkt_header_ft = self._trace_type._pkt_header_ft
 
@@ -713,13 +712,13 @@ class _CodeGen:
                     spec_serialize_write_templates = {
                         'magic': self._serialize_write_magic_statements_templ,
                         'uuid': self._serialize_write_uuid_statements_templ,
-                        'stream_id': self._serialize_write_stream_type_id_statements_templ,
+                        'stream_id': self._serialize_write_dst_id_statements_templ,
                     }
-                    pkt_header_ser_op = builder.build_for_root_ft(pkt_header_ft,
+                    pkt_header_op = builder.build_for_root_ft(pkt_header_ft,
                                                                   _RootFtPrefixes.PH,
                                                                   spec_serialize_write_templates)
 
-                # packet context operations
+                # packet context operation
                 spec_serialize_write_templates = {
                     'timestamp_begin': self._serialize_write_time_statements_templ,
                     'packet_size': self._serialize_write_packet_size_statements_templ,
@@ -727,64 +726,61 @@ class _CodeGen:
                     'events_discarded': self._serialize_write_skip_save_statements_templ,
                     'content_size': self._serialize_write_skip_save_statements_templ,
                 }
-                pkt_ctx_ser_op = builder.build_for_root_ft(stream_type._pkt_ctx_ft,
-                                                           _RootFtPrefixes.PC,
-                                                           spec_serialize_write_templates)
+                pkt_ctx_op = builder.build_for_root_ft(dst._pkt_ctx_ft, _RootFtPrefixes.PC,
+                                                       spec_serialize_write_templates)
 
-                # event header operationss
+                # event record header operation
                 builder = _OpBuilder(self)
-                ev_header_ser_op = None
+                er_header_op = None
 
-                if stream_type._ev_header_ft is not None:
+                if dst._er_header_ft is not None:
                     spec_serialize_write_templates = {
                         'timestamp': self._serialize_write_time_statements_templ,
-                        'id': self._serialize_write_ev_type_id_statements_templ,
+                        'id': self._serialize_write_ert_id_statements_templ,
                     }
-                    ev_header_ser_op = builder.build_for_root_ft(stream_type._ev_header_ft,
-                                                                 _RootFtPrefixes.EH,
-                                                                 spec_serialize_write_templates)
+                    er_header_op = builder.build_for_root_ft(dst._er_header_ft, _RootFtPrefixes.ERH,
+                                                             spec_serialize_write_templates)
 
-                # event common context operations
-                ev_common_ctx_ser_op = None
+                # event record common context operation
+                er_common_ctx_op = None
 
-                if stream_type.event_common_context_field_type is not None:
-                    ev_common_ctx_ser_op = builder.build_for_root_ft(stream_type.event_common_context_field_type,
-                                                                     _RootFtPrefixes.ECC)
+                if dst.event_record_common_context_field_type is not None:
+                    er_common_ctx_op = builder.build_for_root_ft(dst.event_record_common_context_field_type,
+                                                                 _RootFtPrefixes.ERCC)
 
-                # operations specific to each event type
-                ev_ser_ops = {}
+                # operations specific to each event record type
+                er_ops = {}
 
-                for ev_type in stream_type.event_types:
+                for ert in dst.event_record_types:
                     ev_builder = copy.copy(builder)
 
-                    # specific context operations
-                    spec_ctx_ser_op = None
+                    # specific context operation
+                    spec_ctx_op = None
 
-                    if ev_type.specific_context_field_type is not None:
-                        spec_ctx_ser_op = ev_builder.build_for_root_ft(ev_type.specific_context_field_type,
-                                                                       _RootFtPrefixes.SC)
+                    if ert.specific_context_field_type is not None:
+                        spec_ctx_op = ev_builder.build_for_root_ft(ert.specific_context_field_type,
+                                                                   _RootFtPrefixes.SC)
 
-                    # payload operations
-                    payload_ser_op = None
+                    # payload operation
+                    payload_op = None
 
-                    if ev_type.payload_field_type is not None:
-                        payload_ser_op = ev_builder.build_for_root_ft(ev_type.payload_field_type,
-                                                                      _RootFtPrefixes.P)
+                    if ert.payload_field_type is not None:
+                        payload_op = ev_builder.build_for_root_ft(ert.payload_field_type,
+                                                                  _RootFtPrefixes.P)
 
-                    ev_ser_ops[ev_type] = _EvOps(spec_ctx_ser_op, payload_ser_op)
+                    er_ops[ert] = _ErOps(spec_ctx_op, payload_op)
 
-                stream_ser_ops[stream_type] = _StreamOps(pkt_header_ser_op, pkt_ctx_ser_op,
-                                                         ev_header_ser_op, ev_common_ctx_ser_op,
-                                                         ev_ser_ops)
+                ds_ops[dst] = _DsOps(pkt_header_op, pkt_ctx_op, er_header_op, er_common_ctx_op,
+                                     er_ops)
 
-            return stream_ser_ops
+            return ds_ops
 
         # Returns the "write" operation for the packet context member
-        # named `member_name` within the stream type `stream_type`.
-        def stream_op_pkt_ctx_op(stream_type: barectf_config.StreamType, member_name: str) -> _Op:
+        # named `member_name` within the data stream type `dst`.
+        def ds_op_pkt_ctx_op(dst: barectf_config.DataStreamType, member_name: str) -> _Op:
             ret_op = None
 
-            for op in stream_ops[stream_type].pkt_ctx_op.subops:
+            for op in ds_ops[dst].pkt_ctx_op.subops:
                 if op.top_name == member_name and type(op) is _WriteOp:
                     ret_op = op
                     break
@@ -792,13 +788,13 @@ class _CodeGen:
             assert ret_op is not None
             return typing.cast(_Op, ret_op)
 
-        stream_ops = create_stream_ops()
+        ds_ops = create_ds_ops()
         c_src = self._create_file_template('barectf.c.j2').render(header_file_name=header_file_name,
                                                                   bitfield_header_file_name=bitfield_header_file_name,
                                                                   root_ft_prefixes=_RootFtPrefixes,
                                                                   root_ft_prefix_names=_ROOT_FT_PREFIX_NAMES,
-                                                                  stream_ops=stream_ops,
-                                                                  stream_op_pkt_ctx_op=stream_op_pkt_ctx_op)
+                                                                  ds_ops=ds_ops,
+                                                                  ds_op_pkt_ctx_op=ds_op_pkt_ctx_op)
 
         # Jinja 2 makes it hard to have multiple contiguous blocks
         # delimited with empty lines when using a for loop, while not
index 9f7a2432d42b12867192c5be72e925d171311df1..1c20029d482e1b52a88d5df813b77725fcf50bee 100644 (file)
@@ -508,7 +508,7 @@ class _GenCmd(_Cmd):
             cg_opts = config.options.code_generation_options
             cg_opts = barectf.ConfigurationCodeGenerationOptions(v3_prefixes.identifier,
                                                                  v3_prefixes.file_name,
-                                                                 cg_opts.default_stream_type,
+                                                                 cg_opts.default_data_stream_type,
                                                                  cg_opts.header_options,
                                                                  cg_opts.clock_type_c_types)
             config = barectf.Configuration(config.trace, barectf.ConfigurationOptions(cg_opts))
index 552b668b4d7635b6dd15b65a113a00d96265e817..ca9dd4ffc4f9ffb3408f53bb382e2c73dd90797e 100644 (file)
@@ -339,7 +339,7 @@ _OptStructFt = Optional[StructureFieldType]
 LogLevel = typing.NewType('LogLevel', int)
 
 
-class EventType(_UniqueByName):
+class EventRecordType(_UniqueByName):
     def __init__(self, name: str, log_level: Optional[LogLevel] = None,
                  specific_context_field_type: _OptStructFt = None, payload_field_type: _OptStructFt = None):
         self._id: Optional[Id] = None
@@ -437,12 +437,12 @@ _OptDefaultableUIntFt = Optional[_DefaultableUIntFt]
 _OptUIntFt = Optional[UnsignedIntegerFieldType]
 
 
-class StreamTypePacketFeatures:
+class DataStreamTypePacketFeatures:
     def __init__(self, total_size_field_type: _DefaultableUIntFt = DEFAULT_FIELD_TYPE,
                  content_size_field_type: _DefaultableUIntFt = DEFAULT_FIELD_TYPE,
                  beginning_time_field_type: _OptDefaultableUIntFt = None,
                  end_time_field_type: _OptDefaultableUIntFt = None,
-                 discarded_events_counter_field_type: _OptDefaultableUIntFt = None):
+                 discarded_event_records_snapshot_counter_field_type: _OptDefaultableUIntFt = None):
         def get_ft(user_ft: _OptDefaultableUIntFt) -> _OptUIntFt:
             if user_ft == DEFAULT_FIELD_TYPE:
                 return UnsignedIntegerFieldType(64)
@@ -453,7 +453,7 @@ class StreamTypePacketFeatures:
         self._content_size_field_type = get_ft(content_size_field_type)
         self._beginning_time_field_type = get_ft(beginning_time_field_type)
         self._end_time_field_type = get_ft(end_time_field_type)
-        self._discarded_events_counter_field_type = get_ft(discarded_events_counter_field_type)
+        self._discarded_event_records_snapshot_counter_field_type = get_ft(discarded_event_records_snapshot_counter_field_type)
 
     @property
     def total_size_field_type(self) -> _OptUIntFt:
@@ -472,11 +472,11 @@ class StreamTypePacketFeatures:
         return self._end_time_field_type
 
     @property
-    def discarded_events_counter_field_type(self) -> _OptUIntFt:
-        return self._discarded_events_counter_field_type
+    def discarded_event_records_snapshot_counter_field_type(self) -> _OptUIntFt:
+        return self._discarded_event_records_snapshot_counter_field_type
 
 
-class StreamTypeEventFeatures:
+class DataStreamTypeEventRecordFeatures:
     def __init__(self, type_id_field_type: _OptDefaultableUIntFt = DEFAULT_FIELD_TYPE,
                  time_field_type: _OptDefaultableUIntFt = None):
         def get_ft(user_ft: _OptDefaultableUIntFt) -> _OptUIntFt:
@@ -497,44 +497,44 @@ class StreamTypeEventFeatures:
         return self._time_field_type
 
 
-class StreamTypeFeatures:
-    def __init__(self, packet_features: Optional[StreamTypePacketFeatures] = None,
-                 event_features: Optional[StreamTypeEventFeatures] = None):
+class DataStreamTypeFeatures:
+    def __init__(self, packet_features: Optional[DataStreamTypePacketFeatures] = None,
+                 event_record_features: Optional[DataStreamTypeEventRecordFeatures] = None):
         if packet_features is None:
-            self._packet_features = StreamTypePacketFeatures()
+            self._packet_features = DataStreamTypePacketFeatures()
         else:
             self._packet_features = packet_features
 
-        if event_features is None:
-            self._event_features = StreamTypeEventFeatures()
+        if event_record_features is None:
+            self._event_record_features = DataStreamTypeEventRecordFeatures()
         else:
-            self._event_features = event_features
+            self._event_record_features = event_record_features
 
     @property
-    def packet_features(self) -> StreamTypePacketFeatures:
+    def packet_features(self) -> DataStreamTypePacketFeatures:
         return self._packet_features
 
     @property
-    def event_features(self) -> StreamTypeEventFeatures:
-        return self._event_features
+    def event_record_features(self) -> DataStreamTypeEventRecordFeatures:
+        return self._event_record_features
 
 
-class StreamType(_UniqueByName):
-    def __init__(self, name: str, event_types: Set[EventType],
+class DataStreamType(_UniqueByName):
+    def __init__(self, name: str, event_record_types: Set[EventRecordType],
                  default_clock_type: Optional[ClockType] = None,
-                 features: Optional[StreamTypeFeatures] = None,
+                 features: Optional[DataStreamTypeFeatures] = None,
                  packet_context_field_type_extra_members: Optional[_StructFtMembers] = None,
-                 event_common_context_field_type: _OptStructFt = None):
+                 event_record_common_context_field_type: _OptStructFt = None):
         self._id: Optional[Id] = None
         self._name = name
         self._default_clock_type = default_clock_type
-        self._event_common_context_field_type = event_common_context_field_type
-        self._event_types = frozenset(event_types)
+        self._event_record_common_context_field_type = event_record_common_context_field_type
+        self._event_record_types = frozenset(event_record_types)
 
         # assign unique IDs
-        for index, ev_type in enumerate(sorted(self._event_types, key=lambda evt: evt.name)):
-            assert ev_type._id is None
-            ev_type._id = Id(index)
+        for index, ert in enumerate(sorted(self._event_record_types, key=lambda evt: evt.name)):
+            assert ert._id is None
+            ert._id = Id(index)
 
         self._set_features(features)
         self._packet_context_field_type_extra_members = StructureFieldTypeMembers({})
@@ -543,27 +543,27 @@ class StreamType(_UniqueByName):
             self._packet_context_field_type_extra_members = StructureFieldTypeMembers(packet_context_field_type_extra_members)
 
         self._set_pkt_ctx_ft()
-        self._set_ev_header_ft()
+        self._set_er_header_ft()
 
-    def _set_features(self, features: Optional[StreamTypeFeatures]):
+    def _set_features(self, features: Optional[DataStreamTypeFeatures]):
         if features is not None:
             self._features = features
             return None
 
-        ev_time_ft = None
+        er_time_ft = None
         pkt_beginning_time_ft = None
         pkt_end_time_ft = None
 
         if self._default_clock_type is not None:
-            # Automatic time field types because the stream type has a
-            # default clock type.
-            ev_time_ft = DEFAULT_FIELD_TYPE
+            # Automatic time field types because the data stream type
+            # has a default clock type.
+            er_time_ft = DEFAULT_FIELD_TYPE
             pkt_beginning_time_ft = DEFAULT_FIELD_TYPE
             pkt_end_time_ft = DEFAULT_FIELD_TYPE
 
-        self._features = StreamTypeFeatures(StreamTypePacketFeatures(beginning_time_field_type=pkt_beginning_time_ft,
-                                                                     end_time_field_type=pkt_end_time_ft),
-                                            StreamTypeEventFeatures(time_field_type=ev_time_ft))
+        self._features = DataStreamTypeFeatures(DataStreamTypePacketFeatures(beginning_time_field_type=pkt_beginning_time_ft,
+                                                                             end_time_field_type=pkt_end_time_ft),
+                                                DataStreamTypeEventRecordFeatures(time_field_type=er_time_ft))
 
     def _set_ft_mapped_clk_type_name(self, ft: Optional[UnsignedIntegerFieldType]):
         if ft is None:
@@ -601,7 +601,7 @@ class StreamType(_UniqueByName):
         add_member_if_exists('timestamp_end', self._features.packet_features.end_time_field_type,
                              True)
         add_member_if_exists('events_discarded',
-                             self._features.packet_features.discarded_events_counter_field_type)
+                             self._features.packet_features.discarded_event_records_snapshot_counter_field_type)
 
         if self._packet_context_field_type_extra_members is not None:
             for name, field_type in self._packet_context_field_type_extra_members.items():
@@ -610,18 +610,18 @@ class StreamType(_UniqueByName):
 
         self._pkt_ctx_ft = StructureFieldType(8, members)
 
-    def _set_ev_header_ft(self):
+    def _set_er_header_ft(self):
         members = collections.OrderedDict()
 
-        if self._features.event_features.type_id_field_type is not None:
-            members['id'] = StructureFieldTypeMember(self._features.event_features.type_id_field_type)
+        if self._features.event_record_features.type_id_field_type is not None:
+            members['id'] = StructureFieldTypeMember(self._features.event_record_features.type_id_field_type)
 
-        if self._features.event_features.time_field_type is not None:
-            ft = self._features.event_features.time_field_type
+        if self._features.event_record_features.time_field_type is not None:
+            ft = self._features.event_record_features.time_field_type
             self._set_ft_mapped_clk_type_name(ft)
             members['timestamp'] = StructureFieldTypeMember(ft)
 
-        self._ev_header_ft = StructureFieldType(8, members)
+        self._er_header_ft = StructureFieldType(8, members)
 
     @property
     def id(self) -> Optional[Id]:
@@ -636,7 +636,7 @@ class StreamType(_UniqueByName):
         return self._default_clock_type
 
     @property
-    def features(self) -> StreamTypeFeatures:
+    def features(self) -> DataStreamTypeFeatures:
         return self._features
 
     @property
@@ -644,12 +644,12 @@ class StreamType(_UniqueByName):
         return self._packet_context_field_type_extra_members
 
     @property
-    def event_common_context_field_type(self) -> _OptStructFt:
-        return self._event_common_context_field_type
+    def event_record_common_context_field_type(self) -> _OptStructFt:
+        return self._event_record_common_context_field_type
 
     @property
-    def event_types(self) -> FrozenSet[EventType]:
-        return self._event_types
+    def event_record_types(self) -> FrozenSet[EventRecordType]:
+        return self._event_record_types
 
 
 _OptUuidFt = Optional[Union[str, StaticArrayFieldType]]
@@ -658,7 +658,7 @@ _OptUuidFt = Optional[Union[str, StaticArrayFieldType]]
 class TraceTypeFeatures:
     def __init__(self, magic_field_type: _OptDefaultableUIntFt = DEFAULT_FIELD_TYPE,
                  uuid_field_type: _OptUuidFt = None,
-                 stream_type_id_field_type: _OptDefaultableUIntFt = DEFAULT_FIELD_TYPE):
+                 data_stream_type_id_field_type: _OptDefaultableUIntFt = DEFAULT_FIELD_TYPE):
         def get_field_type(user_ft: Optional[Union[str, _FieldType]],
                            create_default_ft: Callable[[], _FieldType]) -> _OptFt:
             if user_ft == DEFAULT_FIELD_TYPE:
@@ -672,15 +672,15 @@ class TraceTypeFeatures:
         def create_default_uuid_ft():
             return StaticArrayFieldType(Count(16), UnsignedIntegerFieldType(8))
 
-        def create_default_stream_type_id_ft():
+        def create_default_dst_id_ft():
             return UnsignedIntegerFieldType(64)
 
         self._magic_field_type = typing.cast(_OptUIntFt, get_field_type(magic_field_type, create_default_magic_ft))
         self._uuid_field_type = typing.cast(Optional[StaticArrayFieldType],
                                             get_field_type(uuid_field_type, create_default_uuid_ft))
-        self._stream_type_id_field_type = typing.cast(_OptUIntFt,
-                                                      get_field_type(stream_type_id_field_type,
-                                                                     create_default_stream_type_id_ft))
+        self._data_stream_type_id_field_type = typing.cast(_OptUIntFt,
+                                                           get_field_type(data_stream_type_id_field_type,
+                                                                          create_default_dst_id_ft))
 
     @property
     def magic_field_type(self) -> _OptUIntFt:
@@ -691,19 +691,19 @@ class TraceTypeFeatures:
         return self._uuid_field_type
 
     @property
-    def stream_type_id_field_type(self) -> _OptUIntFt:
-        return self._stream_type_id_field_type
+    def data_stream_type_id_field_type(self) -> _OptUIntFt:
+        return self._data_stream_type_id_field_type
 
 
 class TraceType:
-    def __init__(self, stream_types: Set[StreamType], uuid: _OptUuid = None,
+    def __init__(self, data_stream_types: Set[DataStreamType], uuid: _OptUuid = None,
                  features: Optional[TraceTypeFeatures] = None):
-        self._stream_types = frozenset(stream_types)
+        self._data_stream_types = frozenset(data_stream_types)
 
         # assign unique IDs
-        for index, stream_type in enumerate(sorted(self._stream_types, key=lambda st: st.name)):
-            assert stream_type._id is None
-            stream_type._id = Id(index)
+        for index, dst in enumerate(sorted(self._data_stream_types, key=lambda st: st.name)):
+            assert dst._id is None
+            dst._id = Id(index)
 
         self._uuid = uuid
         self._set_features(features)
@@ -729,7 +729,7 @@ class TraceType:
 
         add_member_if_exists('magic', self._features.magic_field_type)
         add_member_if_exists('uuid', self._features.uuid_field_type)
-        add_member_if_exists('stream_id', self._features.stream_type_id_field_type)
+        add_member_if_exists('stream_id', self._features.data_stream_type_id_field_type)
         self._pkt_header_ft = StructureFieldType(8, members)
 
     @property
@@ -737,13 +737,13 @@ class TraceType:
         return self._uuid
 
     @property
-    def stream_types(self) -> FrozenSet[StreamType]:
-        return self._stream_types
+    def data_stream_types(self) -> FrozenSet[DataStreamType]:
+        return self._data_stream_types
 
-    def stream_type(self, name: str) -> Optional[StreamType]:
-        for cand_stream_type in self._stream_types:
-            if cand_stream_type.name == name:
-                return cand_stream_type
+    def data_stream_type(self, name: str) -> Optional[DataStreamType]:
+        for cand_dst in self._data_stream_types:
+            if cand_dst.name == name:
+                return cand_dst
 
         return None
 
@@ -755,9 +755,9 @@ class TraceType:
     def clock_types(self) -> Set[ClockType]:
         clk_types = set()
 
-        for stream_type in self._stream_types:
-            if stream_type.default_clock_type is not None:
-                clk_types.add(stream_type.default_clock_type)
+        for dst in self._data_stream_types:
+            if dst.default_clock_type is not None:
+                clk_types.add(dst.default_clock_type)
 
         return clk_types
 
@@ -829,27 +829,27 @@ class ClockTypeCTypes(collections.abc.Mapping):
 
 class ConfigurationCodeGenerationHeaderOptions:
     def __init__(self, identifier_prefix_definition: bool = False,
-                 default_stream_type_name_definition: bool = False):
+                 default_data_stream_type_name_definition: bool = False):
         self._identifier_prefix_definition = identifier_prefix_definition
-        self._default_stream_type_name_definition = default_stream_type_name_definition
+        self._default_data_stream_type_name_definition = default_data_stream_type_name_definition
 
     @property
     def identifier_prefix_definition(self) -> bool:
         return self._identifier_prefix_definition
 
     @property
-    def default_stream_type_name_definition(self) -> bool:
-        return self._default_stream_type_name_definition
+    def default_data_stream_type_name_definition(self) -> bool:
+        return self._default_data_stream_type_name_definition
 
 
 class ConfigurationCodeGenerationOptions:
     def __init__(self, identifier_prefix: str = 'barectf_', file_name_prefix: str = 'barectf',
-                 default_stream_type: Optional[StreamType] = None,
+                 default_data_stream_type: Optional[DataStreamType] = None,
                  header_options: Optional[ConfigurationCodeGenerationHeaderOptions] = None,
                  clock_type_c_types: Optional[_ClkTypeCTypes] = None):
         self._identifier_prefix = identifier_prefix
         self._file_name_prefix = file_name_prefix
-        self._default_stream_type = default_stream_type
+        self._default_data_stream_type = default_data_stream_type
 
         self._header_options = ConfigurationCodeGenerationHeaderOptions()
 
@@ -870,8 +870,8 @@ class ConfigurationCodeGenerationOptions:
         return self._file_name_prefix
 
     @property
-    def default_stream_type(self) -> Optional[StreamType]:
-        return self._default_stream_type
+    def default_data_stream_type(self) -> Optional[DataStreamType]:
+        return self._default_data_stream_type
 
     @property
     def header_options(self) -> ConfigurationCodeGenerationHeaderOptions:
@@ -907,8 +907,8 @@ class Configuration:
 
         clk_type_c_types = self._options.code_generation_options.clock_type_c_types
 
-        for stream_type in trace.type.stream_types:
-            def_clk_type = stream_type.default_clock_type
+        for dst in trace.type.data_stream_types:
+            def_clk_type = dst.default_clock_type
 
             if def_clk_type is None:
                 continue
index 668f0b9478ee599e662ab01e9d6a7304ce3368f7..e6079626b2de26b27f32ef8da2bd216ec21f3c60 100644 (file)
@@ -286,28 +286,28 @@ class _Parser(config_parse_common._Parser):
 
         return v3_clk_type_node
 
-    # Converts a v2 event type node to a v3 event type node and returns
-    # it.
-    def _conv_ev_type_node(self, v2_ev_type_node: _MapNode) -> _MapNode:
-        # create empty v3 event type node
-        v3_ev_type_node: _MapNode = collections.OrderedDict()
+    # Converts a v2 event record type node to a v3 event record type
+    # node and returns it.
+    def _conv_ert_node(self, v2_ert_node: _MapNode) -> _MapNode:
+        # create empty v3 event record type node
+        v3_ert_node: _MapNode = collections.OrderedDict()
 
         # copy `log-level` property
-        _copy_prop_if_exists(v3_ev_type_node, v2_ev_type_node, 'log-level')
+        _copy_prop_if_exists(v3_ert_node, v2_ert_node, 'log-level')
 
         # convert specific context field type node
-        v2_ft_node = v2_ev_type_node.get('context-type')
+        v2_ft_node = v2_ert_node.get('context-type')
 
         if v2_ft_node is not None:
-            v3_ev_type_node['specific-context-field-type'] = self._conv_ft_node(v2_ft_node)
+            v3_ert_node['specific-context-field-type'] = self._conv_ft_node(v2_ft_node)
 
         # convert payload field type node
-        v2_ft_node = v2_ev_type_node.get('payload-type')
+        v2_ft_node = v2_ert_node.get('payload-type')
 
         if v2_ft_node is not None:
-            v3_ev_type_node['payload-field-type'] = self._conv_ft_node(v2_ft_node)
+            v3_ert_node['payload-field-type'] = self._conv_ft_node(v2_ft_node)
 
-        return v3_ev_type_node
+        return v3_ert_node
 
     @staticmethod
     def _set_v3_feature_ft_if_exists(v3_features_node: _MapNode, key: str,
@@ -319,16 +319,16 @@ class _Parser(config_parse_common._Parser):
 
         v3_features_node[key] = val
 
-    # Converts a v2 stream type node to a v3 stream type node and
-    # returns it.
-    def _conv_stream_type_node(self, v2_stream_type_node: _MapNode) -> _MapNode:
-        # This function creates a v3 stream type features node from the
-        # packet context and event header field type nodes of a
-        # v2 stream type node.
+    # Converts a v2 data stream type node to a v3 data stream type node
+    # and returns it.
+    def _conv_dst_node(self, v2_dst_node: _MapNode) -> _MapNode:
+        # This function creates a v3 data stream type features node from
+        # the packet context and event record header field type nodes of
+        # a v2 data stream type node.
         def v3_features_node_from_v2_ft_nodes(v2_pkt_ctx_ft_fields_node: _MapNode,
-                                              v2_ev_header_ft_fields_node: Optional[_MapNode]) -> _MapNode:
-            if v2_ev_header_ft_fields_node is None:
-                v2_ev_header_ft_fields_node = collections.OrderedDict()
+                                              v2_er_header_ft_fields_node: Optional[_MapNode]) -> _MapNode:
+            if v2_er_header_ft_fields_node is None:
+                v2_er_header_ft_fields_node = collections.OrderedDict()
 
             v3_pkt_total_size_ft_node = self._conv_ft_node(v2_pkt_ctx_ft_fields_node['packet_size'])
             v3_pkt_content_size_ft_node = self._conv_ft_node(v2_pkt_ctx_ft_fields_node['content_size'])
@@ -336,27 +336,27 @@ class _Parser(config_parse_common._Parser):
                                                                    'timestamp_begin')
             v3_pkt_end_time_ft_node = self._conv_ft_node_if_exists(v2_pkt_ctx_ft_fields_node,
                                                                    'timestamp_end')
-            v3_pkt_disc_ev_counter_ft_node = self._conv_ft_node_if_exists(v2_pkt_ctx_ft_fields_node,
-                                                                          'events_discarded')
-            v3_ev_type_id_ft_node = self._conv_ft_node_if_exists(v2_ev_header_ft_fields_node, 'id')
-            v3_ev_time_ft_node = self._conv_ft_node_if_exists(v2_ev_header_ft_fields_node,
+            v3_pkt_disc_er_counter_snap_ft_node = self._conv_ft_node_if_exists(v2_pkt_ctx_ft_fields_node,
+                                                                               'events_discarded')
+            v3_ert_id_ft_node = self._conv_ft_node_if_exists(v2_er_header_ft_fields_node, 'id')
+            v3_er_time_ft_node = self._conv_ft_node_if_exists(v2_er_header_ft_fields_node,
                                                               'timestamp')
             v3_features_node: _MapNode = collections.OrderedDict()
             v3_pkt_node: _MapNode = collections.OrderedDict()
-            v3_ev_node: _MapNode = collections.OrderedDict()
+            v3_er_node: _MapNode = collections.OrderedDict()
             v3_pkt_node['total-size-field-type'] = v3_pkt_total_size_ft_node
             v3_pkt_node['content-size-field-type'] = v3_pkt_content_size_ft_node
             self._set_v3_feature_ft_if_exists(v3_pkt_node, 'beginning-time-field-type',
                                               v3_pkt_beg_time_ft_node)
             self._set_v3_feature_ft_if_exists(v3_pkt_node, 'end-time-field-type',
                                               v3_pkt_end_time_ft_node)
-            self._set_v3_feature_ft_if_exists(v3_pkt_node, 'discarded-events-counter-field-type',
-                                              v3_pkt_disc_ev_counter_ft_node)
-            self._set_v3_feature_ft_if_exists(v3_ev_node, 'type-id-field-type',
-                                              v3_ev_type_id_ft_node)
-            self._set_v3_feature_ft_if_exists(v3_ev_node, 'time-field-type', v3_ev_time_ft_node)
+            self._set_v3_feature_ft_if_exists(v3_pkt_node,
+                                              'discarded-event-records-counter-snapshot-field-type',
+                                              v3_pkt_disc_er_counter_snap_ft_node)
+            self._set_v3_feature_ft_if_exists(v3_er_node, 'type-id-field-type', v3_ert_id_ft_node)
+            self._set_v3_feature_ft_if_exists(v3_er_node, 'time-field-type', v3_er_time_ft_node)
             v3_features_node['packet'] = v3_pkt_node
-            v3_features_node['event'] = v3_ev_node
+            v3_features_node['event-record'] = v3_er_node
             return v3_features_node
 
         def clk_type_name_from_v2_int_ft_node(v2_int_ft_node: Optional[_MapNode]) -> _OptStr:
@@ -371,21 +371,21 @@ class _Parser(config_parse_common._Parser):
 
             return None
 
-        # create empty v3 stream type node
-        v3_stream_type_node: _MapNode = collections.OrderedDict()
+        # create empty v3 data stream type node
+        v3_dst_node: _MapNode = collections.OrderedDict()
 
         # rename `$default` property to `$is-default`
-        _copy_prop_if_exists(v3_stream_type_node, v2_stream_type_node, '$default', '$is-default')
+        _copy_prop_if_exists(v3_dst_node, v2_dst_node, '$default', '$is-default')
 
         # set default clock type node
         pct_prop_name = 'packet-context-type'
-        v2_pkt_ctx_ft_fields_node = v2_stream_type_node[pct_prop_name]['fields']
+        v2_pkt_ctx_ft_fields_node = v2_dst_node[pct_prop_name]['fields']
         eht_prop_name = 'event-header-type'
-        v2_ev_header_ft_fields_node = None
-        v2_ev_header_ft_node = v2_stream_type_node.get(eht_prop_name)
+        v2_er_header_ft_fields_node = None
+        v2_er_header_ft_node = v2_dst_node.get(eht_prop_name)
 
-        if v2_ev_header_ft_node is not None:
-            v2_ev_header_ft_fields_node = v2_ev_header_ft_node['fields']
+        if v2_er_header_ft_node is not None:
+            v2_er_header_ft_fields_node = v2_er_header_ft_node['fields']
 
         def_clk_type_name = None
 
@@ -403,8 +403,8 @@ class _Parser(config_parse_common._Parser):
             _append_error_ctx(exc, f'`{pct_prop_name}` property')
 
         try:
-            if def_clk_type_name is None and v2_ev_header_ft_fields_node is not None:
-                def_clk_type_name = clk_type_name_from_v2_int_ft_node(v2_ev_header_ft_fields_node.get('timestamp'))
+            if def_clk_type_name is None and v2_er_header_ft_fields_node is not None:
+                def_clk_type_name = clk_type_name_from_v2_int_ft_node(v2_er_header_ft_fields_node.get('timestamp'))
 
             if def_clk_type_name is None and ts_begin_clk_type_name is not None:
                 def_clk_type_name = ts_begin_clk_type_name
@@ -415,11 +415,11 @@ class _Parser(config_parse_common._Parser):
             _append_error_ctx(exc, f'`{eht_prop_name}` property')
 
         if def_clk_type_name is not None:
-            v3_stream_type_node['$default-clock-type-name'] = def_clk_type_name
+            v3_dst_node['$default-clock-type-name'] = def_clk_type_name
 
         # set features node
-        v3_stream_type_node['$features'] = v3_features_node_from_v2_ft_nodes(v2_pkt_ctx_ft_fields_node,
-                                                                             v2_ev_header_ft_fields_node)
+        v3_dst_node['$features'] = v3_features_node_from_v2_ft_nodes(v2_pkt_ctx_ft_fields_node,
+                                                                     v2_er_header_ft_fields_node)
 
         # set extra packet context field type members node
         pkt_ctx_ft_extra_members = []
@@ -443,26 +443,26 @@ class _Parser(config_parse_common._Parser):
             }))
 
         if len(pkt_ctx_ft_extra_members) > 0:
-            v3_stream_type_node['packet-context-field-type-extra-members'] = pkt_ctx_ft_extra_members
+            v3_dst_node['packet-context-field-type-extra-members'] = pkt_ctx_ft_extra_members
 
-        # convert event common context field type node
-        v2_ft_node = v2_stream_type_node.get('event-context-type')
+        # convert event record common context field type node
+        v2_ft_node = v2_dst_node.get('event-context-type')
 
         if v2_ft_node is not None:
-            v3_stream_type_node['event-common-context-field-type'] = self._conv_ft_node(v2_ft_node)
+            v3_dst_node['event-record-common-context-field-type'] = self._conv_ft_node(v2_ft_node)
 
-        # convert event type nodes
-        v3_event_types_node = collections.OrderedDict()
+        # convert event record type nodes
+        v3_erts_node = collections.OrderedDict()
 
-        for ev_type_name, v2_ev_type_node in v2_stream_type_node['events'].items():
+        for ert_name, v2_ert_node in v2_dst_node['events'].items():
             try:
-                v3_event_types_node[ev_type_name] = self._conv_ev_type_node(v2_ev_type_node)
+                v3_erts_node[ert_name] = self._conv_ert_node(v2_ert_node)
             except _ConfigurationParseError as exc:
-                _append_error_ctx(exc, f'Event type `{ev_type_name}`')
+                _append_error_ctx(exc, f'Event record type `{ert_name}`')
 
-        v3_stream_type_node['event-types'] = v3_event_types_node
+        v3_dst_node['event-record-types'] = v3_erts_node
 
-        return v3_stream_type_node
+        return v3_dst_node
 
     # Converts a v2 metadata node to a v3 trace node and returns it.
     def _conv_meta_node(self, v2_meta_node: _MapNode) -> _MapNode:
@@ -477,12 +477,12 @@ class _Parser(config_parse_common._Parser):
 
             v3_magic_ft_node = self._conv_ft_node_if_exists(v2_pkt_header_ft_fields_node, 'magic')
             v3_uuid_ft_node = self._conv_ft_node_if_exists(v2_pkt_header_ft_fields_node, 'uuid')
-            v3_stream_type_id_ft_node = self._conv_ft_node_if_exists(v2_pkt_header_ft_fields_node,
-                                                                     'stream_id')
+            v3_dst_id_ft_node = self._conv_ft_node_if_exists(v2_pkt_header_ft_fields_node,
+                                                             'stream_id')
             v3_features_node: _MapNode = collections.OrderedDict()
             set_if_exists('magic-field-type', v3_magic_ft_node)
             set_if_exists('uuid-field-type', v3_uuid_ft_node)
-            set_if_exists('stream-type-id-field-type', v3_stream_type_id_ft_node)
+            set_if_exists('data-stream-type-id-field-type', v3_dst_id_ft_node)
             return v3_features_node
 
         v3_trace_node: _MapNode = collections.OrderedDict()
@@ -516,35 +516,35 @@ class _Parser(config_parse_common._Parser):
         v2_pkt_header_ft_node = v2_trace_node.get('packet-header-type')
         v3_trace_type_node['$features'] = v3_features_node_from_v2_ft_node(v2_pkt_header_ft_node)
 
-        # convert stream type nodes
-        v3_stream_types_node = collections.OrderedDict()
+        # convert data stream type nodes
+        v3_dsts_node = collections.OrderedDict()
 
-        for stream_type_name, v2_stream_type_node in v2_meta_node['streams'].items():
+        for dst_name, v2_dst_node in v2_meta_node['streams'].items():
             try:
-                v3_stream_types_node[stream_type_name] = self._conv_stream_type_node(v2_stream_type_node)
+                v3_dsts_node[dst_name] = self._conv_dst_node(v2_dst_node)
             except _ConfigurationParseError as exc:
-                _append_error_ctx(exc, f'Stream type `{stream_type_name}`')
+                _append_error_ctx(exc, f'Data stream type `{dst_name}`')
 
-        v3_trace_type_node['stream-types'] = v3_stream_types_node
+        v3_trace_type_node['data-stream-types'] = v3_dsts_node
 
         # If `v2_meta_node` has a `$default-stream` property, find the
-        # corresponding v3 stream type node and set its `$is-default`
-        # property to `True`.
+        # corresponding v3 data stream type node and set its
+        # `$is-default` property to `True`.
         prop_name = '$default-stream'
-        v2_def_stream_type_node = v2_meta_node.get(prop_name)
+        v2_def_dst_node = v2_meta_node.get(prop_name)
 
-        if v2_def_stream_type_node is not None:
+        if v2_def_dst_node is not None:
             found = False
 
-            for stream_type_name, v3_stream_type_node in v3_stream_types_node.items():
-                if stream_type_name == v2_def_stream_type_node:
-                    v3_stream_type_node['$is-default'] = True
+            for dst_name, v3_dst_node in v3_dsts_node.items():
+                if dst_name == v2_def_dst_node:
+                    v3_dst_node['$is-default'] = True
                     found = True
                     break
 
             if not found:
                 raise _ConfigurationParseError(f'`{prop_name}` property',
-                                               f'Stream type `{v2_def_stream_type_node}` does not exist')
+                                               f'Data stream type `{v2_def_dst_node}` does not exist')
 
         # set environment node
         v2_env_node = v2_meta_node.get('env')
@@ -583,7 +583,7 @@ class _Parser(config_parse_common._Parser):
             _copy_prop_if_exists(header_node, v2_options_node, 'gen-prefix-def',
                                  'identifier-prefix-definition')
             _copy_prop_if_exists(header_node, v2_options_node, 'gen-default-stream-def',
-                                 'default-stream-type-name-definition')
+                                 'default-data-stream-type-name-definition')
             code_gen_node['header'] = header_node
 
         self._root_node[opt_prop_name] = collections.OrderedDict({
@@ -612,29 +612,27 @@ class _Parser(config_parse_common._Parser):
         meta_node = self._root_node['metadata']
         ft_aliases_node = meta_node['type-aliases']
 
-        # Expand field type aliases within trace, stream, and event
-        # types now.
+        # Expand field type aliases within trace, data stream, and event
+        # record types now.
         try:
             self._resolve_ft_alias_from(ft_aliases_node, meta_node['trace'], 'packet-header-type')
         except _ConfigurationParseError as exc:
             _append_error_ctx(exc, 'Trace type')
 
-        for stream_type_name, stream_type_node in meta_node['streams'].items():
+        for dst_name, dst_node in meta_node['streams'].items():
             try:
-                self._resolve_ft_alias_from(ft_aliases_node, stream_type_node,
-                                            'packet-context-type')
-                self._resolve_ft_alias_from(ft_aliases_node, stream_type_node, 'event-header-type')
-                self._resolve_ft_alias_from(ft_aliases_node, stream_type_node,
-                                            'event-context-type')
+                self._resolve_ft_alias_from(ft_aliases_node, dst_node, 'packet-context-type')
+                self._resolve_ft_alias_from(ft_aliases_node, dst_node, 'event-header-type')
+                self._resolve_ft_alias_from(ft_aliases_node, dst_node, 'event-context-type')
 
-                for ev_type_name, ev_type_node in stream_type_node['events'].items():
+                for ert_name, ert_node in dst_node['events'].items():
                     try:
-                        self._resolve_ft_alias_from(ft_aliases_node, ev_type_node, 'context-type')
-                        self._resolve_ft_alias_from(ft_aliases_node, ev_type_node, 'payload-type')
+                        self._resolve_ft_alias_from(ft_aliases_node, ert_node, 'context-type')
+                        self._resolve_ft_alias_from(ft_aliases_node, ert_node, 'payload-type')
                     except _ConfigurationParseError as exc:
-                        _append_error_ctx(exc, f'Event type `{ev_type_name}`')
+                        _append_error_ctx(exc, f'Event record type `{ert_name}`')
             except _ConfigurationParseError as exc:
-                _append_error_ctx(exc, f'Stream type `{stream_type_name}`')
+                _append_error_ctx(exc, f'Data stream type `{dst_name}`')
 
         # remove the (now unneeded) `type-aliases` node
         del meta_node['type-aliases']
@@ -650,14 +648,14 @@ class _Parser(config_parse_common._Parser):
         meta_node = self._root_node['metadata']
         self._apply_ft_inheritance(meta_node['trace'], 'packet-header-type')
 
-        for stream_type_node in meta_node['streams'].values():
-            self._apply_ft_inheritance(stream_type_node, 'packet-context-type')
-            self._apply_ft_inheritance(stream_type_node, 'event-header-type')
-            self._apply_ft_inheritance(stream_type_node, 'event-context-type')
+        for dst_node in meta_node['streams'].values():
+            self._apply_ft_inheritance(dst_node, 'packet-context-type')
+            self._apply_ft_inheritance(dst_node, 'event-header-type')
+            self._apply_ft_inheritance(dst_node, 'event-context-type')
 
-            for ev_type_node in stream_type_node['events'].values():
-                self._apply_ft_inheritance(ev_type_node, 'context-type')
-                self._apply_ft_inheritance(ev_type_node, 'payload-type')
+            for ert_node in dst_node['events'].values():
+                self._apply_ft_inheritance(ert_node, 'context-type')
+                self._apply_ft_inheritance(ert_node, 'payload-type')
 
     # Calls _expand_ft_aliases() and _apply_fts_inheritance() if the
     # metadata node has a `type-aliases` property.
@@ -681,34 +679,34 @@ class _Parser(config_parse_common._Parser):
         # next, apply inheritance to create effective field types
         self._apply_fts_inheritance()
 
-    # Processes the inclusions of the event type node `ev_type_node`,
+    # Processes the inclusions of the event record type node `ert_node`,
     # returning the effective node.
-    def _process_ev_type_node_include(self, ev_type_node: _MapNode) -> _MapNode:
-        # Make sure the event type node is valid for the inclusion
-        # processing stage.
-        self._schema_validator.validate(ev_type_node, 'config/2/event-type-pre-include')
+    def _process_ert_node_include(self, ert_node: _MapNode) -> _MapNode:
+        # Make sure the event record type node is valid for the
+        # inclusion processing stage.
+        self._schema_validator.validate(ert_node, 'config/2/ert-pre-include')
 
         # process inclusions
-        return self._process_node_include(ev_type_node, self._process_ev_type_node_include)
+        return self._process_node_include(ert_node, self._process_ert_node_include)
 
-    # Processes the inclusions of the stream type node
-    # `stream_type_node`, returning the effective node.
-    def _process_stream_type_node_include(self, stream_type_node: _MapNode) -> _MapNode:
-        def process_children_include(stream_type_node):
+    # Processes the inclusions of the data stream type node `dst_node`,
+    # returning the effective node.
+    def _process_dst_node_include(self, dst_node: _MapNode) -> _MapNode:
+        def process_children_include(dst_node):
             prop_name = 'events'
 
-            if prop_name in stream_type_node:
-                ev_types_node = stream_type_node[prop_name]
+            if prop_name in dst_node:
+                erts_node = dst_node[prop_name]
 
-                for key in list(ev_types_node):
-                    ev_types_node[key] = self._process_ev_type_node_include(ev_types_node[key])
+                for key in list(erts_node):
+                    erts_node[key] = self._process_ert_node_include(erts_node[key])
 
-        # Make sure the stream type node is valid for the inclusion
+        # Make sure the data stream type node is valid for the inclusion
         # processing stage.
-        self._schema_validator.validate(stream_type_node, 'config/2/stream-type-pre-include')
+        self._schema_validator.validate(dst_node, 'config/2/dst-pre-include')
 
         # process inclusions
-        return self._process_node_include(stream_type_node, self._process_stream_type_node_include,
+        return self._process_node_include(dst_node, self._process_dst_node_include,
                                           process_children_include)
 
     # Processes the inclusions of the trace type node `trace_type_node`,
@@ -751,10 +749,10 @@ class _Parser(config_parse_common._Parser):
             prop_name = 'streams'
 
             if prop_name in meta_node:
-                stream_types_node = meta_node[prop_name]
+                dsts_node = meta_node[prop_name]
 
-                for key in list(stream_types_node):
-                    stream_types_node[key] = self._process_stream_type_node_include(stream_types_node[key])
+                for key in list(dsts_node):
+                    dsts_node[key] = self._process_dst_node_include(dsts_node[key])
 
         # Make sure the metadata node is valid for the inclusion
         # processing stage.
@@ -769,19 +767,20 @@ class _Parser(config_parse_common._Parser):
     def _process_config_includes(self):
         # Process inclusions in this order:
         #
-        # 1. Clock type node, event type nodes, and trace type nodes
-        #    (the order between those is not important).
+        # 1. Clock type node, event record type nodes, and trace type
+        #    nodes (the order between those is not important).
         #
-        # 2. Stream type nodes.
+        # 2. Data stream type nodes.
         #
         # 3. Metadata node.
         #
         # This is because:
         #
         # * A metadata node can include clock type nodes, a trace type
-        #   node, stream type nodes, and event type nodes (indirectly).
+        #   node, data stream type nodes, and event record type nodes
+        #   (indirectly).
         #
-        # * A stream type node can include event type nodes.
+        # * A data stream type node can include event record type nodes.
         #
         # First, make sure the configuration node itself is valid for
         # the inclusion processing stage.
@@ -836,15 +835,15 @@ class _Parser(config_parse_common._Parser):
         # packet header and packet context field type member nodes (for
         # example, `stream_id`, `packet_size`, or `timestamp_end`) to
         # set the `$features` properties of barectf 3 trace type and
-        # stream type nodes. Those field type nodes can be aliases,
+        # data stream type nodes. Those field type nodes can be aliases,
         # contain aliases, or inherit from other nodes.
         self._expand_fts()
 
         # Validate the whole, (almost) effective configuration node.
         #
         # It's almost effective because the `log-level` property of
-        # event type nodes can be log level aliases. Log level aliases
-        # are also a feature of a barectf 3 configuration node,
+        # event record type nodes can be log level aliases. Log level
+        # aliases are also a feature of a barectf 3 configuration node,
         # therefore this is compatible.
         self._schema_validator.validate(self._root_node, 'config/2/config')
 
index 99204448fa526e061d6cb8af11c0658d34a6b171..771bf03a606dbfcb3ad6564acad4bcfed90fe144 100644 (file)
@@ -344,35 +344,37 @@ class _Parser(barectf_config_parse_common._Parser):
 
         return Count(len(members_node))
 
-    # Creates an event type from the event type node `ev_type_node`
-    # named `name`.
+    # Creates an event record type from the event record type node
+    # `ert_node` named `name`.
     #
-    # `ev_member_count` is the total number of structure field type
-    # members within the event type so far (from the common part in its
-    # stream type). For example, if the stream type has a event header
-    # field type with `id` and `timestamp` members, then
-    # `ev_member_count` is 2.
-    def _create_ev_type(self, name: str, ev_type_node: _MapNode, ev_member_count: Count) -> barectf_config.EventType:
+    # `ert_member_count` is the total number of structure field type
+    # members within the event record type so far (from the common part
+    # in its data stream type). For example, if the data stream type has
+    # an event record header field type with `id` and `timestamp`
+    # members, then `ert_member_count` is 2.
+    def _create_ert(self, name: str, ert_node: _MapNode,
+                        ert_member_count: Count) -> barectf_config.EventRecordType:
         try:
-            self._validate_iden(name, '`name` property', 'event type name')
+            self._validate_iden(name, '`name` property', 'event record type name')
 
-            # make sure the event type is not empty
+            # make sure the event record type is not empty
             spec_ctx_ft_prop_name = 'specific-context-field-type'
             payload_ft_prop_name = 'payload-field-type'
-            ev_member_count = Count(ev_member_count + self._total_struct_ft_node_members(ev_type_node.get(spec_ctx_ft_prop_name)))
-            ev_member_count = Count(ev_member_count + self._total_struct_ft_node_members(ev_type_node.get(payload_ft_prop_name)))
+            ert_member_count = Count(ert_member_count + self._total_struct_ft_node_members(ert_node.get(spec_ctx_ft_prop_name)))
+            ert_member_count = Count(ert_member_count + self._total_struct_ft_node_members(ert_node.get(payload_ft_prop_name)))
 
-            if ev_member_count == 0:
-                raise _ConfigurationParseError('Event type', 'Event type is empty (no members).')
+            if ert_member_count == 0:
+                raise _ConfigurationParseError('Event record type',
+                                               'Event record type is empty (no members).')
 
-            # create event type
-            return barectf_config.EventType(name, ev_type_node.get('log-level'),
-                                            self._try_create_struct_ft(ev_type_node,
+            # create event record type
+            return barectf_config.EventRecordType(name, ert_node.get('log-level'),
+                                                  self._try_create_struct_ft(ert_node,
                                                                        spec_ctx_ft_prop_name),
-                                            self._try_create_struct_ft(ev_type_node,
+                                                  self._try_create_struct_ft(ert_node,
                                                                        payload_ft_prop_name))
         except _ConfigurationParseError as exc:
-            _append_error_ctx(exc, f'Event type `{name}`')
+            _append_error_ctx(exc, f'Event record type `{name}`')
 
             # satisfy static type checker (never reached)
             raise
@@ -412,15 +414,15 @@ class _Parser(barectf_config_parse_common._Parser):
         assert type(ft_node) is collections.OrderedDict
         return self._create_fts(ft_node)[0]
 
-    def _create_stream_type(self, name: str, stream_type_node: _MapNode) -> barectf_config.StreamType:
+    def _create_dst(self, name: str, dst_node: _MapNode) -> barectf_config.DataStreamType:
         try:
-            # validate stream type's name
-            self._validate_iden(name, '`name` property', 'stream type name')
+            # validate data stream type's name
+            self._validate_iden(name, '`name` property', 'data stream type name')
 
             # get default clock type, if any
             def_clk_type = None
             prop_name = '$default-clock-type-name'
-            def_clk_type_name = stream_type_node.get(prop_name)
+            def_clk_type_name = dst_node.get(prop_name)
 
             if def_clk_type_name is not None:
                 try:
@@ -433,24 +435,25 @@ class _Parser(barectf_config_parse_common._Parser):
             pkt_content_size_ft = barectf_config.DEFAULT_FIELD_TYPE
             pkt_beginning_time_ft = None
             pkt_end_time_ft = None
-            pkt_discarded_events_counter_ft = None
-            ev_type_id_ft = barectf_config.DEFAULT_FIELD_TYPE
-            ev_time_ft = None
+            pkt_discarded_er_counter_snap_ft = None
+            ert_id_ft = barectf_config.DEFAULT_FIELD_TYPE
+            ert_time_ft = None
 
             if def_clk_type is not None:
-                # The stream type has a default clock type. Initialize
-                # the packet beginning time, packet end time, and event
-                # time field types to default field types.
+                # The data stream type has a default clock type.
+                # Initialize the packet beginning time, packet end time,
+                # and event record time field types to default field
+                # types.
                 #
-                # This means your stream type node only needs a default
-                # clock type name to enable those features
+                # This means your data stream type node only needs a
+                # default clock type name to enable those features
                 # automatically. Those features do not add any parameter
-                # to the tracing event functions.
+                # to the event tracing functions.
                 pkt_beginning_time_ft = barectf_config.DEFAULT_FIELD_TYPE
                 pkt_end_time_ft = barectf_config.DEFAULT_FIELD_TYPE
-                ev_time_ft = barectf_config.DEFAULT_FIELD_TYPE
+                ert_time_ft = barectf_config.DEFAULT_FIELD_TYPE
 
-            features_node = stream_type_node.get('$features')
+            features_node = dst_node.get('$features')
 
             if features_node is not None:
                 # create packet feature field types
@@ -465,48 +468,48 @@ class _Parser(barectf_config_parse_common._Parser):
                                                              pkt_beginning_time_ft)
                     pkt_end_time_ft = self._feature_ft(pkt_node, 'end-time-field-type',
                                                        pkt_end_time_ft)
-                    pkt_discarded_events_counter_ft = self._feature_ft(pkt_node,
-                                                                       'discarded-events-counter-field-type',
-                                                                       pkt_discarded_events_counter_ft)
+                    pkt_discarded_er_counter_snap_ft = self._feature_ft(pkt_node,
+                                                                   'discarded-event-records-counter-snapshot-field-type',
+                                                                   pkt_discarded_er_counter_snap_ft)
 
-                # create event feature field types
-                ev_node = features_node.get('event')
+                # create event record feature field types
+                er_node = features_node.get('event-record')
                 type_id_ft_prop_name = 'type-id-field-type'
 
-                if ev_node is not None:
-                    ev_type_id_ft = self._feature_ft(ev_node, type_id_ft_prop_name, ev_type_id_ft)
-                    ev_time_ft = self._feature_ft(ev_node, 'time-field-type', ev_time_ft)
+                if er_node is not None:
+                    ert_id_ft = self._feature_ft(er_node, type_id_ft_prop_name, ert_id_ft)
+                    ert_time_ft = self._feature_ft(er_node, 'time-field-type', ert_time_ft)
 
-            ev_types_prop_name = 'event-types'
-            ev_type_count = len(stream_type_node[ev_types_prop_name])
+            erts_prop_name = 'event-record-types'
+            ert_count = len(dst_node[erts_prop_name])
 
             try:
-                if ev_type_id_ft is None and ev_type_count > 1:
+                if ert_id_ft is None and ert_count > 1:
                     raise _ConfigurationParseError(f'`{type_id_ft_prop_name}` property',
-                                                   'Event type ID field type feature is required because stream type has more than one event type')
+                                                   'Event record type ID field type feature is required because data stream type has more than one event record type')
 
-                if isinstance(ev_type_id_ft, barectf_config._IntegerFieldType):
-                    ev_type_id_int_ft = typing.cast(barectf_config._IntegerFieldType, ev_type_id_ft)
+                if isinstance(ert_id_ft, barectf_config._IntegerFieldType):
+                    ert_id_int_ft = typing.cast(barectf_config._IntegerFieldType, ert_id_ft)
 
-                    if ev_type_count > (1 << ev_type_id_int_ft.size):
+                    if ert_count > (1 << ert_id_int_ft.size):
                         raise _ConfigurationParseError(f'`{type_id_ft_prop_name}` property',
-                                                       f'Field type\'s size ({ev_type_id_int_ft.size} bits) is too small to accomodate {ev_type_count} event types')
+                                                       f'Field type\'s size ({ert_id_int_ft.size} bits) is too small to accomodate {ert_count} event record types')
             except _ConfigurationParseError as exc:
-                exc._append_ctx('`event` property')
+                exc._append_ctx('`event-record` property')
                 _append_error_ctx(exc, '`$features` property')
 
-            pkt_features = barectf_config.StreamTypePacketFeatures(pkt_total_size_ft,
-                                                                   pkt_content_size_ft,
-                                                                   pkt_beginning_time_ft,
-                                                                   pkt_end_time_ft,
-                                                                   pkt_discarded_events_counter_ft)
-            ev_features = barectf_config.StreamTypeEventFeatures(ev_type_id_ft, ev_time_ft)
-            features = barectf_config.StreamTypeFeatures(pkt_features, ev_features)
+            pkt_features = barectf_config.DataStreamTypePacketFeatures(pkt_total_size_ft,
+                                                                       pkt_content_size_ft,
+                                                                       pkt_beginning_time_ft,
+                                                                       pkt_end_time_ft,
+                                                                       pkt_discarded_er_counter_snap_ft)
+            er_features = barectf_config.DataStreamTypeEventRecordFeatures(ert_id_ft, ert_time_ft)
+            features = barectf_config.DataStreamTypeFeatures(pkt_features, er_features)
 
             # create packet context (structure) field type extra members
             pkt_ctx_ft_extra_members = None
             prop_name = 'packet-context-field-type-extra-members'
-            pkt_ctx_ft_extra_members_node = stream_type_node.get(prop_name)
+            pkt_ctx_ft_extra_members_node = dst_node.get(prop_name)
 
             if pkt_ctx_ft_extra_members_node is not None:
                 pkt_ctx_ft_extra_members = self._create_struct_ft_members(pkt_ctx_ft_extra_members_node,
@@ -527,30 +530,30 @@ class _Parser(barectf_config_parse_common._Parser):
                         raise _ConfigurationParseError(f'`{prop_name}` property',
                                                        f'Packet context field type member name `{member_name}` is reserved.')
 
-            # create event types
-            ev_header_common_ctx_member_count = Count(0)
+            # create event record types
+            er_header_common_ctx_member_count = Count(0)
 
-            if ev_features.type_id_field_type is not None:
-                ev_header_common_ctx_member_count = Count(ev_header_common_ctx_member_count + 1)
+            if er_features.type_id_field_type is not None:
+                er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + 1)
 
-            if ev_features.time_field_type is not None:
-                ev_header_common_ctx_member_count = Count(ev_header_common_ctx_member_count + 1)
+            if er_features.time_field_type is not None:
+                er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + 1)
 
-            ev_common_ctx_ft_prop_name = 'event-common-context-field-type'
-            ev_common_ctx_ft_node = stream_type_node.get(ev_common_ctx_ft_prop_name)
-            ev_header_common_ctx_member_count = Count(ev_header_common_ctx_member_count + self._total_struct_ft_node_members(ev_common_ctx_ft_node))
-            ev_types = set()
+            er_common_ctx_ft_prop_name = 'event-record-common-context-field-type'
+            er_common_ctx_ft_node = dst_node.get(er_common_ctx_ft_prop_name)
+            er_header_common_ctx_member_count = Count(er_header_common_ctx_member_count + self._total_struct_ft_node_members(er_common_ctx_ft_node))
+            erts = set()
 
-            for ev_name, ev_type_node in stream_type_node[ev_types_prop_name].items():
-                ev_types.add(self._create_ev_type(ev_name, ev_type_node, ev_header_common_ctx_member_count))
+            for ert_name, ert_node in dst_node[erts_prop_name].items():
+                erts.add(self._create_ert(ert_name, ert_node, er_header_common_ctx_member_count))
 
-            # create stream type
-            return barectf_config.StreamType(name, ev_types, def_clk_type, features,
-                                             pkt_ctx_ft_extra_members,
-                                             self._try_create_struct_ft(stream_type_node,
-                                                                        ev_common_ctx_ft_prop_name))
+            # create data stream type
+            return barectf_config.DataStreamType(name, erts, def_clk_type, features,
+                                                 pkt_ctx_ft_extra_members,
+                                                 self._try_create_struct_ft(dst_node,
+                                                                            er_common_ctx_ft_prop_name))
         except _ConfigurationParseError as exc:
-            _append_error_ctx(exc, f'Stream type `{name}`')
+            _append_error_ctx(exc, f'Data data stream type `{name}`')
 
             # satisfy static type checker (never reached)
             raise
@@ -594,7 +597,7 @@ class _Parser(barectf_config_parse_common._Parser):
 
     def _create_trace_type(self):
         try:
-            # create clock types (_create_stream_type() needs them)
+            # create clock types (_create_dst() needs them)
             self._create_clk_types()
 
             # get UUID
@@ -610,7 +613,7 @@ class _Parser(barectf_config_parse_common._Parser):
             # create feature field types
             magic_ft = barectf_config.DEFAULT_FIELD_TYPE
             uuid_ft = None
-            stream_type_id_ft = barectf_config.DEFAULT_FIELD_TYPE
+            dst_id_ft = barectf_config.DEFAULT_FIELD_TYPE
 
             if trace_type_uuid is not None:
                 # Trace type has a UUID: initialize UUID field type to
@@ -618,39 +621,38 @@ class _Parser(barectf_config_parse_common._Parser):
                 uuid_ft = barectf_config.DEFAULT_FIELD_TYPE
 
             features_node = self._trace_type_node.get('$features')
-            stream_type_id_ft_prop_name = 'stream-type-id-field-type'
+            dst_id_ft_prop_name = 'data-stream-type-id-field-type'
 
             if features_node is not None:
                 magic_ft = self._feature_ft(features_node, 'magic-field-type',
                                             magic_ft)
                 uuid_ft = self._feature_ft(features_node, 'uuid-field-type', uuid_ft)
-                stream_type_id_ft = self._feature_ft(features_node, stream_type_id_ft_prop_name,
-                                                     stream_type_id_ft)
+                dst_id_ft = self._feature_ft(features_node, dst_id_ft_prop_name, dst_id_ft)
 
-            stream_types_prop_name = 'stream-types'
-            stream_type_count = len(self._trace_type_node[stream_types_prop_name])
+            dsts_prop_name = 'data-stream-types'
+            dst_count = len(self._trace_type_node[dsts_prop_name])
 
             try:
-                if stream_type_id_ft is None and stream_type_count > 1:
-                    raise _ConfigurationParseError(f'`{stream_type_id_ft_prop_name}` property',
-                                                   'Stream type ID field type feature is required because trace type has more than one stream type')
+                if dst_id_ft is None and dst_count > 1:
+                    raise _ConfigurationParseError(f'`{dst_id_ft_prop_name}` property',
+                                                   'Data stream type ID field type feature is required because trace type has more than one data stream type')
 
-                if isinstance(stream_type_id_ft, barectf_config._FieldType) and stream_type_count > (1 << stream_type_id_ft.size):
-                    raise _ConfigurationParseError(f'`{stream_type_id_ft_prop_name}` property',
-                                                   f'Field type\'s size ({stream_type_id_ft.size} bits) is too small to accomodate {stream_type_count} stream types')
+                if isinstance(dst_id_ft, barectf_config._FieldType) and dst_count > (1 << dst_id_ft.size):
+                    raise _ConfigurationParseError(f'`{dst_id_ft_prop_name}` property',
+                                                   f'Field type\'s size ({dst_id_ft.size} bits) is too small to accomodate {dst_count} data stream types')
             except _ConfigurationParseError as exc:
                 _append_error_ctx(exc, '`$features` property')
 
-            features = barectf_config.TraceTypeFeatures(magic_ft, uuid_ft, stream_type_id_ft)
+            features = barectf_config.TraceTypeFeatures(magic_ft, uuid_ft, dst_id_ft)
 
-            # create stream types
-            stream_types = set()
+            # create data stream types
+            dsts = set()
 
-            for stream_name, stream_type_node in self._trace_type_node[stream_types_prop_name].items():
-                stream_types.add(self._create_stream_type(stream_name, stream_type_node))
+            for dst_name, dst_node in self._trace_type_node[dsts_prop_name].items():
+                dsts.add(self._create_dst(dst_name, dst_node))
 
             # create trace type
-            return barectf_config.TraceType(stream_types, trace_type_uuid, features)
+            return barectf_config.TraceType(dsts, trace_type_uuid, features)
         except _ConfigurationParseError as exc:
             _append_error_ctx(exc, 'Trace type')
 
@@ -679,21 +681,21 @@ class _Parser(barectf_config_parse_common._Parser):
         # create trace first
         trace = self._create_trace()
 
-        # find default stream type, if any
-        def_stream_type = None
+        # find default data stream type, if any
+        def_dst = None
 
-        for stream_type_name, stream_type_node in self._trace_type_node['stream-types'].items():
+        for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
             prop_name = '$is-default'
-            is_default = stream_type_node.get(prop_name)
+            is_default = dst_node.get(prop_name)
 
             if is_default is True:
-                if def_stream_type is not None:
+                if def_dst is not None:
                     exc = _ConfigurationParseError(f'`{prop_name}` property',
-                                                   f'Duplicate default stream type (`{def_stream_type.name}`)')
-                    exc._append_ctx(f'Stream type `{stream_type_name}`')
+                                                   f'Duplicate default data stream type (`{def_dst.name}`)')
+                    exc._append_ctx(f'Data stream type `{dst_name}`')
                     _append_error_ctx(exc, 'Trace type')
 
-                def_stream_type = trace.type.stream_type(stream_type_name)
+                def_dst = trace.type.data_stream_type(dst_name)
 
         # create clock type C type mapping
         clk_types_node = self._trace_type_node.get('clock-types')
@@ -702,19 +704,19 @@ class _Parser(barectf_config_parse_common._Parser):
         if clk_types_node is not None:
             clk_type_c_types = collections.OrderedDict()
 
-            for stream_type in trace.type.stream_types:
-                if stream_type.default_clock_type is None:
+            for dst in trace.type.data_stream_types:
+                if dst.default_clock_type is None:
                     continue
 
-                clk_type_node = clk_types_node[stream_type.default_clock_type.name]
+                clk_type_node = clk_types_node[dst.default_clock_type.name]
                 c_type = clk_type_node.get('$c-type')
 
                 if c_type is not None:
-                    clk_type_c_types[stream_type.default_clock_type] = c_type
+                    clk_type_c_types[dst.default_clock_type] = c_type
 
         # create options
         iden_prefix_def = False
-        def_stream_type_name_def = False
+        def_dst_name_def = False
         opts_node = self.config_node.get('options')
         iden_prefix = 'barectf_'
         file_name_prefix = 'barectf'
@@ -737,13 +739,13 @@ class _Parser(barectf_config_parse_common._Parser):
 
                 if header_opts is not None:
                     iden_prefix_def = header_opts.get('identifier-prefix-definition', False)
-                    def_stream_type_name_def = header_opts.get('default-stream-type-name-definition',
-                                                               False)
+                    def_dst_name_def = header_opts.get('default-data-stream-type-name-definition',
+                                                       False)
 
         header_opts = barectf_config.ConfigurationCodeGenerationHeaderOptions(iden_prefix_def,
-                                                                              def_stream_type_name_def)
+                                                                              def_dst_name_def)
         cg_opts = barectf_config.ConfigurationCodeGenerationOptions(iden_prefix, file_name_prefix,
-                                                                    def_stream_type, header_opts,
+                                                                    def_dst, header_opts,
                                                                     clk_type_c_types)
         opts = barectf_config.ConfigurationOptions(cg_opts)
 
@@ -773,8 +775,8 @@ class _Parser(barectf_config_parse_common._Parser):
 
         ft_aliases_node = self._trace_type_node['$field-type-aliases']
 
-        # Expand field type aliases within trace, stream, and event type
-        # nodes.
+        # Expand field type aliases within trace, data stream, and event
+        # record type nodes.
         features_prop_name = '$features'
 
         try:
@@ -784,15 +786,15 @@ class _Parser(barectf_config_parse_common._Parser):
                 try:
                     resolve_ft_alias_from(features_node, 'magic-field-type')
                     resolve_ft_alias_from(features_node, 'uuid-field-type')
-                    resolve_ft_alias_from(features_node, 'stream-type-id-field-type')
+                    resolve_ft_alias_from(features_node, 'data-stream-type-id-field-type')
                 except _ConfigurationParseError as exc:
                     _append_error_ctx(exc, f'`{features_prop_name}` property')
         except _ConfigurationParseError as exc:
             _append_error_ctx(exc, 'Trace type')
 
-        for stream_type_name, stream_type_node in self._trace_type_node['stream-types'].items():
+        for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
             try:
-                features_node = stream_type_node.get(features_prop_name)
+                features_node = dst_node.get(features_prop_name)
 
                 if features_node is not None:
                     try:
@@ -806,24 +808,24 @@ class _Parser(barectf_config_parse_common._Parser):
                                 resolve_ft_alias_from(pkt_node, 'beginning-time-field-type')
                                 resolve_ft_alias_from(pkt_node, 'end-time-field-type')
                                 resolve_ft_alias_from(pkt_node,
-                                                      'discarded-events-counter-field-type')
+                                                      'discarded-event-records-counter-snapshot-field-type')
                             except _ConfigurationParseError as exc:
                                 _append_error_ctx(exc, f'`{pkt_prop_name}` property')
 
-                        ev_prop_name = 'event'
-                        ev_node = features_node.get(ev_prop_name)
+                        er_prop_name = 'event-record'
+                        er_node = features_node.get(er_prop_name)
 
-                        if ev_node is not None:
+                        if er_node is not None:
                             try:
-                                resolve_ft_alias_from(ev_node, 'type-id-field-type')
-                                resolve_ft_alias_from(ev_node, 'time-field-type')
+                                resolve_ft_alias_from(er_node, 'type-id-field-type')
+                                resolve_ft_alias_from(er_node, 'time-field-type')
                             except _ConfigurationParseError as exc:
-                                _append_error_ctx(exc, f'`{ev_prop_name}` property')
+                                _append_error_ctx(exc, f'`{er_prop_name}` property')
                     except _ConfigurationParseError as exc:
                         _append_error_ctx(exc, f'`{features_prop_name}` property')
 
                 pkt_ctx_ft_extra_members_prop_name = 'packet-context-field-type-extra-members'
-                pkt_ctx_ft_extra_members_node = stream_type_node.get(pkt_ctx_ft_extra_members_prop_name)
+                pkt_ctx_ft_extra_members_node = dst_node.get(pkt_ctx_ft_extra_members_prop_name)
 
                 if pkt_ctx_ft_extra_members_node is not None:
                     try:
@@ -833,16 +835,16 @@ class _Parser(barectf_config_parse_common._Parser):
                     except _ConfigurationParseError as exc:
                         _append_error_ctx(exc, f'`{pkt_ctx_ft_extra_members_prop_name}` property')
 
-                resolve_ft_alias_from(stream_type_node, 'event-common-context-field-type')
+                resolve_ft_alias_from(dst_node, 'event-record-common-context-field-type')
 
-                for ev_type_name, ev_type_node in stream_type_node['event-types'].items():
+                for ert_name, ert_node in dst_node['event-record-types'].items():
                     try:
-                        resolve_ft_alias_from(ev_type_node, 'specific-context-field-type')
-                        resolve_ft_alias_from(ev_type_node, 'payload-field-type')
+                        resolve_ft_alias_from(ert_node, 'specific-context-field-type')
+                        resolve_ft_alias_from(ert_node, 'payload-field-type')
                     except _ConfigurationParseError as exc:
-                        _append_error_ctx(exc, f'Event type `{ev_type_name}`')
+                        _append_error_ctx(exc, f'Event record type `{ert_name}`')
             except _ConfigurationParseError as exc:
-                _append_error_ctx(exc, f'Stream type `{stream_type_name}`')
+                _append_error_ctx(exc, f'Data stream type `{dst_name}`')
 
         # remove the (now unneeded) `$field-type-aliases` property
         del self._trace_type_node['$field-type-aliases']
@@ -870,10 +872,10 @@ class _Parser(barectf_config_parse_common._Parser):
         if features_node is not None:
             apply_ft_inheritance(features_node, 'magic-field-type')
             apply_ft_inheritance(features_node, 'uuid-field-type')
-            apply_ft_inheritance(features_node, 'stream-type-id-field-type')
+            apply_ft_inheritance(features_node, 'data-stream-type-id-field-type')
 
-        for stream_type_node in self._trace_type_node['stream-types'].values():
-            features_node = stream_type_node.get(features_prop_name)
+        for dst_node in self._trace_type_node['data-stream-types'].values():
+            features_node = dst_node.get(features_prop_name)
 
             if features_node is not None:
                 pkt_node = features_node.get('packet')
@@ -883,26 +885,26 @@ class _Parser(barectf_config_parse_common._Parser):
                     apply_ft_inheritance(pkt_node, 'content-size-field-type')
                     apply_ft_inheritance(pkt_node, 'beginning-time-field-type')
                     apply_ft_inheritance(pkt_node, 'end-time-field-type')
-                    apply_ft_inheritance(pkt_node, 'discarded-events-counter-field-type')
+                    apply_ft_inheritance(pkt_node, 'discarded-event-records-counter-snapshot-field-type')
 
-                ev_node = features_node.get('event')
+                er_node = features_node.get('event-record')
 
-                if ev_node is not None:
-                    apply_ft_inheritance(ev_node, 'type-id-field-type')
-                    apply_ft_inheritance(ev_node, 'time-field-type')
+                if er_node is not None:
+                    apply_ft_inheritance(er_node, 'type-id-field-type')
+                    apply_ft_inheritance(er_node, 'time-field-type')
 
-            pkt_ctx_ft_extra_members_node = stream_type_node.get('packet-context-field-type-extra-members')
+            pkt_ctx_ft_extra_members_node = dst_node.get('packet-context-field-type-extra-members')
 
             if pkt_ctx_ft_extra_members_node is not None:
                 for member_node in pkt_ctx_ft_extra_members_node:
                     member_node = list(member_node.values())[0]
                     apply_ft_inheritance(member_node, 'field-type')
 
-            apply_ft_inheritance(stream_type_node, 'event-common-context-field-type')
+            apply_ft_inheritance(dst_node, 'event-record-common-context-field-type')
 
-            for ev_type_node in stream_type_node['event-types'].values():
-                apply_ft_inheritance(ev_type_node, 'specific-context-field-type')
-                apply_ft_inheritance(ev_type_node, 'payload-field-type')
+            for ert_node in dst_node['event-record-types'].values():
+                apply_ft_inheritance(ert_node, 'specific-context-field-type')
+                apply_ft_inheritance(ert_node, 'payload-field-type')
 
     # Normalizes structure field type member nodes.
     #
@@ -958,10 +960,10 @@ class _Parser(barectf_config_parse_common._Parser):
         if features_node is not None:
             normalize_struct_ft_member_nodes(features_node, 'magic-field-type')
             normalize_struct_ft_member_nodes(features_node, 'uuid-field-type')
-            normalize_struct_ft_member_nodes(features_node, 'stream-type-id-field-type')
+            normalize_struct_ft_member_nodes(features_node, 'data-stream-type-id-field-type')
 
-        for stream_type_node in self._trace_type_node['stream-types'].values():
-            features_node = stream_type_node.get(features_prop_name)
+        for dst_node in self._trace_type_node['data-stream-types'].values():
+            features_node = dst_node.get(features_prop_name)
 
             if features_node is not None:
                 pkt_node = features_node.get('packet')
@@ -972,24 +974,24 @@ class _Parser(barectf_config_parse_common._Parser):
                     normalize_struct_ft_member_nodes(pkt_node, 'beginning-time-field-type')
                     normalize_struct_ft_member_nodes(pkt_node, 'end-time-field-type')
                     normalize_struct_ft_member_nodes(pkt_node,
-                                                     'discarded-events-counter-field-type')
+                                                     'discarded-event-records-counter-snapshot-field-type')
 
-                ev_node = features_node.get('event')
+                er_node = features_node.get('event-record')
 
-                if ev_node is not None:
-                    normalize_struct_ft_member_nodes(ev_node, 'type-id-field-type')
-                    normalize_struct_ft_member_nodes(ev_node, 'time-field-type')
+                if er_node is not None:
+                    normalize_struct_ft_member_nodes(er_node, 'type-id-field-type')
+                    normalize_struct_ft_member_nodes(er_node, 'time-field-type')
 
-            pkt_ctx_ft_extra_members_node = stream_type_node.get('packet-context-field-type-extra-members')
+            pkt_ctx_ft_extra_members_node = dst_node.get('packet-context-field-type-extra-members')
 
             if pkt_ctx_ft_extra_members_node is not None:
                 normalize_members_node(pkt_ctx_ft_extra_members_node)
 
-            normalize_struct_ft_member_nodes(stream_type_node, 'event-common-context-field-type')
+            normalize_struct_ft_member_nodes(dst_node, 'event-record-common-context-field-type')
 
-            for ev_type_node in stream_type_node['event-types'].values():
-                normalize_struct_ft_member_nodes(ev_type_node, 'specific-context-field-type')
-                normalize_struct_ft_member_nodes(ev_type_node, 'payload-field-type')
+            for ert_node in dst_node['event-record-types'].values():
+                normalize_struct_ft_member_nodes(ert_node, 'specific-context-field-type')
+                normalize_struct_ft_member_nodes(ert_node, 'payload-field-type')
 
     # Calls _expand_ft_aliases() and _apply_fts_inheritance() if the
     # trace type node has a `$field-type-aliases` property.
@@ -1020,7 +1022,7 @@ class _Parser(barectf_config_parse_common._Parser):
         # next, apply inheritance to create effective field type nodes
         self._apply_fts_inheritance()
 
-    # Substitute the event type node log level aliases with their
+    # Substitute the event record type node log level aliases with their
     # numeric equivalents.
     #
     # Removes the `$log-level-aliases` property of the trace type node.
@@ -1041,12 +1043,12 @@ class _Parser(barectf_config_parse_common._Parser):
             return
 
         # substitute log level aliases
-        for stream_type_name, stream_type_node in self._trace_type_node['stream-types'].items():
+        for dst_name, dst_node in self._trace_type_node['data-stream-types'].items():
             try:
-                for ev_type_name, ev_type_node in stream_type_node['event-types'].items():
+                for ert_name, ert_node in dst_node['event-record-types'].items():
                     try:
                         prop_name = 'log-level'
-                        ll_node = ev_type_node.get(prop_name)
+                        ll_node = ert_node.get(prop_name)
 
                         if ll_node is None:
                             continue
@@ -1056,11 +1058,11 @@ class _Parser(barectf_config_parse_common._Parser):
                                 raise _ConfigurationParseError(f'`{prop_name}` property',
                                                                f'Log level alias `{ll_node}` does not exist')
 
-                            ev_type_node[prop_name] = log_level_aliases_node[ll_node]
+                            ert_node[prop_name] = log_level_aliases_node[ll_node]
                     except _ConfigurationParseError as exc:
-                        _append_error_ctx(exc, f'Event type `{ev_type_name}`')
+                        _append_error_ctx(exc, f'Event record type `{ert_name}`')
             except _ConfigurationParseError as exc:
-                _append_error_ctx(exc, f'Stream type `{stream_type_name}`')
+                _append_error_ctx(exc, f'Data stream type `{dst_name}`')
 
     # Generator of parent node and key pairs for all the nodes,
     # recursively, of `node`.
@@ -1152,34 +1154,34 @@ class _Parser(barectf_config_parse_common._Parser):
         self._target_byte_order_node = self.config_node['target-byte-order']
         self._target_byte_order = self._byte_order_from_node(self._target_byte_order_node)
 
-    # Processes the inclusions of the event type node `ev_type_node`,
-    # returning the effective node.
-    def _process_ev_type_node_include(self, ev_type_node: _MapNode) -> _MapNode:
-        # Make sure the event type node is valid for the inclusion
-        # processing stage.
-        self._schema_validator.validate(ev_type_node, 'config/3/event-type-pre-include')
+    # Processes the inclusions of the event record type node
+    # `ert_node`, returning the effective node.
+    def _process_ert_node_include(self, ert_node: _MapNode) -> _MapNode:
+        # Make sure the event record type node is valid for the
+        # inclusion processing stage.
+        self._schema_validator.validate(ert_node, 'config/3/ert-pre-include')
 
         # process inclusions
-        return self._process_node_include(ev_type_node, self._process_ev_type_node_include)
+        return self._process_node_include(ert_node, self._process_ert_node_include)
 
-    # Processes the inclusions of the stream type node
-    # `stream_type_node`, returning the effective node.
-    def _process_stream_type_node_include(self, stream_type_node: _MapNode) -> _MapNode:
-        def process_children_include(stream_type_node: _MapNode):
-            prop_name = 'event-types'
+    # Processes the inclusions of the data stream type node `dst_node`,
+    # returning the effective node.
+    def _process_dst_node_include(self, dst_node: _MapNode) -> _MapNode:
+        def process_children_include(dst_node: _MapNode):
+            prop_name = 'event-record-types'
 
-            if prop_name in stream_type_node:
-                ev_types_node = stream_type_node[prop_name]
+            if prop_name in dst_node:
+                erts_node = dst_node[prop_name]
 
-                for key in list(ev_types_node):
-                    ev_types_node[key] = self._process_ev_type_node_include(ev_types_node[key])
+                for key in list(erts_node):
+                    erts_node[key] = self._process_ert_node_include(erts_node[key])
 
-        # Make sure the stream type node is valid for the inclusion
+        # Make sure the data stream type node is valid for the inclusion
         # processing stage.
-        self._schema_validator.validate(stream_type_node, 'config/3/stream-type-pre-include')
+        self._schema_validator.validate(dst_node, 'config/3/dst-pre-include')
 
         # process inclusions
-        return self._process_node_include(stream_type_node, self._process_stream_type_node_include,
+        return self._process_node_include(dst_node, self._process_dst_node_include,
                                           process_children_include)
 
     # Processes the inclusions of the clock type node `clk_type_node`,
@@ -1204,13 +1206,13 @@ class _Parser(barectf_config_parse_common._Parser):
                 for key in list(clk_types_node):
                     clk_types_node[key] = self._process_clk_type_node_include(clk_types_node[key])
 
-            prop_name = 'stream-types'
+            prop_name = 'data-stream-types'
 
             if prop_name in trace_type_node:
-                stream_types_node = trace_type_node[prop_name]
+                dsts_node = trace_type_node[prop_name]
 
-                for key in list(stream_types_node):
-                    stream_types_node[key] = self._process_stream_type_node_include(stream_types_node[key])
+                for key in list(dsts_node):
+                    dsts_node[key] = self._process_dst_node_include(dsts_node[key])
 
         # Make sure the trace type node is valid for the inclusion
         # processing stage.
@@ -1240,10 +1242,10 @@ class _Parser(barectf_config_parse_common._Parser):
     def _process_config_includes(self):
         # Process inclusions in this order:
         #
-        # 1. Clock type node and event type nodes (the order between
-        #    those is not important).
+        # 1. Clock type node and event record type nodes (the order
+        #    between those is not important).
         #
-        # 2. Stream type nodes.
+        # 2. Data stream type nodes.
         #
         # 3. Trace type node.
         #
@@ -1252,12 +1254,12 @@ class _Parser(barectf_config_parse_common._Parser):
         # This is because:
         #
         # * A trace node can include a trace type node, clock type
-        #   nodes, stream type nodes, and event type nodes.
+        #   nodes, data stream type nodes, and event record type nodes.
         #
-        # * A trace type node can include clock type nodes, stream type
-        #   nodes, and event type nodes.
+        # * A trace type node can include clock type nodes, data stream
+        #   type nodes, and event record type nodes.
         #
-        # * A stream type node can include event type nodes.
+        # * A data stream type node can include event record type nodes.
         #
         # First, make sure the configuration node itself is valid for
         # the inclusion processing stage.
@@ -1296,9 +1298,9 @@ class _Parser(barectf_config_parse_common._Parser):
         #
         # This process:
         #
-        # 1. Replaces log level aliases in event type nodes with their
-        #    numeric equivalents as found in the `$log-level-aliases`
-        #    property of the trace type node.
+        # 1. Replaces log level aliases in event record type nodes with
+        #    their numeric equivalents as found in the
+        #    `$log-level-aliases` property of the trace type node.
         #
         # 2. Removes the `$log-level-aliases` property from the trace
         #    type node.
index 2a8649a12e32dd14166583f297580b523ac16104..b7ed73354ecec20f2a19ce7205343d10a4ee735a 100644 (file)
@@ -5,7 +5,7 @@
 #
 # * A 32-bit magic number unsigned integer field type.
 # * A UUID static array field type.
-# * An 8-bit stream type ID unsigned integer field type.
+# * An 8-bit data stream type ID unsigned integer field type.
 #
 # The trace type's UUID is automatically generated by barectf.
 
index 7cdcc6e2a9afec57c84cdca0e28a88a47eb87f3b..8cbe8d5a0acb98388aa3c4e838f7f9e9d53cc88d 100644 (file)
@@ -87,11 +87,11 @@ properties:
           packet-header-type:
             $ref: '#/definitions/partial-ft'
       streams:
-        title: Stream types object before field type expansions
+        title: Data stream types object before field type expansions
         type: object
         patternProperties:
           '.*':
-            title: Stream type object before field type expansions
+            title: Data stream type object before field type expansions
             type: object
             properties:
               packet-context-type:
index e7522b641f4bc1b5bc2fff27d057db521f240e2c..d23124ce0eb78066882a2c39e59217c40739d272 100644 (file)
@@ -185,8 +185,8 @@ definitions:
                   $ref: '#/definitions/uint-ft'
                 timestamp:
                   $ref: '#/definitions/uint-ft-ts'
-  stream-type:
-    title: Stream type object
+  dst:
+    title: Data stream type object
     type: object
     properties:
       $default:
@@ -198,19 +198,19 @@ definitions:
       event-context-type:
         $ref: '#/definitions/opt-struct-ft'
       events:
-        title: Event types object
+        title: Event record types object
         type: object
         patternProperties:
           '^[A-Za-z_][A-Za-z0-9_]*$':
-            $ref: '#/definitions/event-type'
+            $ref: '#/definitions/ert'
         additionalProperties: false
         minProperties: 1
     required:
       - packet-context-type
       - events
     additionalProperties: false
-  event-type:
-    title: Event type object
+  ert:
+    title: Event record type object
     type: object
     properties:
       log-level:
@@ -280,11 +280,11 @@ properties:
       $default-stream:
         $ref: https://barectf.org/schemas/config/common/common.json#/definitions/opt-string
       streams:
-        title: Stream types object
+        title: Data stream types object
         type: object
         patternProperties:
           '^[A-Za-z_][A-Za-z0-9_]*$':
-            $ref: '#/definitions/stream-type'
+            $ref: '#/definitions/dst'
         additionalProperties: false
         minProperties: 1
     required:
diff --git a/barectf/schemas/config/2/dst-pre-include.yaml b/barectf/schemas/config/2/dst-pre-include.yaml
new file mode 100644 (file)
index 0000000..6da5445
--- /dev/null
@@ -0,0 +1,36 @@
+# The MIT License (MIT)
+#
+# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+$schema: http://json-schema.org/draft-07/schema#
+$id: https://barectf.org/schemas/config/2/dst-pre-include.json
+title: Data stream type object before inclusions
+type: object
+properties:
+  $include:
+    $ref: https://barectf.org/schemas/config/2/include-prop.json
+  events:
+    title: Event record types object before inclusions
+    type: object
+    patternProperties:
+      '.*':
+        $ref: https://barectf.org/schemas/config/2/ert-pre-include.json
diff --git a/barectf/schemas/config/2/ert-pre-include.yaml b/barectf/schemas/config/2/ert-pre-include.yaml
new file mode 100644 (file)
index 0000000..fa2f67b
--- /dev/null
@@ -0,0 +1,30 @@
+# The MIT License (MIT)
+#
+# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+$schema: http://json-schema.org/draft-07/schema#
+$id: https://barectf.org/schemas/config/2/ert-pre-include.json
+title: Event record type object before inclusions
+type: object
+properties:
+  $include:
+    $ref: https://barectf.org/schemas/config/2/include-prop.json
diff --git a/barectf/schemas/config/2/event-type-pre-include.yaml b/barectf/schemas/config/2/event-type-pre-include.yaml
deleted file mode 100644 (file)
index ccc5d23..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-# The MIT License (MIT)
-#
-# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-$schema: http://json-schema.org/draft-07/schema#
-$id: https://barectf.org/schemas/config/2/event-type-pre-include.json
-title: Event type object before inclusions
-type: object
-properties:
-  $include:
-    $ref: https://barectf.org/schemas/config/2/include-prop.json
index fad2816ef9cc4b24502b981edf4d4c5bd63ab73d..5eacd2edfbd0ec2b9b304c5a93035e4fbc696fd8 100644 (file)
@@ -37,8 +37,8 @@ properties:
   trace:
     $ref: https://barectf.org/schemas/config/2/trace-type-pre-include.json
   streams:
-    title: Stream types object before inclusions
+    title: Data stream types object before inclusions
     type: object
     patternProperties:
       '.*':
-        $ref: https://barectf.org/schemas/config/2/stream-type-pre-include.json
+        $ref: https://barectf.org/schemas/config/2/dst-pre-include.json
diff --git a/barectf/schemas/config/2/stream-type-pre-include.yaml b/barectf/schemas/config/2/stream-type-pre-include.yaml
deleted file mode 100644 (file)
index e550d20..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-# The MIT License (MIT)
-#
-# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-$schema: http://json-schema.org/draft-07/schema#
-$id: https://barectf.org/schemas/config/2/stream-type-pre-include.json
-title: Stream type object before inclusions
-type: object
-properties:
-  $include:
-    $ref: https://barectf.org/schemas/config/2/include-prop.json
-  events:
-    title: Event types object before inclusions
-    type: object
-    patternProperties:
-      '.*':
-        $ref: https://barectf.org/schemas/config/2/event-type-pre-include.json
index a1e2e7023af954b2fea29e0f8072056e98cc8373..03f3b436547923b1d7ad9dca54a92c0d39c8efc5 100644 (file)
@@ -100,16 +100,16 @@ properties:
                   $ref: '#/definitions/partial-ft'
                 uuid-field-type:
                   $ref: '#/definitions/partial-ft'
-                stream-type-id-field-type:
+                data-stream-type-id-field-type:
                   $ref: '#/definitions/partial-ft'
             else:
               type: 'null'
-          stream-types:
-            title: Stream types object before field type expansions
+          data-stream-types:
+            title: Data stream types object before field type expansions
             type: object
             patternProperties:
               '.*':
-                title: Stream type object before field type expansions
+                title: Data stream type object before field type expansions
                 type: object
                 properties:
                   $features:
@@ -130,11 +130,11 @@ properties:
                                 $ref: '#/definitions/partial-ft'
                               end-time-field-type:
                                 $ref: '#/definitions/partial-ft'
-                              discarded-events-counter-field-type:
+                              discarded-event-records-counter-snapshot-field-type:
                                 $ref: '#/definitions/partial-ft'
                           else:
                             type: 'null'
-                        event:
+                        event-record:
                           if:
                             type: object
                           then:
@@ -158,14 +158,14 @@ properties:
                             $ref: '#/definitions/partial-ft'
                     else:
                       type: 'null'
-                  event-common-context-field-type:
+                  event-record-common-context-field-type:
                     $ref: '#/definitions/partial-ft'
-                  event-types:
-                    title: Event types object before field type expansions
+                  event-record-types:
+                    title: Event record types object before field type expansions
                     type: object
                     patternProperties:
                       '.*':
-                        title: Event type object before field type expansions
+                        title: Event record type object before field type expansions
                         type: object
                         properties:
                           specific-context-field-type:
@@ -173,9 +173,9 @@ properties:
                           payload-field-type:
                             $ref: '#/definitions/partial-ft'
                 required:
-                  - event-types
+                  - event-record-types
           required:
-            - stream-types
+            - data-stream-types
   required:
     - type
 required:
index 2a49a80b2efb2d2123ec1f7055b242951581bdeb..66adc526c3ac53fe5a9104a386863391521f241d 100644 (file)
@@ -36,28 +36,28 @@ properties:
         properties:
           $log-level-aliases:
             $ref: https://barectf.org/schemas/config/common/common.json#/definitions/opt-log-level-aliases-prop
-          stream-types:
-            title: Stream types object before log level alias substitutions
+          data-stream-types:
+            title: Data stream types object before log level alias substitutions
             type: object
             patternProperties:
               '.*':
-                title: Stream type object before log level alias substitutions
+                title: Data stream type object before log level alias substitutions
                 type: object
                 properties:
-                  event-types:
-                    title: Event types object before log level alias substitutions
+                  event-record-types:
+                    title: Event record types object before log level alias substitutions
                     type: object
                     patternProperties:
                       '.*':
-                        title: Event type object before log level alias substitutions
+                        title: Event record type object before log level alias substitutions
                         type: object
                         properties:
                           log-level:
                             $ref: https://barectf.org/schemas/config/common/common.json#/definitions/opt-log-level-or-alias-prop
                 required:
-                  - event-types
+                  - event-record-types
           required:
-            - stream-types
+            - data-stream-types
     required:
       - type
 required:
index 306fed4f8063c9ef668bf6c884047135519ff6d4..84e93e66a819885b6e77d77a99d88686b0d4d3ee 100644 (file)
@@ -134,7 +134,7 @@ definitions:
                   const: false
                 else:
                   type: 'null'
-            stream-type-id-field-type:
+            data-stream-type-id-field-type:
               $ref: '#/definitions/opt-or-def-feature-uint-ft'
           additionalProperties: false
         else:
@@ -146,16 +146,16 @@ definitions:
           '^[A-Za-z_][A-Za-z0-9_]*$':
             $ref: '#/definitions/clock-type'
         additionalProperties: false
-      stream-types:
-        title: Stream types object
+      data-stream-types:
+        title: Data stream types object
         type: object
         patternProperties:
           '^[A-Za-z_][A-Za-z0-9_]*$':
-            $ref: '#/definitions/stream-type'
+            $ref: '#/definitions/dst'
         additionalProperties: false
         minProperties: 1
     required:
-      - stream-types
+      - data-stream-types
     additionalProperties: false
   clock-type:
     title: Clock type object
@@ -176,8 +176,8 @@ definitions:
       $c-type:
         $ref: https://barectf.org/schemas/config/common/common.json#/definitions/opt-string
     additionalProperties: false
-  stream-type:
-    title: Stream type object
+  dst:
+    title: Data stream type object
     type: object
     properties:
       $is-default:
@@ -207,12 +207,12 @@ definitions:
                     $ref: '#/definitions/opt-or-def-feature-uint-ft'
                   end-time-field-type:
                     $ref: '#/definitions/opt-or-def-feature-uint-ft'
-                  discarded-events-counter-field-type:
+                  discarded-event-records-counter-snapshot-field-type:
                     $ref: '#/definitions/opt-or-def-feature-uint-ft'
                 additionalProperties: false
               else:
                 type: 'null'
-            event:
+            event-record:
               if:
                 type: object
               then:
@@ -234,21 +234,21 @@ definitions:
           $ref: https://barectf.org/schemas/config/3/field-type.json#/definitions/struct-ft-members
         else:
           type: 'null'
-      event-common-context-field-type:
+      event-record-common-context-field-type:
         $ref: '#/definitions/opt-struct-ft'
-      event-types:
-        title: Event types object
+      event-record-types:
+        title: Event record types object
         type: object
         patternProperties:
           '^[A-Za-z_][A-Za-z0-9_]*$':
-            $ref: '#/definitions/event-type'
+            $ref: '#/definitions/ert'
         additionalProperties: false
         minProperties: 1
     required:
-      - event-types
+      - event-record-types
     additionalProperties: false
-  event-type:
-    title: Event type object
+  ert:
+    title: Event record type object
     type: object
     properties:
       log-level:
@@ -290,7 +290,7 @@ properties:
             properties:
               identifier-prefix-definition:
                 type: boolean
-              default-stream-type-name-definition:
+              default-data-stream-type-name-definition:
                 type: boolean
             additionalProperties: false
         additionalProperties: false
diff --git a/barectf/schemas/config/3/dst-pre-include.yaml b/barectf/schemas/config/3/dst-pre-include.yaml
new file mode 100644 (file)
index 0000000..0ab43ca
--- /dev/null
@@ -0,0 +1,36 @@
+# The MIT License (MIT)
+#
+# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+$schema: http://json-schema.org/draft-07/schema#
+$id: https://barectf.org/schemas/config/3/dst-pre-include.json
+title: Data stream type object before inclusions
+type: object
+properties:
+  $include:
+    $ref: https://barectf.org/schemas/config/3/include-prop.json
+  event-record-types:
+    title: Event record types object before inclusions
+    type: object
+    patternProperties:
+      '.*':
+        $ref: https://barectf.org/schemas/config/3/ert-pre-include.json
diff --git a/barectf/schemas/config/3/ert-pre-include.yaml b/barectf/schemas/config/3/ert-pre-include.yaml
new file mode 100644 (file)
index 0000000..030fa39
--- /dev/null
@@ -0,0 +1,30 @@
+# The MIT License (MIT)
+#
+# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+$schema: http://json-schema.org/draft-07/schema#
+$id: https://barectf.org/schemas/config/3/ert-pre-include.json
+title: Event record type object before inclusions
+type: object
+properties:
+  $include:
+    $ref: https://barectf.org/schemas/config/3/include-prop.json
diff --git a/barectf/schemas/config/3/event-type-pre-include.yaml b/barectf/schemas/config/3/event-type-pre-include.yaml
deleted file mode 100644 (file)
index 9e16937..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-# The MIT License (MIT)
-#
-# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-$schema: http://json-schema.org/draft-07/schema#
-$id: https://barectf.org/schemas/config/3/event-type-pre-include.json
-title: Event type object before inclusions
-type: object
-properties:
-  $include:
-    $ref: https://barectf.org/schemas/config/3/include-prop.json
diff --git a/barectf/schemas/config/3/stream-type-pre-include.yaml b/barectf/schemas/config/3/stream-type-pre-include.yaml
deleted file mode 100644 (file)
index ba9defb..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-# The MIT License (MIT)
-#
-# Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files (the
-# "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to
-# permit persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-$schema: http://json-schema.org/draft-07/schema#
-$id: https://barectf.org/schemas/config/3/stream-type-pre-include.json
-title: Stream type object before inclusions
-type: object
-properties:
-  $include:
-    $ref: https://barectf.org/schemas/config/3/include-prop.json
-  event-types:
-    title: Event types object before inclusions
-    type: object
-    patternProperties:
-      '.*':
-        $ref: https://barectf.org/schemas/config/3/event-type-pre-include.json
index 32499776e7a5c8cfc307177ede0fe61acbebeb68..a445f241ac2bcc591f72434a4401089f74948160 100644 (file)
@@ -34,9 +34,9 @@ properties:
     patternProperties:
       '.*':
         $ref: https://barectf.org/schemas/config/3/clock-type-pre-include.json
-  stream-types:
-    title: Stream types object before inclusions
+  data-stream-types:
+    title: Data stream types object before inclusions
     type: object
     patternProperties:
       '.*':
-        $ref: https://barectf.org/schemas/config/3/stream-type-pre-include.json
+        $ref: https://barectf.org/schemas/config/3/dst-pre-include.json
index ca92b64949846743e366e394892d63639c533661..44e2d0e4429aaa952aaacce2f3088fea0ab55209 100644 (file)
 
 {#
  # Generates the preamble of the packet opening/closing functions for
- # the stream type `stream_type`.
+ # the data stream type `dst`.
  #}
-{% macro open_close_func_preamble(stream_type) %}
+{% macro open_close_func_preamble(dst) %}
 struct {{ prefix }}ctx * const ctx = &sctx->parent;
-{% if stream_type.default_clock_type %}
-const {{ cg_opts.clock_type_c_types[stream_type.default_clock_type] }} ts = ctx->use_cur_last_event_ts ?
+{% if dst.default_clock_type %}
+const {{ cg_opts.clock_type_c_types[dst.default_clock_type] }} ts = ctx->use_cur_last_event_ts ?
        sctx->cur_last_event_ts :
-       ctx->cbs.{{ stream_type.default_clock_type.name }}_clock_get_value(ctx->data);
+       ctx->cbs.{{ dst.default_clock_type.name }}_clock_get_value(ctx->data);
 {% endif %}
 const int saved_in_tracing_section = ctx->in_tracing_section;
 {%- endmacro %}
@@ -53,7 +53,7 @@ const int saved_in_tracing_section = ctx->in_tracing_section;
  #
  # Example:
  #
- #     , ecc_peer_id, ecc_addr, p_msg_id, p_msg
+ #     , ercc_peer_id, ercc_addr, p_msg_id, p_msg
  #}
 {% macro ft_call_params(param_prefix, ft, only_dyn=false) %}
 {% if ft %}
index 7ad777a11d18882c16349ee4968210ce1d072002..0146c1f55c44c0a7cd5afceecdac6c4f91c1a848 100644 (file)
@@ -146,13 +146,13 @@ void _write_c_str(struct {{ ctx_struct_name }} * const ctx, const char * const s
 }
 
 static
-int _reserve_ev_space(void * const vctx, const uint32_t ev_size)
+int _reserve_er_space(void * const vctx, const uint32_t er_size)
 {
        int ret;
        struct {{ ctx_struct_name }} * const ctx = _FROM_VOID_PTR(struct {{ ctx_struct_name }}, vctx);
 
        /* Event _cannot_ fit? */
-       if (ev_size > (ctx->packet_size - ctx->off_content)) {
+       if (er_size > (ctx->packet_size - ctx->off_content)) {
                goto no_space;
        }
 
@@ -160,7 +160,7 @@ int _reserve_ev_space(void * const vctx, const uint32_t ev_size)
        if ({{ prefix }}packet_is_full(ctx)) {
                /* Yes: is the back-end full? */
                if (ctx->cbs.is_backend_full(ctx->data)) {
-                       /* Yes: discard event */
+                       /* Yes: discard event record */
                        goto no_space;
                }
 
@@ -171,7 +171,7 @@ int _reserve_ev_space(void * const vctx, const uint32_t ev_size)
        }
 
        /* Event fits the current packet? */
-       if (ev_size > (ctx->packet_size - ctx->at)) {
+       if (er_size > (ctx->packet_size - ctx->at)) {
                /* No: close packet now */
                ctx->use_cur_last_event_ts = 1;
                ctx->cbs.close_packet(ctx->data);
@@ -179,7 +179,7 @@ int _reserve_ev_space(void * const vctx, const uint32_t ev_size)
 
                /* Is the back-end full? */
                if (ctx->cbs.is_backend_full(ctx->data)) {
-                       /* Yes: discard event */
+                       /* Yes: discard event record */
                        goto no_space;
                }
 
@@ -187,7 +187,7 @@ int _reserve_ev_space(void * const vctx, const uint32_t ev_size)
                ctx->use_cur_last_event_ts = 1;
                ctx->cbs.open_packet(ctx->data);
                ctx->use_cur_last_event_ts = 0;
-               assert(ev_size <= (ctx->packet_size - ctx->at));
+               assert(er_size <= (ctx->packet_size - ctx->at));
        }
 
        ret = 1;
@@ -202,7 +202,7 @@ end:
 }
 
 static
-void _commit_ev(void * const vctx)
+void _commit_er(void * const vctx)
 {
        struct {{ ctx_struct_name }} * const ctx = _FROM_VOID_PTR(struct {{ ctx_struct_name }}, vctx);
 
@@ -229,14 +229,14 @@ void _commit_ev(void * const vctx)
        ctx->use_cur_last_event_ts = 0;
 }
 
-{% for stream_type in cfg.trace.type.stream_types | sort %}
-       {% set def_clk_type = stream_type.default_clock_type %}
-       {% set sctx_name %}{{ prefix }}{{ stream_type.name }}{% endset %}
-       {% set this_stream_ops = stream_ops[stream_type] %}
+{% for dst in cfg.trace.type.data_stream_types | sort %}
+       {% set def_clk_type = dst.default_clock_type %}
+       {% set sctx_name %}{{ prefix }}{{ dst.name }}{% endset %}
+       {% set this_ds_ops = ds_ops[dst] %}
        {% include 'c/open-func-proto.j2' %}
 
 {
-       {{ macros.open_close_func_preamble(stream_type) | indent_tab }}
+       {{ macros.open_close_func_preamble(dst) | indent_tab }}
 
        /*
         * This function is either called by a tracing function, or
@@ -268,13 +268,13 @@ void _commit_ev(void * const vctx)
        }
 
        ctx->at = 0;
-       {% set pkt_header_op = this_stream_ops.pkt_header_op %}
+       {% set pkt_header_op = this_ds_ops.pkt_header_op %}
        {% if pkt_header_op %}
 
-       {{ pkt_header_op.serialize_str(stream_type=stream_type) | indent_tab }}
+       {{ pkt_header_op.serialize_str(dst=dst) | indent_tab }}
        {% endif %}
 
-       {{ this_stream_ops.pkt_ctx_op.serialize_str(stream_type=stream_type) | indent_tab }}
+       {{ this_ds_ops.pkt_ctx_op.serialize_str(dst=dst) | indent_tab }}
 
        /* Save content beginning's offset */
        ctx->off_content = ctx->at;
@@ -292,7 +292,7 @@ end:
        {% include 'c/close-func-proto.j2' %}
 
 {
-       {{ macros.open_close_func_preamble(stream_type) | indent_tab }}
+       {{ macros.open_close_func_preamble(dst) | indent_tab }}
 
        /*
         * This function is either called by a tracing function, or
@@ -326,8 +326,8 @@ end:
        /* Save content size */
        ctx->content_size = ctx->at;
        {% set name = 'timestamp_end' %}
-       {% if name in stream_type._pkt_ctx_ft.members %}
-               {% set op = stream_op_pkt_ctx_op(stream_type, name) %}
+       {% if name in dst._pkt_ctx_ft.members %}
+               {% set op = ds_op_pkt_ctx_op(dst, name) %}
 
        /* Go back to `timestamp_end` field offset */
        ctx->at = sctx->off_{{ op | op_src_var_name }};
@@ -339,8 +339,8 @@ end:
                {% endfilter %}
        {% endif %}
        {% set name = 'content_size' %}
-       {% if name in stream_type._pkt_ctx_ft.members %}
-               {% set op = stream_op_pkt_ctx_op(stream_type, name) %}
+       {% if name in dst._pkt_ctx_ft.members %}
+               {% set op = ds_op_pkt_ctx_op(dst, name) %}
 
        /* Go back to `content_size` field offset */
        ctx->at = sctx->off_{{ op | op_src_var_name }};
@@ -352,8 +352,8 @@ end:
                {% endfilter %}
        {% endif %}
        {% set name = 'events_discarded' %}
-       {% if name in stream_type._pkt_ctx_ft.members %}
-               {% set op = stream_op_pkt_ctx_op(stream_type, name) %}
+       {% if name in dst._pkt_ctx_ft.members %}
+               {% set op = ds_op_pkt_ctx_op(dst, name) %}
 
        /* Go back to `events_discarded` field offset */
        ctx->at = sctx->off_{{ op | op_src_var_name }};
@@ -377,10 +377,10 @@ end:
 end:
        return;
 }
-       {% if stream_type._ev_header_ft %}
+       {% if dst._er_header_ft %}
 
-static void _serialize_ev_header_{{ stream_type.name }}(void * const vctx,
-       const uint32_t ev_type_id)
+static void _serialize_er_header_{{ dst.name }}(void * const vctx,
+       const uint32_t ert_id)
 {
        struct {{ ctx_struct_name }} * const ctx = _FROM_VOID_PTR(struct {{ ctx_struct_name }}, vctx);
                {% if def_clk_type %}
@@ -388,82 +388,82 @@ static void _serialize_ev_header_{{ stream_type.name }}(void * const vctx,
        const {{ cg_opts.clock_type_c_types[def_clk_type] }} ts = sctx->cur_last_event_ts;
                {% endif %}
 
-       {{ this_stream_ops.ev_header_op.serialize_str(stream_type=stream_type) | indent_tab }}
+       {{ this_ds_ops.er_header_op.serialize_str(dst=dst) | indent_tab }}
 }
        {% endif %}
-       {% if stream_type.event_common_context_field_type %}
+       {% if dst.event_record_common_context_field_type %}
 
-static void _serialize_ev_common_ctx_{{ stream_type.name }}(void * const vctx{{ stream_type | serialize_ev_common_ctx_func_params_str(const_params) }})
+static void _serialize_er_common_ctx_{{ dst.name }}(void * const vctx{{ dst | serialize_er_common_ctx_func_params_str(const_params) }})
 {
        struct {{ ctx_struct_name }} * const ctx = _FROM_VOID_PTR(struct {{ ctx_struct_name }}, vctx);
 
-       {{ this_stream_ops.ev_common_ctx_op.serialize_str(stream_type=stream_type) | indent_tab }}
+       {{ this_ds_ops.er_common_ctx_op.serialize_str(dst=dst) | indent_tab }}
 }
        {% endif %}
        {# internal serialization functions #}
-       {% for ev_type in stream_type.event_types | sort %}
+       {% for ert in dst.event_record_types | sort %}
 
-static void _serialize_ev_{{ stream_type.name }}_{{ ev_type.name }}(void * const vctx{{ (stream_type, ev_type) | trace_func_params_str(const_params) }})
+static void _serialize_er_{{ dst.name }}_{{ ert.name }}(void * const vctx{{ (dst, ert) | trace_func_params_str(const_params) }})
 {
        struct {{ ctx_struct_name }} * const ctx = _FROM_VOID_PTR(struct {{ ctx_struct_name }}, vctx);
-               {% if stream_type._ev_header_ft %}
+               {% if dst._er_header_ft %}
 
        /* Serialize header */
-       _serialize_ev_header_{{ stream_type.name }}(ctx, {{ ev_type.id }});
+       _serialize_er_header_{{ dst.name }}(ctx, {{ ert.id }});
                {% endif %}
-               {% if stream_type.event_common_context_field_type %}
+               {% if dst.event_record_common_context_field_type %}
 
        /* Serialize common context */
-                       {% set params = macros.ft_call_params(root_ft_prefixes.ECC, stream_type.event_common_context_field_type) %}
-       _serialize_ev_common_ctx_{{ stream_type.name }}(ctx{{ params }});
+                       {% set params = macros.ft_call_params(root_ft_prefixes.ERCC, dst.event_record_common_context_field_type) %}
+       _serialize_er_common_ctx_{{ dst.name }}(ctx{{ params }});
                {% endif %}
-               {% set this_ev_ops = this_stream_ops.ev_ops[ev_type] %}
-               {% if this_ev_ops.spec_ctx_op %}
+               {% set this_er_ops = this_ds_ops.er_ops[ert] %}
+               {% if this_er_ops.spec_ctx_op %}
 
-       {{ this_ev_ops.spec_ctx_op.serialize_str(stream_type=stream_type, ev_type=ev_type) | indent_tab }}
+       {{ this_er_ops.spec_ctx_op.serialize_str(dst=dst, ert=ert) | indent_tab }}
                {% endif %}
-               {% if this_ev_ops.payload_op %}
+               {% if this_er_ops.payload_op %}
 
-       {{ this_ev_ops.payload_op.serialize_str(stream_type=stream_type, ev_type=ev_type) | indent_tab }}
+       {{ this_er_ops.payload_op.serialize_str(dst=dst, ert=ert) | indent_tab }}
                {% endif %}
 }
        {% endfor %}
        {# internal size functions #}
-       {% for ev_type in stream_type.event_types | sort %}
-               {% set this_ev_ops = this_stream_ops.ev_ops[ev_type] %}
+       {% for ert in dst.event_record_types | sort %}
+               {% set this_er_ops = this_ds_ops.er_ops[ert] %}
 
-static uint32_t _ev_size_{{ stream_type.name }}_{{ ev_type.name }}(void * const vctx{{ (stream_type, ev_type) | trace_func_params_str(const_params, only_dyn=true) }})
+static uint32_t _er_size_{{ dst.name }}_{{ ert.name }}(void * const vctx{{ (dst, ert) | trace_func_params_str(const_params, only_dyn=true) }})
 {
        struct {{ ctx_struct_name }} * const ctx = _FROM_VOID_PTR(struct {{ ctx_struct_name }}, vctx);
        uint32_t at = ctx->at;
-               {% if this_stream_ops.ev_header_op %}
+               {% if this_ds_ops.er_header_op %}
 
-       {{ this_stream_ops.ev_header_op.size_str(stream_type=stream_type) | indent_tab }}
+       {{ this_ds_ops.er_header_op.size_str(dst=dst) | indent_tab }}
                {% endif %}
-               {% if this_stream_ops.ev_common_ctx_op %}
+               {% if this_ds_ops.er_common_ctx_op %}
 
-       {{ this_stream_ops.ev_common_ctx_op.size_str(stream_type=stream_type) | indent_tab }}
+       {{ this_ds_ops.er_common_ctx_op.size_str(dst=dst) | indent_tab }}
                {% endif %}
-               {% if this_ev_ops.spec_ctx_op %}
+               {% if this_er_ops.spec_ctx_op %}
 
-       {{ this_ev_ops.spec_ctx_op.size_str(stream_type=stream_type, ev_type=ev_type) | indent_tab }}
+       {{ this_er_ops.spec_ctx_op.size_str(dst=dst, ert=ert) | indent_tab }}
                {% endif %}
-               {% if this_ev_ops.payload_op %}
+               {% if this_er_ops.payload_op %}
 
-       {{ this_ev_ops.payload_op.size_str(stream_type=stream_type, ev_type=ev_type) | indent_tab }}
+       {{ this_er_ops.payload_op.size_str(dst=dst, ert=ert) | indent_tab }}
                {% endif %}
 
        return at - ctx->at;
 }
        {% endfor %}
        {# public tracing functions #}
-       {% for ev_type in stream_type.event_types | sort %}
+       {% for ert in dst.event_record_types | sort %}
 
                {% include 'c/trace-func-proto.j2' %}
 
 {
        struct {{ ctx_struct_name }} * const ctx = &sctx->parent;
-       uint32_t ev_size;
+       uint32_t er_size;
 
                {% if def_clk_type %}
        /* Save time */
@@ -477,29 +477,29 @@ static uint32_t _ev_size_{{ stream_type.name }}_{{ ev_type.name }}(void * const
        /* We can alter the packet */
        ctx->in_tracing_section = 1;
 
-       /* Compute event size */
-               {% set ev_common_ctx_params = macros.ft_call_params(root_ft_prefixes.ECC, stream_type.event_common_context_field_type, true) %}
-               {% set spec_ctx_params = macros.ft_call_params(root_ft_prefixes.SC, ev_type.specific_context_field_type, true) %}
-               {% set payload_params = macros.ft_call_params(root_ft_prefixes.P, ev_type.payload_field_type, true) %}
-               {% set params %}{{ ev_common_ctx_params }}{{ spec_ctx_params }}{{ payload_params }}{% endset %}
-       ev_size = _ev_size_{{ stream_type.name }}_{{ ev_type.name }}(_TO_VOID_PTR(ctx){{ params }});
+       /* Compute event record size */
+               {% set er_common_ctx_params = macros.ft_call_params(root_ft_prefixes.ERCC, dst.event_record_common_context_field_type, true) %}
+               {% set spec_ctx_params = macros.ft_call_params(root_ft_prefixes.SC, ert.specific_context_field_type, true) %}
+               {% set payload_params = macros.ft_call_params(root_ft_prefixes.P, ert.payload_field_type, true) %}
+               {% set params %}{{ er_common_ctx_params }}{{ spec_ctx_params }}{{ payload_params }}{% endset %}
+       er_size = _er_size_{{ dst.name }}_{{ ert.name }}(_TO_VOID_PTR(ctx){{ params }});
 
        /* Is there enough space to serialize? */
-       if (!_reserve_ev_space(_TO_VOID_PTR(ctx), ev_size)) {
+       if (!_reserve_er_space(_TO_VOID_PTR(ctx), er_size)) {
                /* no: forget this */
                ctx->in_tracing_section = 0;
                goto end;
        }
 
-       /* Serialize event */
-               {% set ev_common_ctx_params = macros.ft_call_params(root_ft_prefixes.ECC, stream_type.event_common_context_field_type) %}
-               {% set spec_ctx_params = macros.ft_call_params(root_ft_prefixes.SC, ev_type.specific_context_field_type) %}
-               {% set payload_params = macros.ft_call_params(root_ft_prefixes.P, ev_type.payload_field_type) %}
-               {% set params %}{{ ev_common_ctx_params }}{{ spec_ctx_params }}{{ payload_params }}{% endset %}
-       _serialize_ev_{{ stream_type.name }}_{{ ev_type.name }}(_TO_VOID_PTR(ctx){{ params }});
+       /* Serialize event record */
+               {% set er_common_ctx_params = macros.ft_call_params(root_ft_prefixes.ERCC, dst.event_record_common_context_field_type) %}
+               {% set spec_ctx_params = macros.ft_call_params(root_ft_prefixes.SC, ert.specific_context_field_type) %}
+               {% set payload_params = macros.ft_call_params(root_ft_prefixes.P, ert.payload_field_type) %}
+               {% set params %}{{ er_common_ctx_params }}{{ spec_ctx_params }}{{ payload_params }}{% endset %}
+       _serialize_er_{{ dst.name }}_{{ ert.name }}(_TO_VOID_PTR(ctx){{ params }});
 
-       /* Commit event */
-       _commit_ev(_TO_VOID_PTR(ctx));
+       /* Commit event record */
+       _commit_er(_TO_VOID_PTR(ctx));
 
        /* Not tracing anymore */
        ctx->in_tracing_section = 0;
index 1cea42d514bc5efcbb57718a15919fd099c111da..ccc5dc2ca72b64701747136e21dbee401e943977 100644 (file)
@@ -28,7 +28,7 @@
 {% set ucprefix = common.ucprefix %}
 {% set trace_type = cfg.trace.type %}
 {% set cg_opts = cfg.options.code_generation_options %}
-{% set def_stream_type = cg_opts.default_stream_type %}
+{% set def_dst = cg_opts.default_data_stream_type %}
 {% set header_opts = cg_opts.header_options %}
 {% set const_params = false %}
 #ifndef _{{ ucprefix }}H
@@ -46,13 +46,13 @@ extern "C" {
 {% if header_opts.identifier_prefix_definition %}
 #define _BARECTF_PREFIX {{ prefix }}
 {% endif %}
-{% if def_stream_type and header_opts.default_stream_type_name_definition %}
-#define _BARECTF_DEFAULT_STREAM {{ def_stream_type.name }}
+{% if def_dst and header_opts.default_data_stream_type_name_definition %}
+#define _BARECTF_DEFAULT_STREAM {{ def_dst.name }}
 {% endif %}
-{% if def_stream_type %}
+{% if def_dst %}
 
-       {% for ev_type in def_stream_type.event_types | sort %}
-#define {{ prefix }}trace_{{ ev_type.name }} {{ c_common.trace_func_name(def_stream_type, ev_type) }}
+       {% for ert in def_dst.event_record_types | sort %}
+#define {{ prefix }}trace_{{ ert.name }} {{ c_common.trace_func_name(def_dst, ert) }}
        {% endfor %}
 {% endif %}
 
@@ -114,7 +114,7 @@ struct {{ prefix }}ctx {
        /* Size of packet header + context fields (content offset) */
        uint32_t off_content;
 
-       /* Discarded event counter */
+       /* Discarded event records counter snapshot */
        uint32_t events_discarded;
 
        /* Current packet is open? */
@@ -126,13 +126,13 @@ struct {{ prefix }}ctx {
        /* Tracing is enabled? */
        volatile int is_tracing_enabled;
 
-       /* Use current/last event time when opening/closing packets */
+       /* Use current/last event record time when opening/closing packets */
        int use_cur_last_event_ts;
 };
 
-{% for stream_type in trace_type.stream_types | sort %}
-/* Context for stream type `{{ stream_type.name }}` */
-struct {{ prefix }}{{ stream_type.name }}_ctx {
+{% for dst in trace_type.data_stream_types | sort %}
+/* Context for data stream type `{{ dst.name }}` */
+struct {{ prefix }}{{ dst.name }}_ctx {
        /* Parent */
        struct {{ prefix }}ctx parent;
 
@@ -142,22 +142,22 @@ struct {{ prefix }}{{ stream_type.name }}_ctx {
        uint32_t off_ph_{{ member_name }};
                {% endfor %}
        {% endif %}
-       {% for member_name in stream_type._pkt_ctx_ft.members %}
+       {% for member_name in dst._pkt_ctx_ft.members %}
        uint32_t off_pc_{{ member_name }};
        {% endfor %}
-       {% if stream_type.default_clock_type %}
-       {{ cg_opts.clock_type_c_types[stream_type.default_clock_type] }} cur_last_event_ts;
+       {% if dst.default_clock_type %}
+       {{ cg_opts.clock_type_c_types[dst.default_clock_type] }} cur_last_event_ts;
        {% endif %}
 };
 
 {% endfor %}
 {% include 'c/ctx-init-func-proto.j2' %};
 
-{% for stream_type in trace_type.stream_types | sort %}
+{% for dst in trace_type.data_stream_types | sort %}
        {% include 'c/open-func-proto.j2' %};
 
        {% include 'c/close-func-proto.j2' %};
-       {% for ev_type in stream_type.event_types | sort %}
+       {% for ert in dst.event_record_types | sort %}
 
                {% include 'c/trace-func-proto.j2' %};
        {% endfor %}
index ef93bb71130ffd2ebe185cf8672590805a95c251..594e6da0c710288f4f8ae9b94b7a824c5656bc7d 100644 (file)
@@ -24,5 +24,5 @@
  #}
 {% import 'common.j2' as common %}
 {% import 'c/common.j2' as c_common %}
-/* Close packet for stream type `{{ stream_type.name }}` */
-void {{ common.prefix }}{{ stream_type.name }}_close_packet(struct {{ common.prefix }}{{ stream_type.name }}_ctx *{{ c_common.const_ptr_str(const_params) }}sctx)
+/* Close packet for data stream type `{{ dst.name }}` */
+void {{ common.prefix }}{{ dst.name }}_close_packet(struct {{ common.prefix }}{{ dst.name }}_ctx *{{ c_common.const_ptr_str(const_params) }}sctx)
index c22292e0fa65491c1c94a2edb859e5e3e26a73fc..09572669ca570bd4ee7eb044071b5bdada75f978 100644 (file)
 {% set ctx_struct_name %}{{ common.prefix }}ctx{% endset %}
 
 {#
- # Generates the name of a tracing function for the stream type
- # `stream_type` and the event type `ev_type`.
+ # Generates the name of a tracing function for the data stream type
+ # `dst` and the event record type `ert`.
  #
  # Example:
  #
  #     barectf_my_stream_trace_my_event
  #}
-{% macro trace_func_name(stream_type, ev_type) %}
-{{ common.prefix }}{{ stream_type.name }}_trace_{{ ev_type.name }}
+{% macro trace_func_name(dst, ert) %}
+{{ common.prefix }}{{ dst.name }}_trace_{{ ert.name }}
 {%- endmacro %}
 
 {#
index fc7e7ec09ed6bd77e32b7415ef6765420e9d0ece..f325afc3aafd6fa8485b1ee2cf5b004f049b2918 100644 (file)
@@ -24,6 +24,6 @@
  #}
 {% import 'common.j2' as common %}
 {% import 'c/common.j2' as c_common %}
-/* Open packet for stream type `{{ stream_type.name }}` */
-void {{ common.prefix }}{{ stream_type.name }}_open_packet(
-       struct {{ common.prefix }}{{ stream_type.name }}_ctx *{{ c_common.const_ptr_str(const_params) }}sctx{{ stream_type | open_func_params_str(const_params) }})
+/* Open packet for data stream type `{{ dst.name }}` */
+void {{ common.prefix }}{{ dst.name }}_open_packet(
+       struct {{ common.prefix }}{{ dst.name }}_ctx *{{ c_common.const_ptr_str(const_params) }}sctx{{ dst | open_func_params_str(const_params) }})
index c12c040eb60e30acd0f815170a74d71d3c6053c5..b532be1bbe736e6bf4787228ab3a8783fcf41c78 100644 (file)
@@ -31,7 +31,7 @@
 
        for ({{ var_name }} = 0; {{ var_name }} < (uint32_t) {{ length_src }}; ++{{ var_name }}) {
 {% for subop in op.subops %}
-               {{ subop.serialize_str(stream_type=stream_type, ev_type=ev_type) | indent_tab(2) }}
+               {{ subop.serialize_str(dst=dst, ert=ert) | indent_tab(2) }}
 
 {% endfor %}
        }
diff --git a/barectf/templates/c/serialize-write-dst-id-statements.j2 b/barectf/templates/c/serialize-write-dst-id-statements.j2
new file mode 100644 (file)
index 0000000..ab0e6f6
--- /dev/null
@@ -0,0 +1,28 @@
+{#
+ # The MIT License (MIT)
+ #
+ # Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
+ #
+ # Permission is hereby granted, free of charge, to any person obtaining
+ # a copy of this software and associated documentation files (the
+ # "Software"), to deal in the Software without restriction, including
+ # without limitation the rights to use, copy, modify, merge, publish,
+ # distribute, sublicense, and/or sell copies of the Software, and to
+ # permit persons to whom the Software is furnished to do so, subject to
+ # the following conditions:
+ #
+ # The above copyright notice and this permission notice shall be
+ # included in all copies or substantial portions of the Software.
+ #
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ #}
+{% set c_type = op.ft | ft_c_type %}
+{% set src = dst.id %}
+/* Write data stream type ID field */
+{% include 'c/serialize-write-bit-array-statements.j2' %}
diff --git a/barectf/templates/c/serialize-write-ert-id-statements.j2 b/barectf/templates/c/serialize-write-ert-id-statements.j2
new file mode 100644 (file)
index 0000000..7aaec03
--- /dev/null
@@ -0,0 +1,28 @@
+{#
+ # The MIT License (MIT)
+ #
+ # Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
+ #
+ # Permission is hereby granted, free of charge, to any person obtaining
+ # a copy of this software and associated documentation files (the
+ # "Software"), to deal in the Software without restriction, including
+ # without limitation the rights to use, copy, modify, merge, publish,
+ # distribute, sublicense, and/or sell copies of the Software, and to
+ # permit persons to whom the Software is furnished to do so, subject to
+ # the following conditions:
+ #
+ # The above copyright notice and this permission notice shall be
+ # included in all copies or substantial portions of the Software.
+ #
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ #}
+{% set c_type = op.ft | ft_c_type %}
+{% set src = 'ert_id' %}
+/* Write event record type ID field */
+{% include 'c/serialize-write-bit-array-statements.j2' %}
diff --git a/barectf/templates/c/serialize-write-ev-type-id-statements.j2 b/barectf/templates/c/serialize-write-ev-type-id-statements.j2
deleted file mode 100644 (file)
index cd3be35..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-{#
- # The MIT License (MIT)
- #
- # Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
- #
- # Permission is hereby granted, free of charge, to any person obtaining
- # a copy of this software and associated documentation files (the
- # "Software"), to deal in the Software without restriction, including
- # without limitation the rights to use, copy, modify, merge, publish,
- # distribute, sublicense, and/or sell copies of the Software, and to
- # permit persons to whom the Software is furnished to do so, subject to
- # the following conditions:
- #
- # The above copyright notice and this permission notice shall be
- # included in all copies or substantial portions of the Software.
- #
- # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
- # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
- # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
- # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
- #}
-{% set c_type = op.ft | ft_c_type %}
-{% set src = 'ev_type_id' %}
-/* Write event type ID field */
-{% include 'c/serialize-write-bit-array-statements.j2' %}
diff --git a/barectf/templates/c/serialize-write-stream-type-id-statements.j2 b/barectf/templates/c/serialize-write-stream-type-id-statements.j2
deleted file mode 100644 (file)
index ea9eb44..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-{#
- # The MIT License (MIT)
- #
- # Copyright (c) 2020 Philippe Proulx <pproulx@efficios.com>
- #
- # Permission is hereby granted, free of charge, to any person obtaining
- # a copy of this software and associated documentation files (the
- # "Software"), to deal in the Software without restriction, including
- # without limitation the rights to use, copy, modify, merge, publish,
- # distribute, sublicense, and/or sell copies of the Software, and to
- # permit persons to whom the Software is furnished to do so, subject to
- # the following conditions:
- #
- # The above copyright notice and this permission notice shall be
- # included in all copies or substantial portions of the Software.
- #
- # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
- # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
- # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
- # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
- #}
-{% set c_type = op.ft | ft_c_type %}
-{% set src = stream_type.id %}
-/* Write stream type ID field */
-{% include 'c/serialize-write-bit-array-statements.j2' %}
index 52664a5264752c937cfac5b4eeb27c589245f476..16e3d8609b51b6a26e621013d7800107fea1bcee 100644 (file)
@@ -31,7 +31,7 @@
 {% endif %}
 {
 {% for subop in op.subops %}
-       {{ subop.serialize_str(stream_type=stream_type, ev_type=ev_type) | indent_tab }}
+       {{ subop.serialize_str(dst=dst, ert=ert) | indent_tab }}
 
 {% endfor %}
 }
index c4ba1160f34a254a49961a633b412a98b077540c..6340e853d3dd1d6a82cfaeef125bd4ccf114683d 100644 (file)
@@ -31,7 +31,7 @@
 
        for ({{ var_name }} = 0; {{ var_name }} < (uint32_t) {{ length_src }}; ++{{ var_name }}) {
 {% for subop in op.subops %}
-               {{ subop.size_str(stream_type=stream_type, ev_type=ev_type) | indent_tab(2) }}
+               {{ subop.size_str(dst=dst, ert=ert) | indent_tab(2) }}
 
 {% endfor %}
        }
index 0dc14c4660665bd87dda8eb2d22e8c5e60732d1c..d235299745e184513b776fddb85e2b1b40e3982f 100644 (file)
@@ -31,7 +31,7 @@
 {% endif %}
 {
 {% for subop in op.subops %}
-       {{ subop.size_str(stream_type=stream_type, ev_type=ev_type) | indent_tab }}
+       {{ subop.size_str(dst=dst, ert=ert) | indent_tab }}
 
 {% endfor %}
 }
index 806f2fee8e9005dd0e91f2660f01e42e8d607d6e..9f63519232d35a4026cef9d9759a63481db33a02 100644 (file)
@@ -24,5 +24,5 @@
  #}
 {% import 'common.j2' as common %}
 {% import 'c/common.j2' as c_common %}
-/* Trace (stream type `{{ stream_type.name }}`, event type `{{ ev_type.name }}`) */
-void {{ common.prefix }}{{ stream_type.name }}_trace_{{ ev_type.name }}(struct {{ common.prefix }}{{ stream_type.name }}_ctx *{{ c_common.const_ptr_str(const_params) }}sctx{{ (stream_type, ev_type) | trace_func_params_str(const_params) }})
+/* Trace (data stream type `{{ dst.name }}`, event record type `{{ ert.name }}`) */
+void {{ common.prefix }}{{ dst.name }}_trace_{{ ert.name }}(struct {{ common.prefix }}{{ dst.name }}_ctx *{{ c_common.const_ptr_str(const_params) }}sctx{{ (dst, ert) | trace_func_params_str(const_params) }})
index 6cc8c6f7bc230b8b49c8cff6c642dd048e1ad5a3..d4ec4d36f2dfcc5259b926d7a744e893e40f8ec8 100644 (file)
@@ -54,7 +54,7 @@ env {
 {% endfor %}
 };
 
-{# all clock types (stream types's default clock types) #}
+{# all clock types (data stream types's default clock types) #}
 {% for clk_type in cfg.trace.type.clock_types | sort %}
 clock {
        name = {{ clk_type.name }};
@@ -72,34 +72,34 @@ clock {
 };
 
 {% endfor %}
-{# stream types and their event types #}
-{% for stream_type in cfg.trace.type.stream_types | sort %}
-/* Stream type `{{ stream_type.name }}` */
+{# data stream types and their event record types #}
+{% for dst in cfg.trace.type.data_stream_types | sort %}
+/* Data stream type `{{ dst.name }}` */
 stream {
-       id = {{ stream_type.id }};
-       {{ root_ft('packet.context', stream_type._pkt_ctx_ft) | indent_tab }}
-       {% if stream_type._ev_header_ft %}
-       {{ root_ft('event.header', stream_type._ev_header_ft) | indent_tab }}
+       id = {{ dst.id }};
+       {{ root_ft('packet.context', dst._pkt_ctx_ft) | indent_tab }}
+       {% if dst._er_header_ft %}
+       {{ root_ft('event.header', dst._er_header_ft) | indent_tab }}
        {% endif %}
-       {% if stream_type.event_common_context_field_type %}
-       {{ root_ft('event.context', stream_type.event_common_context_field_type) | indent_tab }}
+       {% if dst.event_record_common_context_field_type %}
+       {{ root_ft('event.context', dst.event_record_common_context_field_type) | indent_tab }}
        {% endif %}
 };
 
-       {# stream type's event types #}
-       {% for ev_type in stream_type.event_types | sort %}
+       {# data stream type's event record types #}
+       {% for ert in dst.event_record_types | sort %}
 event {
-       stream_id = {{ stream_type.id }};
-       id = {{ ev_type.id }};
-       name = "{{ ev_type.name }}";
-               {% if ev_type.log_level %}
-       loglevel = {{ ev_type.log_level }};
+       stream_id = {{ dst.id }};
+       id = {{ ert.id }};
+       name = "{{ ert.name }}";
+               {% if ert.log_level %}
+       loglevel = {{ ert.log_level }};
                {% endif %}
-               {% if ev_type.specific_context_field_type %}
-       {{ root_ft('context', ev_type.specific_context_field_type) | indent_tab }}
+               {% if ert.specific_context_field_type %}
+       {{ root_ft('context', ert.specific_context_field_type) | indent_tab }}
                {% endif %}
-               {% if ev_type.payload_field_type %}
-       {{ root_ft('fields', ev_type.payload_field_type) | indent_tab }}
+               {% if ert.payload_field_type %}
+       {{ root_ft('fields', ert.payload_field_type) | indent_tab }}
                {% endif %}
 };
 
index a9ef6e3a22cfbbcde04b42c60b7eca69ca41d837..3f9bdf245512c1f5b6aafabf192d130d059d7a2b 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 2f22929971f2b64482e70610ff4ba153191dc325..aad6a07669d990804d2dd5c3cee8f9d1bcd8eee4 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdreal.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index b482d305a1e9f9b1e5ded8d923c72862c13f0710..08a6c4d3793625a4af969c4312a6b24b3e7dacaf 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdreal.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 1b3b408073030b925acc7a18b69359fff4490276..fa190e77b5fcdeda30f72b045a2df2007db7b692 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdmisc.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index f8e5d74524b406acfdf828869154c0f1aabb4f13..16dd6e7942b17d3dd3ababd1e8e1cfbac6daa709 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index b784457f755c7c83c238d47539d705f680f51114..de633b6b7cfd90a4a8653ab27a2360bfad5e842f 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdmisc.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index f920d82fe039a7fb3ff99566732ec4400ce41600..30891b2d7789214361b7dec65813d0a03969c733 100644 (file)
@@ -27,10 +27,10 @@ trace:
     $include:
       - stdint.yaml
       - stdmisc.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index fc3c399c63556f0bc36f1455fb0c0ad1da894720..9dc27141597925ae2c8c5283853677dc8f78a4f1 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 4f6d76a8c0affb041b47b7ad07ffe1a2afab9c48..85e7fff4d7bd81bfa1445bc87e8d359b9d97893f 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index c7874d842357e122c497aab976293df4964fdbaf..22b53d4d46b9ef75496a13a732526daefda78134 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 85c3d0f9cdc83c1688dfa1f7fe54942f556ae705..c252dfc6346377d8983e6d5cc43739da2c8cd075 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index d382847be57a95bf63eb59fa5cd78ad01e72fd76..0904475c613537bd485994f1ac43ce0225ed5aed 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdreal.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 7dcf80e800bd38cb07e15e3c1ff5c00467becc0b..728b7f79a5afec8d7dc705641d61f3059af02c57 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdreal.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 124828a7d59bf898f5c928d5c70b81f816bd5a17..e01faef9b700b686cb3c5512151ec7fcea2cc147 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdmisc.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index daf11f68adf0579806962e26777969217e572bdb..c0e4c8a5dda7f863f09d4a7237e5ebb208c8bef1 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 8257add6b424002983457f9ef1f27e4d9f900688..9928d97823d577a7f8d22133ab7f8a184d0ebef8 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdmisc.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 2a030307a481cca0b977bd6712e48c16d04f415b..99c9c1a537903dfc9b47c681abfca9442e9d924f 100644 (file)
@@ -27,10 +27,10 @@ trace:
     $include:
       - stdint.yaml
       - stdmisc.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 5859a61ba8a657c49c744de99f7a413d5548bf0e..46d7aa805fc3c1e1a76f84edfa0f634563e1804a 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 31c8b2fcd386d1b6cc49101f0dae2eb410440a4e..b62ca4d36c77e59cafc3c2c4711571d30be3a54b 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 92b34a1145b37ddc70949d423d5ebc14474822af..b09e43981c32c547b7e5bc5196992814ad8ec13a 100644 (file)
@@ -26,10 +26,10 @@ trace:
   type:
     $include:
       - stdint.yaml
-    stream-types:
+    data-stream-types:
       default:
         $is-default: yes
-        event-types:
+        event-record-types:
           ev:
             payload-field-type:
               class: struct
index 7ac05b23e9614b9b4cc289451792fa20926e8871..45b613f855c97565f4657167f8339f2c574436a8 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index bcaecdcae98451987755e214707f5039bd63162a..c503db0b08e308d59e94ea6c0a7f94c5d90f4fb4 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index b3a476040712e5ef2b2c72757df0f2101e9f8a2a..6b82319c1405189018ec27911c553986ee22c534 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 6af1bd103ee93798bb0904546b9ba13b7254defe..163743ace4f93c42f4e1dbe752d8bd3ac9e45d2b 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index b1b0685c0a2ecac09f96d284909fedad294cb558..7eda6f21b934fed611d7d15103bb8d50ddea15cb 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 345d7ce82bedbfde8669120bee06e4f8387a90ca..094bcfcb59fa00bfd5a92db09f936e89787f2ff5 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 8b1f1781038d76ddf7b748934ad3a7a738c93ff9..9498de670562cce48e567288c4fec37d20238562 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index c804e6d482a83c2c964b11d2c0d53e8134b31856..9a6da78de70c6614e976e46213d89287208fed1d 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index ad2a593550b8c1bc0318637f0df12d4df0e5cb12..71582eee6e268c427fa1aa451ab9ae892282412b 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 2386313c4040a001ba2cd96bd237e749be9a4118..9d35f75b0626a433e12e0df6ad8441012e872aa8 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index a60e1a6e630cfa2cac64be8d4d1a20ee84886036..09000e20c9a051057d3db642ed4587662cfd73ac 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 59e206d6b1dfc2b27564c94caffa3de4c868da13..aa9d31a409a0e6eae86ed909c3ed024566e2b5c4 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 79d17b943b8c5f8f520e59cc3eaba6e799ed348c..17393f9003010d596c1b063b89eb769a9aa5fac5 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index d12e86368f292c65171d4afb73a9186e4559cb8f..36ac9186b468d87f78ca24797c1314f0bacf62ac 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 64db479c2b0e5d3d147372dfe33ffb450d15379a..80ea4e0e54eb86e5f93659815830da3414244400 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index e924729b27ae09226405b52e7e74e3ddffaecd3b..cc119683d8c31fd99e2c605c8795684b5949bc40 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 282a2f04019ea788cda1a15e17094d7b17383434..cce6b5af3a2bd8d5613fc411b863f5d59a5dc4c3 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 618f6e5a8e36eb9441c6fb1278044428b70327e8..d40c895368d79f9a6664398a11e5f49304644652 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 50f30da01aa2861e3ebabbcb1290cc812a112859..82e2c2c41283dea01bfdafc70f2d05683b8b3bfb 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
index 20c29701559caaa6aff191aab938f298f51b3850..d5b1c2158b8b417f10ce5e0673e0587ffeb35e4b 100644 (file)
@@ -57,7 +57,7 @@ env {
        tracer_name = "barectf";
 };
 
-/* Stream type `default` */
+/* Data stream type `default` */
 stream {
        id = 0;
        packet.context := struct {
This page took 0.160432 seconds and 4 git commands to generate.