2 * SPDX-License-Identifier: MIT
4 * Copyright 2018 Philippe Proulx <pproulx@efficios.com>
13 #include <babeltrace2/babeltrace.h>
15 #include "common/assert.h"
16 #include "common/macros.h"
18 #include "ctf-meta-visitors.hpp"
22 bt_self_component
*self_comp
;
23 bt_trace_class
*ir_tc
;
24 bt_stream_class
*ir_sc
;
25 struct ctf_trace_class
*tc
;
26 struct ctf_stream_class
*sc
;
27 struct ctf_event_class
*ec
;
31 static inline bt_field_class
*ctf_field_class_to_ir(struct ctx
*ctx
, struct ctf_field_class
*fc
);
33 static inline void ctf_field_class_int_set_props(struct ctf_field_class_int
*fc
,
34 bt_field_class
*ir_fc
)
36 bt_field_class_integer_set_field_value_range(ir_fc
, fc
->base
.size
);
37 bt_field_class_integer_set_preferred_display_base(ir_fc
, fc
->disp_base
);
40 static inline bt_field_class
*ctf_field_class_int_to_ir(struct ctx
*ctx
,
41 struct ctf_field_class_int
*fc
)
43 bt_field_class
*ir_fc
;
46 ir_fc
= bt_field_class_integer_signed_create(ctx
->ir_tc
);
48 ir_fc
= bt_field_class_integer_unsigned_create(ctx
->ir_tc
);
52 ctf_field_class_int_set_props(fc
, ir_fc
);
56 static inline bt_field_class
*ctf_field_class_enum_to_ir(struct ctx
*ctx
,
57 struct ctf_field_class_enum
*fc
)
60 bt_field_class
*ir_fc
;
63 if (fc
->base
.is_signed
) {
64 ir_fc
= bt_field_class_enumeration_signed_create(ctx
->ir_tc
);
66 ir_fc
= bt_field_class_enumeration_unsigned_create(ctx
->ir_tc
);
70 ctf_field_class_int_set_props(&fc
->base
, ir_fc
);
72 for (i
= 0; i
< fc
->mappings
->len
; i
++) {
73 struct ctf_field_class_enum_mapping
*mapping
=
74 ctf_field_class_enum_borrow_mapping_by_index(fc
, i
);
75 bt_integer_range_set_signed
*range_set_signed
= NULL
;
76 bt_integer_range_set_unsigned
*range_set_unsigned
= NULL
;
79 if (fc
->base
.is_signed
) {
80 range_set_signed
= bt_integer_range_set_signed_create();
81 BT_ASSERT(range_set_signed
);
83 range_set_unsigned
= bt_integer_range_set_unsigned_create();
84 BT_ASSERT(range_set_unsigned
);
87 for (range_i
= 0; range_i
< mapping
->ranges
->len
; range_i
++) {
88 struct ctf_range
*range
=
89 ctf_field_class_enum_mapping_borrow_range_by_index(mapping
, range_i
);
91 if (fc
->base
.is_signed
) {
92 ret
= bt_integer_range_set_signed_add_range(range_set_signed
, range
->lower
.i
,
95 ret
= bt_integer_range_set_unsigned_add_range(range_set_unsigned
, range
->lower
.u
,
102 if (fc
->base
.is_signed
) {
103 ret
= bt_field_class_enumeration_signed_add_mapping(ir_fc
, mapping
->label
->str
,
105 BT_INTEGER_RANGE_SET_SIGNED_PUT_REF_AND_RESET(range_set_signed
);
107 ret
= bt_field_class_enumeration_unsigned_add_mapping(ir_fc
, mapping
->label
->str
,
109 BT_INTEGER_RANGE_SET_UNSIGNED_PUT_REF_AND_RESET(range_set_unsigned
);
118 static inline bt_field_class
*ctf_field_class_float_to_ir(struct ctx
*ctx
,
119 struct ctf_field_class_float
*fc
)
121 bt_field_class
*ir_fc
;
123 if (fc
->base
.size
== 32) {
124 ir_fc
= bt_field_class_real_single_precision_create(ctx
->ir_tc
);
126 ir_fc
= bt_field_class_real_double_precision_create(ctx
->ir_tc
);
133 static inline bt_field_class
*ctf_field_class_string_to_ir(struct ctx
*ctx
,
134 struct ctf_field_class_string
*)
136 bt_field_class
*ir_fc
= bt_field_class_string_create(ctx
->ir_tc
);
142 static inline void translate_struct_field_class_members(struct ctx
*ctx
,
143 struct ctf_field_class_struct
*fc
,
144 bt_field_class
*ir_fc
, bool,
145 struct ctf_field_class_struct
*)
150 for (i
= 0; i
< fc
->members
->len
; i
++) {
151 struct ctf_named_field_class
*named_fc
=
152 ctf_field_class_struct_borrow_member_by_index(fc
, i
);
153 bt_field_class
*member_ir_fc
;
154 const char *name
= named_fc
->name
->str
;
156 if (!named_fc
->fc
->in_ir
) {
160 member_ir_fc
= ctf_field_class_to_ir(ctx
, named_fc
->fc
);
161 BT_ASSERT(member_ir_fc
);
162 ret
= bt_field_class_structure_append_member(ir_fc
, name
, member_ir_fc
);
164 bt_field_class_put_ref(member_ir_fc
);
168 static inline bt_field_class
*ctf_field_class_struct_to_ir(struct ctx
*ctx
,
169 struct ctf_field_class_struct
*fc
)
171 bt_field_class
*ir_fc
= bt_field_class_structure_create(ctx
->ir_tc
);
174 translate_struct_field_class_members(ctx
, fc
, ir_fc
, false, NULL
);
178 static inline bt_field_class
*borrow_ir_fc_from_field_path(struct ctx
*ctx
,
179 struct ctf_field_path
*field_path
)
181 bt_field_class
*ir_fc
= NULL
;
182 struct ctf_field_class
*fc
=
183 ctf_field_path_borrow_field_class(field_path
, ctx
->tc
, ctx
->sc
, ctx
->ec
);
194 static inline const bt_field_class_enumeration_mapping
*
195 find_ir_enum_field_class_mapping_by_label(const bt_field_class
*fc
, const char *label
,
198 const bt_field_class_enumeration_mapping
*mapping
= NULL
;
201 for (i
= 0; i
< bt_field_class_enumeration_get_mapping_count(fc
); i
++) {
202 const bt_field_class_enumeration_mapping
*this_mapping
;
203 const bt_field_class_enumeration_signed_mapping
*signed_this_mapping
= NULL
;
204 const bt_field_class_enumeration_unsigned_mapping
*unsigned_this_mapping
= NULL
;
207 signed_this_mapping
=
208 bt_field_class_enumeration_signed_borrow_mapping_by_index_const(fc
, i
);
209 BT_ASSERT(signed_this_mapping
);
211 bt_field_class_enumeration_signed_mapping_as_mapping_const(signed_this_mapping
);
213 unsigned_this_mapping
=
214 bt_field_class_enumeration_unsigned_borrow_mapping_by_index_const(fc
, i
);
215 BT_ASSERT(unsigned_this_mapping
);
217 bt_field_class_enumeration_unsigned_mapping_as_mapping_const(unsigned_this_mapping
);
220 BT_ASSERT(this_mapping
);
222 if (strcmp(bt_field_class_enumeration_mapping_get_label(this_mapping
), label
) == 0) {
223 mapping
= this_mapping
;
232 static inline bt_field_class
*ctf_field_class_variant_to_ir(struct ctx
*ctx
,
233 struct ctf_field_class_variant
*fc
)
236 bt_field_class
*ir_fc
;
238 bt_field_class
*ir_tag_fc
= NULL
;
240 if (fc
->tag_path
.root
!= CTF_SCOPE_PACKET_HEADER
&&
241 fc
->tag_path
.root
!= CTF_SCOPE_EVENT_HEADER
) {
242 ir_tag_fc
= borrow_ir_fc_from_field_path(ctx
, &fc
->tag_path
);
243 BT_ASSERT(ir_tag_fc
);
246 ir_fc
= bt_field_class_variant_create(ctx
->ir_tc
, ir_tag_fc
);
249 for (i
= 0; i
< fc
->options
->len
; i
++) {
250 struct ctf_named_field_class
*named_fc
=
251 ctf_field_class_variant_borrow_option_by_index(fc
, i
);
252 bt_field_class
*option_ir_fc
;
254 BT_ASSERT(named_fc
->fc
->in_ir
);
255 option_ir_fc
= ctf_field_class_to_ir(ctx
, named_fc
->fc
);
256 BT_ASSERT(option_ir_fc
);
260 * At this point the trace IR selector
261 * (enumeration) field class already exists if
262 * the variant is tagged (`ir_tag_fc`). This one
263 * already contains range sets for its mappings,
264 * so we just reuse the same, finding them by
265 * matching a variant field class's option's
266 * _original_ name (with a leading underscore,
267 * possibly) with a selector field class's
270 if (fc
->tag_fc
->base
.is_signed
) {
271 const bt_field_class_enumeration_signed_mapping
*mapping
=
272 (bt_field_class_enumeration_signed_mapping
*)
273 find_ir_enum_field_class_mapping_by_label(ir_tag_fc
,
274 named_fc
->orig_name
->str
, true);
275 const bt_integer_range_set_signed
*range_set
;
278 range_set
= bt_field_class_enumeration_signed_mapping_borrow_ranges_const(mapping
);
279 BT_ASSERT(range_set
);
280 ret
= bt_field_class_variant_with_selector_field_integer_signed_append_option(
281 ir_fc
, named_fc
->name
->str
, option_ir_fc
, range_set
);
283 const bt_field_class_enumeration_unsigned_mapping
*mapping
=
284 (bt_field_class_enumeration_unsigned_mapping
*)
285 find_ir_enum_field_class_mapping_by_label(ir_tag_fc
,
286 named_fc
->orig_name
->str
, false);
287 const bt_integer_range_set_unsigned
*range_set
;
291 bt_field_class_enumeration_unsigned_mapping_borrow_ranges_const(mapping
);
292 BT_ASSERT(range_set
);
293 ret
= bt_field_class_variant_with_selector_field_integer_unsigned_append_option(
294 ir_fc
, named_fc
->name
->str
, option_ir_fc
, range_set
);
297 ret
= bt_field_class_variant_without_selector_append_option(ir_fc
, named_fc
->name
->str
,
302 bt_field_class_put_ref(option_ir_fc
);
308 static inline bt_field_class
*ctf_field_class_array_to_ir(struct ctx
*ctx
,
309 struct ctf_field_class_array
*fc
)
311 bt_field_class
*ir_fc
;
312 bt_field_class
*elem_ir_fc
;
314 if (fc
->base
.is_text
) {
315 ir_fc
= bt_field_class_string_create(ctx
->ir_tc
);
320 elem_ir_fc
= ctf_field_class_to_ir(ctx
, fc
->base
.elem_fc
);
321 BT_ASSERT(elem_ir_fc
);
322 ir_fc
= bt_field_class_array_static_create(ctx
->ir_tc
, elem_ir_fc
, fc
->length
);
324 bt_field_class_put_ref(elem_ir_fc
);
330 static inline bt_field_class
*ctf_field_class_sequence_to_ir(struct ctx
*ctx
,
331 struct ctf_field_class_sequence
*fc
)
333 bt_field_class
*ir_fc
;
334 bt_field_class
*elem_ir_fc
;
335 bt_field_class
*length_fc
= NULL
;
337 if (fc
->base
.is_text
) {
338 ir_fc
= bt_field_class_string_create(ctx
->ir_tc
);
343 elem_ir_fc
= ctf_field_class_to_ir(ctx
, fc
->base
.elem_fc
);
344 BT_ASSERT(elem_ir_fc
);
346 if (fc
->length_path
.root
!= CTF_SCOPE_PACKET_HEADER
&&
347 fc
->length_path
.root
!= CTF_SCOPE_EVENT_HEADER
) {
348 length_fc
= borrow_ir_fc_from_field_path(ctx
, &fc
->length_path
);
349 BT_ASSERT(length_fc
);
352 ir_fc
= bt_field_class_array_dynamic_create(ctx
->ir_tc
, elem_ir_fc
, length_fc
);
354 bt_field_class_put_ref(elem_ir_fc
);
361 static inline bt_field_class
*ctf_field_class_to_ir(struct ctx
*ctx
, struct ctf_field_class
*fc
)
363 bt_field_class
*ir_fc
= NULL
;
366 BT_ASSERT(fc
->in_ir
);
369 case CTF_FIELD_CLASS_TYPE_INT
:
370 ir_fc
= ctf_field_class_int_to_ir(ctx
, ctf_field_class_as_int(fc
));
372 case CTF_FIELD_CLASS_TYPE_ENUM
:
373 ir_fc
= ctf_field_class_enum_to_ir(ctx
, ctf_field_class_as_enum(fc
));
375 case CTF_FIELD_CLASS_TYPE_FLOAT
:
376 ir_fc
= ctf_field_class_float_to_ir(ctx
, ctf_field_class_as_float(fc
));
378 case CTF_FIELD_CLASS_TYPE_STRING
:
379 ir_fc
= ctf_field_class_string_to_ir(ctx
, ctf_field_class_as_string(fc
));
381 case CTF_FIELD_CLASS_TYPE_STRUCT
:
382 ir_fc
= ctf_field_class_struct_to_ir(ctx
, ctf_field_class_as_struct(fc
));
384 case CTF_FIELD_CLASS_TYPE_ARRAY
:
385 ir_fc
= ctf_field_class_array_to_ir(ctx
, ctf_field_class_as_array(fc
));
387 case CTF_FIELD_CLASS_TYPE_SEQUENCE
:
388 ir_fc
= ctf_field_class_sequence_to_ir(ctx
, ctf_field_class_as_sequence(fc
));
390 case CTF_FIELD_CLASS_TYPE_VARIANT
:
391 ir_fc
= ctf_field_class_variant_to_ir(ctx
, ctf_field_class_as_variant(fc
));
402 ctf_field_class_struct_has_immediate_member_in_ir(struct ctf_field_class_struct
*fc
)
405 bool has_immediate_member_in_ir
= false;
408 * If the structure field class has no members at all, then it
409 * was an empty structure in the beginning, so leave it existing
412 if (fc
->members
->len
== 0) {
413 has_immediate_member_in_ir
= true;
417 for (i
= 0; i
< fc
->members
->len
; i
++) {
418 struct ctf_named_field_class
*named_fc
=
419 ctf_field_class_struct_borrow_member_by_index(fc
, i
);
421 if (named_fc
->fc
->in_ir
) {
422 has_immediate_member_in_ir
= true;
428 return has_immediate_member_in_ir
;
431 static inline bt_field_class
*scope_ctf_field_class_to_ir(struct ctx
*ctx
)
433 bt_field_class
*ir_fc
= NULL
;
434 struct ctf_field_class
*fc
= NULL
;
436 switch (ctx
->scope
) {
437 case CTF_SCOPE_PACKET_CONTEXT
:
438 fc
= ctx
->sc
->packet_context_fc
;
440 case CTF_SCOPE_EVENT_COMMON_CONTEXT
:
441 fc
= ctx
->sc
->event_common_context_fc
;
443 case CTF_SCOPE_EVENT_SPECIFIC_CONTEXT
:
444 fc
= ctx
->ec
->spec_context_fc
;
446 case CTF_SCOPE_EVENT_PAYLOAD
:
447 fc
= ctx
->ec
->payload_fc
;
453 if (fc
&& ctf_field_class_struct_has_immediate_member_in_ir(ctf_field_class_as_struct(fc
))) {
454 ir_fc
= ctf_field_class_to_ir(ctx
, fc
);
460 static inline void ctf_event_class_to_ir(struct ctx
*ctx
)
463 bt_event_class
*ir_ec
= NULL
;
464 bt_field_class
*ir_fc
;
468 if (ctx
->ec
->is_translated
) {
469 ir_ec
= bt_stream_class_borrow_event_class_by_id(ctx
->ir_sc
, ctx
->ec
->id
);
474 ir_ec
= bt_event_class_create_with_id(ctx
->ir_sc
, ctx
->ec
->id
);
476 bt_event_class_put_ref(ir_ec
);
477 ctx
->scope
= CTF_SCOPE_EVENT_SPECIFIC_CONTEXT
;
478 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
480 ret
= bt_event_class_set_specific_context_field_class(ir_ec
, ir_fc
);
482 bt_field_class_put_ref(ir_fc
);
485 ctx
->scope
= CTF_SCOPE_EVENT_PAYLOAD
;
486 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
488 ret
= bt_event_class_set_payload_field_class(ir_ec
, ir_fc
);
490 bt_field_class_put_ref(ir_fc
);
493 if (ctx
->ec
->name
->len
> 0) {
494 ret
= bt_event_class_set_name(ir_ec
, ctx
->ec
->name
->str
);
498 if (ctx
->ec
->emf_uri
->len
> 0) {
499 ret
= bt_event_class_set_emf_uri(ir_ec
, ctx
->ec
->emf_uri
->str
);
503 if (ctx
->ec
->is_log_level_set
) {
504 bt_event_class_set_log_level(ir_ec
, ctx
->ec
->log_level
);
507 ctx
->ec
->is_translated
= true;
508 ctx
->ec
->ir_ec
= ir_ec
;
514 static inline void ctf_stream_class_to_ir(struct ctx
*ctx
)
517 bt_field_class
*ir_fc
;
521 if (ctx
->sc
->is_translated
) {
522 ctx
->ir_sc
= bt_trace_class_borrow_stream_class_by_id(ctx
->ir_tc
, ctx
->sc
->id
);
523 BT_ASSERT(ctx
->ir_sc
);
527 ctx
->ir_sc
= bt_stream_class_create_with_id(ctx
->ir_tc
, ctx
->sc
->id
);
528 BT_ASSERT(ctx
->ir_sc
);
529 bt_stream_class_put_ref(ctx
->ir_sc
);
531 if (ctx
->sc
->default_clock_class
) {
532 BT_ASSERT(ctx
->sc
->default_clock_class
->ir_cc
);
533 ret
= bt_stream_class_set_default_clock_class(ctx
->ir_sc
,
534 ctx
->sc
->default_clock_class
->ir_cc
);
538 bt_stream_class_set_supports_packets(ctx
->ir_sc
, BT_TRUE
, ctx
->sc
->packets_have_ts_begin
,
539 ctx
->sc
->packets_have_ts_end
);
540 bt_stream_class_set_supports_discarded_events(ctx
->ir_sc
, ctx
->sc
->has_discarded_events
,
541 ctx
->sc
->discarded_events_have_default_cs
);
542 bt_stream_class_set_supports_discarded_packets(ctx
->ir_sc
, ctx
->sc
->has_discarded_packets
,
543 ctx
->sc
->discarded_packets_have_default_cs
);
544 ctx
->scope
= CTF_SCOPE_PACKET_CONTEXT
;
545 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
547 ret
= bt_stream_class_set_packet_context_field_class(ctx
->ir_sc
, ir_fc
);
549 bt_field_class_put_ref(ir_fc
);
552 ctx
->scope
= CTF_SCOPE_EVENT_COMMON_CONTEXT
;
553 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
555 ret
= bt_stream_class_set_event_common_context_field_class(ctx
->ir_sc
, ir_fc
);
557 bt_field_class_put_ref(ir_fc
);
560 bt_stream_class_set_assigns_automatic_event_class_id(ctx
->ir_sc
, BT_FALSE
);
561 bt_stream_class_set_assigns_automatic_stream_id(ctx
->ir_sc
, BT_FALSE
);
563 ctx
->sc
->is_translated
= true;
564 ctx
->sc
->ir_sc
= ctx
->ir_sc
;
570 static inline void ctf_clock_class_to_ir(bt_clock_class
*ir_cc
, struct ctf_clock_class
*cc
)
574 if (strlen(cc
->name
->str
) > 0) {
575 ret
= bt_clock_class_set_name(ir_cc
, cc
->name
->str
);
579 if (strlen(cc
->description
->str
) > 0) {
580 ret
= bt_clock_class_set_description(ir_cc
, cc
->description
->str
);
584 bt_clock_class_set_frequency(ir_cc
, cc
->frequency
);
585 bt_clock_class_set_precision(ir_cc
, cc
->precision
);
586 bt_clock_class_set_offset(ir_cc
, cc
->offset_seconds
, cc
->offset_cycles
);
589 bt_clock_class_set_uuid(ir_cc
, cc
->uuid
);
592 bt_clock_class_set_origin_is_unix_epoch(ir_cc
, cc
->is_absolute
);
595 static inline int ctf_trace_class_to_ir(struct ctx
*ctx
)
601 BT_ASSERT(ctx
->ir_tc
);
603 if (ctx
->tc
->is_translated
) {
607 for (i
= 0; i
< ctx
->tc
->clock_classes
->len
; i
++) {
608 ctf_clock_class
*cc
= (ctf_clock_class
*) ctx
->tc
->clock_classes
->pdata
[i
];
610 cc
->ir_cc
= bt_clock_class_create(ctx
->self_comp
);
611 ctf_clock_class_to_ir(cc
->ir_cc
, cc
);
614 bt_trace_class_set_assigns_automatic_stream_class_id(ctx
->ir_tc
, BT_FALSE
);
615 ctx
->tc
->is_translated
= true;
616 ctx
->tc
->ir_tc
= ctx
->ir_tc
;
622 int ctf_trace_class_translate(bt_self_component
*self_comp
, bt_trace_class
*ir_tc
,
623 struct ctf_trace_class
*tc
)
629 ctx
.self_comp
= self_comp
;
632 ret
= ctf_trace_class_to_ir(&ctx
);
637 for (i
= 0; i
< tc
->stream_classes
->len
; i
++) {
639 ctx
.sc
= (ctf_stream_class
*) tc
->stream_classes
->pdata
[i
];
641 ctf_stream_class_to_ir(&ctx
);
643 for (j
= 0; j
< ctx
.sc
->event_classes
->len
; j
++) {
644 ctx
.ec
= (ctf_event_class
*) ctx
.sc
->event_classes
->pdata
[j
];
646 ctf_event_class_to_ir(&ctx
);