2 * lttng-bytecode-interpreter.c
4 * LTTng UST bytecode interpreter.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
30 #include <urcu-pointer.h>
33 #include <lttng/ust-endian.h>
34 #include <lttng/ust-events.h>
36 #include "lttng-bytecode.h"
37 #include "string-utils.h"
42 * -2: unknown escape char.
47 int parse_char(const char **p
)
67 * Returns SIZE_MAX if the string is null-terminated, or the number of
71 size_t get_str_or_seq_len(const struct estack_entry
*entry
)
73 return entry
->u
.s
.seq_len
;
77 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
80 const char *candidate
;
84 /* Find out which side is the pattern vs. the candidate. */
85 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
86 pattern
= estack_ax(stack
, top
)->u
.s
.str
;
87 pattern_len
= get_str_or_seq_len(estack_ax(stack
, top
));
88 candidate
= estack_bx(stack
, top
)->u
.s
.str
;
89 candidate_len
= get_str_or_seq_len(estack_bx(stack
, top
));
91 pattern
= estack_bx(stack
, top
)->u
.s
.str
;
92 pattern_len
= get_str_or_seq_len(estack_bx(stack
, top
));
93 candidate
= estack_ax(stack
, top
)->u
.s
.str
;
94 candidate_len
= get_str_or_seq_len(estack_ax(stack
, top
));
97 /* Perform the match. Returns 0 when the result is true. */
98 return !strutils_star_glob_match(pattern
, pattern_len
, candidate
,
103 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
105 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
112 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
113 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
116 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
117 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
118 ret
= parse_char(&q
);
125 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
126 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
127 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
128 ret
= parse_char(&p
);
134 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
135 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
136 ret
= parse_char(&p
);
139 } else if (ret
== -2) {
142 /* else compare both char */
144 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
145 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
146 ret
= parse_char(&q
);
149 } else if (ret
== -2) {
169 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data
,
170 const char *filter_stack_data
)
172 return LTTNG_INTERPRETER_DISCARD
;
175 #ifdef INTERPRETER_USE_SWITCH
178 * Fallback for compilers that do not support taking address of labels.
182 start_pc = &bytecode->data[0]; \
183 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
185 dbg_printf("Executing op %s (%u)\n", \
186 print_op((unsigned int) *(bytecode_opcode_t *) pc), \
187 (unsigned int) *(bytecode_opcode_t *) pc); \
188 switch (*(bytecode_opcode_t *) pc) {
190 #define OP(name) jump_target_##name: __attribute__((unused)); \
198 #define JUMP_TO(name) \
199 goto jump_target_##name
204 * Dispatch-table based interpreter.
208 start_pc = &bytecode->code[0]; \
209 pc = next_pc = start_pc; \
210 if (unlikely(pc - start_pc >= bytecode->len)) \
212 goto *dispatch[*(bytecode_opcode_t *) pc];
219 goto *dispatch[*(bytecode_opcode_t *) pc];
223 #define JUMP_TO(name) \
228 #define IS_INTEGER_REGISTER(reg_type) \
229 (reg_type == REG_U64 || reg_type == REG_S64)
231 static int context_get_index(struct lttng_ctx
*ctx
,
232 struct load_ptr
*ptr
,
236 struct lttng_ctx_field
*ctx_field
;
237 struct lttng_event_field
*field
;
238 struct lttng_ctx_value v
;
240 ctx_field
= &ctx
->fields
[idx
];
241 field
= &ctx_field
->event_field
;
242 ptr
->type
= LOAD_OBJECT
;
245 switch (field
->type
.atype
) {
247 ctx_field
->get_value(ctx_field
, &v
);
248 if (field
->type
.u
.integer
.signedness
) {
249 ptr
->object_type
= OBJECT_TYPE_S64
;
250 ptr
->u
.s64
= v
.u
.s64
;
251 ptr
->ptr
= &ptr
->u
.s64
;
253 ptr
->object_type
= OBJECT_TYPE_U64
;
254 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
255 ptr
->ptr
= &ptr
->u
.u64
;
258 case atype_enum
: /* Fall-through */
259 case atype_enum_nestable
:
261 const struct lttng_integer_type
*itype
;
263 if (field
->type
.atype
== atype_enum
) {
264 itype
= &field
->type
.u
.legacy
.basic
.enumeration
.container_type
;
266 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
268 ctx_field
->get_value(ctx_field
, &v
);
269 if (itype
->signedness
) {
270 ptr
->object_type
= OBJECT_TYPE_S64
;
271 ptr
->u
.s64
= v
.u
.s64
;
272 ptr
->ptr
= &ptr
->u
.s64
;
274 ptr
->object_type
= OBJECT_TYPE_U64
;
275 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
276 ptr
->ptr
= &ptr
->u
.u64
;
281 if (field
->type
.u
.legacy
.array
.elem_type
.atype
!= atype_integer
) {
282 ERR("Array nesting only supports integer types.");
285 if (field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
286 ERR("Only string arrays are supported for contexts.");
289 ptr
->object_type
= OBJECT_TYPE_STRING
;
290 ctx_field
->get_value(ctx_field
, &v
);
293 case atype_array_nestable
:
294 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
295 ERR("Array nesting only supports integer types.");
298 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
299 ERR("Only string arrays are supported for contexts.");
302 ptr
->object_type
= OBJECT_TYPE_STRING
;
303 ctx_field
->get_value(ctx_field
, &v
);
307 if (field
->type
.u
.legacy
.sequence
.elem_type
.atype
!= atype_integer
) {
308 ERR("Sequence nesting only supports integer types.");
311 if (field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
312 ERR("Only string sequences are supported for contexts.");
315 ptr
->object_type
= OBJECT_TYPE_STRING
;
316 ctx_field
->get_value(ctx_field
, &v
);
319 case atype_sequence_nestable
:
320 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
321 ERR("Sequence nesting only supports integer types.");
324 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
325 ERR("Only string sequences are supported for contexts.");
328 ptr
->object_type
= OBJECT_TYPE_STRING
;
329 ctx_field
->get_value(ctx_field
, &v
);
333 ptr
->object_type
= OBJECT_TYPE_STRING
;
334 ctx_field
->get_value(ctx_field
, &v
);
338 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
339 ctx_field
->get_value(ctx_field
, &v
);
341 ptr
->ptr
= &ptr
->u
.d
;
344 ctx_field
->get_value(ctx_field
, &v
);
346 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
348 case LTTNG_UST_DYNAMIC_TYPE_S64
:
349 ptr
->object_type
= OBJECT_TYPE_S64
;
350 ptr
->u
.s64
= v
.u
.s64
;
351 ptr
->ptr
= &ptr
->u
.s64
;
352 dbg_printf("context get index dynamic s64 %" PRIi64
"\n", ptr
->u
.s64
);
354 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
355 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
357 ptr
->ptr
= &ptr
->u
.d
;
358 dbg_printf("context get index dynamic double %g\n", ptr
->u
.d
);
360 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
361 ptr
->object_type
= OBJECT_TYPE_STRING
;
363 dbg_printf("context get index dynamic string %s\n", (const char *) ptr
->ptr
);
366 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
371 ERR("Structure type cannot be loaded.");
374 ERR("Unknown type: %d", (int) field
->type
.atype
);
380 static int dynamic_get_index(struct lttng_ctx
*ctx
,
381 struct bytecode_runtime
*runtime
,
382 uint64_t index
, struct estack_entry
*stack_top
)
385 const struct bytecode_get_index_data
*gid
;
387 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
388 switch (stack_top
->u
.ptr
.type
) {
390 switch (stack_top
->u
.ptr
.object_type
) {
391 case OBJECT_TYPE_ARRAY
:
395 assert(gid
->offset
< gid
->array_len
);
396 /* Skip count (unsigned long) */
397 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
398 ptr
= ptr
+ gid
->offset
;
399 stack_top
->u
.ptr
.ptr
= ptr
;
400 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
401 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
402 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_array
||
403 stack_top
->u
.ptr
.field
->type
.atype
== atype_array_nestable
);
404 stack_top
->u
.ptr
.field
= NULL
;
407 case OBJECT_TYPE_SEQUENCE
:
412 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
413 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
414 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
418 ptr
= ptr
+ gid
->offset
;
419 stack_top
->u
.ptr
.ptr
= ptr
;
420 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
421 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
422 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence
||
423 stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence_nestable
);
424 stack_top
->u
.ptr
.field
= NULL
;
427 case OBJECT_TYPE_STRUCT
:
428 ERR("Nested structures are not supported yet.");
431 case OBJECT_TYPE_VARIANT
:
433 ERR("Unexpected get index type %d",
434 (int) stack_top
->u
.ptr
.object_type
);
439 case LOAD_ROOT_CONTEXT
:
440 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
442 ret
= context_get_index(ctx
,
450 case LOAD_ROOT_PAYLOAD
:
451 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
452 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
453 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
454 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
455 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
456 stack_top
->u
.ptr
.field
= gid
->field
;
457 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
461 stack_top
->type
= REG_PTR
;
469 static int dynamic_load_field(struct estack_entry
*stack_top
)
473 switch (stack_top
->u
.ptr
.type
) {
476 case LOAD_ROOT_CONTEXT
:
477 case LOAD_ROOT_APP_CONTEXT
:
478 case LOAD_ROOT_PAYLOAD
:
480 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
484 switch (stack_top
->u
.ptr
.object_type
) {
486 dbg_printf("op load field s8\n");
487 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
488 stack_top
->type
= REG_S64
;
490 case OBJECT_TYPE_S16
:
494 dbg_printf("op load field s16\n");
495 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
496 if (stack_top
->u
.ptr
.rev_bo
)
498 stack_top
->u
.v
= tmp
;
499 stack_top
->type
= REG_S64
;
502 case OBJECT_TYPE_S32
:
506 dbg_printf("op load field s32\n");
507 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
508 if (stack_top
->u
.ptr
.rev_bo
)
510 stack_top
->u
.v
= tmp
;
511 stack_top
->type
= REG_S64
;
514 case OBJECT_TYPE_S64
:
518 dbg_printf("op load field s64\n");
519 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
520 if (stack_top
->u
.ptr
.rev_bo
)
522 stack_top
->u
.v
= tmp
;
523 stack_top
->type
= REG_S64
;
527 dbg_printf("op load field u8\n");
528 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
529 stack_top
->type
= REG_U64
;
531 case OBJECT_TYPE_U16
:
535 dbg_printf("op load field u16\n");
536 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
537 if (stack_top
->u
.ptr
.rev_bo
)
539 stack_top
->u
.v
= tmp
;
540 stack_top
->type
= REG_U64
;
543 case OBJECT_TYPE_U32
:
547 dbg_printf("op load field u32\n");
548 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
549 if (stack_top
->u
.ptr
.rev_bo
)
551 stack_top
->u
.v
= tmp
;
552 stack_top
->type
= REG_U64
;
555 case OBJECT_TYPE_U64
:
559 dbg_printf("op load field u64\n");
560 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
561 if (stack_top
->u
.ptr
.rev_bo
)
563 stack_top
->u
.v
= tmp
;
564 stack_top
->type
= REG_U64
;
567 case OBJECT_TYPE_DOUBLE
:
568 memcpy(&stack_top
->u
.d
,
569 stack_top
->u
.ptr
.ptr
,
570 sizeof(struct literal_double
));
571 stack_top
->type
= REG_DOUBLE
;
573 case OBJECT_TYPE_STRING
:
577 dbg_printf("op load field string\n");
578 str
= (const char *) stack_top
->u
.ptr
.ptr
;
579 stack_top
->u
.s
.str
= str
;
580 if (unlikely(!stack_top
->u
.s
.str
)) {
581 dbg_printf("Interpreter warning: loading a NULL string.\n");
585 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
586 stack_top
->u
.s
.literal_type
=
587 ESTACK_STRING_LITERAL_TYPE_NONE
;
588 stack_top
->type
= REG_STRING
;
591 case OBJECT_TYPE_STRING_SEQUENCE
:
595 dbg_printf("op load field string sequence\n");
596 ptr
= stack_top
->u
.ptr
.ptr
;
597 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
598 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
599 stack_top
->type
= REG_STRING
;
600 if (unlikely(!stack_top
->u
.s
.str
)) {
601 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
605 stack_top
->u
.s
.literal_type
=
606 ESTACK_STRING_LITERAL_TYPE_NONE
;
609 case OBJECT_TYPE_DYNAMIC
:
611 * Dynamic types in context are looked up
612 * by context get index.
616 case OBJECT_TYPE_SEQUENCE
:
617 case OBJECT_TYPE_ARRAY
:
618 case OBJECT_TYPE_STRUCT
:
619 case OBJECT_TYPE_VARIANT
:
620 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
631 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
632 struct lttng_interpreter_output
*output
)
639 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
640 output
->u
.s
= ax
->u
.v
;
643 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
644 output
->u
.u
= (uint64_t) ax
->u
.v
;
647 output
->type
= LTTNG_INTERPRETER_TYPE_DOUBLE
;
648 output
->u
.d
= ax
->u
.d
;
651 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
652 output
->u
.str
.str
= ax
->u
.s
.str
;
653 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
656 switch (ax
->u
.ptr
.object_type
) {
658 case OBJECT_TYPE_S16
:
659 case OBJECT_TYPE_S32
:
660 case OBJECT_TYPE_S64
:
662 case OBJECT_TYPE_U16
:
663 case OBJECT_TYPE_U32
:
664 case OBJECT_TYPE_U64
:
665 case OBJECT_TYPE_DOUBLE
:
666 case OBJECT_TYPE_STRING
:
667 case OBJECT_TYPE_STRING_SEQUENCE
:
668 ret
= dynamic_load_field(ax
);
671 /* Retry after loading ptr into stack top. */
673 case OBJECT_TYPE_SEQUENCE
:
674 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
675 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
676 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
677 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
679 case OBJECT_TYPE_ARRAY
:
680 /* Skip count (unsigned long) */
681 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
682 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
683 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
684 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
686 case OBJECT_TYPE_STRUCT
:
687 case OBJECT_TYPE_VARIANT
:
693 case REG_STAR_GLOB_STRING
:
699 return LTTNG_INTERPRETER_RECORD_FLAG
;
703 * Return 0 (discard), or raise the 0x1 flag (log event).
704 * Currently, other flags are kept for future extensions and have no
708 uint64_t bytecode_interpret(void *interpreter_data
,
709 const char *interpreter_stack_data
,
710 struct lttng_interpreter_output
*output
)
712 struct bytecode_runtime
*bytecode
= interpreter_data
;
713 struct lttng_ctx
*ctx
= rcu_dereference(*bytecode
->p
.pctx
);
714 void *pc
, *next_pc
, *start_pc
;
717 struct estack _stack
;
718 struct estack
*stack
= &_stack
;
719 register int64_t ax
= 0, bx
= 0;
720 register enum entry_type ax_t
= REG_UNKNOWN
, bx_t
= REG_UNKNOWN
;
721 register int top
= INTERPRETER_STACK_EMPTY
;
722 #ifndef INTERPRETER_USE_SWITCH
723 static void *dispatch
[NR_BYTECODE_OPS
] = {
724 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
726 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
729 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
730 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
731 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
732 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
733 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
734 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
735 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
736 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
737 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
738 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
740 /* binary comparators */
741 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
742 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
743 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
744 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
745 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
746 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
748 /* string binary comparator */
749 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
750 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
751 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
752 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
753 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
754 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
756 /* globbing pattern binary comparator */
757 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
758 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
760 /* s64 binary comparator */
761 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
762 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
763 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
764 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
765 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
766 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
768 /* double binary comparator */
769 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
770 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
771 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
772 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
773 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
774 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
776 /* Mixed S64-double binary comparators */
777 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
778 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
779 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
780 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
781 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
782 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
784 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
785 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
786 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
787 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
788 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
789 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
792 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
793 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
794 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
795 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
796 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
797 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
798 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
799 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
800 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
803 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
804 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
807 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
808 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
809 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
810 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
811 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
813 /* load from immediate operand */
814 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
815 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
816 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
817 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
820 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
821 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
822 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
824 /* get context ref */
825 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
826 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
827 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
828 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
830 /* Instructions for recursive traversal through composed types. */
831 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
832 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
833 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
835 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
836 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
837 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
838 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
840 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
841 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
842 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
843 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
844 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
845 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
846 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
847 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
848 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
849 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
850 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
851 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
853 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
855 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
857 #endif /* #ifndef INTERPRETER_USE_SWITCH */
861 OP(BYTECODE_OP_UNKNOWN
):
862 OP(BYTECODE_OP_LOAD_FIELD_REF
):
863 #ifdef INTERPRETER_USE_SWITCH
865 #endif /* INTERPRETER_USE_SWITCH */
866 ERR("unknown bytecode op %u",
867 (unsigned int) *(bytecode_opcode_t
*) pc
);
871 OP(BYTECODE_OP_RETURN
):
872 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
873 /* Handle dynamic typing. */
874 switch (estack_ax_t
) {
877 retval
= !!estack_ax_v
;
888 case REG_STAR_GLOB_STRING
:
897 OP(BYTECODE_OP_RETURN_S64
):
898 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
899 retval
= !!estack_ax_v
;
907 OP(BYTECODE_OP_PLUS
):
908 OP(BYTECODE_OP_MINUS
):
909 ERR("unsupported bytecode op %u",
910 (unsigned int) *(bytecode_opcode_t
*) pc
);
916 /* Dynamic typing. */
917 switch (estack_ax_t
) {
918 case REG_S64
: /* Fall-through */
920 switch (estack_bx_t
) {
921 case REG_S64
: /* Fall-through */
923 JUMP_TO(BYTECODE_OP_EQ_S64
);
925 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64
);
926 case REG_STRING
: /* Fall-through */
927 case REG_STAR_GLOB_STRING
:
931 ERR("Unknown interpreter register type (%d)",
938 switch (estack_bx_t
) {
939 case REG_S64
: /* Fall-through */
941 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE
);
943 JUMP_TO(BYTECODE_OP_EQ_DOUBLE
);
944 case REG_STRING
: /* Fall-through */
945 case REG_STAR_GLOB_STRING
:
949 ERR("Unknown interpreter register type (%d)",
956 switch (estack_bx_t
) {
957 case REG_S64
: /* Fall-through */
958 case REG_U64
: /* Fall-through */
963 JUMP_TO(BYTECODE_OP_EQ_STRING
);
964 case REG_STAR_GLOB_STRING
:
965 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
967 ERR("Unknown interpreter register type (%d)",
973 case REG_STAR_GLOB_STRING
:
974 switch (estack_bx_t
) {
975 case REG_S64
: /* Fall-through */
976 case REG_U64
: /* Fall-through */
981 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
982 case REG_STAR_GLOB_STRING
:
986 ERR("Unknown interpreter register type (%d)",
993 ERR("Unknown interpreter register type (%d)",
1001 /* Dynamic typing. */
1002 switch (estack_ax_t
) {
1003 case REG_S64
: /* Fall-through */
1005 switch (estack_bx_t
) {
1006 case REG_S64
: /* Fall-through */
1008 JUMP_TO(BYTECODE_OP_NE_S64
);
1010 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64
);
1011 case REG_STRING
: /* Fall-through */
1012 case REG_STAR_GLOB_STRING
:
1016 ERR("Unknown interpreter register type (%d)",
1023 switch (estack_bx_t
) {
1024 case REG_S64
: /* Fall-through */
1026 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE
);
1028 JUMP_TO(BYTECODE_OP_NE_DOUBLE
);
1029 case REG_STRING
: /* Fall-through */
1030 case REG_STAR_GLOB_STRING
:
1034 ERR("Unknown interpreter register type (%d)",
1041 switch (estack_bx_t
) {
1042 case REG_S64
: /* Fall-through */
1048 JUMP_TO(BYTECODE_OP_NE_STRING
);
1049 case REG_STAR_GLOB_STRING
:
1050 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1052 ERR("Unknown interpreter register type (%d)",
1058 case REG_STAR_GLOB_STRING
:
1059 switch (estack_bx_t
) {
1060 case REG_S64
: /* Fall-through */
1066 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1067 case REG_STAR_GLOB_STRING
:
1071 ERR("Unknown interpreter register type (%d)",
1078 ERR("Unknown interpreter register type (%d)",
1086 /* Dynamic typing. */
1087 switch (estack_ax_t
) {
1088 case REG_S64
: /* Fall-through */
1090 switch (estack_bx_t
) {
1091 case REG_S64
: /* Fall-through */
1093 JUMP_TO(BYTECODE_OP_GT_S64
);
1095 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64
);
1096 case REG_STRING
: /* Fall-through */
1097 case REG_STAR_GLOB_STRING
:
1101 ERR("Unknown interpreter register type (%d)",
1108 switch (estack_bx_t
) {
1109 case REG_S64
: /* Fall-through */
1111 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE
);
1113 JUMP_TO(BYTECODE_OP_GT_DOUBLE
);
1114 case REG_STRING
: /* Fall-through */
1115 case REG_STAR_GLOB_STRING
:
1119 ERR("Unknown interpreter register type (%d)",
1126 switch (estack_bx_t
) {
1127 case REG_S64
: /* Fall-through */
1128 case REG_U64
: /* Fall-through */
1129 case REG_DOUBLE
: /* Fall-through */
1130 case REG_STAR_GLOB_STRING
:
1134 JUMP_TO(BYTECODE_OP_GT_STRING
);
1136 ERR("Unknown interpreter register type (%d)",
1143 ERR("Unknown interpreter register type (%d)",
1151 /* Dynamic typing. */
1152 switch (estack_ax_t
) {
1153 case REG_S64
: /* Fall-through */
1155 switch (estack_bx_t
) {
1156 case REG_S64
: /* Fall-through */
1158 JUMP_TO(BYTECODE_OP_LT_S64
);
1160 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64
);
1161 case REG_STRING
: /* Fall-through */
1162 case REG_STAR_GLOB_STRING
:
1166 ERR("Unknown interpreter register type (%d)",
1173 switch (estack_bx_t
) {
1174 case REG_S64
: /* Fall-through */
1176 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE
);
1178 JUMP_TO(BYTECODE_OP_LT_DOUBLE
);
1179 case REG_STRING
: /* Fall-through */
1180 case REG_STAR_GLOB_STRING
:
1184 ERR("Unknown interpreter register type (%d)",
1191 switch (estack_bx_t
) {
1192 case REG_S64
: /* Fall-through */
1193 case REG_U64
: /* Fall-through */
1194 case REG_DOUBLE
: /* Fall-through */
1195 case REG_STAR_GLOB_STRING
:
1199 JUMP_TO(BYTECODE_OP_LT_STRING
);
1201 ERR("Unknown interpreter register type (%d)",
1208 ERR("Unknown interpreter register type (%d)",
1216 /* Dynamic typing. */
1217 switch (estack_ax_t
) {
1218 case REG_S64
: /* Fall-through */
1220 switch (estack_bx_t
) {
1221 case REG_S64
: /* Fall-through */
1223 JUMP_TO(BYTECODE_OP_GE_S64
);
1225 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64
);
1226 case REG_STRING
: /* Fall-through */
1227 case REG_STAR_GLOB_STRING
:
1231 ERR("Unknown interpreter register type (%d)",
1238 switch (estack_bx_t
) {
1239 case REG_S64
: /* Fall-through */
1241 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE
);
1243 JUMP_TO(BYTECODE_OP_GE_DOUBLE
);
1244 case REG_STRING
: /* Fall-through */
1245 case REG_STAR_GLOB_STRING
:
1249 ERR("Unknown interpreter register type (%d)",
1256 switch (estack_bx_t
) {
1257 case REG_S64
: /* Fall-through */
1258 case REG_U64
: /* Fall-through */
1259 case REG_DOUBLE
: /* Fall-through */
1260 case REG_STAR_GLOB_STRING
:
1264 JUMP_TO(BYTECODE_OP_GE_STRING
);
1266 ERR("Unknown interpreter register type (%d)",
1273 ERR("Unknown interpreter register type (%d)",
1281 /* Dynamic typing. */
1282 switch (estack_ax_t
) {
1283 case REG_S64
: /* Fall-through */
1285 switch (estack_bx_t
) {
1286 case REG_S64
: /* Fall-through */
1288 JUMP_TO(BYTECODE_OP_LE_S64
);
1290 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64
);
1291 case REG_STRING
: /* Fall-through */
1292 case REG_STAR_GLOB_STRING
:
1296 ERR("Unknown interpreter register type (%d)",
1303 switch (estack_bx_t
) {
1304 case REG_S64
: /* Fall-through */
1306 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE
);
1308 JUMP_TO(BYTECODE_OP_LE_DOUBLE
);
1309 case REG_STRING
: /* Fall-through */
1310 case REG_STAR_GLOB_STRING
:
1314 ERR("Unknown interpreter register type (%d)",
1321 switch (estack_bx_t
) {
1322 case REG_S64
: /* Fall-through */
1323 case REG_U64
: /* Fall-through */
1324 case REG_DOUBLE
: /* Fall-through */
1325 case REG_STAR_GLOB_STRING
:
1329 JUMP_TO(BYTECODE_OP_LE_STRING
);
1331 ERR("Unknown interpreter register type (%d)",
1338 ERR("Unknown interpreter register type (%d)",
1345 OP(BYTECODE_OP_EQ_STRING
):
1349 res
= (stack_strcmp(stack
, top
, "==") == 0);
1350 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1352 estack_ax_t
= REG_S64
;
1353 next_pc
+= sizeof(struct binary_op
);
1356 OP(BYTECODE_OP_NE_STRING
):
1360 res
= (stack_strcmp(stack
, top
, "!=") != 0);
1361 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1363 estack_ax_t
= REG_S64
;
1364 next_pc
+= sizeof(struct binary_op
);
1367 OP(BYTECODE_OP_GT_STRING
):
1371 res
= (stack_strcmp(stack
, top
, ">") > 0);
1372 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1374 estack_ax_t
= REG_S64
;
1375 next_pc
+= sizeof(struct binary_op
);
1378 OP(BYTECODE_OP_LT_STRING
):
1382 res
= (stack_strcmp(stack
, top
, "<") < 0);
1383 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1385 estack_ax_t
= REG_S64
;
1386 next_pc
+= sizeof(struct binary_op
);
1389 OP(BYTECODE_OP_GE_STRING
):
1393 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1394 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1396 estack_ax_t
= REG_S64
;
1397 next_pc
+= sizeof(struct binary_op
);
1400 OP(BYTECODE_OP_LE_STRING
):
1404 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1405 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1407 estack_ax_t
= REG_S64
;
1408 next_pc
+= sizeof(struct binary_op
);
1412 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1416 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1417 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1419 estack_ax_t
= REG_S64
;
1420 next_pc
+= sizeof(struct binary_op
);
1423 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1427 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1428 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1430 estack_ax_t
= REG_S64
;
1431 next_pc
+= sizeof(struct binary_op
);
1435 OP(BYTECODE_OP_EQ_S64
):
1439 res
= (estack_bx_v
== estack_ax_v
);
1440 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1442 estack_ax_t
= REG_S64
;
1443 next_pc
+= sizeof(struct binary_op
);
1446 OP(BYTECODE_OP_NE_S64
):
1450 res
= (estack_bx_v
!= estack_ax_v
);
1451 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1453 estack_ax_t
= REG_S64
;
1454 next_pc
+= sizeof(struct binary_op
);
1457 OP(BYTECODE_OP_GT_S64
):
1461 res
= (estack_bx_v
> estack_ax_v
);
1462 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1464 estack_ax_t
= REG_S64
;
1465 next_pc
+= sizeof(struct binary_op
);
1468 OP(BYTECODE_OP_LT_S64
):
1472 res
= (estack_bx_v
< estack_ax_v
);
1473 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1475 estack_ax_t
= REG_S64
;
1476 next_pc
+= sizeof(struct binary_op
);
1479 OP(BYTECODE_OP_GE_S64
):
1483 res
= (estack_bx_v
>= estack_ax_v
);
1484 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1486 estack_ax_t
= REG_S64
;
1487 next_pc
+= sizeof(struct binary_op
);
1490 OP(BYTECODE_OP_LE_S64
):
1494 res
= (estack_bx_v
<= estack_ax_v
);
1495 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1497 estack_ax_t
= REG_S64
;
1498 next_pc
+= sizeof(struct binary_op
);
1502 OP(BYTECODE_OP_EQ_DOUBLE
):
1506 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
1507 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1509 estack_ax_t
= REG_S64
;
1510 next_pc
+= sizeof(struct binary_op
);
1513 OP(BYTECODE_OP_NE_DOUBLE
):
1517 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
1518 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1520 estack_ax_t
= REG_S64
;
1521 next_pc
+= sizeof(struct binary_op
);
1524 OP(BYTECODE_OP_GT_DOUBLE
):
1528 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
1529 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1531 estack_ax_t
= REG_S64
;
1532 next_pc
+= sizeof(struct binary_op
);
1535 OP(BYTECODE_OP_LT_DOUBLE
):
1539 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
1540 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1542 estack_ax_t
= REG_S64
;
1543 next_pc
+= sizeof(struct binary_op
);
1546 OP(BYTECODE_OP_GE_DOUBLE
):
1550 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
1551 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1553 estack_ax_t
= REG_S64
;
1554 next_pc
+= sizeof(struct binary_op
);
1557 OP(BYTECODE_OP_LE_DOUBLE
):
1561 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
1562 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1564 estack_ax_t
= REG_S64
;
1565 next_pc
+= sizeof(struct binary_op
);
1569 /* Mixed S64-double binary comparators */
1570 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1574 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
1575 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1577 estack_ax_t
= REG_S64
;
1578 next_pc
+= sizeof(struct binary_op
);
1581 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1585 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
1586 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1588 estack_ax_t
= REG_S64
;
1589 next_pc
+= sizeof(struct binary_op
);
1592 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1596 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
1597 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1599 estack_ax_t
= REG_S64
;
1600 next_pc
+= sizeof(struct binary_op
);
1603 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1607 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
1608 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1610 estack_ax_t
= REG_S64
;
1611 next_pc
+= sizeof(struct binary_op
);
1614 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1618 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
1619 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1621 estack_ax_t
= REG_S64
;
1622 next_pc
+= sizeof(struct binary_op
);
1625 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1629 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
1630 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1632 estack_ax_t
= REG_S64
;
1633 next_pc
+= sizeof(struct binary_op
);
1637 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1641 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
1642 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1644 estack_ax_t
= REG_S64
;
1645 next_pc
+= sizeof(struct binary_op
);
1648 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1652 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
1653 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1655 estack_ax_t
= REG_S64
;
1656 next_pc
+= sizeof(struct binary_op
);
1659 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1663 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
1664 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1666 estack_ax_t
= REG_S64
;
1667 next_pc
+= sizeof(struct binary_op
);
1670 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1674 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
1675 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1677 estack_ax_t
= REG_S64
;
1678 next_pc
+= sizeof(struct binary_op
);
1681 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1685 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
1686 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1688 estack_ax_t
= REG_S64
;
1689 next_pc
+= sizeof(struct binary_op
);
1692 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1696 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
1697 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1699 estack_ax_t
= REG_S64
;
1700 next_pc
+= sizeof(struct binary_op
);
1703 OP(BYTECODE_OP_BIT_RSHIFT
):
1707 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1712 /* Catch undefined behavior. */
1713 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1717 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1718 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1720 estack_ax_t
= REG_U64
;
1721 next_pc
+= sizeof(struct binary_op
);
1724 OP(BYTECODE_OP_BIT_LSHIFT
):
1728 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1733 /* Catch undefined behavior. */
1734 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1738 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1739 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1741 estack_ax_t
= REG_U64
;
1742 next_pc
+= sizeof(struct binary_op
);
1745 OP(BYTECODE_OP_BIT_AND
):
1749 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1754 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1755 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1757 estack_ax_t
= REG_U64
;
1758 next_pc
+= sizeof(struct binary_op
);
1761 OP(BYTECODE_OP_BIT_OR
):
1765 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1770 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1771 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1773 estack_ax_t
= REG_U64
;
1774 next_pc
+= sizeof(struct binary_op
);
1777 OP(BYTECODE_OP_BIT_XOR
):
1781 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1786 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1787 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1789 estack_ax_t
= REG_U64
;
1790 next_pc
+= sizeof(struct binary_op
);
1795 OP(BYTECODE_OP_UNARY_PLUS
):
1797 /* Dynamic typing. */
1798 switch (estack_ax_t
) {
1799 case REG_S64
: /* Fall-through. */
1801 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64
);
1803 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE
);
1804 case REG_STRING
: /* Fall-through */
1805 case REG_STAR_GLOB_STRING
:
1809 ERR("Unknown interpreter register type (%d)",
1815 OP(BYTECODE_OP_UNARY_MINUS
):
1817 /* Dynamic typing. */
1818 switch (estack_ax_t
) {
1819 case REG_S64
: /* Fall-through. */
1821 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64
);
1823 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE
);
1824 case REG_STRING
: /* Fall-through */
1825 case REG_STAR_GLOB_STRING
:
1829 ERR("Unknown interpreter register type (%d)",
1835 OP(BYTECODE_OP_UNARY_NOT
):
1837 /* Dynamic typing. */
1838 switch (estack_ax_t
) {
1839 case REG_S64
: /* Fall-through. */
1841 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64
);
1843 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE
);
1844 case REG_STRING
: /* Fall-through */
1845 case REG_STAR_GLOB_STRING
:
1849 ERR("Unknown interpreter register type (%d)",
1854 next_pc
+= sizeof(struct unary_op
);
1858 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1860 /* Dynamic typing. */
1861 if (!IS_INTEGER_REGISTER(estack_ax_t
)) {
1866 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1867 estack_ax_t
= REG_U64
;
1868 next_pc
+= sizeof(struct unary_op
);
1872 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1873 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1875 next_pc
+= sizeof(struct unary_op
);
1878 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1880 estack_ax_v
= -estack_ax_v
;
1881 next_pc
+= sizeof(struct unary_op
);
1884 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1886 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
1887 next_pc
+= sizeof(struct unary_op
);
1890 OP(BYTECODE_OP_UNARY_NOT_S64
):
1892 estack_ax_v
= !estack_ax_v
;
1893 estack_ax_t
= REG_S64
;
1894 next_pc
+= sizeof(struct unary_op
);
1897 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1899 estack_ax_v
= !estack_ax(stack
, top
)->u
.d
;
1900 estack_ax_t
= REG_S64
;
1901 next_pc
+= sizeof(struct unary_op
);
1906 OP(BYTECODE_OP_AND
):
1908 struct logical_op
*insn
= (struct logical_op
*) pc
;
1910 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1914 /* If AX is 0, skip and evaluate to 0 */
1915 if (unlikely(estack_ax_v
== 0)) {
1916 dbg_printf("Jumping to bytecode offset %u\n",
1917 (unsigned int) insn
->skip_offset
);
1918 next_pc
= start_pc
+ insn
->skip_offset
;
1920 /* Pop 1 when jump not taken */
1921 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1922 next_pc
+= sizeof(struct logical_op
);
1928 struct logical_op
*insn
= (struct logical_op
*) pc
;
1930 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1934 /* If AX is nonzero, skip and evaluate to 1 */
1935 if (unlikely(estack_ax_v
!= 0)) {
1937 dbg_printf("Jumping to bytecode offset %u\n",
1938 (unsigned int) insn
->skip_offset
);
1939 next_pc
= start_pc
+ insn
->skip_offset
;
1941 /* Pop 1 when jump not taken */
1942 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1943 next_pc
+= sizeof(struct logical_op
);
1949 /* load field ref */
1950 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1952 struct load_op
*insn
= (struct load_op
*) pc
;
1953 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1955 dbg_printf("load field ref offset %u type string\n",
1957 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1958 estack_ax(stack
, top
)->u
.s
.str
=
1959 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1960 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1961 dbg_printf("Interpreter warning: loading a NULL string.\n");
1965 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1966 estack_ax(stack
, top
)->u
.s
.literal_type
=
1967 ESTACK_STRING_LITERAL_TYPE_NONE
;
1968 estack_ax_t
= REG_STRING
;
1969 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1970 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1974 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
1976 struct load_op
*insn
= (struct load_op
*) pc
;
1977 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1979 dbg_printf("load field ref offset %u type sequence\n",
1981 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1982 estack_ax(stack
, top
)->u
.s
.seq_len
=
1983 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1984 estack_ax(stack
, top
)->u
.s
.str
=
1985 *(const char **) (&interpreter_stack_data
[ref
->offset
1986 + sizeof(unsigned long)]);
1987 estack_ax_t
= REG_STRING
;
1988 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1989 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
1993 estack_ax(stack
, top
)->u
.s
.literal_type
=
1994 ESTACK_STRING_LITERAL_TYPE_NONE
;
1995 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1999 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
2001 struct load_op
*insn
= (struct load_op
*) pc
;
2002 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2004 dbg_printf("load field ref offset %u type s64\n",
2006 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2008 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
2009 estack_ax_t
= REG_S64
;
2010 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
2011 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2015 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
2017 struct load_op
*insn
= (struct load_op
*) pc
;
2018 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2020 dbg_printf("load field ref offset %u type double\n",
2022 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2023 memcpy(&estack_ax(stack
, top
)->u
.d
, &interpreter_stack_data
[ref
->offset
],
2024 sizeof(struct literal_double
));
2025 estack_ax_t
= REG_DOUBLE
;
2026 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
2027 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2031 /* load from immediate operand */
2032 OP(BYTECODE_OP_LOAD_STRING
):
2034 struct load_op
*insn
= (struct load_op
*) pc
;
2036 dbg_printf("load string %s\n", insn
->data
);
2037 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2038 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2039 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2040 estack_ax(stack
, top
)->u
.s
.literal_type
=
2041 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
2042 estack_ax_t
= REG_STRING
;
2043 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2047 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
2049 struct load_op
*insn
= (struct load_op
*) pc
;
2051 dbg_printf("load globbing pattern %s\n", insn
->data
);
2052 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2053 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2054 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2055 estack_ax(stack
, top
)->u
.s
.literal_type
=
2056 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
2057 estack_ax_t
= REG_STAR_GLOB_STRING
;
2058 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2062 OP(BYTECODE_OP_LOAD_S64
):
2064 struct load_op
*insn
= (struct load_op
*) pc
;
2066 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2067 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
2068 estack_ax_t
= REG_S64
;
2069 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
2070 next_pc
+= sizeof(struct load_op
)
2071 + sizeof(struct literal_numeric
);
2075 OP(BYTECODE_OP_LOAD_DOUBLE
):
2077 struct load_op
*insn
= (struct load_op
*) pc
;
2079 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2080 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
2081 sizeof(struct literal_double
));
2082 estack_ax_t
= REG_DOUBLE
;
2083 dbg_printf("load double %g\n", estack_ax(stack
, top
)->u
.d
);
2084 next_pc
+= sizeof(struct load_op
)
2085 + sizeof(struct literal_double
);
2090 OP(BYTECODE_OP_CAST_TO_S64
):
2092 /* Dynamic typing. */
2093 switch (estack_ax_t
) {
2095 JUMP_TO(BYTECODE_OP_CAST_NOP
);
2097 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64
);
2099 estack_ax_t
= REG_S64
;
2100 next_pc
+= sizeof(struct cast_op
);
2101 case REG_STRING
: /* Fall-through */
2102 case REG_STAR_GLOB_STRING
:
2106 ERR("Unknown interpreter register type (%d)",
2113 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
2115 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
2116 estack_ax_t
= REG_S64
;
2117 next_pc
+= sizeof(struct cast_op
);
2121 OP(BYTECODE_OP_CAST_NOP
):
2123 next_pc
+= sizeof(struct cast_op
);
2127 /* get context ref */
2128 OP(BYTECODE_OP_GET_CONTEXT_REF
):
2130 struct load_op
*insn
= (struct load_op
*) pc
;
2131 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2132 struct lttng_ctx_field
*ctx_field
;
2133 struct lttng_ctx_value v
;
2135 dbg_printf("get context ref offset %u type dynamic\n",
2137 ctx_field
= &ctx
->fields
[ref
->offset
];
2138 ctx_field
->get_value(ctx_field
, &v
);
2139 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2141 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
2144 case LTTNG_UST_DYNAMIC_TYPE_S64
:
2145 estack_ax_v
= v
.u
.s64
;
2146 estack_ax_t
= REG_S64
;
2147 dbg_printf("ref get context dynamic s64 %" PRIi64
"\n", estack_ax_v
);
2149 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
2150 estack_ax(stack
, top
)->u
.d
= v
.u
.d
;
2151 estack_ax_t
= REG_DOUBLE
;
2152 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack
, top
)->u
.d
);
2154 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
2155 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2156 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2157 dbg_printf("Interpreter warning: loading a NULL string.\n");
2161 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2162 estack_ax(stack
, top
)->u
.s
.literal_type
=
2163 ESTACK_STRING_LITERAL_TYPE_NONE
;
2164 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2165 estack_ax_t
= REG_STRING
;
2168 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
2172 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2176 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
2178 struct load_op
*insn
= (struct load_op
*) pc
;
2179 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2180 struct lttng_ctx_field
*ctx_field
;
2181 struct lttng_ctx_value v
;
2183 dbg_printf("get context ref offset %u type string\n",
2185 ctx_field
= &ctx
->fields
[ref
->offset
];
2186 ctx_field
->get_value(ctx_field
, &v
);
2187 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2188 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2189 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2190 dbg_printf("Interpreter warning: loading a NULL string.\n");
2194 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2195 estack_ax(stack
, top
)->u
.s
.literal_type
=
2196 ESTACK_STRING_LITERAL_TYPE_NONE
;
2197 estack_ax_t
= REG_STRING
;
2198 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2199 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2203 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
2205 struct load_op
*insn
= (struct load_op
*) pc
;
2206 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2207 struct lttng_ctx_field
*ctx_field
;
2208 struct lttng_ctx_value v
;
2210 dbg_printf("get context ref offset %u type s64\n",
2212 ctx_field
= &ctx
->fields
[ref
->offset
];
2213 ctx_field
->get_value(ctx_field
, &v
);
2214 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2215 estack_ax_v
= v
.u
.s64
;
2216 estack_ax_t
= REG_S64
;
2217 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
2218 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2222 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
2224 struct load_op
*insn
= (struct load_op
*) pc
;
2225 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2226 struct lttng_ctx_field
*ctx_field
;
2227 struct lttng_ctx_value v
;
2229 dbg_printf("get context ref offset %u type double\n",
2231 ctx_field
= &ctx
->fields
[ref
->offset
];
2232 ctx_field
->get_value(ctx_field
, &v
);
2233 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2234 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.u
.d
, sizeof(struct literal_double
));
2235 estack_ax_t
= REG_DOUBLE
;
2236 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
2237 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2241 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
2243 dbg_printf("op get context root\n");
2244 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2245 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
2246 /* "field" only needed for variants. */
2247 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2248 estack_ax_t
= REG_PTR
;
2249 next_pc
+= sizeof(struct load_op
);
2253 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
2255 dbg_printf("op get app context root\n");
2256 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2257 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_APP_CONTEXT
;
2258 /* "field" only needed for variants. */
2259 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2260 estack_ax_t
= REG_PTR
;
2261 next_pc
+= sizeof(struct load_op
);
2265 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
2267 dbg_printf("op get app payload root\n");
2268 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2269 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
2270 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
2271 /* "field" only needed for variants. */
2272 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2273 estack_ax_t
= REG_PTR
;
2274 next_pc
+= sizeof(struct load_op
);
2278 OP(BYTECODE_OP_GET_SYMBOL
):
2280 dbg_printf("op get symbol\n");
2281 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
2283 ERR("Nested fields not implemented yet.");
2286 case LOAD_ROOT_CONTEXT
:
2287 case LOAD_ROOT_APP_CONTEXT
:
2288 case LOAD_ROOT_PAYLOAD
:
2290 * symbol lookup is performed by
2296 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2300 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
2303 * Used for first variant encountered in a
2304 * traversal. Variants are not implemented yet.
2310 OP(BYTECODE_OP_GET_INDEX_U16
):
2312 struct load_op
*insn
= (struct load_op
*) pc
;
2313 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
2315 dbg_printf("op get index u16\n");
2316 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2319 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2320 estack_ax_t
= estack_ax(stack
, top
)->type
;
2321 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2325 OP(BYTECODE_OP_GET_INDEX_U64
):
2327 struct load_op
*insn
= (struct load_op
*) pc
;
2328 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
2330 dbg_printf("op get index u64\n");
2331 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2334 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2335 estack_ax_t
= estack_ax(stack
, top
)->type
;
2336 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2340 OP(BYTECODE_OP_LOAD_FIELD
):
2342 dbg_printf("op load field\n");
2343 ret
= dynamic_load_field(estack_ax(stack
, top
));
2346 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2347 estack_ax_t
= estack_ax(stack
, top
)->type
;
2348 next_pc
+= sizeof(struct load_op
);
2352 OP(BYTECODE_OP_LOAD_FIELD_S8
):
2354 dbg_printf("op load field s8\n");
2356 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2357 estack_ax_t
= REG_S64
;
2358 next_pc
+= sizeof(struct load_op
);
2361 OP(BYTECODE_OP_LOAD_FIELD_S16
):
2363 dbg_printf("op load field s16\n");
2365 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2366 estack_ax_t
= REG_S64
;
2367 next_pc
+= sizeof(struct load_op
);
2370 OP(BYTECODE_OP_LOAD_FIELD_S32
):
2372 dbg_printf("op load field s32\n");
2374 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2375 estack_ax_t
= REG_S64
;
2376 next_pc
+= sizeof(struct load_op
);
2379 OP(BYTECODE_OP_LOAD_FIELD_S64
):
2381 dbg_printf("op load field s64\n");
2383 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2384 estack_ax_t
= REG_S64
;
2385 next_pc
+= sizeof(struct load_op
);
2388 OP(BYTECODE_OP_LOAD_FIELD_U8
):
2390 dbg_printf("op load field u8\n");
2392 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2393 estack_ax_t
= REG_U64
;
2394 next_pc
+= sizeof(struct load_op
);
2397 OP(BYTECODE_OP_LOAD_FIELD_U16
):
2399 dbg_printf("op load field u16\n");
2401 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2402 estack_ax_t
= REG_U64
;
2403 next_pc
+= sizeof(struct load_op
);
2406 OP(BYTECODE_OP_LOAD_FIELD_U32
):
2408 dbg_printf("op load field u32\n");
2410 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2411 estack_ax_t
= REG_U64
;
2412 next_pc
+= sizeof(struct load_op
);
2415 OP(BYTECODE_OP_LOAD_FIELD_U64
):
2417 dbg_printf("op load field u64\n");
2419 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2420 estack_ax_t
= REG_U64
;
2421 next_pc
+= sizeof(struct load_op
);
2424 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
2426 dbg_printf("op load field double\n");
2428 memcpy(&estack_ax(stack
, top
)->u
.d
,
2429 estack_ax(stack
, top
)->u
.ptr
.ptr
,
2430 sizeof(struct literal_double
));
2431 estack_ax(stack
, top
)->type
= REG_DOUBLE
;
2432 next_pc
+= sizeof(struct load_op
);
2436 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
2440 dbg_printf("op load field string\n");
2441 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2442 estack_ax(stack
, top
)->u
.s
.str
= str
;
2443 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2444 dbg_printf("Interpreter warning: loading a NULL string.\n");
2448 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2449 estack_ax(stack
, top
)->u
.s
.literal_type
=
2450 ESTACK_STRING_LITERAL_TYPE_NONE
;
2451 estack_ax(stack
, top
)->type
= REG_STRING
;
2452 next_pc
+= sizeof(struct load_op
);
2456 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
2460 dbg_printf("op load field string sequence\n");
2461 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
2462 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
2463 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
2464 estack_ax(stack
, top
)->type
= REG_STRING
;
2465 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2466 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2470 estack_ax(stack
, top
)->u
.s
.literal_type
=
2471 ESTACK_STRING_LITERAL_TYPE_NONE
;
2472 next_pc
+= sizeof(struct load_op
);
2478 /* Return _DISCARD on error. */
2480 return LTTNG_INTERPRETER_DISCARD
;
2483 return lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
2490 uint64_t lttng_bytecode_filter_interpret(void *filter_data
,
2491 const char *filter_stack_data
)
2493 return bytecode_interpret(filter_data
, filter_stack_data
, NULL
);