2 * lttng-bytecode-interpreter.c
4 * LTTng UST bytecode interpreter.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
30 #include <urcu-pointer.h>
33 #include <lttng/ust-endian.h>
34 #include <lttng/ust-events.h>
36 #include "lttng-bytecode.h"
37 #include "string-utils.h"
42 * -2: unknown escape char.
47 int parse_char(const char **p
)
67 * Returns SIZE_MAX if the string is null-terminated, or the number of
71 size_t get_str_or_seq_len(const struct estack_entry
*entry
)
73 return entry
->u
.s
.seq_len
;
77 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
80 const char *candidate
;
84 /* Find out which side is the pattern vs. the candidate. */
85 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
86 pattern
= estack_ax(stack
, top
)->u
.s
.str
;
87 pattern_len
= get_str_or_seq_len(estack_ax(stack
, top
));
88 candidate
= estack_bx(stack
, top
)->u
.s
.str
;
89 candidate_len
= get_str_or_seq_len(estack_bx(stack
, top
));
91 pattern
= estack_bx(stack
, top
)->u
.s
.str
;
92 pattern_len
= get_str_or_seq_len(estack_bx(stack
, top
));
93 candidate
= estack_ax(stack
, top
)->u
.s
.str
;
94 candidate_len
= get_str_or_seq_len(estack_ax(stack
, top
));
97 /* Perform the match. Returns 0 when the result is true. */
98 return !strutils_star_glob_match(pattern
, pattern_len
, candidate
,
103 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
105 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
112 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
>= estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
113 if (q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
116 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
117 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
118 ret
= parse_char(&q
);
125 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
>= estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
126 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
127 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
128 ret
= parse_char(&p
);
134 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
135 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
136 ret
= parse_char(&p
);
139 } else if (ret
== -2) {
142 /* else compare both char */
144 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
145 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
146 ret
= parse_char(&q
);
149 } else if (ret
== -2) {
169 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data
,
170 const char *filter_stack_data
)
172 return LTTNG_INTERPRETER_DISCARD
;
175 #ifdef INTERPRETER_USE_SWITCH
178 * Fallback for compilers that do not support taking address of labels.
182 start_pc = &bytecode->data[0]; \
183 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
185 dbg_printf("Executing op %s (%u)\n", \
186 print_op((unsigned int) *(bytecode_opcode_t *) pc), \
187 (unsigned int) *(bytecode_opcode_t *) pc); \
188 switch (*(bytecode_opcode_t *) pc) {
190 #define OP(name) jump_target_##name: __attribute__((unused)); \
198 #define JUMP_TO(name) \
199 goto jump_target_##name
204 * Dispatch-table based interpreter.
208 start_pc = &bytecode->code[0]; \
209 pc = next_pc = start_pc; \
210 if (unlikely(pc - start_pc >= bytecode->len)) \
212 goto *dispatch[*(bytecode_opcode_t *) pc];
219 goto *dispatch[*(bytecode_opcode_t *) pc];
223 #define JUMP_TO(name) \
228 #define IS_INTEGER_REGISTER(reg_type) \
229 (reg_type == REG_U64 || reg_type == REG_S64)
231 static int context_get_index(struct lttng_ctx
*ctx
,
232 struct load_ptr
*ptr
,
236 struct lttng_ctx_field
*ctx_field
;
237 struct lttng_event_field
*field
;
238 struct lttng_ctx_value v
;
240 ctx_field
= &ctx
->fields
[idx
];
241 field
= &ctx_field
->event_field
;
242 ptr
->type
= LOAD_OBJECT
;
245 switch (field
->type
.atype
) {
247 ctx_field
->get_value(ctx_field
, &v
);
248 if (field
->type
.u
.integer
.signedness
) {
249 ptr
->object_type
= OBJECT_TYPE_S64
;
250 ptr
->u
.s64
= v
.u
.s64
;
251 ptr
->ptr
= &ptr
->u
.s64
;
253 ptr
->object_type
= OBJECT_TYPE_U64
;
254 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
255 ptr
->ptr
= &ptr
->u
.u64
;
258 case atype_enum
: /* Fall-through */
259 case atype_enum_nestable
:
261 const struct lttng_integer_type
*itype
;
263 if (field
->type
.atype
== atype_enum
) {
264 itype
= &field
->type
.u
.legacy
.basic
.enumeration
.container_type
;
266 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
268 ctx_field
->get_value(ctx_field
, &v
);
269 if (itype
->signedness
) {
270 ptr
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
271 ptr
->u
.s64
= v
.u
.s64
;
272 ptr
->ptr
= &ptr
->u
.s64
;
274 ptr
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
275 ptr
->u
.u64
= v
.u
.s64
; /* Cast. */
276 ptr
->ptr
= &ptr
->u
.u64
;
281 if (field
->type
.u
.legacy
.array
.elem_type
.atype
!= atype_integer
) {
282 ERR("Array nesting only supports integer types.");
285 if (field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
286 ERR("Only string arrays are supported for contexts.");
289 ptr
->object_type
= OBJECT_TYPE_STRING
;
290 ctx_field
->get_value(ctx_field
, &v
);
293 case atype_array_nestable
:
294 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
295 ERR("Array nesting only supports integer types.");
298 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
299 ERR("Only string arrays are supported for contexts.");
302 ptr
->object_type
= OBJECT_TYPE_STRING
;
303 ctx_field
->get_value(ctx_field
, &v
);
307 if (field
->type
.u
.legacy
.sequence
.elem_type
.atype
!= atype_integer
) {
308 ERR("Sequence nesting only supports integer types.");
311 if (field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
312 ERR("Only string sequences are supported for contexts.");
315 ptr
->object_type
= OBJECT_TYPE_STRING
;
316 ctx_field
->get_value(ctx_field
, &v
);
319 case atype_sequence_nestable
:
320 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
321 ERR("Sequence nesting only supports integer types.");
324 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
325 ERR("Only string sequences are supported for contexts.");
328 ptr
->object_type
= OBJECT_TYPE_STRING
;
329 ctx_field
->get_value(ctx_field
, &v
);
333 ptr
->object_type
= OBJECT_TYPE_STRING
;
334 ctx_field
->get_value(ctx_field
, &v
);
338 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
339 ctx_field
->get_value(ctx_field
, &v
);
341 ptr
->ptr
= &ptr
->u
.d
;
344 ctx_field
->get_value(ctx_field
, &v
);
346 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
348 case LTTNG_UST_DYNAMIC_TYPE_S64
:
349 ptr
->object_type
= OBJECT_TYPE_S64
;
350 ptr
->u
.s64
= v
.u
.s64
;
351 ptr
->ptr
= &ptr
->u
.s64
;
352 dbg_printf("context get index dynamic s64 %" PRIi64
"\n", ptr
->u
.s64
);
354 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
355 ptr
->object_type
= OBJECT_TYPE_DOUBLE
;
357 ptr
->ptr
= &ptr
->u
.d
;
358 dbg_printf("context get index dynamic double %g\n", ptr
->u
.d
);
360 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
361 ptr
->object_type
= OBJECT_TYPE_STRING
;
363 dbg_printf("context get index dynamic string %s\n", (const char *) ptr
->ptr
);
366 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
371 ERR("Structure type cannot be loaded.");
374 ERR("Unknown type: %d", (int) field
->type
.atype
);
380 static int dynamic_get_index(struct lttng_ctx
*ctx
,
381 struct bytecode_runtime
*runtime
,
382 uint64_t index
, struct estack_entry
*stack_top
)
385 const struct bytecode_get_index_data
*gid
;
387 gid
= (const struct bytecode_get_index_data
*) &runtime
->data
[index
];
388 switch (stack_top
->u
.ptr
.type
) {
390 switch (stack_top
->u
.ptr
.object_type
) {
391 case OBJECT_TYPE_ARRAY
:
395 assert(gid
->offset
< gid
->array_len
);
396 /* Skip count (unsigned long) */
397 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
398 ptr
= ptr
+ gid
->offset
;
399 stack_top
->u
.ptr
.ptr
= ptr
;
400 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
401 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
402 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_array
||
403 stack_top
->u
.ptr
.field
->type
.atype
== atype_array_nestable
);
404 stack_top
->u
.ptr
.field
= NULL
;
407 case OBJECT_TYPE_SEQUENCE
:
412 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
413 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
414 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
418 ptr
= ptr
+ gid
->offset
;
419 stack_top
->u
.ptr
.ptr
= ptr
;
420 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
421 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
422 assert(stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence
||
423 stack_top
->u
.ptr
.field
->type
.atype
== atype_sequence_nestable
);
424 stack_top
->u
.ptr
.field
= NULL
;
427 case OBJECT_TYPE_STRUCT
:
428 ERR("Nested structures are not supported yet.");
431 case OBJECT_TYPE_VARIANT
:
433 ERR("Unexpected get index type %d",
434 (int) stack_top
->u
.ptr
.object_type
);
439 case LOAD_ROOT_CONTEXT
:
440 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
442 ret
= context_get_index(ctx
,
450 case LOAD_ROOT_PAYLOAD
:
451 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
452 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
453 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
454 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
455 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
456 stack_top
->u
.ptr
.field
= gid
->field
;
457 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
461 stack_top
->type
= REG_PTR
;
469 static int dynamic_load_field(struct estack_entry
*stack_top
)
473 switch (stack_top
->u
.ptr
.type
) {
476 case LOAD_ROOT_CONTEXT
:
477 case LOAD_ROOT_APP_CONTEXT
:
478 case LOAD_ROOT_PAYLOAD
:
480 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
484 switch (stack_top
->u
.ptr
.object_type
) {
486 dbg_printf("op load field s8\n");
487 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
488 stack_top
->type
= REG_S64
;
490 case OBJECT_TYPE_S16
:
494 dbg_printf("op load field s16\n");
495 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
496 if (stack_top
->u
.ptr
.rev_bo
)
498 stack_top
->u
.v
= tmp
;
499 stack_top
->type
= REG_S64
;
502 case OBJECT_TYPE_S32
:
506 dbg_printf("op load field s32\n");
507 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
508 if (stack_top
->u
.ptr
.rev_bo
)
510 stack_top
->u
.v
= tmp
;
511 stack_top
->type
= REG_S64
;
514 case OBJECT_TYPE_S64
:
518 dbg_printf("op load field s64\n");
519 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
520 if (stack_top
->u
.ptr
.rev_bo
)
522 stack_top
->u
.v
= tmp
;
523 stack_top
->type
= REG_S64
;
526 case OBJECT_TYPE_SIGNED_ENUM
:
530 dbg_printf("op load field signed enumeration\n");
531 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
532 if (stack_top
->u
.ptr
.rev_bo
)
534 stack_top
->u
.v
= tmp
;
535 stack_top
->type
= REG_S64
;
539 dbg_printf("op load field u8\n");
540 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
541 stack_top
->type
= REG_U64
;
543 case OBJECT_TYPE_U16
:
547 dbg_printf("op load field u16\n");
548 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
549 if (stack_top
->u
.ptr
.rev_bo
)
551 stack_top
->u
.v
= tmp
;
552 stack_top
->type
= REG_U64
;
555 case OBJECT_TYPE_U32
:
559 dbg_printf("op load field u32\n");
560 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
561 if (stack_top
->u
.ptr
.rev_bo
)
563 stack_top
->u
.v
= tmp
;
564 stack_top
->type
= REG_U64
;
567 case OBJECT_TYPE_U64
:
571 dbg_printf("op load field u64\n");
572 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
573 if (stack_top
->u
.ptr
.rev_bo
)
575 stack_top
->u
.v
= tmp
;
576 stack_top
->type
= REG_U64
;
579 case OBJECT_TYPE_UNSIGNED_ENUM
:
583 dbg_printf("op load field unsigned enumeration\n");
584 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
585 if (stack_top
->u
.ptr
.rev_bo
)
587 stack_top
->u
.v
= tmp
;
588 stack_top
->type
= REG_U64
;
591 case OBJECT_TYPE_DOUBLE
:
592 memcpy(&stack_top
->u
.d
,
593 stack_top
->u
.ptr
.ptr
,
594 sizeof(struct literal_double
));
595 stack_top
->type
= REG_DOUBLE
;
597 case OBJECT_TYPE_STRING
:
601 dbg_printf("op load field string\n");
602 str
= (const char *) stack_top
->u
.ptr
.ptr
;
603 stack_top
->u
.s
.str
= str
;
604 if (unlikely(!stack_top
->u
.s
.str
)) {
605 dbg_printf("Interpreter warning: loading a NULL string.\n");
609 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
610 stack_top
->u
.s
.literal_type
=
611 ESTACK_STRING_LITERAL_TYPE_NONE
;
612 stack_top
->type
= REG_STRING
;
615 case OBJECT_TYPE_STRING_SEQUENCE
:
619 dbg_printf("op load field string sequence\n");
620 ptr
= stack_top
->u
.ptr
.ptr
;
621 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
622 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
623 stack_top
->type
= REG_STRING
;
624 if (unlikely(!stack_top
->u
.s
.str
)) {
625 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
629 stack_top
->u
.s
.literal_type
=
630 ESTACK_STRING_LITERAL_TYPE_NONE
;
633 case OBJECT_TYPE_DYNAMIC
:
635 * Dynamic types in context are looked up
636 * by context get index.
640 case OBJECT_TYPE_SEQUENCE
:
641 case OBJECT_TYPE_ARRAY
:
642 case OBJECT_TYPE_STRUCT
:
643 case OBJECT_TYPE_VARIANT
:
644 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
655 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
656 struct lttng_interpreter_output
*output
)
663 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
664 output
->u
.s
= ax
->u
.v
;
667 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
668 output
->u
.u
= (uint64_t) ax
->u
.v
;
671 output
->type
= LTTNG_INTERPRETER_TYPE_DOUBLE
;
672 output
->u
.d
= ax
->u
.d
;
675 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
676 output
->u
.str
.str
= ax
->u
.s
.str
;
677 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
680 switch (ax
->u
.ptr
.object_type
) {
682 case OBJECT_TYPE_S16
:
683 case OBJECT_TYPE_S32
:
684 case OBJECT_TYPE_S64
:
686 case OBJECT_TYPE_U16
:
687 case OBJECT_TYPE_U32
:
688 case OBJECT_TYPE_U64
:
689 case OBJECT_TYPE_DOUBLE
:
690 case OBJECT_TYPE_STRING
:
691 case OBJECT_TYPE_STRING_SEQUENCE
:
692 ret
= dynamic_load_field(ax
);
695 /* Retry after loading ptr into stack top. */
697 case OBJECT_TYPE_SEQUENCE
:
698 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
699 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
700 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
701 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
703 case OBJECT_TYPE_ARRAY
:
704 /* Skip count (unsigned long) */
705 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
706 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
707 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
708 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
710 case OBJECT_TYPE_SIGNED_ENUM
:
711 ret
= dynamic_load_field(ax
);
714 output
->type
= LTTNG_INTERPRETER_TYPE_SIGNED_ENUM
;
715 output
->u
.s
= ax
->u
.v
;
717 case OBJECT_TYPE_UNSIGNED_ENUM
:
718 ret
= dynamic_load_field(ax
);
721 output
->type
= LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM
;
722 output
->u
.u
= ax
->u
.v
;
724 case OBJECT_TYPE_STRUCT
:
725 case OBJECT_TYPE_VARIANT
:
731 case REG_STAR_GLOB_STRING
:
737 return LTTNG_INTERPRETER_RECORD_FLAG
;
741 * Return 0 (discard), or raise the 0x1 flag (log event).
742 * Currently, other flags are kept for future extensions and have no
746 uint64_t bytecode_interpret(void *interpreter_data
,
747 const char *interpreter_stack_data
,
748 struct lttng_interpreter_output
*output
)
750 struct bytecode_runtime
*bytecode
= interpreter_data
;
751 struct lttng_ctx
*ctx
= rcu_dereference(*bytecode
->p
.pctx
);
752 void *pc
, *next_pc
, *start_pc
;
755 struct estack _stack
;
756 struct estack
*stack
= &_stack
;
757 register int64_t ax
= 0, bx
= 0;
758 register enum entry_type ax_t
= REG_UNKNOWN
, bx_t
= REG_UNKNOWN
;
759 register int top
= INTERPRETER_STACK_EMPTY
;
760 #ifndef INTERPRETER_USE_SWITCH
761 static void *dispatch
[NR_BYTECODE_OPS
] = {
762 [ BYTECODE_OP_UNKNOWN
] = &&LABEL_BYTECODE_OP_UNKNOWN
,
764 [ BYTECODE_OP_RETURN
] = &&LABEL_BYTECODE_OP_RETURN
,
767 [ BYTECODE_OP_MUL
] = &&LABEL_BYTECODE_OP_MUL
,
768 [ BYTECODE_OP_DIV
] = &&LABEL_BYTECODE_OP_DIV
,
769 [ BYTECODE_OP_MOD
] = &&LABEL_BYTECODE_OP_MOD
,
770 [ BYTECODE_OP_PLUS
] = &&LABEL_BYTECODE_OP_PLUS
,
771 [ BYTECODE_OP_MINUS
] = &&LABEL_BYTECODE_OP_MINUS
,
772 [ BYTECODE_OP_BIT_RSHIFT
] = &&LABEL_BYTECODE_OP_BIT_RSHIFT
,
773 [ BYTECODE_OP_BIT_LSHIFT
] = &&LABEL_BYTECODE_OP_BIT_LSHIFT
,
774 [ BYTECODE_OP_BIT_AND
] = &&LABEL_BYTECODE_OP_BIT_AND
,
775 [ BYTECODE_OP_BIT_OR
] = &&LABEL_BYTECODE_OP_BIT_OR
,
776 [ BYTECODE_OP_BIT_XOR
] = &&LABEL_BYTECODE_OP_BIT_XOR
,
778 /* binary comparators */
779 [ BYTECODE_OP_EQ
] = &&LABEL_BYTECODE_OP_EQ
,
780 [ BYTECODE_OP_NE
] = &&LABEL_BYTECODE_OP_NE
,
781 [ BYTECODE_OP_GT
] = &&LABEL_BYTECODE_OP_GT
,
782 [ BYTECODE_OP_LT
] = &&LABEL_BYTECODE_OP_LT
,
783 [ BYTECODE_OP_GE
] = &&LABEL_BYTECODE_OP_GE
,
784 [ BYTECODE_OP_LE
] = &&LABEL_BYTECODE_OP_LE
,
786 /* string binary comparator */
787 [ BYTECODE_OP_EQ_STRING
] = &&LABEL_BYTECODE_OP_EQ_STRING
,
788 [ BYTECODE_OP_NE_STRING
] = &&LABEL_BYTECODE_OP_NE_STRING
,
789 [ BYTECODE_OP_GT_STRING
] = &&LABEL_BYTECODE_OP_GT_STRING
,
790 [ BYTECODE_OP_LT_STRING
] = &&LABEL_BYTECODE_OP_LT_STRING
,
791 [ BYTECODE_OP_GE_STRING
] = &&LABEL_BYTECODE_OP_GE_STRING
,
792 [ BYTECODE_OP_LE_STRING
] = &&LABEL_BYTECODE_OP_LE_STRING
,
794 /* globbing pattern binary comparator */
795 [ BYTECODE_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING
,
796 [ BYTECODE_OP_NE_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING
,
798 /* s64 binary comparator */
799 [ BYTECODE_OP_EQ_S64
] = &&LABEL_BYTECODE_OP_EQ_S64
,
800 [ BYTECODE_OP_NE_S64
] = &&LABEL_BYTECODE_OP_NE_S64
,
801 [ BYTECODE_OP_GT_S64
] = &&LABEL_BYTECODE_OP_GT_S64
,
802 [ BYTECODE_OP_LT_S64
] = &&LABEL_BYTECODE_OP_LT_S64
,
803 [ BYTECODE_OP_GE_S64
] = &&LABEL_BYTECODE_OP_GE_S64
,
804 [ BYTECODE_OP_LE_S64
] = &&LABEL_BYTECODE_OP_LE_S64
,
806 /* double binary comparator */
807 [ BYTECODE_OP_EQ_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE
,
808 [ BYTECODE_OP_NE_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_DOUBLE
,
809 [ BYTECODE_OP_GT_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_DOUBLE
,
810 [ BYTECODE_OP_LT_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_DOUBLE
,
811 [ BYTECODE_OP_GE_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_DOUBLE
,
812 [ BYTECODE_OP_LE_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_DOUBLE
,
814 /* Mixed S64-double binary comparators */
815 [ BYTECODE_OP_EQ_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64
,
816 [ BYTECODE_OP_NE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64
,
817 [ BYTECODE_OP_GT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64
,
818 [ BYTECODE_OP_LT_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64
,
819 [ BYTECODE_OP_GE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64
,
820 [ BYTECODE_OP_LE_DOUBLE_S64
] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64
,
822 [ BYTECODE_OP_EQ_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE
,
823 [ BYTECODE_OP_NE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE
,
824 [ BYTECODE_OP_GT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE
,
825 [ BYTECODE_OP_LT_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE
,
826 [ BYTECODE_OP_GE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE
,
827 [ BYTECODE_OP_LE_S64_DOUBLE
] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE
,
830 [ BYTECODE_OP_UNARY_PLUS
] = &&LABEL_BYTECODE_OP_UNARY_PLUS
,
831 [ BYTECODE_OP_UNARY_MINUS
] = &&LABEL_BYTECODE_OP_UNARY_MINUS
,
832 [ BYTECODE_OP_UNARY_NOT
] = &&LABEL_BYTECODE_OP_UNARY_NOT
,
833 [ BYTECODE_OP_UNARY_PLUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64
,
834 [ BYTECODE_OP_UNARY_MINUS_S64
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64
,
835 [ BYTECODE_OP_UNARY_NOT_S64
] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64
,
836 [ BYTECODE_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE
,
837 [ BYTECODE_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE
,
838 [ BYTECODE_OP_UNARY_NOT_DOUBLE
] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE
,
841 [ BYTECODE_OP_AND
] = &&LABEL_BYTECODE_OP_AND
,
842 [ BYTECODE_OP_OR
] = &&LABEL_BYTECODE_OP_OR
,
845 [ BYTECODE_OP_LOAD_FIELD_REF
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF
,
846 [ BYTECODE_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING
,
847 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
,
848 [ BYTECODE_OP_LOAD_FIELD_REF_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64
,
849 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
,
851 /* load from immediate operand */
852 [ BYTECODE_OP_LOAD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STRING
,
853 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING
,
854 [ BYTECODE_OP_LOAD_S64
] = &&LABEL_BYTECODE_OP_LOAD_S64
,
855 [ BYTECODE_OP_LOAD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE
,
858 [ BYTECODE_OP_CAST_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_TO_S64
,
859 [ BYTECODE_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64
,
860 [ BYTECODE_OP_CAST_NOP
] = &&LABEL_BYTECODE_OP_CAST_NOP
,
862 /* get context ref */
863 [ BYTECODE_OP_GET_CONTEXT_REF
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF
,
864 [ BYTECODE_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING
,
865 [ BYTECODE_OP_GET_CONTEXT_REF_S64
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64
,
866 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
,
868 /* Instructions for recursive traversal through composed types. */
869 [ BYTECODE_OP_GET_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT
,
870 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT
,
871 [ BYTECODE_OP_GET_PAYLOAD_ROOT
] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT
,
873 [ BYTECODE_OP_GET_SYMBOL
] = &&LABEL_BYTECODE_OP_GET_SYMBOL
,
874 [ BYTECODE_OP_GET_SYMBOL_FIELD
] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD
,
875 [ BYTECODE_OP_GET_INDEX_U16
] = &&LABEL_BYTECODE_OP_GET_INDEX_U16
,
876 [ BYTECODE_OP_GET_INDEX_U64
] = &&LABEL_BYTECODE_OP_GET_INDEX_U64
,
878 [ BYTECODE_OP_LOAD_FIELD
] = &&LABEL_BYTECODE_OP_LOAD_FIELD
,
879 [ BYTECODE_OP_LOAD_FIELD_S8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8
,
880 [ BYTECODE_OP_LOAD_FIELD_S16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16
,
881 [ BYTECODE_OP_LOAD_FIELD_S32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32
,
882 [ BYTECODE_OP_LOAD_FIELD_S64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64
,
883 [ BYTECODE_OP_LOAD_FIELD_U8
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8
,
884 [ BYTECODE_OP_LOAD_FIELD_U16
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16
,
885 [ BYTECODE_OP_LOAD_FIELD_U32
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32
,
886 [ BYTECODE_OP_LOAD_FIELD_U64
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64
,
887 [ BYTECODE_OP_LOAD_FIELD_STRING
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING
,
888 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE
,
889 [ BYTECODE_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE
,
891 [ BYTECODE_OP_UNARY_BIT_NOT
] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT
,
893 [ BYTECODE_OP_RETURN_S64
] = &&LABEL_BYTECODE_OP_RETURN_S64
,
895 #endif /* #ifndef INTERPRETER_USE_SWITCH */
899 OP(BYTECODE_OP_UNKNOWN
):
900 OP(BYTECODE_OP_LOAD_FIELD_REF
):
901 #ifdef INTERPRETER_USE_SWITCH
903 #endif /* INTERPRETER_USE_SWITCH */
904 ERR("unknown bytecode op %u",
905 (unsigned int) *(bytecode_opcode_t
*) pc
);
909 OP(BYTECODE_OP_RETURN
):
910 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
911 /* Handle dynamic typing. */
912 switch (estack_ax_t
) {
915 retval
= !!estack_ax_v
;
926 case REG_STAR_GLOB_STRING
:
935 OP(BYTECODE_OP_RETURN_S64
):
936 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
937 retval
= !!estack_ax_v
;
945 OP(BYTECODE_OP_PLUS
):
946 OP(BYTECODE_OP_MINUS
):
947 ERR("unsupported bytecode op %u",
948 (unsigned int) *(bytecode_opcode_t
*) pc
);
954 /* Dynamic typing. */
955 switch (estack_ax_t
) {
956 case REG_S64
: /* Fall-through */
958 switch (estack_bx_t
) {
959 case REG_S64
: /* Fall-through */
961 JUMP_TO(BYTECODE_OP_EQ_S64
);
963 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64
);
964 case REG_STRING
: /* Fall-through */
965 case REG_STAR_GLOB_STRING
:
969 ERR("Unknown interpreter register type (%d)",
976 switch (estack_bx_t
) {
977 case REG_S64
: /* Fall-through */
979 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE
);
981 JUMP_TO(BYTECODE_OP_EQ_DOUBLE
);
982 case REG_STRING
: /* Fall-through */
983 case REG_STAR_GLOB_STRING
:
987 ERR("Unknown interpreter register type (%d)",
994 switch (estack_bx_t
) {
995 case REG_S64
: /* Fall-through */
996 case REG_U64
: /* Fall-through */
1001 JUMP_TO(BYTECODE_OP_EQ_STRING
);
1002 case REG_STAR_GLOB_STRING
:
1003 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1005 ERR("Unknown interpreter register type (%d)",
1011 case REG_STAR_GLOB_STRING
:
1012 switch (estack_bx_t
) {
1013 case REG_S64
: /* Fall-through */
1014 case REG_U64
: /* Fall-through */
1019 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING
);
1020 case REG_STAR_GLOB_STRING
:
1024 ERR("Unknown interpreter register type (%d)",
1031 ERR("Unknown interpreter register type (%d)",
1039 /* Dynamic typing. */
1040 switch (estack_ax_t
) {
1041 case REG_S64
: /* Fall-through */
1043 switch (estack_bx_t
) {
1044 case REG_S64
: /* Fall-through */
1046 JUMP_TO(BYTECODE_OP_NE_S64
);
1048 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64
);
1049 case REG_STRING
: /* Fall-through */
1050 case REG_STAR_GLOB_STRING
:
1054 ERR("Unknown interpreter register type (%d)",
1061 switch (estack_bx_t
) {
1062 case REG_S64
: /* Fall-through */
1064 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE
);
1066 JUMP_TO(BYTECODE_OP_NE_DOUBLE
);
1067 case REG_STRING
: /* Fall-through */
1068 case REG_STAR_GLOB_STRING
:
1072 ERR("Unknown interpreter register type (%d)",
1079 switch (estack_bx_t
) {
1080 case REG_S64
: /* Fall-through */
1086 JUMP_TO(BYTECODE_OP_NE_STRING
);
1087 case REG_STAR_GLOB_STRING
:
1088 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1090 ERR("Unknown interpreter register type (%d)",
1096 case REG_STAR_GLOB_STRING
:
1097 switch (estack_bx_t
) {
1098 case REG_S64
: /* Fall-through */
1104 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING
);
1105 case REG_STAR_GLOB_STRING
:
1109 ERR("Unknown interpreter register type (%d)",
1116 ERR("Unknown interpreter register type (%d)",
1124 /* Dynamic typing. */
1125 switch (estack_ax_t
) {
1126 case REG_S64
: /* Fall-through */
1128 switch (estack_bx_t
) {
1129 case REG_S64
: /* Fall-through */
1131 JUMP_TO(BYTECODE_OP_GT_S64
);
1133 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64
);
1134 case REG_STRING
: /* Fall-through */
1135 case REG_STAR_GLOB_STRING
:
1139 ERR("Unknown interpreter register type (%d)",
1146 switch (estack_bx_t
) {
1147 case REG_S64
: /* Fall-through */
1149 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE
);
1151 JUMP_TO(BYTECODE_OP_GT_DOUBLE
);
1152 case REG_STRING
: /* Fall-through */
1153 case REG_STAR_GLOB_STRING
:
1157 ERR("Unknown interpreter register type (%d)",
1164 switch (estack_bx_t
) {
1165 case REG_S64
: /* Fall-through */
1166 case REG_U64
: /* Fall-through */
1167 case REG_DOUBLE
: /* Fall-through */
1168 case REG_STAR_GLOB_STRING
:
1172 JUMP_TO(BYTECODE_OP_GT_STRING
);
1174 ERR("Unknown interpreter register type (%d)",
1181 ERR("Unknown interpreter register type (%d)",
1189 /* Dynamic typing. */
1190 switch (estack_ax_t
) {
1191 case REG_S64
: /* Fall-through */
1193 switch (estack_bx_t
) {
1194 case REG_S64
: /* Fall-through */
1196 JUMP_TO(BYTECODE_OP_LT_S64
);
1198 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64
);
1199 case REG_STRING
: /* Fall-through */
1200 case REG_STAR_GLOB_STRING
:
1204 ERR("Unknown interpreter register type (%d)",
1211 switch (estack_bx_t
) {
1212 case REG_S64
: /* Fall-through */
1214 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE
);
1216 JUMP_TO(BYTECODE_OP_LT_DOUBLE
);
1217 case REG_STRING
: /* Fall-through */
1218 case REG_STAR_GLOB_STRING
:
1222 ERR("Unknown interpreter register type (%d)",
1229 switch (estack_bx_t
) {
1230 case REG_S64
: /* Fall-through */
1231 case REG_U64
: /* Fall-through */
1232 case REG_DOUBLE
: /* Fall-through */
1233 case REG_STAR_GLOB_STRING
:
1237 JUMP_TO(BYTECODE_OP_LT_STRING
);
1239 ERR("Unknown interpreter register type (%d)",
1246 ERR("Unknown interpreter register type (%d)",
1254 /* Dynamic typing. */
1255 switch (estack_ax_t
) {
1256 case REG_S64
: /* Fall-through */
1258 switch (estack_bx_t
) {
1259 case REG_S64
: /* Fall-through */
1261 JUMP_TO(BYTECODE_OP_GE_S64
);
1263 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64
);
1264 case REG_STRING
: /* Fall-through */
1265 case REG_STAR_GLOB_STRING
:
1269 ERR("Unknown interpreter register type (%d)",
1276 switch (estack_bx_t
) {
1277 case REG_S64
: /* Fall-through */
1279 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE
);
1281 JUMP_TO(BYTECODE_OP_GE_DOUBLE
);
1282 case REG_STRING
: /* Fall-through */
1283 case REG_STAR_GLOB_STRING
:
1287 ERR("Unknown interpreter register type (%d)",
1294 switch (estack_bx_t
) {
1295 case REG_S64
: /* Fall-through */
1296 case REG_U64
: /* Fall-through */
1297 case REG_DOUBLE
: /* Fall-through */
1298 case REG_STAR_GLOB_STRING
:
1302 JUMP_TO(BYTECODE_OP_GE_STRING
);
1304 ERR("Unknown interpreter register type (%d)",
1311 ERR("Unknown interpreter register type (%d)",
1319 /* Dynamic typing. */
1320 switch (estack_ax_t
) {
1321 case REG_S64
: /* Fall-through */
1323 switch (estack_bx_t
) {
1324 case REG_S64
: /* Fall-through */
1326 JUMP_TO(BYTECODE_OP_LE_S64
);
1328 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64
);
1329 case REG_STRING
: /* Fall-through */
1330 case REG_STAR_GLOB_STRING
:
1334 ERR("Unknown interpreter register type (%d)",
1341 switch (estack_bx_t
) {
1342 case REG_S64
: /* Fall-through */
1344 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE
);
1346 JUMP_TO(BYTECODE_OP_LE_DOUBLE
);
1347 case REG_STRING
: /* Fall-through */
1348 case REG_STAR_GLOB_STRING
:
1352 ERR("Unknown interpreter register type (%d)",
1359 switch (estack_bx_t
) {
1360 case REG_S64
: /* Fall-through */
1361 case REG_U64
: /* Fall-through */
1362 case REG_DOUBLE
: /* Fall-through */
1363 case REG_STAR_GLOB_STRING
:
1367 JUMP_TO(BYTECODE_OP_LE_STRING
);
1369 ERR("Unknown interpreter register type (%d)",
1376 ERR("Unknown interpreter register type (%d)",
1383 OP(BYTECODE_OP_EQ_STRING
):
1387 res
= (stack_strcmp(stack
, top
, "==") == 0);
1388 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1390 estack_ax_t
= REG_S64
;
1391 next_pc
+= sizeof(struct binary_op
);
1394 OP(BYTECODE_OP_NE_STRING
):
1398 res
= (stack_strcmp(stack
, top
, "!=") != 0);
1399 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1401 estack_ax_t
= REG_S64
;
1402 next_pc
+= sizeof(struct binary_op
);
1405 OP(BYTECODE_OP_GT_STRING
):
1409 res
= (stack_strcmp(stack
, top
, ">") > 0);
1410 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1412 estack_ax_t
= REG_S64
;
1413 next_pc
+= sizeof(struct binary_op
);
1416 OP(BYTECODE_OP_LT_STRING
):
1420 res
= (stack_strcmp(stack
, top
, "<") < 0);
1421 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1423 estack_ax_t
= REG_S64
;
1424 next_pc
+= sizeof(struct binary_op
);
1427 OP(BYTECODE_OP_GE_STRING
):
1431 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
1432 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1434 estack_ax_t
= REG_S64
;
1435 next_pc
+= sizeof(struct binary_op
);
1438 OP(BYTECODE_OP_LE_STRING
):
1442 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
1443 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1445 estack_ax_t
= REG_S64
;
1446 next_pc
+= sizeof(struct binary_op
);
1450 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING
):
1454 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
1455 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1457 estack_ax_t
= REG_S64
;
1458 next_pc
+= sizeof(struct binary_op
);
1461 OP(BYTECODE_OP_NE_STAR_GLOB_STRING
):
1465 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
1466 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1468 estack_ax_t
= REG_S64
;
1469 next_pc
+= sizeof(struct binary_op
);
1473 OP(BYTECODE_OP_EQ_S64
):
1477 res
= (estack_bx_v
== estack_ax_v
);
1478 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1480 estack_ax_t
= REG_S64
;
1481 next_pc
+= sizeof(struct binary_op
);
1484 OP(BYTECODE_OP_NE_S64
):
1488 res
= (estack_bx_v
!= estack_ax_v
);
1489 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1491 estack_ax_t
= REG_S64
;
1492 next_pc
+= sizeof(struct binary_op
);
1495 OP(BYTECODE_OP_GT_S64
):
1499 res
= (estack_bx_v
> estack_ax_v
);
1500 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1502 estack_ax_t
= REG_S64
;
1503 next_pc
+= sizeof(struct binary_op
);
1506 OP(BYTECODE_OP_LT_S64
):
1510 res
= (estack_bx_v
< estack_ax_v
);
1511 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1513 estack_ax_t
= REG_S64
;
1514 next_pc
+= sizeof(struct binary_op
);
1517 OP(BYTECODE_OP_GE_S64
):
1521 res
= (estack_bx_v
>= estack_ax_v
);
1522 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1524 estack_ax_t
= REG_S64
;
1525 next_pc
+= sizeof(struct binary_op
);
1528 OP(BYTECODE_OP_LE_S64
):
1532 res
= (estack_bx_v
<= estack_ax_v
);
1533 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1535 estack_ax_t
= REG_S64
;
1536 next_pc
+= sizeof(struct binary_op
);
1540 OP(BYTECODE_OP_EQ_DOUBLE
):
1544 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
1545 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1547 estack_ax_t
= REG_S64
;
1548 next_pc
+= sizeof(struct binary_op
);
1551 OP(BYTECODE_OP_NE_DOUBLE
):
1555 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
1556 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1558 estack_ax_t
= REG_S64
;
1559 next_pc
+= sizeof(struct binary_op
);
1562 OP(BYTECODE_OP_GT_DOUBLE
):
1566 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
1567 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1569 estack_ax_t
= REG_S64
;
1570 next_pc
+= sizeof(struct binary_op
);
1573 OP(BYTECODE_OP_LT_DOUBLE
):
1577 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
1578 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1580 estack_ax_t
= REG_S64
;
1581 next_pc
+= sizeof(struct binary_op
);
1584 OP(BYTECODE_OP_GE_DOUBLE
):
1588 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
1589 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1591 estack_ax_t
= REG_S64
;
1592 next_pc
+= sizeof(struct binary_op
);
1595 OP(BYTECODE_OP_LE_DOUBLE
):
1599 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
1600 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1602 estack_ax_t
= REG_S64
;
1603 next_pc
+= sizeof(struct binary_op
);
1607 /* Mixed S64-double binary comparators */
1608 OP(BYTECODE_OP_EQ_DOUBLE_S64
):
1612 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
1613 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1615 estack_ax_t
= REG_S64
;
1616 next_pc
+= sizeof(struct binary_op
);
1619 OP(BYTECODE_OP_NE_DOUBLE_S64
):
1623 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
1624 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1626 estack_ax_t
= REG_S64
;
1627 next_pc
+= sizeof(struct binary_op
);
1630 OP(BYTECODE_OP_GT_DOUBLE_S64
):
1634 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
1635 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1637 estack_ax_t
= REG_S64
;
1638 next_pc
+= sizeof(struct binary_op
);
1641 OP(BYTECODE_OP_LT_DOUBLE_S64
):
1645 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
1646 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1648 estack_ax_t
= REG_S64
;
1649 next_pc
+= sizeof(struct binary_op
);
1652 OP(BYTECODE_OP_GE_DOUBLE_S64
):
1656 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
1657 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1659 estack_ax_t
= REG_S64
;
1660 next_pc
+= sizeof(struct binary_op
);
1663 OP(BYTECODE_OP_LE_DOUBLE_S64
):
1667 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
1668 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1670 estack_ax_t
= REG_S64
;
1671 next_pc
+= sizeof(struct binary_op
);
1675 OP(BYTECODE_OP_EQ_S64_DOUBLE
):
1679 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
1680 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1682 estack_ax_t
= REG_S64
;
1683 next_pc
+= sizeof(struct binary_op
);
1686 OP(BYTECODE_OP_NE_S64_DOUBLE
):
1690 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
1691 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1693 estack_ax_t
= REG_S64
;
1694 next_pc
+= sizeof(struct binary_op
);
1697 OP(BYTECODE_OP_GT_S64_DOUBLE
):
1701 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
1702 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1704 estack_ax_t
= REG_S64
;
1705 next_pc
+= sizeof(struct binary_op
);
1708 OP(BYTECODE_OP_LT_S64_DOUBLE
):
1712 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
1713 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1715 estack_ax_t
= REG_S64
;
1716 next_pc
+= sizeof(struct binary_op
);
1719 OP(BYTECODE_OP_GE_S64_DOUBLE
):
1723 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
1724 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1726 estack_ax_t
= REG_S64
;
1727 next_pc
+= sizeof(struct binary_op
);
1730 OP(BYTECODE_OP_LE_S64_DOUBLE
):
1734 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
1735 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1737 estack_ax_t
= REG_S64
;
1738 next_pc
+= sizeof(struct binary_op
);
1741 OP(BYTECODE_OP_BIT_RSHIFT
):
1745 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1750 /* Catch undefined behavior. */
1751 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1755 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1756 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1758 estack_ax_t
= REG_U64
;
1759 next_pc
+= sizeof(struct binary_op
);
1762 OP(BYTECODE_OP_BIT_LSHIFT
):
1766 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1771 /* Catch undefined behavior. */
1772 if (caa_unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1776 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1777 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1779 estack_ax_t
= REG_U64
;
1780 next_pc
+= sizeof(struct binary_op
);
1783 OP(BYTECODE_OP_BIT_AND
):
1787 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1792 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1793 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1795 estack_ax_t
= REG_U64
;
1796 next_pc
+= sizeof(struct binary_op
);
1799 OP(BYTECODE_OP_BIT_OR
):
1803 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1808 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1809 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1811 estack_ax_t
= REG_U64
;
1812 next_pc
+= sizeof(struct binary_op
);
1815 OP(BYTECODE_OP_BIT_XOR
):
1819 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1824 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1825 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1827 estack_ax_t
= REG_U64
;
1828 next_pc
+= sizeof(struct binary_op
);
1833 OP(BYTECODE_OP_UNARY_PLUS
):
1835 /* Dynamic typing. */
1836 switch (estack_ax_t
) {
1837 case REG_S64
: /* Fall-through. */
1839 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64
);
1841 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE
);
1842 case REG_STRING
: /* Fall-through */
1843 case REG_STAR_GLOB_STRING
:
1847 ERR("Unknown interpreter register type (%d)",
1853 OP(BYTECODE_OP_UNARY_MINUS
):
1855 /* Dynamic typing. */
1856 switch (estack_ax_t
) {
1857 case REG_S64
: /* Fall-through. */
1859 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64
);
1861 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE
);
1862 case REG_STRING
: /* Fall-through */
1863 case REG_STAR_GLOB_STRING
:
1867 ERR("Unknown interpreter register type (%d)",
1873 OP(BYTECODE_OP_UNARY_NOT
):
1875 /* Dynamic typing. */
1876 switch (estack_ax_t
) {
1877 case REG_S64
: /* Fall-through. */
1879 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64
);
1881 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE
);
1882 case REG_STRING
: /* Fall-through */
1883 case REG_STAR_GLOB_STRING
:
1887 ERR("Unknown interpreter register type (%d)",
1892 next_pc
+= sizeof(struct unary_op
);
1896 OP(BYTECODE_OP_UNARY_BIT_NOT
):
1898 /* Dynamic typing. */
1899 if (!IS_INTEGER_REGISTER(estack_ax_t
)) {
1904 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1905 estack_ax_t
= REG_U64
;
1906 next_pc
+= sizeof(struct unary_op
);
1910 OP(BYTECODE_OP_UNARY_PLUS_S64
):
1911 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE
):
1913 next_pc
+= sizeof(struct unary_op
);
1916 OP(BYTECODE_OP_UNARY_MINUS_S64
):
1918 estack_ax_v
= -estack_ax_v
;
1919 next_pc
+= sizeof(struct unary_op
);
1922 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE
):
1924 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
1925 next_pc
+= sizeof(struct unary_op
);
1928 OP(BYTECODE_OP_UNARY_NOT_S64
):
1930 estack_ax_v
= !estack_ax_v
;
1931 estack_ax_t
= REG_S64
;
1932 next_pc
+= sizeof(struct unary_op
);
1935 OP(BYTECODE_OP_UNARY_NOT_DOUBLE
):
1937 estack_ax_v
= !estack_ax(stack
, top
)->u
.d
;
1938 estack_ax_t
= REG_S64
;
1939 next_pc
+= sizeof(struct unary_op
);
1944 OP(BYTECODE_OP_AND
):
1946 struct logical_op
*insn
= (struct logical_op
*) pc
;
1948 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1952 /* If AX is 0, skip and evaluate to 0 */
1953 if (unlikely(estack_ax_v
== 0)) {
1954 dbg_printf("Jumping to bytecode offset %u\n",
1955 (unsigned int) insn
->skip_offset
);
1956 next_pc
= start_pc
+ insn
->skip_offset
;
1958 /* Pop 1 when jump not taken */
1959 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1960 next_pc
+= sizeof(struct logical_op
);
1966 struct logical_op
*insn
= (struct logical_op
*) pc
;
1968 if (estack_ax_t
!= REG_S64
&& estack_ax_t
!= REG_U64
) {
1972 /* If AX is nonzero, skip and evaluate to 1 */
1973 if (unlikely(estack_ax_v
!= 0)) {
1975 dbg_printf("Jumping to bytecode offset %u\n",
1976 (unsigned int) insn
->skip_offset
);
1977 next_pc
= start_pc
+ insn
->skip_offset
;
1979 /* Pop 1 when jump not taken */
1980 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1981 next_pc
+= sizeof(struct logical_op
);
1987 /* load field ref */
1988 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING
):
1990 struct load_op
*insn
= (struct load_op
*) pc
;
1991 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1993 dbg_printf("load field ref offset %u type string\n",
1995 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1996 estack_ax(stack
, top
)->u
.s
.str
=
1997 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1998 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1999 dbg_printf("Interpreter warning: loading a NULL string.\n");
2003 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2004 estack_ax(stack
, top
)->u
.s
.literal_type
=
2005 ESTACK_STRING_LITERAL_TYPE_NONE
;
2006 estack_ax_t
= REG_STRING
;
2007 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2008 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2012 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
):
2014 struct load_op
*insn
= (struct load_op
*) pc
;
2015 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2017 dbg_printf("load field ref offset %u type sequence\n",
2019 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2020 estack_ax(stack
, top
)->u
.s
.seq_len
=
2021 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
2022 estack_ax(stack
, top
)->u
.s
.str
=
2023 *(const char **) (&interpreter_stack_data
[ref
->offset
2024 + sizeof(unsigned long)]);
2025 estack_ax_t
= REG_STRING
;
2026 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2027 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2031 estack_ax(stack
, top
)->u
.s
.literal_type
=
2032 ESTACK_STRING_LITERAL_TYPE_NONE
;
2033 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2037 OP(BYTECODE_OP_LOAD_FIELD_REF_S64
):
2039 struct load_op
*insn
= (struct load_op
*) pc
;
2040 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2042 dbg_printf("load field ref offset %u type s64\n",
2044 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2046 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
2047 estack_ax_t
= REG_S64
;
2048 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
2049 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2053 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
):
2055 struct load_op
*insn
= (struct load_op
*) pc
;
2056 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2058 dbg_printf("load field ref offset %u type double\n",
2060 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2061 memcpy(&estack_ax(stack
, top
)->u
.d
, &interpreter_stack_data
[ref
->offset
],
2062 sizeof(struct literal_double
));
2063 estack_ax_t
= REG_DOUBLE
;
2064 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
2065 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2069 /* load from immediate operand */
2070 OP(BYTECODE_OP_LOAD_STRING
):
2072 struct load_op
*insn
= (struct load_op
*) pc
;
2074 dbg_printf("load string %s\n", insn
->data
);
2075 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2076 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2077 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2078 estack_ax(stack
, top
)->u
.s
.literal_type
=
2079 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
2080 estack_ax_t
= REG_STRING
;
2081 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2085 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING
):
2087 struct load_op
*insn
= (struct load_op
*) pc
;
2089 dbg_printf("load globbing pattern %s\n", insn
->data
);
2090 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2091 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
2092 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2093 estack_ax(stack
, top
)->u
.s
.literal_type
=
2094 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
2095 estack_ax_t
= REG_STAR_GLOB_STRING
;
2096 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
2100 OP(BYTECODE_OP_LOAD_S64
):
2102 struct load_op
*insn
= (struct load_op
*) pc
;
2104 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2105 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
2106 estack_ax_t
= REG_S64
;
2107 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
2108 next_pc
+= sizeof(struct load_op
)
2109 + sizeof(struct literal_numeric
);
2113 OP(BYTECODE_OP_LOAD_DOUBLE
):
2115 struct load_op
*insn
= (struct load_op
*) pc
;
2117 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2118 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
2119 sizeof(struct literal_double
));
2120 estack_ax_t
= REG_DOUBLE
;
2121 dbg_printf("load double %g\n", estack_ax(stack
, top
)->u
.d
);
2122 next_pc
+= sizeof(struct load_op
)
2123 + sizeof(struct literal_double
);
2128 OP(BYTECODE_OP_CAST_TO_S64
):
2130 /* Dynamic typing. */
2131 switch (estack_ax_t
) {
2133 JUMP_TO(BYTECODE_OP_CAST_NOP
);
2135 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64
);
2137 estack_ax_t
= REG_S64
;
2138 next_pc
+= sizeof(struct cast_op
);
2139 case REG_STRING
: /* Fall-through */
2140 case REG_STAR_GLOB_STRING
:
2144 ERR("Unknown interpreter register type (%d)",
2151 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64
):
2153 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
2154 estack_ax_t
= REG_S64
;
2155 next_pc
+= sizeof(struct cast_op
);
2159 OP(BYTECODE_OP_CAST_NOP
):
2161 next_pc
+= sizeof(struct cast_op
);
2165 /* get context ref */
2166 OP(BYTECODE_OP_GET_CONTEXT_REF
):
2168 struct load_op
*insn
= (struct load_op
*) pc
;
2169 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2170 struct lttng_ctx_field
*ctx_field
;
2171 struct lttng_ctx_value v
;
2173 dbg_printf("get context ref offset %u type dynamic\n",
2175 ctx_field
= &ctx
->fields
[ref
->offset
];
2176 ctx_field
->get_value(ctx_field
, &v
);
2177 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2179 case LTTNG_UST_DYNAMIC_TYPE_NONE
:
2182 case LTTNG_UST_DYNAMIC_TYPE_S64
:
2183 estack_ax_v
= v
.u
.s64
;
2184 estack_ax_t
= REG_S64
;
2185 dbg_printf("ref get context dynamic s64 %" PRIi64
"\n", estack_ax_v
);
2187 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE
:
2188 estack_ax(stack
, top
)->u
.d
= v
.u
.d
;
2189 estack_ax_t
= REG_DOUBLE
;
2190 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack
, top
)->u
.d
);
2192 case LTTNG_UST_DYNAMIC_TYPE_STRING
:
2193 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2194 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2195 dbg_printf("Interpreter warning: loading a NULL string.\n");
2199 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2200 estack_ax(stack
, top
)->u
.s
.literal_type
=
2201 ESTACK_STRING_LITERAL_TYPE_NONE
;
2202 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2203 estack_ax_t
= REG_STRING
;
2206 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v
.sel
);
2210 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2214 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING
):
2216 struct load_op
*insn
= (struct load_op
*) pc
;
2217 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2218 struct lttng_ctx_field
*ctx_field
;
2219 struct lttng_ctx_value v
;
2221 dbg_printf("get context ref offset %u type string\n",
2223 ctx_field
= &ctx
->fields
[ref
->offset
];
2224 ctx_field
->get_value(ctx_field
, &v
);
2225 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2226 estack_ax(stack
, top
)->u
.s
.str
= v
.u
.str
;
2227 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2228 dbg_printf("Interpreter warning: loading a NULL string.\n");
2232 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2233 estack_ax(stack
, top
)->u
.s
.literal_type
=
2234 ESTACK_STRING_LITERAL_TYPE_NONE
;
2235 estack_ax_t
= REG_STRING
;
2236 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
2237 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2241 OP(BYTECODE_OP_GET_CONTEXT_REF_S64
):
2243 struct load_op
*insn
= (struct load_op
*) pc
;
2244 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2245 struct lttng_ctx_field
*ctx_field
;
2246 struct lttng_ctx_value v
;
2248 dbg_printf("get context ref offset %u type s64\n",
2250 ctx_field
= &ctx
->fields
[ref
->offset
];
2251 ctx_field
->get_value(ctx_field
, &v
);
2252 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2253 estack_ax_v
= v
.u
.s64
;
2254 estack_ax_t
= REG_S64
;
2255 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
2256 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2260 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
):
2262 struct load_op
*insn
= (struct load_op
*) pc
;
2263 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
2264 struct lttng_ctx_field
*ctx_field
;
2265 struct lttng_ctx_value v
;
2267 dbg_printf("get context ref offset %u type double\n",
2269 ctx_field
= &ctx
->fields
[ref
->offset
];
2270 ctx_field
->get_value(ctx_field
, &v
);
2271 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2272 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.u
.d
, sizeof(struct literal_double
));
2273 estack_ax_t
= REG_DOUBLE
;
2274 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
2275 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
2279 OP(BYTECODE_OP_GET_CONTEXT_ROOT
):
2281 dbg_printf("op get context root\n");
2282 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2283 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
2284 /* "field" only needed for variants. */
2285 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2286 estack_ax_t
= REG_PTR
;
2287 next_pc
+= sizeof(struct load_op
);
2291 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT
):
2293 dbg_printf("op get app context root\n");
2294 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2295 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_APP_CONTEXT
;
2296 /* "field" only needed for variants. */
2297 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2298 estack_ax_t
= REG_PTR
;
2299 next_pc
+= sizeof(struct load_op
);
2303 OP(BYTECODE_OP_GET_PAYLOAD_ROOT
):
2305 dbg_printf("op get app payload root\n");
2306 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
2307 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
2308 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
2309 /* "field" only needed for variants. */
2310 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
2311 estack_ax_t
= REG_PTR
;
2312 next_pc
+= sizeof(struct load_op
);
2316 OP(BYTECODE_OP_GET_SYMBOL
):
2318 dbg_printf("op get symbol\n");
2319 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
2321 ERR("Nested fields not implemented yet.");
2324 case LOAD_ROOT_CONTEXT
:
2325 case LOAD_ROOT_APP_CONTEXT
:
2326 case LOAD_ROOT_PAYLOAD
:
2328 * symbol lookup is performed by
2334 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2338 OP(BYTECODE_OP_GET_SYMBOL_FIELD
):
2341 * Used for first variant encountered in a
2342 * traversal. Variants are not implemented yet.
2348 OP(BYTECODE_OP_GET_INDEX_U16
):
2350 struct load_op
*insn
= (struct load_op
*) pc
;
2351 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
2353 dbg_printf("op get index u16\n");
2354 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2357 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2358 estack_ax_t
= estack_ax(stack
, top
)->type
;
2359 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2363 OP(BYTECODE_OP_GET_INDEX_U64
):
2365 struct load_op
*insn
= (struct load_op
*) pc
;
2366 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
2368 dbg_printf("op get index u64\n");
2369 ret
= dynamic_get_index(ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
2372 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2373 estack_ax_t
= estack_ax(stack
, top
)->type
;
2374 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2378 OP(BYTECODE_OP_LOAD_FIELD
):
2380 dbg_printf("op load field\n");
2381 ret
= dynamic_load_field(estack_ax(stack
, top
));
2384 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
2385 estack_ax_t
= estack_ax(stack
, top
)->type
;
2386 next_pc
+= sizeof(struct load_op
);
2390 OP(BYTECODE_OP_LOAD_FIELD_S8
):
2392 dbg_printf("op load field s8\n");
2394 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2395 estack_ax_t
= REG_S64
;
2396 next_pc
+= sizeof(struct load_op
);
2399 OP(BYTECODE_OP_LOAD_FIELD_S16
):
2401 dbg_printf("op load field s16\n");
2403 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2404 estack_ax_t
= REG_S64
;
2405 next_pc
+= sizeof(struct load_op
);
2408 OP(BYTECODE_OP_LOAD_FIELD_S32
):
2410 dbg_printf("op load field s32\n");
2412 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2413 estack_ax_t
= REG_S64
;
2414 next_pc
+= sizeof(struct load_op
);
2417 OP(BYTECODE_OP_LOAD_FIELD_S64
):
2419 dbg_printf("op load field s64\n");
2421 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2422 estack_ax_t
= REG_S64
;
2423 next_pc
+= sizeof(struct load_op
);
2426 OP(BYTECODE_OP_LOAD_FIELD_U8
):
2428 dbg_printf("op load field u8\n");
2430 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2431 estack_ax_t
= REG_U64
;
2432 next_pc
+= sizeof(struct load_op
);
2435 OP(BYTECODE_OP_LOAD_FIELD_U16
):
2437 dbg_printf("op load field u16\n");
2439 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2440 estack_ax_t
= REG_U64
;
2441 next_pc
+= sizeof(struct load_op
);
2444 OP(BYTECODE_OP_LOAD_FIELD_U32
):
2446 dbg_printf("op load field u32\n");
2448 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2449 estack_ax_t
= REG_U64
;
2450 next_pc
+= sizeof(struct load_op
);
2453 OP(BYTECODE_OP_LOAD_FIELD_U64
):
2455 dbg_printf("op load field u64\n");
2457 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2458 estack_ax_t
= REG_U64
;
2459 next_pc
+= sizeof(struct load_op
);
2462 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE
):
2464 dbg_printf("op load field double\n");
2466 memcpy(&estack_ax(stack
, top
)->u
.d
,
2467 estack_ax(stack
, top
)->u
.ptr
.ptr
,
2468 sizeof(struct literal_double
));
2469 estack_ax(stack
, top
)->type
= REG_DOUBLE
;
2470 next_pc
+= sizeof(struct load_op
);
2474 OP(BYTECODE_OP_LOAD_FIELD_STRING
):
2478 dbg_printf("op load field string\n");
2479 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
2480 estack_ax(stack
, top
)->u
.s
.str
= str
;
2481 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2482 dbg_printf("Interpreter warning: loading a NULL string.\n");
2486 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
2487 estack_ax(stack
, top
)->u
.s
.literal_type
=
2488 ESTACK_STRING_LITERAL_TYPE_NONE
;
2489 estack_ax(stack
, top
)->type
= REG_STRING
;
2490 next_pc
+= sizeof(struct load_op
);
2494 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE
):
2498 dbg_printf("op load field string sequence\n");
2499 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
2500 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
2501 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
2502 estack_ax(stack
, top
)->type
= REG_STRING
;
2503 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
2504 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2508 estack_ax(stack
, top
)->u
.s
.literal_type
=
2509 ESTACK_STRING_LITERAL_TYPE_NONE
;
2510 next_pc
+= sizeof(struct load_op
);
2516 /* Return _DISCARD on error. */
2518 return LTTNG_INTERPRETER_DISCARD
;
2521 return lttng_bytecode_interpret_format_output(estack_ax(stack
, top
),
2528 uint64_t lttng_bytecode_filter_interpret(void *filter_data
,
2529 const char *filter_stack_data
)
2531 return bytecode_interpret(filter_data
, filter_stack_data
, NULL
);