1 /* SPDX-License-Identifier: MIT
3 * lttng-filter-interpreter.c
5 * LTTng modules filter interpreter.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <wrapper/uaccess.h>
11 #include <wrapper/objtool.h>
12 #include <wrapper/types.h>
13 #include <linux/swab.h>
15 #include <lttng/filter.h>
16 #include <lttng/string-utils.h>
19 * get_char should be called with page fault handler disabled if it is expected
20 * to handle user-space read.
23 char get_char(struct estack_entry
*reg
, size_t offset
)
25 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
30 /* Handle invalid access as end of string. */
31 if (unlikely(!lttng_access_ok(VERIFY_READ
,
32 reg
->u
.s
.user_str
+ offset
,
35 /* Handle fault (nonzero return value) as end of string. */
36 if (unlikely(__copy_from_user_inatomic(&c
,
37 reg
->u
.s
.user_str
+ offset
,
42 return reg
->u
.s
.str
[offset
];
48 * -2: unknown escape char.
52 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
57 *c
= get_char(reg
, *offset
);
73 char get_char_at_cb(size_t at
, void *data
)
75 return get_char(data
, at
);
79 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
81 bool has_user
= false;
83 struct estack_entry
*pattern_reg
;
84 struct estack_entry
*candidate_reg
;
86 /* Disable the page fault handler when reading from userspace. */
87 if (estack_bx(stack
, top
)->u
.s
.user
88 || estack_ax(stack
, top
)->u
.s
.user
) {
93 /* Find out which side is the pattern vs. the candidate. */
94 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
95 pattern_reg
= estack_ax(stack
, top
);
96 candidate_reg
= estack_bx(stack
, top
);
98 pattern_reg
= estack_bx(stack
, top
);
99 candidate_reg
= estack_ax(stack
, top
);
102 /* Perform the match operation. */
103 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
104 pattern_reg
, get_char_at_cb
, candidate_reg
);
112 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
114 size_t offset_bx
= 0, offset_ax
= 0;
115 int diff
, has_user
= 0;
117 if (estack_bx(stack
, top
)->u
.s
.user
118 || estack_ax(stack
, top
)->u
.s
.user
) {
126 char char_bx
, char_ax
;
128 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
129 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
131 if (unlikely(char_bx
== '\0')) {
132 if (char_ax
== '\0') {
136 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
137 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
138 ret
= parse_char(estack_ax(stack
, top
),
139 &char_ax
, &offset_ax
);
149 if (unlikely(char_ax
== '\0')) {
150 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
151 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
152 ret
= parse_char(estack_bx(stack
, top
),
153 &char_bx
, &offset_bx
);
162 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
163 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
164 ret
= parse_char(estack_bx(stack
, top
),
165 &char_bx
, &offset_bx
);
169 } else if (ret
== -2) {
172 /* else compare both char */
174 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
175 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
176 ret
= parse_char(estack_ax(stack
, top
),
177 &char_ax
, &offset_ax
);
181 } else if (ret
== -2) {
198 diff
= char_bx
- char_ax
;
210 uint64_t lttng_filter_interpret_bytecode_false(void *filter_data
,
211 struct lttng_probe_ctx
*lttng_probe_ctx
,
212 const char *filter_stack_data
)
214 return LTTNG_FILTER_DISCARD
;
217 #ifdef INTERPRETER_USE_SWITCH
220 * Fallback for compilers that do not support taking address of labels.
224 start_pc = &bytecode->data[0]; \
225 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
227 dbg_printk("LTTng: Executing op %s (%u)\n", \
228 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
229 (unsigned int) *(filter_opcode_t *) pc); \
230 switch (*(filter_opcode_t *) pc) {
232 #define OP(name) case name
242 * Dispatch-table based interpreter.
246 start_pc = &bytecode->code[0]; \
247 pc = next_pc = start_pc; \
248 if (unlikely(pc - start_pc >= bytecode->len)) \
250 goto *dispatch[*(filter_opcode_t *) pc];
257 goto *dispatch[*(filter_opcode_t *) pc];
263 #define IS_INTEGER_REGISTER(reg_type) \
264 (reg_type == REG_S64 || reg_type == REG_U64)
266 static int context_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
267 struct load_ptr
*ptr
,
271 struct lttng_ctx_field
*ctx_field
;
272 struct lttng_event_field
*field
;
273 union lttng_ctx_value v
;
275 ctx_field
= <tng_static_ctx
->fields
[idx
];
276 field
= &ctx_field
->event_field
;
277 ptr
->type
= LOAD_OBJECT
;
278 /* field is only used for types nested within variants. */
281 switch (field
->type
.atype
) {
283 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
284 if (field
->type
.u
.integer
.signedness
) {
285 ptr
->object_type
= OBJECT_TYPE_S64
;
287 ptr
->ptr
= &ptr
->u
.s64
;
289 ptr
->object_type
= OBJECT_TYPE_U64
;
290 ptr
->u
.u64
= v
.s64
; /* Cast. */
291 ptr
->ptr
= &ptr
->u
.u64
;
294 case atype_enum_nestable
:
296 const struct lttng_integer_type
*itype
=
297 &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
299 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
300 if (itype
->signedness
) {
301 ptr
->object_type
= OBJECT_TYPE_S64
;
303 ptr
->ptr
= &ptr
->u
.s64
;
305 ptr
->object_type
= OBJECT_TYPE_U64
;
306 ptr
->u
.u64
= v
.s64
; /* Cast. */
307 ptr
->ptr
= &ptr
->u
.u64
;
311 case atype_array_nestable
:
312 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
313 printk(KERN_WARNING
"LTTng: filter: Array nesting only supports integer types.\n");
316 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
317 printk(KERN_WARNING
"LTTng: filter: Only string arrays are supported for contexts.\n");
320 ptr
->object_type
= OBJECT_TYPE_STRING
;
321 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
324 case atype_sequence_nestable
:
325 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
326 printk(KERN_WARNING
"LTTng: filter: Sequence nesting only supports integer types.\n");
329 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
330 printk(KERN_WARNING
"LTTng: filter: Only string sequences are supported for contexts.\n");
333 ptr
->object_type
= OBJECT_TYPE_STRING
;
334 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
338 ptr
->object_type
= OBJECT_TYPE_STRING
;
339 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
342 case atype_struct_nestable
:
343 printk(KERN_WARNING
"LTTng: filter: Structure type cannot be loaded.\n");
345 case atype_variant_nestable
:
346 printk(KERN_WARNING
"LTTng: filter: Variant type cannot be loaded.\n");
349 printk(KERN_WARNING
"LTTng: filter: Unknown type: %d", (int) field
->type
.atype
);
355 static int dynamic_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
356 struct bytecode_runtime
*runtime
,
357 uint64_t index
, struct estack_entry
*stack_top
)
360 const struct filter_get_index_data
*gid
;
362 gid
= (const struct filter_get_index_data
*) &runtime
->data
[index
];
363 switch (stack_top
->u
.ptr
.type
) {
365 switch (stack_top
->u
.ptr
.object_type
) {
366 case OBJECT_TYPE_ARRAY
:
370 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
371 /* Skip count (unsigned long) */
372 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
373 ptr
= ptr
+ gid
->offset
;
374 stack_top
->u
.ptr
.ptr
= ptr
;
375 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
376 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
377 BUG_ON(stack_top
->u
.ptr
.field
->type
.atype
!= atype_array_nestable
);
378 stack_top
->u
.ptr
.field
= NULL
;
381 case OBJECT_TYPE_SEQUENCE
:
386 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
387 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
388 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
392 ptr
= ptr
+ gid
->offset
;
393 stack_top
->u
.ptr
.ptr
= ptr
;
394 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
395 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
396 BUG_ON(stack_top
->u
.ptr
.field
->type
.atype
!= atype_sequence_nestable
);
397 stack_top
->u
.ptr
.field
= NULL
;
400 case OBJECT_TYPE_STRUCT
:
401 printk(KERN_WARNING
"LTTng: filter: Nested structures are not supported yet.\n");
404 case OBJECT_TYPE_VARIANT
:
406 printk(KERN_WARNING
"LTTng: filter: Unexpected get index type %d",
407 (int) stack_top
->u
.ptr
.object_type
);
412 case LOAD_ROOT_CONTEXT
:
413 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
415 ret
= context_get_index(lttng_probe_ctx
,
423 case LOAD_ROOT_PAYLOAD
:
424 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
425 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
426 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
427 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
428 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
429 stack_top
->u
.ptr
.field
= gid
->field
;
438 static int dynamic_load_field(struct estack_entry
*stack_top
)
442 switch (stack_top
->u
.ptr
.type
) {
445 case LOAD_ROOT_CONTEXT
:
446 case LOAD_ROOT_APP_CONTEXT
:
447 case LOAD_ROOT_PAYLOAD
:
449 dbg_printk("Filter warning: cannot load root, missing field name.\n");
453 switch (stack_top
->u
.ptr
.object_type
) {
455 dbg_printk("op load field s8\n");
456 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
457 stack_top
->type
= REG_S64
;
459 case OBJECT_TYPE_S16
:
463 dbg_printk("op load field s16\n");
464 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
465 if (stack_top
->u
.ptr
.rev_bo
)
467 stack_top
->u
.v
= tmp
;
468 stack_top
->type
= REG_S64
;
471 case OBJECT_TYPE_S32
:
475 dbg_printk("op load field s32\n");
476 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
477 if (stack_top
->u
.ptr
.rev_bo
)
479 stack_top
->u
.v
= tmp
;
480 stack_top
->type
= REG_S64
;
483 case OBJECT_TYPE_S64
:
487 dbg_printk("op load field s64\n");
488 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
489 if (stack_top
->u
.ptr
.rev_bo
)
491 stack_top
->u
.v
= tmp
;
492 stack_top
->type
= REG_S64
;
496 dbg_printk("op load field u8\n");
497 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
498 stack_top
->type
= REG_U64
;
500 case OBJECT_TYPE_U16
:
504 dbg_printk("op load field u16\n");
505 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
506 if (stack_top
->u
.ptr
.rev_bo
)
508 stack_top
->u
.v
= tmp
;
509 stack_top
->type
= REG_U64
;
512 case OBJECT_TYPE_U32
:
516 dbg_printk("op load field u32\n");
517 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
518 if (stack_top
->u
.ptr
.rev_bo
)
520 stack_top
->u
.v
= tmp
;
521 stack_top
->type
= REG_U64
;
524 case OBJECT_TYPE_U64
:
528 dbg_printk("op load field u64\n");
529 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
530 if (stack_top
->u
.ptr
.rev_bo
)
532 stack_top
->u
.v
= tmp
;
533 stack_top
->type
= REG_U64
;
536 case OBJECT_TYPE_STRING
:
540 dbg_printk("op load field string\n");
541 str
= (const char *) stack_top
->u
.ptr
.ptr
;
542 stack_top
->u
.s
.str
= str
;
543 if (unlikely(!stack_top
->u
.s
.str
)) {
544 dbg_printk("Filter warning: loading a NULL string.\n");
548 stack_top
->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
549 stack_top
->u
.s
.literal_type
=
550 ESTACK_STRING_LITERAL_TYPE_NONE
;
551 stack_top
->type
= REG_STRING
;
554 case OBJECT_TYPE_STRING_SEQUENCE
:
558 dbg_printk("op load field string sequence\n");
559 ptr
= stack_top
->u
.ptr
.ptr
;
560 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
561 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
562 if (unlikely(!stack_top
->u
.s
.str
)) {
563 dbg_printk("Filter warning: loading a NULL sequence.\n");
567 stack_top
->u
.s
.literal_type
=
568 ESTACK_STRING_LITERAL_TYPE_NONE
;
569 stack_top
->type
= REG_STRING
;
572 case OBJECT_TYPE_DYNAMIC
:
574 * Dynamic types in context are looked up
575 * by context get index.
579 case OBJECT_TYPE_DOUBLE
:
582 case OBJECT_TYPE_SEQUENCE
:
583 case OBJECT_TYPE_ARRAY
:
584 case OBJECT_TYPE_STRUCT
:
585 case OBJECT_TYPE_VARIANT
:
586 printk(KERN_WARNING
"LTTng: filter: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
597 int lttng_bytecode_interpret_format_output(struct estack_entry
*ax
,
598 struct lttng_interpreter_output
*output
)
605 output
->type
= LTTNG_INTERPRETER_TYPE_S64
;
606 output
->u
.s
= ax
->u
.v
;
609 output
->type
= LTTNG_INTERPRETER_TYPE_U64
;
610 output
->u
.u
= (uint64_t) ax
->u
.v
;
613 output
->type
= LTTNG_INTERPRETER_TYPE_STRING
;
614 output
->u
.str
.str
= ax
->u
.s
.str
;
615 output
->u
.str
.len
= ax
->u
.s
.seq_len
;
618 switch (ax
->u
.ptr
.object_type
) {
620 case OBJECT_TYPE_S16
:
621 case OBJECT_TYPE_S32
:
622 case OBJECT_TYPE_S64
:
624 case OBJECT_TYPE_U16
:
625 case OBJECT_TYPE_U32
:
626 case OBJECT_TYPE_U64
:
627 case OBJECT_TYPE_DOUBLE
:
628 case OBJECT_TYPE_STRING
:
629 case OBJECT_TYPE_STRING_SEQUENCE
:
630 ret
= dynamic_load_field(ax
);
633 /* Retry after loading ptr into stack top. */
635 case OBJECT_TYPE_SEQUENCE
:
636 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
637 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
638 output
->u
.sequence
.nr_elem
= *(unsigned long *) ax
->u
.ptr
.ptr
;
639 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.sequence_nestable
.elem_type
;
641 case OBJECT_TYPE_ARRAY
:
642 /* Skip count (unsigned long) */
643 output
->type
= LTTNG_INTERPRETER_TYPE_SEQUENCE
;
644 output
->u
.sequence
.ptr
= *(const char **) (ax
->u
.ptr
.ptr
+ sizeof(unsigned long));
645 output
->u
.sequence
.nr_elem
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.length
;
646 output
->u
.sequence
.nested_type
= ax
->u
.ptr
.field
->type
.u
.array_nestable
.elem_type
;
648 case OBJECT_TYPE_STRUCT
:
649 case OBJECT_TYPE_VARIANT
:
655 case REG_STAR_GLOB_STRING
:
656 case REG_TYPE_UNKNOWN
:
661 return LTTNG_FILTER_RECORD_FLAG
;
665 * Return 0 (discard), or raise the 0x1 flag (log event).
666 * Currently, other flags are kept for future extensions and have no
670 uint64_t bytecode_interpret(void *interpreter_data
,
671 struct lttng_probe_ctx
*lttng_probe_ctx
,
672 const char *interpreter_stack_data
,
673 struct lttng_interpreter_output
*output
)
675 struct bytecode_runtime
*bytecode
= interpreter_data
;
676 void *pc
, *next_pc
, *start_pc
;
679 struct estack _stack
;
680 struct estack
*stack
= &_stack
;
681 register int64_t ax
= 0, bx
= 0;
682 register enum entry_type ax_t
= REG_TYPE_UNKNOWN
, bx_t
= REG_TYPE_UNKNOWN
;
683 register int top
= FILTER_STACK_EMPTY
;
684 #ifndef INTERPRETER_USE_SWITCH
685 static void *dispatch
[NR_FILTER_OPS
] = {
686 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
688 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
691 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
692 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
693 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
694 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
695 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
696 [ FILTER_OP_BIT_RSHIFT
] = &&LABEL_FILTER_OP_BIT_RSHIFT
,
697 [ FILTER_OP_BIT_LSHIFT
] = &&LABEL_FILTER_OP_BIT_LSHIFT
,
698 [ FILTER_OP_BIT_AND
] = &&LABEL_FILTER_OP_BIT_AND
,
699 [ FILTER_OP_BIT_OR
] = &&LABEL_FILTER_OP_BIT_OR
,
700 [ FILTER_OP_BIT_XOR
] = &&LABEL_FILTER_OP_BIT_XOR
,
702 /* binary comparators */
703 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
704 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
705 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
706 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
707 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
708 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
710 /* string binary comparator */
711 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
712 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
713 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
714 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
715 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
716 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
718 /* globbing pattern binary comparator */
719 [ FILTER_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING
,
720 [ FILTER_OP_NE_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING
,
722 /* s64 binary comparator */
723 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
724 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
725 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
726 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
727 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
728 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
730 /* double binary comparator */
731 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
732 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
733 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
734 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
735 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
736 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
738 /* Mixed S64-double binary comparators */
739 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
740 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
741 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
742 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
743 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
744 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
746 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
747 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
748 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
749 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
750 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
751 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
754 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
755 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
756 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
757 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
758 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
759 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
760 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
761 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
762 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
765 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
766 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
769 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
770 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
771 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
772 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
773 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
775 /* load from immediate operand */
776 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
777 [ FILTER_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING
,
778 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
779 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
782 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
783 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
784 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
786 /* get context ref */
787 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
788 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
789 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
790 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
792 /* load userspace field ref */
793 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING
,
794 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
796 /* Instructions for recursive traversal through composed types. */
797 [ FILTER_OP_GET_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT
,
798 [ FILTER_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT
,
799 [ FILTER_OP_GET_PAYLOAD_ROOT
] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT
,
801 [ FILTER_OP_GET_SYMBOL
] = &&LABEL_FILTER_OP_GET_SYMBOL
,
802 [ FILTER_OP_GET_SYMBOL_FIELD
] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD
,
803 [ FILTER_OP_GET_INDEX_U16
] = &&LABEL_FILTER_OP_GET_INDEX_U16
,
804 [ FILTER_OP_GET_INDEX_U64
] = &&LABEL_FILTER_OP_GET_INDEX_U64
,
806 [ FILTER_OP_LOAD_FIELD
] = &&LABEL_FILTER_OP_LOAD_FIELD
,
807 [ FILTER_OP_LOAD_FIELD_S8
] = &&LABEL_FILTER_OP_LOAD_FIELD_S8
,
808 [ FILTER_OP_LOAD_FIELD_S16
] = &&LABEL_FILTER_OP_LOAD_FIELD_S16
,
809 [ FILTER_OP_LOAD_FIELD_S32
] = &&LABEL_FILTER_OP_LOAD_FIELD_S32
,
810 [ FILTER_OP_LOAD_FIELD_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_S64
,
811 [ FILTER_OP_LOAD_FIELD_U8
] = &&LABEL_FILTER_OP_LOAD_FIELD_U8
,
812 [ FILTER_OP_LOAD_FIELD_U16
] = &&LABEL_FILTER_OP_LOAD_FIELD_U16
,
813 [ FILTER_OP_LOAD_FIELD_U32
] = &&LABEL_FILTER_OP_LOAD_FIELD_U32
,
814 [ FILTER_OP_LOAD_FIELD_U64
] = &&LABEL_FILTER_OP_LOAD_FIELD_U64
,
815 [ FILTER_OP_LOAD_FIELD_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING
,
816 [ FILTER_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE
,
817 [ FILTER_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE
,
819 [ FILTER_OP_UNARY_BIT_NOT
] = &&LABEL_FILTER_OP_UNARY_BIT_NOT
,
821 [ FILTER_OP_RETURN_S64
] = &&LABEL_FILTER_OP_RETURN_S64
,
823 #endif /* #ifndef INTERPRETER_USE_SWITCH */
827 OP(FILTER_OP_UNKNOWN
):
828 OP(FILTER_OP_LOAD_FIELD_REF
):
829 OP(FILTER_OP_GET_CONTEXT_REF
):
830 #ifdef INTERPRETER_USE_SWITCH
832 #endif /* INTERPRETER_USE_SWITCH */
833 printk(KERN_WARNING
"LTTng: filter: unknown bytecode op %u\n",
834 (unsigned int) *(filter_opcode_t
*) pc
);
838 OP(FILTER_OP_RETURN
):
839 OP(FILTER_OP_RETURN_S64
):
840 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
841 switch (estack_ax_t
) {
844 retval
= !!estack_ax_v
;
855 case REG_STAR_GLOB_STRING
:
856 case REG_TYPE_UNKNOWN
:
869 printk(KERN_WARNING
"LTTng: filter: unsupported bytecode op %u\n",
870 (unsigned int) *(filter_opcode_t
*) pc
);
880 printk(KERN_WARNING
"LTTng: filter: unsupported non-specialized bytecode op %u\n",
881 (unsigned int) *(filter_opcode_t
*) pc
);
885 OP(FILTER_OP_EQ_STRING
):
889 res
= (stack_strcmp(stack
, top
, "==") == 0);
890 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
892 estack_ax_t
= REG_S64
;
893 next_pc
+= sizeof(struct binary_op
);
896 OP(FILTER_OP_NE_STRING
):
900 res
= (stack_strcmp(stack
, top
, "!=") != 0);
901 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
903 estack_ax_t
= REG_S64
;
904 next_pc
+= sizeof(struct binary_op
);
907 OP(FILTER_OP_GT_STRING
):
911 res
= (stack_strcmp(stack
, top
, ">") > 0);
912 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
914 estack_ax_t
= REG_S64
;
915 next_pc
+= sizeof(struct binary_op
);
918 OP(FILTER_OP_LT_STRING
):
922 res
= (stack_strcmp(stack
, top
, "<") < 0);
923 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
925 estack_ax_t
= REG_S64
;
926 next_pc
+= sizeof(struct binary_op
);
929 OP(FILTER_OP_GE_STRING
):
933 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
934 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
936 estack_ax_t
= REG_S64
;
937 next_pc
+= sizeof(struct binary_op
);
940 OP(FILTER_OP_LE_STRING
):
944 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
945 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
947 estack_ax_t
= REG_S64
;
948 next_pc
+= sizeof(struct binary_op
);
952 OP(FILTER_OP_EQ_STAR_GLOB_STRING
):
956 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
957 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
959 estack_ax_t
= REG_S64
;
960 next_pc
+= sizeof(struct binary_op
);
963 OP(FILTER_OP_NE_STAR_GLOB_STRING
):
967 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
968 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
970 estack_ax_t
= REG_S64
;
971 next_pc
+= sizeof(struct binary_op
);
975 OP(FILTER_OP_EQ_S64
):
979 res
= (estack_bx_v
== estack_ax_v
);
980 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
982 estack_ax_t
= REG_S64
;
983 next_pc
+= sizeof(struct binary_op
);
986 OP(FILTER_OP_NE_S64
):
990 res
= (estack_bx_v
!= estack_ax_v
);
991 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
993 estack_ax_t
= REG_S64
;
994 next_pc
+= sizeof(struct binary_op
);
997 OP(FILTER_OP_GT_S64
):
1001 res
= (estack_bx_v
> estack_ax_v
);
1002 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1004 estack_ax_t
= REG_S64
;
1005 next_pc
+= sizeof(struct binary_op
);
1008 OP(FILTER_OP_LT_S64
):
1012 res
= (estack_bx_v
< estack_ax_v
);
1013 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1015 estack_ax_t
= REG_S64
;
1016 next_pc
+= sizeof(struct binary_op
);
1019 OP(FILTER_OP_GE_S64
):
1023 res
= (estack_bx_v
>= estack_ax_v
);
1024 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1026 estack_ax_t
= REG_S64
;
1027 next_pc
+= sizeof(struct binary_op
);
1030 OP(FILTER_OP_LE_S64
):
1034 res
= (estack_bx_v
<= estack_ax_v
);
1035 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1037 estack_ax_t
= REG_S64
;
1038 next_pc
+= sizeof(struct binary_op
);
1042 OP(FILTER_OP_EQ_DOUBLE
):
1043 OP(FILTER_OP_NE_DOUBLE
):
1044 OP(FILTER_OP_GT_DOUBLE
):
1045 OP(FILTER_OP_LT_DOUBLE
):
1046 OP(FILTER_OP_GE_DOUBLE
):
1047 OP(FILTER_OP_LE_DOUBLE
):
1053 /* Mixed S64-double binary comparators */
1054 OP(FILTER_OP_EQ_DOUBLE_S64
):
1055 OP(FILTER_OP_NE_DOUBLE_S64
):
1056 OP(FILTER_OP_GT_DOUBLE_S64
):
1057 OP(FILTER_OP_LT_DOUBLE_S64
):
1058 OP(FILTER_OP_GE_DOUBLE_S64
):
1059 OP(FILTER_OP_LE_DOUBLE_S64
):
1060 OP(FILTER_OP_EQ_S64_DOUBLE
):
1061 OP(FILTER_OP_NE_S64_DOUBLE
):
1062 OP(FILTER_OP_GT_S64_DOUBLE
):
1063 OP(FILTER_OP_LT_S64_DOUBLE
):
1064 OP(FILTER_OP_GE_S64_DOUBLE
):
1065 OP(FILTER_OP_LE_S64_DOUBLE
):
1070 OP(FILTER_OP_BIT_RSHIFT
):
1074 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1079 /* Catch undefined behavior. */
1080 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1084 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
1085 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1087 estack_ax_t
= REG_U64
;
1088 next_pc
+= sizeof(struct binary_op
);
1091 OP(FILTER_OP_BIT_LSHIFT
):
1095 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1100 /* Catch undefined behavior. */
1101 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
1105 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
1106 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1108 estack_ax_t
= REG_U64
;
1109 next_pc
+= sizeof(struct binary_op
);
1112 OP(FILTER_OP_BIT_AND
):
1116 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1121 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1122 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1124 estack_ax_t
= REG_U64
;
1125 next_pc
+= sizeof(struct binary_op
);
1128 OP(FILTER_OP_BIT_OR
):
1132 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1137 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1138 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1140 estack_ax_t
= REG_U64
;
1141 next_pc
+= sizeof(struct binary_op
);
1144 OP(FILTER_OP_BIT_XOR
):
1148 if (!IS_INTEGER_REGISTER(estack_ax_t
) || !IS_INTEGER_REGISTER(estack_bx_t
)) {
1153 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1154 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1156 estack_ax_t
= REG_U64
;
1157 next_pc
+= sizeof(struct binary_op
);
1162 OP(FILTER_OP_UNARY_PLUS
):
1163 OP(FILTER_OP_UNARY_MINUS
):
1164 OP(FILTER_OP_UNARY_NOT
):
1165 printk(KERN_WARNING
"LTTng: filter: unsupported non-specialized bytecode op %u\n",
1166 (unsigned int) *(filter_opcode_t
*) pc
);
1171 OP(FILTER_OP_UNARY_BIT_NOT
):
1173 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1174 estack_ax_t
= REG_S64
;
1175 next_pc
+= sizeof(struct unary_op
);
1179 OP(FILTER_OP_UNARY_PLUS_S64
):
1181 next_pc
+= sizeof(struct unary_op
);
1184 OP(FILTER_OP_UNARY_MINUS_S64
):
1186 estack_ax_v
= -estack_ax_v
;
1187 estack_ax_t
= REG_S64
;
1188 next_pc
+= sizeof(struct unary_op
);
1191 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
1192 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
1197 OP(FILTER_OP_UNARY_NOT_S64
):
1199 estack_ax_v
= !estack_ax_v
;
1200 estack_ax_t
= REG_S64
;
1201 next_pc
+= sizeof(struct unary_op
);
1204 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
1213 struct logical_op
*insn
= (struct logical_op
*) pc
;
1215 /* If AX is 0, skip and evaluate to 0 */
1216 if (unlikely(estack_ax_v
== 0)) {
1217 dbg_printk("Jumping to bytecode offset %u\n",
1218 (unsigned int) insn
->skip_offset
);
1219 next_pc
= start_pc
+ insn
->skip_offset
;
1221 /* Pop 1 when jump not taken */
1222 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1223 next_pc
+= sizeof(struct logical_op
);
1229 struct logical_op
*insn
= (struct logical_op
*) pc
;
1231 /* If AX is nonzero, skip and evaluate to 1 */
1233 if (unlikely(estack_ax_v
!= 0)) {
1235 dbg_printk("Jumping to bytecode offset %u\n",
1236 (unsigned int) insn
->skip_offset
);
1237 next_pc
= start_pc
+ insn
->skip_offset
;
1239 /* Pop 1 when jump not taken */
1240 estack_pop(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1241 next_pc
+= sizeof(struct logical_op
);
1247 /* load field ref */
1248 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
1250 struct load_op
*insn
= (struct load_op
*) pc
;
1251 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1253 dbg_printk("load field ref offset %u type string\n",
1255 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1256 estack_ax(stack
, top
)->u
.s
.str
=
1257 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1258 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1259 dbg_printk("Filter warning: loading a NULL string.\n");
1263 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1264 estack_ax(stack
, top
)->u
.s
.literal_type
=
1265 ESTACK_STRING_LITERAL_TYPE_NONE
;
1266 estack_ax(stack
, top
)->u
.s
.user
= 0;
1267 estack_ax(stack
, top
)->type
= REG_STRING
;
1268 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1269 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1273 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
1275 struct load_op
*insn
= (struct load_op
*) pc
;
1276 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1278 dbg_printk("load field ref offset %u type sequence\n",
1280 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1281 estack_ax(stack
, top
)->u
.s
.seq_len
=
1282 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1283 estack_ax(stack
, top
)->u
.s
.str
=
1284 *(const char **) (&interpreter_stack_data
[ref
->offset
1285 + sizeof(unsigned long)]);
1286 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1287 dbg_printk("Filter warning: loading a NULL sequence.\n");
1291 estack_ax(stack
, top
)->u
.s
.literal_type
=
1292 ESTACK_STRING_LITERAL_TYPE_NONE
;
1293 estack_ax(stack
, top
)->u
.s
.user
= 0;
1294 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1298 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
1300 struct load_op
*insn
= (struct load_op
*) pc
;
1301 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1303 dbg_printk("load field ref offset %u type s64\n",
1305 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1307 ((struct literal_numeric
*) &interpreter_stack_data
[ref
->offset
])->v
;
1308 estack_ax_t
= REG_S64
;
1309 dbg_printk("ref load s64 %lld\n",
1310 (long long) estack_ax_v
);
1311 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1315 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
1321 /* load from immediate operand */
1322 OP(FILTER_OP_LOAD_STRING
):
1324 struct load_op
*insn
= (struct load_op
*) pc
;
1326 dbg_printk("load string %s\n", insn
->data
);
1327 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1328 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1329 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1330 estack_ax(stack
, top
)->u
.s
.literal_type
=
1331 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1332 estack_ax(stack
, top
)->u
.s
.user
= 0;
1333 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1337 OP(FILTER_OP_LOAD_STAR_GLOB_STRING
):
1339 struct load_op
*insn
= (struct load_op
*) pc
;
1341 dbg_printk("load globbing pattern %s\n", insn
->data
);
1342 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1343 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1344 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1345 estack_ax(stack
, top
)->u
.s
.literal_type
=
1346 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1347 estack_ax(stack
, top
)->u
.s
.user
= 0;
1348 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1352 OP(FILTER_OP_LOAD_S64
):
1354 struct load_op
*insn
= (struct load_op
*) pc
;
1356 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1357 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1358 estack_ax_t
= REG_S64
;
1359 dbg_printk("load s64 %lld\n",
1360 (long long) estack_ax_v
);
1361 next_pc
+= sizeof(struct load_op
)
1362 + sizeof(struct literal_numeric
);
1366 OP(FILTER_OP_LOAD_DOUBLE
):
1373 OP(FILTER_OP_CAST_TO_S64
):
1374 printk(KERN_WARNING
"LTTng: filter: unsupported non-specialized bytecode op %u\n",
1375 (unsigned int) *(filter_opcode_t
*) pc
);
1379 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
1385 OP(FILTER_OP_CAST_NOP
):
1387 next_pc
+= sizeof(struct cast_op
);
1391 /* get context ref */
1392 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
1394 struct load_op
*insn
= (struct load_op
*) pc
;
1395 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1396 struct lttng_ctx_field
*ctx_field
;
1397 union lttng_ctx_value v
;
1399 dbg_printk("get context ref offset %u type string\n",
1401 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1402 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1403 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1404 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
1405 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1406 dbg_printk("Filter warning: loading a NULL string.\n");
1410 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1411 estack_ax(stack
, top
)->u
.s
.literal_type
=
1412 ESTACK_STRING_LITERAL_TYPE_NONE
;
1413 estack_ax(stack
, top
)->u
.s
.user
= 0;
1414 estack_ax(stack
, top
)->type
= REG_STRING
;
1415 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1416 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1420 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
1422 struct load_op
*insn
= (struct load_op
*) pc
;
1423 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1424 struct lttng_ctx_field
*ctx_field
;
1425 union lttng_ctx_value v
;
1427 dbg_printk("get context ref offset %u type s64\n",
1429 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1430 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1431 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1432 estack_ax_v
= v
.s64
;
1433 estack_ax_t
= REG_S64
;
1434 dbg_printk("ref get context s64 %lld\n",
1435 (long long) estack_ax_v
);
1436 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1440 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
1446 /* load userspace field ref */
1447 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING
):
1449 struct load_op
*insn
= (struct load_op
*) pc
;
1450 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1452 dbg_printk("load field ref offset %u type user string\n",
1454 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1455 estack_ax(stack
, top
)->u
.s
.user_str
=
1456 *(const char * const *) &interpreter_stack_data
[ref
->offset
];
1457 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1458 dbg_printk("Filter warning: loading a NULL string.\n");
1462 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1463 estack_ax(stack
, top
)->u
.s
.literal_type
=
1464 ESTACK_STRING_LITERAL_TYPE_NONE
;
1465 estack_ax(stack
, top
)->u
.s
.user
= 1;
1466 estack_ax(stack
, top
)->type
= REG_STRING
;
1467 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1468 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1472 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1474 struct load_op
*insn
= (struct load_op
*) pc
;
1475 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1477 dbg_printk("load field ref offset %u type user sequence\n",
1479 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1480 estack_ax(stack
, top
)->u
.s
.seq_len
=
1481 *(unsigned long *) &interpreter_stack_data
[ref
->offset
];
1482 estack_ax(stack
, top
)->u
.s
.user_str
=
1483 *(const char **) (&interpreter_stack_data
[ref
->offset
1484 + sizeof(unsigned long)]);
1485 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1486 dbg_printk("Filter warning: loading a NULL sequence.\n");
1490 estack_ax(stack
, top
)->u
.s
.literal_type
=
1491 ESTACK_STRING_LITERAL_TYPE_NONE
;
1492 estack_ax(stack
, top
)->u
.s
.user
= 1;
1493 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1497 OP(FILTER_OP_GET_CONTEXT_ROOT
):
1499 dbg_printk("op get context root\n");
1500 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1501 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1502 /* "field" only needed for variants. */
1503 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1504 estack_ax(stack
, top
)->type
= REG_PTR
;
1505 next_pc
+= sizeof(struct load_op
);
1509 OP(FILTER_OP_GET_APP_CONTEXT_ROOT
):
1515 OP(FILTER_OP_GET_PAYLOAD_ROOT
):
1517 dbg_printk("op get app payload root\n");
1518 estack_push(stack
, top
, ax
, bx
, ax_t
, bx_t
);
1519 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1520 estack_ax(stack
, top
)->u
.ptr
.ptr
= interpreter_stack_data
;
1521 /* "field" only needed for variants. */
1522 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1523 estack_ax(stack
, top
)->type
= REG_PTR
;
1524 next_pc
+= sizeof(struct load_op
);
1528 OP(FILTER_OP_GET_SYMBOL
):
1530 dbg_printk("op get symbol\n");
1531 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1533 printk(KERN_WARNING
"LTTng: filter: Nested fields not implemented yet.\n");
1536 case LOAD_ROOT_CONTEXT
:
1537 case LOAD_ROOT_APP_CONTEXT
:
1538 case LOAD_ROOT_PAYLOAD
:
1540 * symbol lookup is performed by
1546 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1550 OP(FILTER_OP_GET_SYMBOL_FIELD
):
1553 * Used for first variant encountered in a
1554 * traversal. Variants are not implemented yet.
1560 OP(FILTER_OP_GET_INDEX_U16
):
1562 struct load_op
*insn
= (struct load_op
*) pc
;
1563 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1565 dbg_printk("op get index u16\n");
1566 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1569 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1570 estack_ax_t
= estack_ax(stack
, top
)->type
;
1571 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1575 OP(FILTER_OP_GET_INDEX_U64
):
1577 struct load_op
*insn
= (struct load_op
*) pc
;
1578 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1580 dbg_printk("op get index u64\n");
1581 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1584 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1585 estack_ax_t
= estack_ax(stack
, top
)->type
;
1586 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1590 OP(FILTER_OP_LOAD_FIELD
):
1592 dbg_printk("op load field\n");
1593 ret
= dynamic_load_field(estack_ax(stack
, top
));
1596 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1597 estack_ax_t
= estack_ax(stack
, top
)->type
;
1598 next_pc
+= sizeof(struct load_op
);
1602 OP(FILTER_OP_LOAD_FIELD_S8
):
1604 dbg_printk("op load field s8\n");
1606 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1607 estack_ax_t
= REG_S64
;
1608 next_pc
+= sizeof(struct load_op
);
1611 OP(FILTER_OP_LOAD_FIELD_S16
):
1613 dbg_printk("op load field s16\n");
1615 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1616 estack_ax_t
= REG_S64
;
1617 next_pc
+= sizeof(struct load_op
);
1620 OP(FILTER_OP_LOAD_FIELD_S32
):
1622 dbg_printk("op load field s32\n");
1624 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1625 estack_ax_t
= REG_S64
;
1626 next_pc
+= sizeof(struct load_op
);
1629 OP(FILTER_OP_LOAD_FIELD_S64
):
1631 dbg_printk("op load field s64\n");
1633 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1634 estack_ax_t
= REG_S64
;
1635 next_pc
+= sizeof(struct load_op
);
1638 OP(FILTER_OP_LOAD_FIELD_U8
):
1640 dbg_printk("op load field u8\n");
1642 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1643 estack_ax_t
= REG_S64
;
1644 next_pc
+= sizeof(struct load_op
);
1647 OP(FILTER_OP_LOAD_FIELD_U16
):
1649 dbg_printk("op load field u16\n");
1651 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1652 estack_ax_t
= REG_S64
;
1653 next_pc
+= sizeof(struct load_op
);
1656 OP(FILTER_OP_LOAD_FIELD_U32
):
1658 dbg_printk("op load field u32\n");
1660 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1661 estack_ax_t
= REG_S64
;
1662 next_pc
+= sizeof(struct load_op
);
1665 OP(FILTER_OP_LOAD_FIELD_U64
):
1667 dbg_printk("op load field u64\n");
1669 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1670 estack_ax_t
= REG_S64
;
1671 next_pc
+= sizeof(struct load_op
);
1674 OP(FILTER_OP_LOAD_FIELD_DOUBLE
):
1680 OP(FILTER_OP_LOAD_FIELD_STRING
):
1684 dbg_printk("op load field string\n");
1685 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1686 estack_ax(stack
, top
)->u
.s
.str
= str
;
1687 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1688 dbg_printk("Filter warning: loading a NULL string.\n");
1692 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
1693 estack_ax(stack
, top
)->u
.s
.literal_type
=
1694 ESTACK_STRING_LITERAL_TYPE_NONE
;
1695 estack_ax(stack
, top
)->type
= REG_STRING
;
1696 next_pc
+= sizeof(struct load_op
);
1700 OP(FILTER_OP_LOAD_FIELD_SEQUENCE
):
1704 dbg_printk("op load field string sequence\n");
1705 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1706 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1707 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1708 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1709 dbg_printk("Filter warning: loading a NULL sequence.\n");
1713 estack_ax(stack
, top
)->u
.s
.literal_type
=
1714 ESTACK_STRING_LITERAL_TYPE_NONE
;
1715 estack_ax(stack
, top
)->type
= REG_STRING
;
1716 next_pc
+= sizeof(struct load_op
);
1722 /* Return _DISCARD on error. */
1724 return LTTNG_FILTER_DISCARD
;
1727 return lttng_bytecode_interpret_format_output(
1728 estack_ax(stack
, top
), output
);
1733 LTTNG_STACK_FRAME_NON_STANDARD(bytecode_interpret
);
1735 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
1736 struct lttng_probe_ctx
*lttng_probe_ctx
,
1737 const char *filter_stack_data
)
1739 return bytecode_interpret(filter_data
, lttng_probe_ctx
,
1740 filter_stack_data
, NULL
);