2 * lttng-filter-interpreter.c
4 * LTTng UST filter interpreter.
6 * Copyright (C) 2010-2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; only
11 * version 2.1 of the License.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "lttng-filter.h"
27 * -2: unknown escape char.
32 int parse_char(const char **p
)
52 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
54 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
61 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
62 if (q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
65 if (estack_ax(stack
, top
)->u
.s
.literal
) {
73 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
74 if (p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0') {
77 if (estack_bx(stack
, top
)->u
.s
.literal
) {
85 if (estack_bx(stack
, top
)->u
.s
.literal
) {
89 } else if (ret
== -2) {
92 /* else compare both char */
94 if (estack_ax(stack
, top
)->u
.s
.literal
) {
98 } else if (ret
== -2) {
118 uint64_t lttng_filter_false(void *filter_data
,
119 const char *filter_stack_data
)
124 #ifdef INTERPRETER_USE_SWITCH
127 * Fallback for compilers that do not support taking address of labels.
131 start_pc = &bytecode->data[0]; \
132 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
134 dbg_printf("Executing op %s (%u)\n", \
135 print_op((unsigned int) *(filter_opcode_t *) pc), \
136 (unsigned int) *(filter_opcode_t *) pc); \
137 switch (*(filter_opcode_t *) pc) {
139 #define OP(name) case name
149 * Dispatch-table based interpreter.
153 start_pc = &bytecode->data[0]; \
154 pc = next_pc = start_pc; \
155 if (unlikely(pc - start_pc >= bytecode->len)) \
157 goto *dispatch[*(filter_opcode_t *) pc];
164 goto *dispatch[*(filter_opcode_t *) pc];
171 * Return 0 (discard), or raise the 0x1 flag (log event).
172 * Currently, other flags are kept for future extensions and have no
175 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
176 const char *filter_stack_data
)
178 struct bytecode_runtime
*bytecode
= filter_data
;
179 void *pc
, *next_pc
, *start_pc
;
182 struct estack _stack
;
183 struct estack
*stack
= &_stack
;
184 register int64_t ax
= 0, bx
= 0;
185 register int top
= FILTER_STACK_EMPTY
;
186 #ifndef INTERPRETER_USE_SWITCH
187 static void *dispatch
[NR_FILTER_OPS
] = {
188 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
190 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
193 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
194 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
195 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
196 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
197 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
198 [ FILTER_OP_RSHIFT
] = &&LABEL_FILTER_OP_RSHIFT
,
199 [ FILTER_OP_LSHIFT
] = &&LABEL_FILTER_OP_LSHIFT
,
200 [ FILTER_OP_BIN_AND
] = &&LABEL_FILTER_OP_BIN_AND
,
201 [ FILTER_OP_BIN_OR
] = &&LABEL_FILTER_OP_BIN_OR
,
202 [ FILTER_OP_BIN_XOR
] = &&LABEL_FILTER_OP_BIN_XOR
,
204 /* binary comparators */
205 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
206 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
207 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
208 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
209 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
210 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
212 /* string binary comparator */
213 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
214 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
215 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
216 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
217 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
218 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
220 /* s64 binary comparator */
221 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
222 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
223 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
224 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
225 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
226 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
228 /* double binary comparator */
229 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
230 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
231 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
232 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
233 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
234 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
236 /* Mixed S64-double binary comparators */
237 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
238 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
239 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
240 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
241 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
242 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
244 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
245 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
246 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
247 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
248 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
249 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
252 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
253 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
254 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
255 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
256 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
257 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
258 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
259 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
260 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
263 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
264 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
267 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
268 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
269 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
270 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
271 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
273 /* load from immediate operand */
274 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
275 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
276 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
279 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
280 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
281 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
283 /* get context ref */
284 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
285 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
286 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
287 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
289 #endif /* #ifndef INTERPRETER_USE_SWITCH */
293 OP(FILTER_OP_UNKNOWN
):
294 OP(FILTER_OP_LOAD_FIELD_REF
):
295 OP(FILTER_OP_GET_CONTEXT_REF
):
296 #ifdef INTERPRETER_USE_SWITCH
298 #endif /* INTERPRETER_USE_SWITCH */
299 ERR("unknown bytecode op %u\n",
300 (unsigned int) *(filter_opcode_t
*) pc
);
304 OP(FILTER_OP_RETURN
):
305 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
306 retval
= !!estack_ax_v
;
316 OP(FILTER_OP_RSHIFT
):
317 OP(FILTER_OP_LSHIFT
):
318 OP(FILTER_OP_BIN_AND
):
319 OP(FILTER_OP_BIN_OR
):
320 OP(FILTER_OP_BIN_XOR
):
321 ERR("unsupported bytecode op %u\n",
322 (unsigned int) *(filter_opcode_t
*) pc
);
332 ERR("unsupported non-specialized bytecode op %u\n",
333 (unsigned int) *(filter_opcode_t
*) pc
);
337 OP(FILTER_OP_EQ_STRING
):
341 res
= (stack_strcmp(stack
, top
, "==") == 0);
342 estack_pop(stack
, top
, ax
, bx
);
344 next_pc
+= sizeof(struct binary_op
);
347 OP(FILTER_OP_NE_STRING
):
351 res
= (stack_strcmp(stack
, top
, "!=") != 0);
352 estack_pop(stack
, top
, ax
, bx
);
354 next_pc
+= sizeof(struct binary_op
);
357 OP(FILTER_OP_GT_STRING
):
361 res
= (stack_strcmp(stack
, top
, ">") > 0);
362 estack_pop(stack
, top
, ax
, bx
);
364 next_pc
+= sizeof(struct binary_op
);
367 OP(FILTER_OP_LT_STRING
):
371 res
= (stack_strcmp(stack
, top
, "<") < 0);
372 estack_pop(stack
, top
, ax
, bx
);
374 next_pc
+= sizeof(struct binary_op
);
377 OP(FILTER_OP_GE_STRING
):
381 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
382 estack_pop(stack
, top
, ax
, bx
);
384 next_pc
+= sizeof(struct binary_op
);
387 OP(FILTER_OP_LE_STRING
):
391 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
392 estack_pop(stack
, top
, ax
, bx
);
394 next_pc
+= sizeof(struct binary_op
);
398 OP(FILTER_OP_EQ_S64
):
402 res
= (estack_bx_v
== estack_ax_v
);
403 estack_pop(stack
, top
, ax
, bx
);
405 next_pc
+= sizeof(struct binary_op
);
408 OP(FILTER_OP_NE_S64
):
412 res
= (estack_bx_v
!= estack_ax_v
);
413 estack_pop(stack
, top
, ax
, bx
);
415 next_pc
+= sizeof(struct binary_op
);
418 OP(FILTER_OP_GT_S64
):
422 res
= (estack_bx_v
> estack_ax_v
);
423 estack_pop(stack
, top
, ax
, bx
);
425 next_pc
+= sizeof(struct binary_op
);
428 OP(FILTER_OP_LT_S64
):
432 res
= (estack_bx_v
< estack_ax_v
);
433 estack_pop(stack
, top
, ax
, bx
);
435 next_pc
+= sizeof(struct binary_op
);
438 OP(FILTER_OP_GE_S64
):
442 res
= (estack_bx_v
>= estack_ax_v
);
443 estack_pop(stack
, top
, ax
, bx
);
445 next_pc
+= sizeof(struct binary_op
);
448 OP(FILTER_OP_LE_S64
):
452 res
= (estack_bx_v
<= estack_ax_v
);
453 estack_pop(stack
, top
, ax
, bx
);
455 next_pc
+= sizeof(struct binary_op
);
459 OP(FILTER_OP_EQ_DOUBLE
):
463 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
464 estack_pop(stack
, top
, ax
, bx
);
466 next_pc
+= sizeof(struct binary_op
);
469 OP(FILTER_OP_NE_DOUBLE
):
473 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
474 estack_pop(stack
, top
, ax
, bx
);
476 next_pc
+= sizeof(struct binary_op
);
479 OP(FILTER_OP_GT_DOUBLE
):
483 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
484 estack_pop(stack
, top
, ax
, bx
);
486 next_pc
+= sizeof(struct binary_op
);
489 OP(FILTER_OP_LT_DOUBLE
):
493 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
494 estack_pop(stack
, top
, ax
, bx
);
496 next_pc
+= sizeof(struct binary_op
);
499 OP(FILTER_OP_GE_DOUBLE
):
503 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
504 estack_pop(stack
, top
, ax
, bx
);
506 next_pc
+= sizeof(struct binary_op
);
509 OP(FILTER_OP_LE_DOUBLE
):
513 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
514 estack_pop(stack
, top
, ax
, bx
);
516 next_pc
+= sizeof(struct binary_op
);
520 /* Mixed S64-double binary comparators */
521 OP(FILTER_OP_EQ_DOUBLE_S64
):
525 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
526 estack_pop(stack
, top
, ax
, bx
);
528 next_pc
+= sizeof(struct binary_op
);
531 OP(FILTER_OP_NE_DOUBLE_S64
):
535 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
536 estack_pop(stack
, top
, ax
, bx
);
538 next_pc
+= sizeof(struct binary_op
);
541 OP(FILTER_OP_GT_DOUBLE_S64
):
545 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
546 estack_pop(stack
, top
, ax
, bx
);
548 next_pc
+= sizeof(struct binary_op
);
551 OP(FILTER_OP_LT_DOUBLE_S64
):
555 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
556 estack_pop(stack
, top
, ax
, bx
);
558 next_pc
+= sizeof(struct binary_op
);
561 OP(FILTER_OP_GE_DOUBLE_S64
):
565 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
566 estack_pop(stack
, top
, ax
, bx
);
568 next_pc
+= sizeof(struct binary_op
);
571 OP(FILTER_OP_LE_DOUBLE_S64
):
575 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
576 estack_pop(stack
, top
, ax
, bx
);
578 next_pc
+= sizeof(struct binary_op
);
582 OP(FILTER_OP_EQ_S64_DOUBLE
):
586 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
587 estack_pop(stack
, top
, ax
, bx
);
589 next_pc
+= sizeof(struct binary_op
);
592 OP(FILTER_OP_NE_S64_DOUBLE
):
596 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
597 estack_pop(stack
, top
, ax
, bx
);
599 next_pc
+= sizeof(struct binary_op
);
602 OP(FILTER_OP_GT_S64_DOUBLE
):
606 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
607 estack_pop(stack
, top
, ax
, bx
);
609 next_pc
+= sizeof(struct binary_op
);
612 OP(FILTER_OP_LT_S64_DOUBLE
):
616 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
617 estack_pop(stack
, top
, ax
, bx
);
619 next_pc
+= sizeof(struct binary_op
);
622 OP(FILTER_OP_GE_S64_DOUBLE
):
626 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
627 estack_pop(stack
, top
, ax
, bx
);
629 next_pc
+= sizeof(struct binary_op
);
632 OP(FILTER_OP_LE_S64_DOUBLE
):
636 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
637 estack_pop(stack
, top
, ax
, bx
);
639 next_pc
+= sizeof(struct binary_op
);
644 OP(FILTER_OP_UNARY_PLUS
):
645 OP(FILTER_OP_UNARY_MINUS
):
646 OP(FILTER_OP_UNARY_NOT
):
647 ERR("unsupported non-specialized bytecode op %u\n",
648 (unsigned int) *(filter_opcode_t
*) pc
);
653 OP(FILTER_OP_UNARY_PLUS_S64
):
654 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
656 next_pc
+= sizeof(struct unary_op
);
659 OP(FILTER_OP_UNARY_MINUS_S64
):
661 estack_ax_v
= -estack_ax_v
;
662 next_pc
+= sizeof(struct unary_op
);
665 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
667 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
668 next_pc
+= sizeof(struct unary_op
);
671 OP(FILTER_OP_UNARY_NOT_S64
):
673 estack_ax_v
= !estack_ax_v
;
674 next_pc
+= sizeof(struct unary_op
);
677 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
679 estack_ax(stack
, top
)->u
.d
= !estack_ax(stack
, top
)->u
.d
;
680 next_pc
+= sizeof(struct unary_op
);
687 struct logical_op
*insn
= (struct logical_op
*) pc
;
689 /* If AX is 0, skip and evaluate to 0 */
690 if (unlikely(estack_ax_v
== 0)) {
691 dbg_printf("Jumping to bytecode offset %u\n",
692 (unsigned int) insn
->skip_offset
);
693 next_pc
= start_pc
+ insn
->skip_offset
;
695 /* Pop 1 when jump not taken */
696 estack_pop(stack
, top
, ax
, bx
);
697 next_pc
+= sizeof(struct logical_op
);
703 struct logical_op
*insn
= (struct logical_op
*) pc
;
705 /* If AX is nonzero, skip and evaluate to 1 */
707 if (unlikely(estack_ax_v
!= 0)) {
709 dbg_printf("Jumping to bytecode offset %u\n",
710 (unsigned int) insn
->skip_offset
);
711 next_pc
= start_pc
+ insn
->skip_offset
;
713 /* Pop 1 when jump not taken */
714 estack_pop(stack
, top
, ax
, bx
);
715 next_pc
+= sizeof(struct logical_op
);
722 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
724 struct load_op
*insn
= (struct load_op
*) pc
;
725 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
727 dbg_printf("load field ref offset %u type string\n",
729 estack_push(stack
, top
, ax
, bx
);
730 estack_ax(stack
, top
)->u
.s
.str
=
731 *(const char * const *) &filter_stack_data
[ref
->offset
];
732 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
733 dbg_printf("Filter warning: loading a NULL string.\n");
737 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
738 estack_ax(stack
, top
)->u
.s
.literal
= 0;
739 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
740 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
744 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
746 struct load_op
*insn
= (struct load_op
*) pc
;
747 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
749 dbg_printf("load field ref offset %u type sequence\n",
751 estack_push(stack
, top
, ax
, bx
);
752 estack_ax(stack
, top
)->u
.s
.seq_len
=
753 *(unsigned long *) &filter_stack_data
[ref
->offset
];
754 estack_ax(stack
, top
)->u
.s
.str
=
755 *(const char **) (&filter_stack_data
[ref
->offset
756 + sizeof(unsigned long)]);
757 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
758 dbg_printf("Filter warning: loading a NULL sequence.\n");
762 estack_ax(stack
, top
)->u
.s
.literal
= 0;
763 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
767 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
769 struct load_op
*insn
= (struct load_op
*) pc
;
770 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
772 dbg_printf("load field ref offset %u type s64\n",
774 estack_push(stack
, top
, ax
, bx
);
776 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
777 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
778 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
782 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
784 struct load_op
*insn
= (struct load_op
*) pc
;
785 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
787 dbg_printf("load field ref offset %u type double\n",
789 estack_push(stack
, top
, ax
, bx
);
790 memcpy(&estack_ax(stack
, top
)->u
.d
, &filter_stack_data
[ref
->offset
],
791 sizeof(struct literal_double
));
792 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
793 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
797 /* load from immediate operand */
798 OP(FILTER_OP_LOAD_STRING
):
800 struct load_op
*insn
= (struct load_op
*) pc
;
802 dbg_printf("load string %s\n", insn
->data
);
803 estack_push(stack
, top
, ax
, bx
);
804 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
805 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
806 estack_ax(stack
, top
)->u
.s
.literal
= 1;
807 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
811 OP(FILTER_OP_LOAD_S64
):
813 struct load_op
*insn
= (struct load_op
*) pc
;
815 estack_push(stack
, top
, ax
, bx
);
816 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
817 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
818 next_pc
+= sizeof(struct load_op
)
819 + sizeof(struct literal_numeric
);
823 OP(FILTER_OP_LOAD_DOUBLE
):
825 struct load_op
*insn
= (struct load_op
*) pc
;
827 estack_push(stack
, top
, ax
, bx
);
828 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
829 sizeof(struct literal_double
));
830 dbg_printf("load s64 %g\n", estack_ax(stack
, top
)->u
.d
);
831 next_pc
+= sizeof(struct load_op
)
832 + sizeof(struct literal_double
);
837 OP(FILTER_OP_CAST_TO_S64
):
838 ERR("unsupported non-specialized bytecode op %u\n",
839 (unsigned int) *(filter_opcode_t
*) pc
);
843 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
845 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
846 next_pc
+= sizeof(struct cast_op
);
850 OP(FILTER_OP_CAST_NOP
):
852 next_pc
+= sizeof(struct cast_op
);
856 /* get context ref */
857 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
859 struct load_op
*insn
= (struct load_op
*) pc
;
860 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
861 struct lttng_ctx_field
*ctx_field
;
862 union lttng_ctx_value v
;
864 dbg_printf("get context ref offset %u type string\n",
866 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
867 ctx_field
->get_value(ctx_field
, &v
);
868 estack_push(stack
, top
, ax
, bx
);
869 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
870 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
871 dbg_printf("Filter warning: loading a NULL string.\n");
875 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
876 estack_ax(stack
, top
)->u
.s
.literal
= 0;
877 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
878 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
882 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
884 struct load_op
*insn
= (struct load_op
*) pc
;
885 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
886 struct lttng_ctx_field
*ctx_field
;
887 union lttng_ctx_value v
;
889 dbg_printf("get context ref offset %u type s64\n",
891 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
892 ctx_field
->get_value(ctx_field
, &v
);
893 estack_push(stack
, top
, ax
, bx
);
895 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
896 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
900 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
902 struct load_op
*insn
= (struct load_op
*) pc
;
903 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
904 struct lttng_ctx_field
*ctx_field
;
905 union lttng_ctx_value v
;
907 dbg_printf("get context ref offset %u type double\n",
909 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
910 ctx_field
->get_value(ctx_field
, &v
);
911 estack_push(stack
, top
, ax
, bx
);
912 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.d
, sizeof(struct literal_double
));
913 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
914 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
920 /* return 0 (discard) on error */