1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-validator.c
5 * LTTng modules bytecode bytecode validator.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/types.h>
11 #include <linux/jhash.h>
12 #include <linux/slab.h>
14 #include <wrapper/list.h>
15 #include <lttng/lttng-bytecode.h>
17 #define MERGE_POINT_TABLE_BITS 7
18 #define MERGE_POINT_TABLE_SIZE (1U << MERGE_POINT_TABLE_BITS)
20 /* merge point table node */
22 struct hlist_node node
;
24 /* Context at merge point */
26 unsigned long target_pc
;
30 struct hlist_head mp_head
[MERGE_POINT_TABLE_SIZE
];
34 int lttng_hash_match(struct mp_node
*mp_node
, unsigned long key_pc
)
36 if (mp_node
->target_pc
== key_pc
)
43 int merge_points_compare(const struct vstack
*stacka
,
44 const struct vstack
*stackb
)
48 if (stacka
->top
!= stackb
->top
)
50 len
= stacka
->top
+ 1;
51 WARN_ON_ONCE(len
< 0);
52 for (i
= 0; i
< len
; i
++) {
53 if (stacka
->e
[i
].type
!= stackb
->e
[i
].type
)
60 int merge_point_add_check(struct mp_table
*mp_table
, unsigned long target_pc
,
61 const struct vstack
*stack
)
63 struct mp_node
*mp_node
;
64 unsigned long hash
= jhash_1word(target_pc
, 0);
65 struct hlist_head
*head
;
66 struct mp_node
*lookup_node
;
69 dbg_printk("Bytecode: adding merge point at offset %lu, hash %lu\n",
71 mp_node
= kzalloc(sizeof(struct mp_node
), GFP_KERNEL
);
74 mp_node
->target_pc
= target_pc
;
75 memcpy(&mp_node
->stack
, stack
, sizeof(mp_node
->stack
));
77 head
= &mp_table
->mp_head
[hash
& (MERGE_POINT_TABLE_SIZE
- 1)];
78 lttng_hlist_for_each_entry(lookup_node
, head
, node
) {
79 if (lttng_hash_match(lookup_node
, target_pc
)) {
85 /* Key already present */
86 dbg_printk("Bytecode: compare merge points for offset %lu, hash %lu\n",
89 if (merge_points_compare(stack
, &lookup_node
->stack
)) {
90 printk(KERN_WARNING
"Merge points differ for offset %lu\n",
95 hlist_add_head(&mp_node
->node
, head
);
101 * Binary comparators use top of stack and top of stack -1.
104 int bin_op_compare_check(struct vstack
*stack
, const bytecode_opcode_t opcode
,
107 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
110 switch (vstack_ax(stack
)->type
) {
116 switch (vstack_bx(stack
)->type
) {
120 case REG_TYPE_UNKNOWN
:
124 case REG_STAR_GLOB_STRING
:
125 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
134 case REG_STAR_GLOB_STRING
:
135 switch (vstack_bx(stack
)->type
) {
139 case REG_TYPE_UNKNOWN
:
142 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
146 case REG_STAR_GLOB_STRING
:
154 switch (vstack_bx(stack
)->type
) {
158 case REG_TYPE_UNKNOWN
:
161 case REG_STAR_GLOB_STRING
:
168 case REG_TYPE_UNKNOWN
:
169 switch (vstack_bx(stack
)->type
) {
173 case REG_TYPE_UNKNOWN
:
175 case REG_STAR_GLOB_STRING
:
188 printk(KERN_WARNING
"empty stack for '%s' binary operator\n", str
);
192 printk(KERN_WARNING
"type mismatch for '%s' binary operator\n", str
);
196 printk(KERN_WARNING
"unknown type for '%s' binary operator\n", str
);
201 * Binary bitwise operators use top of stack and top of stack -1.
202 * Return 0 if typing is known to match, 1 if typing is dynamic
203 * (unknown), negative error value on error.
206 int bin_op_bitwise_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
209 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
212 switch (vstack_ax(stack
)->type
) {
217 case REG_TYPE_UNKNOWN
:
218 switch (vstack_bx(stack
)->type
) {
222 case REG_TYPE_UNKNOWN
:
224 case REG_STAR_GLOB_STRING
:
232 switch (vstack_bx(stack
)->type
) {
236 case REG_TYPE_UNKNOWN
:
250 printk(KERN_WARNING
"empty stack for '%s' binary operator\n", str
);
254 printk(KERN_WARNING
"unknown type for '%s' binary operator\n", str
);
259 int validate_get_symbol(struct bytecode_runtime
*bytecode
,
260 const struct get_symbol
*sym
)
262 const char *str
, *str_limit
;
265 if (sym
->offset
>= bytecode
->p
.bc
->bc
.len
- bytecode
->p
.bc
->bc
.reloc_offset
)
268 str
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ sym
->offset
;
269 str_limit
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.len
;
270 len_limit
= str_limit
- str
;
271 if (strnlen(str
, len_limit
) == len_limit
)
277 * Validate bytecode range overflow within the validation pass.
278 * Called for each instruction encountered.
281 int bytecode_validate_overflow(struct bytecode_runtime
*bytecode
,
282 char *start_pc
, char *pc
)
286 switch (*(bytecode_opcode_t
*) pc
) {
287 case BYTECODE_OP_UNKNOWN
:
290 printk(KERN_WARNING
"unknown bytecode op %u\n",
291 (unsigned int) *(bytecode_opcode_t
*) pc
);
296 case BYTECODE_OP_RETURN
:
297 case BYTECODE_OP_RETURN_S64
:
299 if (unlikely(pc
+ sizeof(struct return_op
)
300 > start_pc
+ bytecode
->len
)) {
307 case BYTECODE_OP_MUL
:
308 case BYTECODE_OP_DIV
:
309 case BYTECODE_OP_MOD
:
310 case BYTECODE_OP_PLUS
:
311 case BYTECODE_OP_MINUS
:
312 case BYTECODE_OP_EQ_DOUBLE
:
313 case BYTECODE_OP_NE_DOUBLE
:
314 case BYTECODE_OP_GT_DOUBLE
:
315 case BYTECODE_OP_LT_DOUBLE
:
316 case BYTECODE_OP_GE_DOUBLE
:
317 case BYTECODE_OP_LE_DOUBLE
:
319 case BYTECODE_OP_EQ_DOUBLE_S64
:
320 case BYTECODE_OP_NE_DOUBLE_S64
:
321 case BYTECODE_OP_GT_DOUBLE_S64
:
322 case BYTECODE_OP_LT_DOUBLE_S64
:
323 case BYTECODE_OP_GE_DOUBLE_S64
:
324 case BYTECODE_OP_LE_DOUBLE_S64
:
325 case BYTECODE_OP_EQ_S64_DOUBLE
:
326 case BYTECODE_OP_NE_S64_DOUBLE
:
327 case BYTECODE_OP_GT_S64_DOUBLE
:
328 case BYTECODE_OP_LT_S64_DOUBLE
:
329 case BYTECODE_OP_GE_S64_DOUBLE
:
330 case BYTECODE_OP_LE_S64_DOUBLE
:
331 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
332 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
333 case BYTECODE_OP_LOAD_DOUBLE
:
334 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
335 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
336 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
337 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
339 printk(KERN_WARNING
"unsupported bytecode op %u\n",
340 (unsigned int) *(bytecode_opcode_t
*) pc
);
351 case BYTECODE_OP_EQ_STRING
:
352 case BYTECODE_OP_NE_STRING
:
353 case BYTECODE_OP_GT_STRING
:
354 case BYTECODE_OP_LT_STRING
:
355 case BYTECODE_OP_GE_STRING
:
356 case BYTECODE_OP_LE_STRING
:
357 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
358 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
359 case BYTECODE_OP_EQ_S64
:
360 case BYTECODE_OP_NE_S64
:
361 case BYTECODE_OP_GT_S64
:
362 case BYTECODE_OP_LT_S64
:
363 case BYTECODE_OP_GE_S64
:
364 case BYTECODE_OP_LE_S64
:
365 case BYTECODE_OP_BIT_RSHIFT
:
366 case BYTECODE_OP_BIT_LSHIFT
:
367 case BYTECODE_OP_BIT_AND
:
368 case BYTECODE_OP_BIT_OR
:
369 case BYTECODE_OP_BIT_XOR
:
371 if (unlikely(pc
+ sizeof(struct binary_op
)
372 > start_pc
+ bytecode
->len
)) {
379 case BYTECODE_OP_UNARY_PLUS
:
380 case BYTECODE_OP_UNARY_MINUS
:
381 case BYTECODE_OP_UNARY_NOT
:
382 case BYTECODE_OP_UNARY_PLUS_S64
:
383 case BYTECODE_OP_UNARY_MINUS_S64
:
384 case BYTECODE_OP_UNARY_NOT_S64
:
385 case BYTECODE_OP_UNARY_BIT_NOT
:
387 if (unlikely(pc
+ sizeof(struct unary_op
)
388 > start_pc
+ bytecode
->len
)) {
395 case BYTECODE_OP_AND
:
398 if (unlikely(pc
+ sizeof(struct logical_op
)
399 > start_pc
+ bytecode
->len
)) {
406 case BYTECODE_OP_LOAD_FIELD_REF
:
408 printk(KERN_WARNING
"Unknown field ref type\n");
413 /* get context ref */
414 case BYTECODE_OP_GET_CONTEXT_REF
:
416 printk(KERN_WARNING
"Unknown field ref type\n");
420 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
421 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
422 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
423 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
424 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
425 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
426 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
428 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct field_ref
)
429 > start_pc
+ bytecode
->len
)) {
435 /* load from immediate operand */
436 case BYTECODE_OP_LOAD_STRING
:
437 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
439 struct load_op
*insn
= (struct load_op
*) pc
;
440 uint32_t str_len
, maxlen
;
442 if (unlikely(pc
+ sizeof(struct load_op
)
443 > start_pc
+ bytecode
->len
)) {
448 maxlen
= start_pc
+ bytecode
->len
- pc
- sizeof(struct load_op
);
449 str_len
= strnlen(insn
->data
, maxlen
);
450 if (unlikely(str_len
>= maxlen
)) {
451 /* Final '\0' not found within range */
457 case BYTECODE_OP_LOAD_S64
:
459 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_numeric
)
460 > start_pc
+ bytecode
->len
)) {
466 case BYTECODE_OP_CAST_TO_S64
:
467 case BYTECODE_OP_CAST_NOP
:
469 if (unlikely(pc
+ sizeof(struct cast_op
)
470 > start_pc
+ bytecode
->len
)) {
477 * Instructions for recursive traversal through composed types.
479 case BYTECODE_OP_GET_CONTEXT_ROOT
:
480 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
481 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
482 case BYTECODE_OP_LOAD_FIELD
:
483 case BYTECODE_OP_LOAD_FIELD_S8
:
484 case BYTECODE_OP_LOAD_FIELD_S16
:
485 case BYTECODE_OP_LOAD_FIELD_S32
:
486 case BYTECODE_OP_LOAD_FIELD_S64
:
487 case BYTECODE_OP_LOAD_FIELD_U8
:
488 case BYTECODE_OP_LOAD_FIELD_U16
:
489 case BYTECODE_OP_LOAD_FIELD_U32
:
490 case BYTECODE_OP_LOAD_FIELD_U64
:
491 case BYTECODE_OP_LOAD_FIELD_STRING
:
492 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
493 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
494 if (unlikely(pc
+ sizeof(struct load_op
)
495 > start_pc
+ bytecode
->len
)) {
500 case BYTECODE_OP_GET_SYMBOL
:
502 struct load_op
*insn
= (struct load_op
*) pc
;
503 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
505 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_symbol
)
506 > start_pc
+ bytecode
->len
)) {
510 ret
= validate_get_symbol(bytecode
, sym
);
514 case BYTECODE_OP_GET_SYMBOL_FIELD
:
515 printk(KERN_WARNING
"Unexpected get symbol field\n");
519 case BYTECODE_OP_GET_INDEX_U16
:
520 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u16
)
521 > start_pc
+ bytecode
->len
)) {
526 case BYTECODE_OP_GET_INDEX_U64
:
527 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u64
)
528 > start_pc
+ bytecode
->len
)) {
538 unsigned long delete_all_nodes(struct mp_table
*mp_table
)
540 struct mp_node
*mp_node
;
541 struct hlist_node
*tmp
;
542 unsigned long nr_nodes
= 0;
545 for (i
= 0; i
< MERGE_POINT_TABLE_SIZE
; i
++) {
546 struct hlist_head
*head
;
548 head
= &mp_table
->mp_head
[i
];
549 lttng_hlist_for_each_entry_safe(mp_node
, tmp
, head
, node
) {
563 int validate_instruction_context(struct bytecode_runtime
*bytecode
,
564 struct vstack
*stack
,
569 const bytecode_opcode_t opcode
= *(bytecode_opcode_t
*) pc
;
572 case BYTECODE_OP_UNKNOWN
:
575 printk(KERN_WARNING
"unknown bytecode op %u\n",
576 (unsigned int) *(bytecode_opcode_t
*) pc
);
581 case BYTECODE_OP_RETURN
:
582 case BYTECODE_OP_RETURN_S64
:
588 case BYTECODE_OP_MUL
:
589 case BYTECODE_OP_DIV
:
590 case BYTECODE_OP_MOD
:
591 case BYTECODE_OP_PLUS
:
592 case BYTECODE_OP_MINUS
:
594 case BYTECODE_OP_EQ_DOUBLE
:
595 case BYTECODE_OP_NE_DOUBLE
:
596 case BYTECODE_OP_GT_DOUBLE
:
597 case BYTECODE_OP_LT_DOUBLE
:
598 case BYTECODE_OP_GE_DOUBLE
:
599 case BYTECODE_OP_LE_DOUBLE
:
600 case BYTECODE_OP_EQ_DOUBLE_S64
:
601 case BYTECODE_OP_NE_DOUBLE_S64
:
602 case BYTECODE_OP_GT_DOUBLE_S64
:
603 case BYTECODE_OP_LT_DOUBLE_S64
:
604 case BYTECODE_OP_GE_DOUBLE_S64
:
605 case BYTECODE_OP_LE_DOUBLE_S64
:
606 case BYTECODE_OP_EQ_S64_DOUBLE
:
607 case BYTECODE_OP_NE_S64_DOUBLE
:
608 case BYTECODE_OP_GT_S64_DOUBLE
:
609 case BYTECODE_OP_LT_S64_DOUBLE
:
610 case BYTECODE_OP_GE_S64_DOUBLE
:
611 case BYTECODE_OP_LE_S64_DOUBLE
:
612 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
613 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
614 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
615 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
616 case BYTECODE_OP_LOAD_DOUBLE
:
617 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
618 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
620 printk(KERN_WARNING
"unsupported bytecode op %u\n",
621 (unsigned int) *(bytecode_opcode_t
*) pc
);
628 ret
= bin_op_compare_check(stack
, opcode
, "==");
635 ret
= bin_op_compare_check(stack
, opcode
, "!=");
642 ret
= bin_op_compare_check(stack
, opcode
, ">");
649 ret
= bin_op_compare_check(stack
, opcode
, "<");
656 ret
= bin_op_compare_check(stack
, opcode
, ">=");
663 ret
= bin_op_compare_check(stack
, opcode
, "<=");
669 case BYTECODE_OP_EQ_STRING
:
670 case BYTECODE_OP_NE_STRING
:
671 case BYTECODE_OP_GT_STRING
:
672 case BYTECODE_OP_LT_STRING
:
673 case BYTECODE_OP_GE_STRING
:
674 case BYTECODE_OP_LE_STRING
:
676 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
677 printk(KERN_WARNING
"Empty stack\n");
681 if (vstack_ax(stack
)->type
!= REG_STRING
682 || vstack_bx(stack
)->type
!= REG_STRING
) {
683 printk(KERN_WARNING
"Unexpected register type for string comparator\n");
691 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
692 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
694 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
695 printk(KERN_WARNING
"Empty stack\n");
699 if (vstack_ax(stack
)->type
!= REG_STAR_GLOB_STRING
700 && vstack_bx(stack
)->type
!= REG_STAR_GLOB_STRING
) {
701 printk(KERN_WARNING
"Unexpected register type for globbing pattern comparator\n");
708 case BYTECODE_OP_EQ_S64
:
709 case BYTECODE_OP_NE_S64
:
710 case BYTECODE_OP_GT_S64
:
711 case BYTECODE_OP_LT_S64
:
712 case BYTECODE_OP_GE_S64
:
713 case BYTECODE_OP_LE_S64
:
715 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
716 printk(KERN_WARNING
"Empty stack\n");
720 switch (vstack_ax(stack
)->type
) {
725 printk(KERN_WARNING
"Unexpected register type for s64 comparator\n");
729 switch (vstack_bx(stack
)->type
) {
734 printk(KERN_WARNING
"Unexpected register type for s64 comparator\n");
741 case BYTECODE_OP_BIT_RSHIFT
:
742 ret
= bin_op_bitwise_check(stack
, opcode
, ">>");
746 case BYTECODE_OP_BIT_LSHIFT
:
747 ret
= bin_op_bitwise_check(stack
, opcode
, "<<");
751 case BYTECODE_OP_BIT_AND
:
752 ret
= bin_op_bitwise_check(stack
, opcode
, "&");
756 case BYTECODE_OP_BIT_OR
:
757 ret
= bin_op_bitwise_check(stack
, opcode
, "|");
761 case BYTECODE_OP_BIT_XOR
:
762 ret
= bin_op_bitwise_check(stack
, opcode
, "^");
768 case BYTECODE_OP_UNARY_PLUS
:
769 case BYTECODE_OP_UNARY_MINUS
:
770 case BYTECODE_OP_UNARY_NOT
:
772 if (!vstack_ax(stack
)) {
773 printk(KERN_WARNING
"Empty stack\n");
777 switch (vstack_ax(stack
)->type
) {
780 printk(KERN_WARNING
"unknown register type\n");
785 case REG_STAR_GLOB_STRING
:
786 printk(KERN_WARNING
"Unary op can only be applied to numeric or floating point registers\n");
791 case REG_TYPE_UNKNOWN
:
796 case BYTECODE_OP_UNARY_BIT_NOT
:
798 if (!vstack_ax(stack
)) {
799 printk(KERN_WARNING
"Empty stack\n");
803 switch (vstack_ax(stack
)->type
) {
805 printk(KERN_WARNING
"unknown register type\n");
810 case REG_STAR_GLOB_STRING
:
812 printk(KERN_WARNING
"Unary bitwise op can only be applied to numeric registers\n");
817 case REG_TYPE_UNKNOWN
:
823 case BYTECODE_OP_UNARY_PLUS_S64
:
824 case BYTECODE_OP_UNARY_MINUS_S64
:
825 case BYTECODE_OP_UNARY_NOT_S64
:
827 if (!vstack_ax(stack
)) {
828 printk(KERN_WARNING
"Empty stack\n");
832 if (vstack_ax(stack
)->type
!= REG_S64
&&
833 vstack_ax(stack
)->type
!= REG_U64
) {
834 printk(KERN_WARNING
"Invalid register type\n");
842 case BYTECODE_OP_AND
:
845 struct logical_op
*insn
= (struct logical_op
*) pc
;
847 if (!vstack_ax(stack
)) {
848 printk(KERN_WARNING
"Empty stack\n");
852 if (vstack_ax(stack
)->type
!= REG_S64
&&
853 vstack_ax(stack
)->type
!= REG_U64
) {
854 printk(KERN_WARNING
"Logical comparator expects S64 or U64 register\n");
859 dbg_printk("Validate jumping to bytecode offset %u\n",
860 (unsigned int) insn
->skip_offset
);
861 if (unlikely(start_pc
+ insn
->skip_offset
<= pc
)) {
862 printk(KERN_WARNING
"Loops are not allowed in bytecode\n");
870 case BYTECODE_OP_LOAD_FIELD_REF
:
872 printk(KERN_WARNING
"Unknown field ref type\n");
876 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
877 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
878 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
879 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
881 struct load_op
*insn
= (struct load_op
*) pc
;
882 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
884 dbg_printk("Validate load field ref offset %u type string\n",
888 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
890 struct load_op
*insn
= (struct load_op
*) pc
;
891 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
893 dbg_printk("Validate load field ref offset %u type s64\n",
898 /* load from immediate operand */
899 case BYTECODE_OP_LOAD_STRING
:
900 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
905 case BYTECODE_OP_LOAD_S64
:
910 case BYTECODE_OP_CAST_TO_S64
:
912 struct cast_op
*insn
= (struct cast_op
*) pc
;
914 if (!vstack_ax(stack
)) {
915 printk(KERN_WARNING
"Empty stack\n");
919 switch (vstack_ax(stack
)->type
) {
922 printk(KERN_WARNING
"unknown register type\n");
927 case REG_STAR_GLOB_STRING
:
928 printk(KERN_WARNING
"Cast op can only be applied to numeric or floating point registers\n");
934 if (insn
->op
== BYTECODE_OP_CAST_DOUBLE_TO_S64
) {
935 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
936 printk(KERN_WARNING
"Cast expects double\n");
943 case BYTECODE_OP_CAST_NOP
:
948 /* get context ref */
949 case BYTECODE_OP_GET_CONTEXT_REF
:
951 printk(KERN_WARNING
"Unknown get context ref type\n");
955 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
957 struct load_op
*insn
= (struct load_op
*) pc
;
958 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
960 dbg_printk("Validate get context ref offset %u type string\n",
964 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
966 struct load_op
*insn
= (struct load_op
*) pc
;
967 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
969 dbg_printk("Validate get context ref offset %u type s64\n",
975 * Instructions for recursive traversal through composed types.
977 case BYTECODE_OP_GET_CONTEXT_ROOT
:
979 dbg_printk("Validate get context root\n");
982 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
984 dbg_printk("Validate get app context root\n");
987 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
989 dbg_printk("Validate get payload root\n");
992 case BYTECODE_OP_LOAD_FIELD
:
995 * We tolerate that field type is unknown at validation,
996 * because we are performing the load specialization in
997 * a phase after validation.
999 dbg_printk("Validate load field\n");
1002 case BYTECODE_OP_LOAD_FIELD_S8
:
1004 dbg_printk("Validate load field s8\n");
1007 case BYTECODE_OP_LOAD_FIELD_S16
:
1009 dbg_printk("Validate load field s16\n");
1012 case BYTECODE_OP_LOAD_FIELD_S32
:
1014 dbg_printk("Validate load field s32\n");
1017 case BYTECODE_OP_LOAD_FIELD_S64
:
1019 dbg_printk("Validate load field s64\n");
1022 case BYTECODE_OP_LOAD_FIELD_U8
:
1024 dbg_printk("Validate load field u8\n");
1027 case BYTECODE_OP_LOAD_FIELD_U16
:
1029 dbg_printk("Validate load field u16\n");
1032 case BYTECODE_OP_LOAD_FIELD_U32
:
1034 dbg_printk("Validate load field u32\n");
1037 case BYTECODE_OP_LOAD_FIELD_U64
:
1039 dbg_printk("Validate load field u64\n");
1042 case BYTECODE_OP_LOAD_FIELD_STRING
:
1044 dbg_printk("Validate load field string\n");
1047 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1049 dbg_printk("Validate load field sequence\n");
1052 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1054 dbg_printk("Validate load field double\n");
1058 case BYTECODE_OP_GET_SYMBOL
:
1060 struct load_op
*insn
= (struct load_op
*) pc
;
1061 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1063 dbg_printk("Validate get symbol offset %u\n", sym
->offset
);
1067 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1069 struct load_op
*insn
= (struct load_op
*) pc
;
1070 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1072 dbg_printk("Validate get symbol field offset %u\n", sym
->offset
);
1076 case BYTECODE_OP_GET_INDEX_U16
:
1078 struct load_op
*insn
= (struct load_op
*) pc
;
1079 struct get_index_u16
*get_index
= (struct get_index_u16
*) insn
->data
;
1081 dbg_printk("Validate get index u16 index %u\n", get_index
->index
);
1085 case BYTECODE_OP_GET_INDEX_U64
:
1087 struct load_op
*insn
= (struct load_op
*) pc
;
1088 struct get_index_u64
*get_index
= (struct get_index_u64
*) insn
->data
;
1090 dbg_printk("Validate get index u64 index %llu\n",
1091 (unsigned long long) get_index
->index
);
1105 int validate_instruction_all_contexts(struct bytecode_runtime
*bytecode
,
1106 struct mp_table
*mp_table
,
1107 struct vstack
*stack
,
1112 unsigned long target_pc
= pc
- start_pc
;
1114 struct hlist_head
*head
;
1115 struct mp_node
*mp_node
;
1117 /* Validate the context resulting from the previous instruction */
1118 ret
= validate_instruction_context(bytecode
, stack
, start_pc
, pc
);
1122 /* Validate merge points */
1123 hash
= jhash_1word(target_pc
, 0);
1124 head
= &mp_table
->mp_head
[hash
& (MERGE_POINT_TABLE_SIZE
- 1)];
1125 lttng_hlist_for_each_entry(mp_node
, head
, node
) {
1126 if (lttng_hash_match(mp_node
, target_pc
)) {
1132 dbg_printk("Bytecode: validate merge point at offset %lu\n",
1134 if (merge_points_compare(stack
, &mp_node
->stack
)) {
1135 printk(KERN_WARNING
"Merge points differ for offset %lu\n",
1139 /* Once validated, we can remove the merge point */
1140 dbg_printk("Bytecode: remove merge point at offset %lu\n",
1142 hlist_del(&mp_node
->node
);
1149 * >0: going to next insn.
1150 * 0: success, stop iteration.
1154 int exec_insn(struct bytecode_runtime
*bytecode
,
1155 struct mp_table
*mp_table
,
1156 struct vstack
*stack
,
1161 char *next_pc
= *_next_pc
;
1163 switch (*(bytecode_opcode_t
*) pc
) {
1164 case BYTECODE_OP_UNKNOWN
:
1167 printk(KERN_WARNING
"unknown bytecode op %u\n",
1168 (unsigned int) *(bytecode_opcode_t
*) pc
);
1173 case BYTECODE_OP_RETURN
:
1175 if (!vstack_ax(stack
)) {
1176 printk(KERN_WARNING
"Empty stack\n");
1180 switch (vstack_ax(stack
)->type
) {
1186 case REG_TYPE_UNKNOWN
:
1189 printk(KERN_WARNING
"Unexpected register type %d at end of bytecode\n",
1190 (int) vstack_ax(stack
)->type
);
1199 case BYTECODE_OP_RETURN_S64
:
1201 if (!vstack_ax(stack
)) {
1202 printk(KERN_WARNING
"Empty stack\n");
1206 switch (vstack_ax(stack
)->type
) {
1211 case REG_TYPE_UNKNOWN
:
1212 printk(KERN_WARNING
"Unexpected register type %d at end of bytecode\n",
1213 (int) vstack_ax(stack
)->type
);
1223 case BYTECODE_OP_MUL
:
1224 case BYTECODE_OP_DIV
:
1225 case BYTECODE_OP_MOD
:
1226 case BYTECODE_OP_PLUS
:
1227 case BYTECODE_OP_MINUS
:
1228 /* Floating point */
1229 case BYTECODE_OP_EQ_DOUBLE
:
1230 case BYTECODE_OP_NE_DOUBLE
:
1231 case BYTECODE_OP_GT_DOUBLE
:
1232 case BYTECODE_OP_LT_DOUBLE
:
1233 case BYTECODE_OP_GE_DOUBLE
:
1234 case BYTECODE_OP_LE_DOUBLE
:
1235 case BYTECODE_OP_EQ_DOUBLE_S64
:
1236 case BYTECODE_OP_NE_DOUBLE_S64
:
1237 case BYTECODE_OP_GT_DOUBLE_S64
:
1238 case BYTECODE_OP_LT_DOUBLE_S64
:
1239 case BYTECODE_OP_GE_DOUBLE_S64
:
1240 case BYTECODE_OP_LE_DOUBLE_S64
:
1241 case BYTECODE_OP_EQ_S64_DOUBLE
:
1242 case BYTECODE_OP_NE_S64_DOUBLE
:
1243 case BYTECODE_OP_GT_S64_DOUBLE
:
1244 case BYTECODE_OP_LT_S64_DOUBLE
:
1245 case BYTECODE_OP_GE_S64_DOUBLE
:
1246 case BYTECODE_OP_LE_S64_DOUBLE
:
1247 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1248 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1249 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1250 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1251 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1252 case BYTECODE_OP_LOAD_DOUBLE
:
1253 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1255 printk(KERN_WARNING
"unsupported bytecode op %u\n",
1256 (unsigned int) *(bytecode_opcode_t
*) pc
);
1261 case BYTECODE_OP_EQ
:
1262 case BYTECODE_OP_NE
:
1263 case BYTECODE_OP_GT
:
1264 case BYTECODE_OP_LT
:
1265 case BYTECODE_OP_GE
:
1266 case BYTECODE_OP_LE
:
1267 case BYTECODE_OP_EQ_STRING
:
1268 case BYTECODE_OP_NE_STRING
:
1269 case BYTECODE_OP_GT_STRING
:
1270 case BYTECODE_OP_LT_STRING
:
1271 case BYTECODE_OP_GE_STRING
:
1272 case BYTECODE_OP_LE_STRING
:
1273 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1274 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1275 case BYTECODE_OP_EQ_S64
:
1276 case BYTECODE_OP_NE_S64
:
1277 case BYTECODE_OP_GT_S64
:
1278 case BYTECODE_OP_LT_S64
:
1279 case BYTECODE_OP_GE_S64
:
1280 case BYTECODE_OP_LE_S64
:
1283 if (vstack_pop(stack
)) {
1287 if (!vstack_ax(stack
)) {
1288 printk(KERN_WARNING
"Empty stack\n");
1292 switch (vstack_ax(stack
)->type
) {
1297 case REG_STAR_GLOB_STRING
:
1298 case REG_TYPE_UNKNOWN
:
1301 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1302 (int) vstack_ax(stack
)->type
);
1307 vstack_ax(stack
)->type
= REG_S64
;
1308 next_pc
+= sizeof(struct binary_op
);
1311 case BYTECODE_OP_BIT_RSHIFT
:
1312 case BYTECODE_OP_BIT_LSHIFT
:
1313 case BYTECODE_OP_BIT_AND
:
1314 case BYTECODE_OP_BIT_OR
:
1315 case BYTECODE_OP_BIT_XOR
:
1318 if (vstack_pop(stack
)) {
1322 if (!vstack_ax(stack
)) {
1323 printk(KERN_WARNING
"Empty stack\n");
1327 switch (vstack_ax(stack
)->type
) {
1332 case REG_STAR_GLOB_STRING
:
1333 case REG_TYPE_UNKNOWN
:
1336 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1337 (int) vstack_ax(stack
)->type
);
1342 vstack_ax(stack
)->type
= REG_U64
;
1343 next_pc
+= sizeof(struct binary_op
);
1348 case BYTECODE_OP_UNARY_PLUS
:
1349 case BYTECODE_OP_UNARY_MINUS
:
1352 if (!vstack_ax(stack
)) {
1353 printk(KERN_WARNING
"Empty stack\n\n");
1357 switch (vstack_ax(stack
)->type
) {
1360 case REG_TYPE_UNKNOWN
:
1363 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1364 (int) vstack_ax(stack
)->type
);
1369 vstack_ax(stack
)->type
= REG_TYPE_UNKNOWN
;
1370 next_pc
+= sizeof(struct unary_op
);
1374 case BYTECODE_OP_UNARY_PLUS_S64
:
1375 case BYTECODE_OP_UNARY_MINUS_S64
:
1376 case BYTECODE_OP_UNARY_NOT_S64
:
1379 if (!vstack_ax(stack
)) {
1380 printk(KERN_WARNING
"Empty stack\n\n");
1384 switch (vstack_ax(stack
)->type
) {
1389 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1390 (int) vstack_ax(stack
)->type
);
1395 next_pc
+= sizeof(struct unary_op
);
1399 case BYTECODE_OP_UNARY_NOT
:
1402 if (!vstack_ax(stack
)) {
1403 printk(KERN_WARNING
"Empty stack\n\n");
1407 switch (vstack_ax(stack
)->type
) {
1410 case REG_TYPE_UNKNOWN
:
1413 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1414 (int) vstack_ax(stack
)->type
);
1419 next_pc
+= sizeof(struct unary_op
);
1423 case BYTECODE_OP_UNARY_BIT_NOT
:
1426 if (!vstack_ax(stack
)) {
1427 printk(KERN_WARNING
"Empty stack\n");
1431 switch (vstack_ax(stack
)->type
) {
1434 case REG_TYPE_UNKNOWN
:
1438 printk(KERN_WARNING
"Unexpected register type %d for operation\n",
1439 (int) vstack_ax(stack
)->type
);
1444 vstack_ax(stack
)->type
= REG_U64
;
1445 next_pc
+= sizeof(struct unary_op
);
1450 case BYTECODE_OP_AND
:
1451 case BYTECODE_OP_OR
:
1453 struct logical_op
*insn
= (struct logical_op
*) pc
;
1456 /* Add merge point to table */
1457 merge_ret
= merge_point_add_check(mp_table
,
1458 insn
->skip_offset
, stack
);
1464 if (!vstack_ax(stack
)) {
1465 printk(KERN_WARNING
"Empty stack\n\n");
1469 /* There is always a cast-to-s64 operation before a or/and op. */
1470 switch (vstack_ax(stack
)->type
) {
1475 printk(KERN_WARNING
"Incorrect register type %d for operation\n",
1476 (int) vstack_ax(stack
)->type
);
1481 /* Continue to next instruction */
1482 /* Pop 1 when jump not taken */
1483 if (vstack_pop(stack
)) {
1487 next_pc
+= sizeof(struct logical_op
);
1491 /* load field ref */
1492 case BYTECODE_OP_LOAD_FIELD_REF
:
1494 printk(KERN_WARNING
"Unknown field ref type\n");
1498 /* get context ref */
1499 case BYTECODE_OP_GET_CONTEXT_REF
:
1501 printk(KERN_WARNING
"Unknown get context ref type\n");
1505 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1506 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1507 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1508 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
1509 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
1511 if (vstack_push(stack
)) {
1515 vstack_ax(stack
)->type
= REG_STRING
;
1516 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1519 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1520 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1522 if (vstack_push(stack
)) {
1526 vstack_ax(stack
)->type
= REG_S64
;
1527 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1531 /* load from immediate operand */
1532 case BYTECODE_OP_LOAD_STRING
:
1534 struct load_op
*insn
= (struct load_op
*) pc
;
1536 if (vstack_push(stack
)) {
1540 vstack_ax(stack
)->type
= REG_STRING
;
1541 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1545 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1547 struct load_op
*insn
= (struct load_op
*) pc
;
1549 if (vstack_push(stack
)) {
1553 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1554 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1558 case BYTECODE_OP_LOAD_S64
:
1560 if (vstack_push(stack
)) {
1564 vstack_ax(stack
)->type
= REG_S64
;
1565 next_pc
+= sizeof(struct load_op
)
1566 + sizeof(struct literal_numeric
);
1570 case BYTECODE_OP_CAST_TO_S64
:
1573 if (!vstack_ax(stack
)) {
1574 printk(KERN_WARNING
"Empty stack\n");
1578 switch (vstack_ax(stack
)->type
) {
1582 case REG_TYPE_UNKNOWN
:
1585 printk(KERN_WARNING
"Incorrect register type %d for cast\n",
1586 (int) vstack_ax(stack
)->type
);
1590 vstack_ax(stack
)->type
= REG_S64
;
1591 next_pc
+= sizeof(struct cast_op
);
1594 case BYTECODE_OP_CAST_NOP
:
1596 next_pc
+= sizeof(struct cast_op
);
1601 * Instructions for recursive traversal through composed types.
1603 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1604 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1605 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1607 if (vstack_push(stack
)) {
1611 vstack_ax(stack
)->type
= REG_PTR
;
1612 next_pc
+= sizeof(struct load_op
);
1616 case BYTECODE_OP_LOAD_FIELD
:
1619 if (!vstack_ax(stack
)) {
1620 printk(KERN_WARNING
"Empty stack\n\n");
1624 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1625 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1629 vstack_ax(stack
)->type
= REG_TYPE_UNKNOWN
;
1630 next_pc
+= sizeof(struct load_op
);
1634 case BYTECODE_OP_LOAD_FIELD_S8
:
1635 case BYTECODE_OP_LOAD_FIELD_S16
:
1636 case BYTECODE_OP_LOAD_FIELD_S32
:
1637 case BYTECODE_OP_LOAD_FIELD_S64
:
1640 if (!vstack_ax(stack
)) {
1641 printk(KERN_WARNING
"Empty stack\n\n");
1645 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1646 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1650 vstack_ax(stack
)->type
= REG_S64
;
1651 next_pc
+= sizeof(struct load_op
);
1654 case BYTECODE_OP_LOAD_FIELD_U8
:
1655 case BYTECODE_OP_LOAD_FIELD_U16
:
1656 case BYTECODE_OP_LOAD_FIELD_U32
:
1657 case BYTECODE_OP_LOAD_FIELD_U64
:
1660 if (!vstack_ax(stack
)) {
1661 printk(KERN_WARNING
"Empty stack\n\n");
1665 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1666 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1670 vstack_ax(stack
)->type
= REG_U64
;
1671 next_pc
+= sizeof(struct load_op
);
1674 case BYTECODE_OP_LOAD_FIELD_STRING
:
1675 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1678 if (!vstack_ax(stack
)) {
1679 printk(KERN_WARNING
"Empty stack\n\n");
1683 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1684 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1688 vstack_ax(stack
)->type
= REG_STRING
;
1689 next_pc
+= sizeof(struct load_op
);
1693 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1696 if (!vstack_ax(stack
)) {
1697 printk(KERN_WARNING
"Empty stack\n\n");
1701 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1702 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1706 vstack_ax(stack
)->type
= REG_DOUBLE
;
1707 next_pc
+= sizeof(struct load_op
);
1711 case BYTECODE_OP_GET_SYMBOL
:
1712 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1715 if (!vstack_ax(stack
)) {
1716 printk(KERN_WARNING
"Empty stack\n\n");
1720 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1721 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1725 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1729 case BYTECODE_OP_GET_INDEX_U16
:
1732 if (!vstack_ax(stack
)) {
1733 printk(KERN_WARNING
"Empty stack\n\n");
1737 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1738 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1742 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1746 case BYTECODE_OP_GET_INDEX_U64
:
1749 if (!vstack_ax(stack
)) {
1750 printk(KERN_WARNING
"Empty stack\n\n");
1754 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1755 printk(KERN_WARNING
"Expecting pointer on top of stack\n\n");
1759 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1765 *_next_pc
= next_pc
;
1770 * Never called concurrently (hash seed is shared).
1772 int lttng_bytecode_validate(struct bytecode_runtime
*bytecode
)
1774 struct mp_table
*mp_table
;
1775 char *pc
, *next_pc
, *start_pc
;
1777 struct vstack stack
;
1779 vstack_init(&stack
);
1781 mp_table
= kzalloc(sizeof(*mp_table
), GFP_KERNEL
);
1783 printk(KERN_WARNING
"Error allocating hash table for bytecode validation\n");
1786 start_pc
= &bytecode
->code
[0];
1787 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
1789 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
1792 printk(KERN_WARNING
"bytecode overflow\n");
1795 dbg_printk("Validating op %s (%u)\n",
1796 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
1797 (unsigned int) *(bytecode_opcode_t
*) pc
);
1800 * For each instruction, validate the current context
1801 * (traversal of entire execution flow), and validate
1802 * all merge points targeting this instruction.
1804 ret
= validate_instruction_all_contexts(bytecode
, mp_table
,
1805 &stack
, start_pc
, pc
);
1808 ret
= exec_insn(bytecode
, mp_table
, &stack
, &next_pc
, pc
);
1813 if (delete_all_nodes(mp_table
)) {
1815 printk(KERN_WARNING
"Unexpected merge points\n");