2 * lttng-bytecode-specialize.c
4 * LTTng UST bytecode specializer.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
31 #include "lttng-bytecode.h"
32 #include <lttng/align.h>
33 #include "ust-events-internal.h"
35 static int lttng_fls(int val
)
38 unsigned int x
= (unsigned int) val
;
42 if (!(x
& 0xFFFF0000U
)) {
46 if (!(x
& 0xFF000000U
)) {
50 if (!(x
& 0xF0000000U
)) {
54 if (!(x
& 0xC0000000U
)) {
58 if (!(x
& 0x80000000U
)) {
64 static int get_count_order(unsigned int count
)
68 order
= lttng_fls(count
) - 1;
69 if (count
& (count
- 1))
74 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
75 size_t align
, size_t len
)
78 size_t padding
= lttng_ust_offset_align(runtime
->data_len
, align
);
79 size_t new_len
= runtime
->data_len
+ padding
+ len
;
80 size_t new_alloc_len
= new_len
;
81 size_t old_alloc_len
= runtime
->data_alloc_len
;
83 if (new_len
> BYTECODE_MAX_DATA_LEN
)
86 if (new_alloc_len
> old_alloc_len
) {
90 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
91 newptr
= realloc(runtime
->data
, new_alloc_len
);
94 runtime
->data
= newptr
;
95 /* We zero directly the memory from start of allocation. */
96 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
97 runtime
->data_alloc_len
= new_alloc_len
;
99 runtime
->data_len
+= padding
;
100 ret
= runtime
->data_len
;
101 runtime
->data_len
+= len
;
105 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
106 const void *p
, size_t align
, size_t len
)
110 offset
= bytecode_reserve_data(runtime
, align
, len
);
113 memcpy(&runtime
->data
[offset
], p
, len
);
117 static int specialize_load_field(struct vstack_entry
*stack_top
,
118 struct load_op
*insn
)
122 switch (stack_top
->load
.type
) {
125 case LOAD_ROOT_CONTEXT
:
126 case LOAD_ROOT_APP_CONTEXT
:
127 case LOAD_ROOT_PAYLOAD
:
129 dbg_printf("Bytecode warning: cannot load root, missing field name.\n");
133 switch (stack_top
->load
.object_type
) {
135 dbg_printf("op load field s8\n");
136 stack_top
->type
= REG_S64
;
137 if (!stack_top
->load
.rev_bo
)
138 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
140 case OBJECT_TYPE_S16
:
141 dbg_printf("op load field s16\n");
142 stack_top
->type
= REG_S64
;
143 if (!stack_top
->load
.rev_bo
)
144 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
146 case OBJECT_TYPE_S32
:
147 dbg_printf("op load field s32\n");
148 stack_top
->type
= REG_S64
;
149 if (!stack_top
->load
.rev_bo
)
150 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
152 case OBJECT_TYPE_S64
:
153 dbg_printf("op load field s64\n");
154 stack_top
->type
= REG_S64
;
155 if (!stack_top
->load
.rev_bo
)
156 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
158 case OBJECT_TYPE_SIGNED_ENUM
:
159 dbg_printf("op load field signed enumeration\n");
160 stack_top
->type
= REG_PTR
;
163 dbg_printf("op load field u8\n");
164 stack_top
->type
= REG_U64
;
165 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
167 case OBJECT_TYPE_U16
:
168 dbg_printf("op load field u16\n");
169 stack_top
->type
= REG_U64
;
170 if (!stack_top
->load
.rev_bo
)
171 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
173 case OBJECT_TYPE_U32
:
174 dbg_printf("op load field u32\n");
175 stack_top
->type
= REG_U64
;
176 if (!stack_top
->load
.rev_bo
)
177 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
179 case OBJECT_TYPE_U64
:
180 dbg_printf("op load field u64\n");
181 stack_top
->type
= REG_U64
;
182 if (!stack_top
->load
.rev_bo
)
183 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
185 case OBJECT_TYPE_UNSIGNED_ENUM
:
186 dbg_printf("op load field unsigned enumeration\n");
187 stack_top
->type
= REG_PTR
;
189 case OBJECT_TYPE_DOUBLE
:
190 stack_top
->type
= REG_DOUBLE
;
191 insn
->op
= BYTECODE_OP_LOAD_FIELD_DOUBLE
;
193 case OBJECT_TYPE_STRING
:
194 dbg_printf("op load field string\n");
195 stack_top
->type
= REG_STRING
;
196 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
198 case OBJECT_TYPE_STRING_SEQUENCE
:
199 dbg_printf("op load field string sequence\n");
200 stack_top
->type
= REG_STRING
;
201 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
203 case OBJECT_TYPE_DYNAMIC
:
204 dbg_printf("op load field dynamic\n");
205 stack_top
->type
= REG_UNKNOWN
;
206 /* Don't specialize load op. */
208 case OBJECT_TYPE_SEQUENCE
:
209 case OBJECT_TYPE_ARRAY
:
210 case OBJECT_TYPE_STRUCT
:
211 case OBJECT_TYPE_VARIANT
:
212 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
222 static int specialize_get_index_object_type(enum object_type
*otype
,
223 int signedness
, uint32_t elem_len
)
228 *otype
= OBJECT_TYPE_S8
;
230 *otype
= OBJECT_TYPE_U8
;
234 *otype
= OBJECT_TYPE_S16
;
236 *otype
= OBJECT_TYPE_U16
;
240 *otype
= OBJECT_TYPE_S32
;
242 *otype
= OBJECT_TYPE_U32
;
246 *otype
= OBJECT_TYPE_S64
;
248 *otype
= OBJECT_TYPE_U64
;
256 static int specialize_get_index(struct bytecode_runtime
*runtime
,
257 struct load_op
*insn
, uint64_t index
,
258 struct vstack_entry
*stack_top
,
262 struct bytecode_get_index_data gid
;
265 memset(&gid
, 0, sizeof(gid
));
266 switch (stack_top
->load
.type
) {
268 switch (stack_top
->load
.object_type
) {
269 case OBJECT_TYPE_ARRAY
:
271 const struct lttng_integer_type
*integer_type
;
272 const struct lttng_event_field
*field
;
273 uint32_t elem_len
, num_elems
;
276 field
= stack_top
->load
.field
;
277 switch (field
->type
.atype
) {
279 integer_type
= &field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
;
280 num_elems
= field
->type
.u
.legacy
.array
.length
;
282 case atype_array_nestable
:
283 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
287 integer_type
= &field
->type
.u
.array_nestable
.elem_type
->u
.integer
;
288 num_elems
= field
->type
.u
.array_nestable
.length
;
294 elem_len
= integer_type
->size
;
295 signedness
= integer_type
->signedness
;
296 if (index
>= num_elems
) {
300 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
301 signedness
, elem_len
);
304 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
305 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
306 gid
.elem
.type
= stack_top
->load
.object_type
;
307 gid
.elem
.len
= elem_len
;
308 if (integer_type
->reverse_byte_order
)
309 gid
.elem
.rev_bo
= true;
310 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
313 case OBJECT_TYPE_SEQUENCE
:
315 const struct lttng_integer_type
*integer_type
;
316 const struct lttng_event_field
*field
;
320 field
= stack_top
->load
.field
;
321 switch (field
->type
.atype
) {
323 integer_type
= &field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
;
325 case atype_sequence_nestable
:
326 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
330 integer_type
= &field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
;
336 elem_len
= integer_type
->size
;
337 signedness
= integer_type
->signedness
;
338 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
339 signedness
, elem_len
);
342 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
343 gid
.elem
.type
= stack_top
->load
.object_type
;
344 gid
.elem
.len
= elem_len
;
345 if (integer_type
->reverse_byte_order
)
346 gid
.elem
.rev_bo
= true;
347 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
350 case OBJECT_TYPE_STRUCT
:
351 /* Only generated by the specialize phase. */
352 case OBJECT_TYPE_VARIANT
: /* Fall-through */
354 ERR("Unexpected get index type %d",
355 (int) stack_top
->load
.object_type
);
360 case LOAD_ROOT_CONTEXT
:
361 case LOAD_ROOT_APP_CONTEXT
:
362 case LOAD_ROOT_PAYLOAD
:
363 ERR("Index lookup for root field not implemented yet.");
367 data_offset
= bytecode_push_data(runtime
, &gid
,
368 __alignof__(gid
), sizeof(gid
));
369 if (data_offset
< 0) {
375 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
378 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
391 static int specialize_context_lookup_name(struct lttng_ctx
*ctx
,
392 struct bytecode_runtime
*bytecode
,
393 struct load_op
*insn
)
398 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
399 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
400 return lttng_get_context_index(ctx
, name
);
403 static int specialize_load_object(const struct lttng_event_field
*field
,
404 struct vstack_load
*load
, bool is_context
)
406 load
->type
= LOAD_OBJECT
;
408 switch (field
->type
.atype
) {
410 if (field
->type
.u
.integer
.signedness
)
411 load
->object_type
= OBJECT_TYPE_S64
;
413 load
->object_type
= OBJECT_TYPE_U64
;
414 load
->rev_bo
= false;
417 case atype_enum_nestable
:
419 const struct lttng_integer_type
*itype
;
421 if (field
->type
.atype
== atype_enum
) {
422 itype
= &field
->type
.u
.legacy
.basic
.enumeration
.container_type
;
424 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
426 if (itype
->signedness
)
427 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
429 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
430 load
->rev_bo
= false;
434 if (field
->type
.u
.legacy
.array
.elem_type
.atype
!= atype_integer
) {
435 ERR("Array nesting only supports integer types.");
439 load
->object_type
= OBJECT_TYPE_STRING
;
441 if (field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
442 load
->object_type
= OBJECT_TYPE_ARRAY
;
445 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
449 case atype_array_nestable
:
450 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
451 ERR("Array nesting only supports integer types.");
455 load
->object_type
= OBJECT_TYPE_STRING
;
457 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
458 load
->object_type
= OBJECT_TYPE_ARRAY
;
461 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
466 if (field
->type
.u
.legacy
.sequence
.elem_type
.atype
!= atype_integer
) {
467 ERR("Sequence nesting only supports integer types.");
471 load
->object_type
= OBJECT_TYPE_STRING
;
473 if (field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
474 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
477 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
481 case atype_sequence_nestable
:
482 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
483 ERR("Sequence nesting only supports integer types.");
487 load
->object_type
= OBJECT_TYPE_STRING
;
489 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
490 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
493 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
499 load
->object_type
= OBJECT_TYPE_STRING
;
502 load
->object_type
= OBJECT_TYPE_DOUBLE
;
505 load
->object_type
= OBJECT_TYPE_DYNAMIC
;
508 ERR("Structure type cannot be loaded.");
511 ERR("Unknown type: %d", (int) field
->type
.atype
);
517 static int specialize_context_lookup(struct lttng_ctx
*ctx
,
518 struct bytecode_runtime
*runtime
,
519 struct load_op
*insn
,
520 struct vstack_load
*load
)
523 struct lttng_ctx_field
*ctx_field
;
524 struct lttng_event_field
*field
;
525 struct bytecode_get_index_data gid
;
528 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
532 ctx_field
= &ctx
->fields
[idx
];
533 field
= &ctx_field
->event_field
;
534 ret
= specialize_load_object(field
, load
, true);
537 /* Specialize each get_symbol into a get_index. */
538 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
539 memset(&gid
, 0, sizeof(gid
));
541 gid
.elem
.type
= load
->object_type
;
542 gid
.elem
.rev_bo
= load
->rev_bo
;
544 data_offset
= bytecode_push_data(runtime
, &gid
,
545 __alignof__(gid
), sizeof(gid
));
546 if (data_offset
< 0) {
549 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
553 static int specialize_app_context_lookup(struct lttng_ctx
**pctx
,
554 struct bytecode_runtime
*runtime
,
555 struct load_op
*insn
,
556 struct vstack_load
*load
)
559 const char *orig_name
;
562 struct lttng_ctx_field
*ctx_field
;
563 struct lttng_event_field
*field
;
564 struct bytecode_get_index_data gid
;
567 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
568 orig_name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
569 name
= zmalloc(strlen(orig_name
) + strlen("$app.") + 1);
574 strcpy(name
, "$app.");
575 strcat(name
, orig_name
);
576 idx
= lttng_get_context_index(*pctx
, name
);
578 assert(lttng_context_is_app(name
));
579 ret
= lttng_ust_add_app_context_to_ctx_rcu(name
,
583 idx
= lttng_get_context_index(*pctx
, name
);
587 ctx_field
= &(*pctx
)->fields
[idx
];
588 field
= &ctx_field
->event_field
;
589 ret
= specialize_load_object(field
, load
, true);
592 /* Specialize each get_symbol into a get_index. */
593 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
594 memset(&gid
, 0, sizeof(gid
));
596 gid
.elem
.type
= load
->object_type
;
597 gid
.elem
.rev_bo
= load
->rev_bo
;
599 data_offset
= bytecode_push_data(runtime
, &gid
,
600 __alignof__(gid
), sizeof(gid
));
601 if (data_offset
< 0) {
605 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
612 static int specialize_payload_lookup(const struct lttng_event_desc
*event_desc
,
613 struct bytecode_runtime
*runtime
,
614 struct load_op
*insn
,
615 struct vstack_load
*load
)
619 unsigned int i
, nr_fields
;
621 uint32_t field_offset
= 0;
622 const struct lttng_event_field
*field
;
624 struct bytecode_get_index_data gid
;
627 nr_fields
= event_desc
->nr_fields
;
628 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
629 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
630 for (i
= 0; i
< nr_fields
; i
++) {
631 field
= &event_desc
->fields
[i
];
632 if (field
->u
.ext
.nofilter
) {
635 if (!strcmp(field
->name
, name
)) {
639 /* compute field offset on stack */
640 switch (field
->type
.atype
) {
643 case atype_enum_nestable
:
644 field_offset
+= sizeof(int64_t);
647 case atype_array_nestable
:
649 case atype_sequence_nestable
:
650 field_offset
+= sizeof(unsigned long);
651 field_offset
+= sizeof(void *);
654 field_offset
+= sizeof(void *);
657 field_offset
+= sizeof(double);
669 ret
= specialize_load_object(field
, load
, false);
673 /* Specialize each get_symbol into a get_index. */
674 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
675 memset(&gid
, 0, sizeof(gid
));
676 gid
.offset
= field_offset
;
677 gid
.elem
.type
= load
->object_type
;
678 gid
.elem
.rev_bo
= load
->rev_bo
;
680 data_offset
= bytecode_push_data(runtime
, &gid
,
681 __alignof__(gid
), sizeof(gid
));
682 if (data_offset
< 0) {
686 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
692 int lttng_bytecode_specialize(const struct lttng_event_desc
*event_desc
,
693 struct bytecode_runtime
*bytecode
)
695 void *pc
, *next_pc
, *start_pc
;
697 struct vstack _stack
;
698 struct vstack
*stack
= &_stack
;
699 struct lttng_ctx
**pctx
= bytecode
->p
.pctx
;
703 start_pc
= &bytecode
->code
[0];
704 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
706 switch (*(bytecode_opcode_t
*) pc
) {
707 case BYTECODE_OP_UNKNOWN
:
709 ERR("unknown bytecode op %u\n",
710 (unsigned int) *(bytecode_opcode_t
*) pc
);
714 case BYTECODE_OP_RETURN
:
715 if (vstack_ax(stack
)->type
== REG_S64
||
716 vstack_ax(stack
)->type
== REG_U64
)
717 *(bytecode_opcode_t
*) pc
= BYTECODE_OP_RETURN_S64
;
721 case BYTECODE_OP_RETURN_S64
:
722 if (vstack_ax(stack
)->type
!= REG_S64
&&
723 vstack_ax(stack
)->type
!= REG_U64
) {
724 ERR("Unexpected register type\n");
732 case BYTECODE_OP_MUL
:
733 case BYTECODE_OP_DIV
:
734 case BYTECODE_OP_MOD
:
735 case BYTECODE_OP_PLUS
:
736 case BYTECODE_OP_MINUS
:
737 ERR("unsupported bytecode op %u\n",
738 (unsigned int) *(bytecode_opcode_t
*) pc
);
744 struct binary_op
*insn
= (struct binary_op
*) pc
;
746 switch(vstack_ax(stack
)->type
) {
748 ERR("unknown register type\n");
753 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
755 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
756 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
758 insn
->op
= BYTECODE_OP_EQ_STRING
;
760 case REG_STAR_GLOB_STRING
:
761 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
763 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
767 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
769 if (vstack_bx(stack
)->type
== REG_S64
||
770 vstack_bx(stack
)->type
== REG_U64
)
771 insn
->op
= BYTECODE_OP_EQ_S64
;
773 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
776 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
778 if (vstack_bx(stack
)->type
== REG_S64
||
779 vstack_bx(stack
)->type
== REG_U64
)
780 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
782 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
785 break; /* Dynamic typing. */
788 if (vstack_pop(stack
)) {
792 vstack_ax(stack
)->type
= REG_S64
;
793 next_pc
+= sizeof(struct binary_op
);
799 struct binary_op
*insn
= (struct binary_op
*) pc
;
801 switch(vstack_ax(stack
)->type
) {
803 ERR("unknown register type\n");
808 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
810 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
811 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
813 insn
->op
= BYTECODE_OP_NE_STRING
;
815 case REG_STAR_GLOB_STRING
:
816 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
818 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
822 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
824 if (vstack_bx(stack
)->type
== REG_S64
||
825 vstack_bx(stack
)->type
== REG_U64
)
826 insn
->op
= BYTECODE_OP_NE_S64
;
828 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
831 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
833 if (vstack_bx(stack
)->type
== REG_S64
||
834 vstack_bx(stack
)->type
== REG_U64
)
835 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
837 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
840 break; /* Dynamic typing. */
843 if (vstack_pop(stack
)) {
847 vstack_ax(stack
)->type
= REG_S64
;
848 next_pc
+= sizeof(struct binary_op
);
854 struct binary_op
*insn
= (struct binary_op
*) pc
;
856 switch(vstack_ax(stack
)->type
) {
858 ERR("unknown register type\n");
862 case REG_STAR_GLOB_STRING
:
863 ERR("invalid register type for > binary operator\n");
867 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
869 insn
->op
= BYTECODE_OP_GT_STRING
;
873 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
875 if (vstack_bx(stack
)->type
== REG_S64
||
876 vstack_bx(stack
)->type
== REG_U64
)
877 insn
->op
= BYTECODE_OP_GT_S64
;
879 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
882 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
884 if (vstack_bx(stack
)->type
== REG_S64
||
885 vstack_bx(stack
)->type
== REG_U64
)
886 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
888 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
891 break; /* Dynamic typing. */
894 if (vstack_pop(stack
)) {
898 vstack_ax(stack
)->type
= REG_S64
;
899 next_pc
+= sizeof(struct binary_op
);
905 struct binary_op
*insn
= (struct binary_op
*) pc
;
907 switch(vstack_ax(stack
)->type
) {
909 ERR("unknown register type\n");
913 case REG_STAR_GLOB_STRING
:
914 ERR("invalid register type for < binary operator\n");
918 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
920 insn
->op
= BYTECODE_OP_LT_STRING
;
924 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
926 if (vstack_bx(stack
)->type
== REG_S64
||
927 vstack_bx(stack
)->type
== REG_U64
)
928 insn
->op
= BYTECODE_OP_LT_S64
;
930 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
933 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
935 if (vstack_bx(stack
)->type
== REG_S64
||
936 vstack_bx(stack
)->type
== REG_U64
)
937 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
939 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
942 break; /* Dynamic typing. */
945 if (vstack_pop(stack
)) {
949 vstack_ax(stack
)->type
= REG_S64
;
950 next_pc
+= sizeof(struct binary_op
);
956 struct binary_op
*insn
= (struct binary_op
*) pc
;
958 switch(vstack_ax(stack
)->type
) {
960 ERR("unknown register type\n");
964 case REG_STAR_GLOB_STRING
:
965 ERR("invalid register type for >= binary operator\n");
969 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
971 insn
->op
= BYTECODE_OP_GE_STRING
;
975 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
977 if (vstack_bx(stack
)->type
== REG_S64
||
978 vstack_bx(stack
)->type
== REG_U64
)
979 insn
->op
= BYTECODE_OP_GE_S64
;
981 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
984 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
986 if (vstack_bx(stack
)->type
== REG_S64
||
987 vstack_bx(stack
)->type
== REG_U64
)
988 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
990 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
993 break; /* Dynamic typing. */
996 if (vstack_pop(stack
)) {
1000 vstack_ax(stack
)->type
= REG_U64
;
1001 next_pc
+= sizeof(struct binary_op
);
1004 case BYTECODE_OP_LE
:
1006 struct binary_op
*insn
= (struct binary_op
*) pc
;
1008 switch(vstack_ax(stack
)->type
) {
1010 ERR("unknown register type\n");
1014 case REG_STAR_GLOB_STRING
:
1015 ERR("invalid register type for <= binary operator\n");
1019 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
1021 insn
->op
= BYTECODE_OP_LE_STRING
;
1025 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
1027 if (vstack_bx(stack
)->type
== REG_S64
||
1028 vstack_bx(stack
)->type
== REG_U64
)
1029 insn
->op
= BYTECODE_OP_LE_S64
;
1031 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
1034 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
1036 if (vstack_bx(stack
)->type
== REG_S64
||
1037 vstack_bx(stack
)->type
== REG_U64
)
1038 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
1040 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
1043 break; /* Dynamic typing. */
1045 vstack_ax(stack
)->type
= REG_S64
;
1046 next_pc
+= sizeof(struct binary_op
);
1050 case BYTECODE_OP_EQ_STRING
:
1051 case BYTECODE_OP_NE_STRING
:
1052 case BYTECODE_OP_GT_STRING
:
1053 case BYTECODE_OP_LT_STRING
:
1054 case BYTECODE_OP_GE_STRING
:
1055 case BYTECODE_OP_LE_STRING
:
1056 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1057 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1058 case BYTECODE_OP_EQ_S64
:
1059 case BYTECODE_OP_NE_S64
:
1060 case BYTECODE_OP_GT_S64
:
1061 case BYTECODE_OP_LT_S64
:
1062 case BYTECODE_OP_GE_S64
:
1063 case BYTECODE_OP_LE_S64
:
1064 case BYTECODE_OP_EQ_DOUBLE
:
1065 case BYTECODE_OP_NE_DOUBLE
:
1066 case BYTECODE_OP_GT_DOUBLE
:
1067 case BYTECODE_OP_LT_DOUBLE
:
1068 case BYTECODE_OP_GE_DOUBLE
:
1069 case BYTECODE_OP_LE_DOUBLE
:
1070 case BYTECODE_OP_EQ_DOUBLE_S64
:
1071 case BYTECODE_OP_NE_DOUBLE_S64
:
1072 case BYTECODE_OP_GT_DOUBLE_S64
:
1073 case BYTECODE_OP_LT_DOUBLE_S64
:
1074 case BYTECODE_OP_GE_DOUBLE_S64
:
1075 case BYTECODE_OP_LE_DOUBLE_S64
:
1076 case BYTECODE_OP_EQ_S64_DOUBLE
:
1077 case BYTECODE_OP_NE_S64_DOUBLE
:
1078 case BYTECODE_OP_GT_S64_DOUBLE
:
1079 case BYTECODE_OP_LT_S64_DOUBLE
:
1080 case BYTECODE_OP_GE_S64_DOUBLE
:
1081 case BYTECODE_OP_LE_S64_DOUBLE
:
1084 if (vstack_pop(stack
)) {
1088 vstack_ax(stack
)->type
= REG_S64
;
1089 next_pc
+= sizeof(struct binary_op
);
1093 case BYTECODE_OP_BIT_RSHIFT
:
1094 case BYTECODE_OP_BIT_LSHIFT
:
1095 case BYTECODE_OP_BIT_AND
:
1096 case BYTECODE_OP_BIT_OR
:
1097 case BYTECODE_OP_BIT_XOR
:
1100 if (vstack_pop(stack
)) {
1104 vstack_ax(stack
)->type
= REG_S64
;
1105 next_pc
+= sizeof(struct binary_op
);
1110 case BYTECODE_OP_UNARY_PLUS
:
1112 struct unary_op
*insn
= (struct unary_op
*) pc
;
1114 switch(vstack_ax(stack
)->type
) {
1116 ERR("unknown register type\n");
1122 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
1125 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
1127 case REG_UNKNOWN
: /* Dynamic typing. */
1131 next_pc
+= sizeof(struct unary_op
);
1135 case BYTECODE_OP_UNARY_MINUS
:
1137 struct unary_op
*insn
= (struct unary_op
*) pc
;
1139 switch(vstack_ax(stack
)->type
) {
1141 ERR("unknown register type\n");
1147 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
1150 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
1152 case REG_UNKNOWN
: /* Dynamic typing. */
1156 next_pc
+= sizeof(struct unary_op
);
1160 case BYTECODE_OP_UNARY_NOT
:
1162 struct unary_op
*insn
= (struct unary_op
*) pc
;
1164 switch(vstack_ax(stack
)->type
) {
1166 ERR("unknown register type\n");
1172 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
1175 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
1177 case REG_UNKNOWN
: /* Dynamic typing. */
1181 next_pc
+= sizeof(struct unary_op
);
1185 case BYTECODE_OP_UNARY_BIT_NOT
:
1188 next_pc
+= sizeof(struct unary_op
);
1192 case BYTECODE_OP_UNARY_PLUS_S64
:
1193 case BYTECODE_OP_UNARY_MINUS_S64
:
1194 case BYTECODE_OP_UNARY_NOT_S64
:
1195 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1196 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1197 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1200 next_pc
+= sizeof(struct unary_op
);
1205 case BYTECODE_OP_AND
:
1206 case BYTECODE_OP_OR
:
1208 /* Continue to next instruction */
1209 /* Pop 1 when jump not taken */
1210 if (vstack_pop(stack
)) {
1214 next_pc
+= sizeof(struct logical_op
);
1218 /* load field ref */
1219 case BYTECODE_OP_LOAD_FIELD_REF
:
1221 ERR("Unknown field ref type\n");
1225 /* get context ref */
1226 case BYTECODE_OP_GET_CONTEXT_REF
:
1228 if (vstack_push(stack
)) {
1232 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1233 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1236 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1237 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1238 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1240 if (vstack_push(stack
)) {
1244 vstack_ax(stack
)->type
= REG_STRING
;
1245 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1248 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1249 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1251 if (vstack_push(stack
)) {
1255 vstack_ax(stack
)->type
= REG_S64
;
1256 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1259 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1260 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1262 if (vstack_push(stack
)) {
1266 vstack_ax(stack
)->type
= REG_DOUBLE
;
1267 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1271 /* load from immediate operand */
1272 case BYTECODE_OP_LOAD_STRING
:
1274 struct load_op
*insn
= (struct load_op
*) pc
;
1276 if (vstack_push(stack
)) {
1280 vstack_ax(stack
)->type
= REG_STRING
;
1281 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1285 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1287 struct load_op
*insn
= (struct load_op
*) pc
;
1289 if (vstack_push(stack
)) {
1293 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1294 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1298 case BYTECODE_OP_LOAD_S64
:
1300 if (vstack_push(stack
)) {
1304 vstack_ax(stack
)->type
= REG_S64
;
1305 next_pc
+= sizeof(struct load_op
)
1306 + sizeof(struct literal_numeric
);
1310 case BYTECODE_OP_LOAD_DOUBLE
:
1312 if (vstack_push(stack
)) {
1316 vstack_ax(stack
)->type
= REG_DOUBLE
;
1317 next_pc
+= sizeof(struct load_op
)
1318 + sizeof(struct literal_double
);
1323 case BYTECODE_OP_CAST_TO_S64
:
1325 struct cast_op
*insn
= (struct cast_op
*) pc
;
1327 switch (vstack_ax(stack
)->type
) {
1329 ERR("unknown register type\n");
1334 case REG_STAR_GLOB_STRING
:
1335 ERR("Cast op can only be applied to numeric or floating point registers\n");
1339 insn
->op
= BYTECODE_OP_CAST_NOP
;
1342 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1349 vstack_ax(stack
)->type
= REG_S64
;
1350 next_pc
+= sizeof(struct cast_op
);
1353 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1356 vstack_ax(stack
)->type
= REG_S64
;
1357 next_pc
+= sizeof(struct cast_op
);
1360 case BYTECODE_OP_CAST_NOP
:
1362 next_pc
+= sizeof(struct cast_op
);
1367 * Instructions for recursive traversal through composed types.
1369 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1371 if (vstack_push(stack
)) {
1375 vstack_ax(stack
)->type
= REG_PTR
;
1376 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1377 next_pc
+= sizeof(struct load_op
);
1380 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1382 if (vstack_push(stack
)) {
1386 vstack_ax(stack
)->type
= REG_PTR
;
1387 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1388 next_pc
+= sizeof(struct load_op
);
1391 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1393 if (vstack_push(stack
)) {
1397 vstack_ax(stack
)->type
= REG_PTR
;
1398 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1399 next_pc
+= sizeof(struct load_op
);
1403 case BYTECODE_OP_LOAD_FIELD
:
1405 struct load_op
*insn
= (struct load_op
*) pc
;
1407 assert(vstack_ax(stack
)->type
== REG_PTR
);
1409 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1413 next_pc
+= sizeof(struct load_op
);
1417 case BYTECODE_OP_LOAD_FIELD_S8
:
1418 case BYTECODE_OP_LOAD_FIELD_S16
:
1419 case BYTECODE_OP_LOAD_FIELD_S32
:
1420 case BYTECODE_OP_LOAD_FIELD_S64
:
1423 vstack_ax(stack
)->type
= REG_S64
;
1424 next_pc
+= sizeof(struct load_op
);
1428 case BYTECODE_OP_LOAD_FIELD_U8
:
1429 case BYTECODE_OP_LOAD_FIELD_U16
:
1430 case BYTECODE_OP_LOAD_FIELD_U32
:
1431 case BYTECODE_OP_LOAD_FIELD_U64
:
1434 vstack_ax(stack
)->type
= REG_U64
;
1435 next_pc
+= sizeof(struct load_op
);
1439 case BYTECODE_OP_LOAD_FIELD_STRING
:
1440 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1443 vstack_ax(stack
)->type
= REG_STRING
;
1444 next_pc
+= sizeof(struct load_op
);
1448 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1451 vstack_ax(stack
)->type
= REG_DOUBLE
;
1452 next_pc
+= sizeof(struct load_op
);
1456 case BYTECODE_OP_GET_SYMBOL
:
1458 struct load_op
*insn
= (struct load_op
*) pc
;
1460 dbg_printf("op get symbol\n");
1461 switch (vstack_ax(stack
)->load
.type
) {
1463 ERR("Nested fields not implemented yet.");
1466 case LOAD_ROOT_CONTEXT
:
1467 /* Lookup context field. */
1468 ret
= specialize_context_lookup(*pctx
,
1470 &vstack_ax(stack
)->load
);
1474 case LOAD_ROOT_APP_CONTEXT
:
1475 /* Lookup app context field. */
1476 ret
= specialize_app_context_lookup(pctx
,
1478 &vstack_ax(stack
)->load
);
1482 case LOAD_ROOT_PAYLOAD
:
1483 /* Lookup event payload field. */
1484 ret
= specialize_payload_lookup(event_desc
,
1486 &vstack_ax(stack
)->load
);
1491 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1495 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1497 /* Always generated by specialize phase. */
1502 case BYTECODE_OP_GET_INDEX_U16
:
1504 struct load_op
*insn
= (struct load_op
*) pc
;
1505 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1507 dbg_printf("op get index u16\n");
1509 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1510 vstack_ax(stack
), sizeof(*index
));
1513 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1517 case BYTECODE_OP_GET_INDEX_U64
:
1519 struct load_op
*insn
= (struct load_op
*) pc
;
1520 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1522 dbg_printf("op get index u64\n");
1524 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1525 vstack_ax(stack
), sizeof(*index
));
1528 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);