* lib/gdb.exp (standard_output_file): Use "file join".
[deliverable/binutils-gdb.git] / gdb / ax-gdb.c
1 /* GDB-specific functions for operating on agent expressions.
2
3 Copyright (C) 1998-2001, 2003, 2007-2012 Free Software Foundation,
4 Inc.
5
6 This file is part of GDB.
7
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
12
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20
21 #include "defs.h"
22 #include "symtab.h"
23 #include "symfile.h"
24 #include "gdbtypes.h"
25 #include "language.h"
26 #include "value.h"
27 #include "expression.h"
28 #include "command.h"
29 #include "gdbcmd.h"
30 #include "frame.h"
31 #include "target.h"
32 #include "ax.h"
33 #include "ax-gdb.h"
34 #include "gdb_string.h"
35 #include "block.h"
36 #include "regcache.h"
37 #include "user-regs.h"
38 #include "language.h"
39 #include "dictionary.h"
40 #include "breakpoint.h"
41 #include "tracepoint.h"
42 #include "cp-support.h"
43 #include "arch-utils.h"
44
45 #include "valprint.h"
46 #include "c-lang.h"
47
48 /* To make sense of this file, you should read doc/agentexpr.texi.
49 Then look at the types and enums in ax-gdb.h. For the code itself,
50 look at gen_expr, towards the bottom; that's the main function that
51 looks at the GDB expressions and calls everything else to generate
52 code.
53
54 I'm beginning to wonder whether it wouldn't be nicer to internally
55 generate trees, with types, and then spit out the bytecode in
56 linear form afterwards; we could generate fewer `swap', `ext', and
57 `zero_ext' bytecodes that way; it would make good constant folding
58 easier, too. But at the moment, I think we should be willing to
59 pay for the simplicity of this code with less-than-optimal bytecode
60 strings.
61
62 Remember, "GBD" stands for "Great Britain, Dammit!" So be careful. */
63 \f
64
65
66 /* Prototypes for local functions. */
67
68 /* There's a standard order to the arguments of these functions:
69 union exp_element ** --- pointer into expression
70 struct agent_expr * --- agent expression buffer to generate code into
71 struct axs_value * --- describes value left on top of stack */
72
73 static struct value *const_var_ref (struct symbol *var);
74 static struct value *const_expr (union exp_element **pc);
75 static struct value *maybe_const_expr (union exp_element **pc);
76
77 static void gen_traced_pop (struct gdbarch *, struct agent_expr *,
78 struct axs_value *);
79
80 static void gen_sign_extend (struct agent_expr *, struct type *);
81 static void gen_extend (struct agent_expr *, struct type *);
82 static void gen_fetch (struct agent_expr *, struct type *);
83 static void gen_left_shift (struct agent_expr *, int);
84
85
86 static void gen_frame_args_address (struct gdbarch *, struct agent_expr *);
87 static void gen_frame_locals_address (struct gdbarch *, struct agent_expr *);
88 static void gen_offset (struct agent_expr *ax, int offset);
89 static void gen_sym_offset (struct agent_expr *, struct symbol *);
90 static void gen_var_ref (struct gdbarch *, struct agent_expr *ax,
91 struct axs_value *value, struct symbol *var);
92
93
94 static void gen_int_literal (struct agent_expr *ax,
95 struct axs_value *value,
96 LONGEST k, struct type *type);
97
98 static void gen_usual_unary (struct expression *exp, struct agent_expr *ax,
99 struct axs_value *value);
100 static int type_wider_than (struct type *type1, struct type *type2);
101 static struct type *max_type (struct type *type1, struct type *type2);
102 static void gen_conversion (struct agent_expr *ax,
103 struct type *from, struct type *to);
104 static int is_nontrivial_conversion (struct type *from, struct type *to);
105 static void gen_usual_arithmetic (struct expression *exp,
106 struct agent_expr *ax,
107 struct axs_value *value1,
108 struct axs_value *value2);
109 static void gen_integral_promotions (struct expression *exp,
110 struct agent_expr *ax,
111 struct axs_value *value);
112 static void gen_cast (struct agent_expr *ax,
113 struct axs_value *value, struct type *type);
114 static void gen_scale (struct agent_expr *ax,
115 enum agent_op op, struct type *type);
116 static void gen_ptradd (struct agent_expr *ax, struct axs_value *value,
117 struct axs_value *value1, struct axs_value *value2);
118 static void gen_ptrsub (struct agent_expr *ax, struct axs_value *value,
119 struct axs_value *value1, struct axs_value *value2);
120 static void gen_ptrdiff (struct agent_expr *ax, struct axs_value *value,
121 struct axs_value *value1, struct axs_value *value2,
122 struct type *result_type);
123 static void gen_binop (struct agent_expr *ax,
124 struct axs_value *value,
125 struct axs_value *value1,
126 struct axs_value *value2,
127 enum agent_op op,
128 enum agent_op op_unsigned, int may_carry, char *name);
129 static void gen_logical_not (struct agent_expr *ax, struct axs_value *value,
130 struct type *result_type);
131 static void gen_complement (struct agent_expr *ax, struct axs_value *value);
132 static void gen_deref (struct agent_expr *, struct axs_value *);
133 static void gen_address_of (struct agent_expr *, struct axs_value *);
134 static void gen_bitfield_ref (struct expression *exp, struct agent_expr *ax,
135 struct axs_value *value,
136 struct type *type, int start, int end);
137 static void gen_primitive_field (struct expression *exp,
138 struct agent_expr *ax,
139 struct axs_value *value,
140 int offset, int fieldno, struct type *type);
141 static int gen_struct_ref_recursive (struct expression *exp,
142 struct agent_expr *ax,
143 struct axs_value *value,
144 char *field, int offset,
145 struct type *type);
146 static void gen_struct_ref (struct expression *exp, struct agent_expr *ax,
147 struct axs_value *value,
148 char *field,
149 char *operator_name, char *operand_name);
150 static void gen_static_field (struct gdbarch *gdbarch,
151 struct agent_expr *ax, struct axs_value *value,
152 struct type *type, int fieldno);
153 static void gen_repeat (struct expression *exp, union exp_element **pc,
154 struct agent_expr *ax, struct axs_value *value);
155 static void gen_sizeof (struct expression *exp, union exp_element **pc,
156 struct agent_expr *ax, struct axs_value *value,
157 struct type *size_type);
158 static void gen_expr_binop_rest (struct expression *exp,
159 enum exp_opcode op, union exp_element **pc,
160 struct agent_expr *ax,
161 struct axs_value *value,
162 struct axs_value *value1,
163 struct axs_value *value2);
164
165 static void agent_command (char *exp, int from_tty);
166 \f
167
168 /* Detecting constant expressions. */
169
170 /* If the variable reference at *PC is a constant, return its value.
171 Otherwise, return zero.
172
173 Hey, Wally! How can a variable reference be a constant?
174
175 Well, Beav, this function really handles the OP_VAR_VALUE operator,
176 not specifically variable references. GDB uses OP_VAR_VALUE to
177 refer to any kind of symbolic reference: function names, enum
178 elements, and goto labels are all handled through the OP_VAR_VALUE
179 operator, even though they're constants. It makes sense given the
180 situation.
181
182 Gee, Wally, don'cha wonder sometimes if data representations that
183 subvert commonly accepted definitions of terms in favor of heavily
184 context-specific interpretations are really just a tool of the
185 programming hegemony to preserve their power and exclude the
186 proletariat? */
187
188 static struct value *
189 const_var_ref (struct symbol *var)
190 {
191 struct type *type = SYMBOL_TYPE (var);
192
193 switch (SYMBOL_CLASS (var))
194 {
195 case LOC_CONST:
196 return value_from_longest (type, (LONGEST) SYMBOL_VALUE (var));
197
198 case LOC_LABEL:
199 return value_from_pointer (type, (CORE_ADDR) SYMBOL_VALUE_ADDRESS (var));
200
201 default:
202 return 0;
203 }
204 }
205
206
207 /* If the expression starting at *PC has a constant value, return it.
208 Otherwise, return zero. If we return a value, then *PC will be
209 advanced to the end of it. If we return zero, *PC could be
210 anywhere. */
211 static struct value *
212 const_expr (union exp_element **pc)
213 {
214 enum exp_opcode op = (*pc)->opcode;
215 struct value *v1;
216
217 switch (op)
218 {
219 case OP_LONG:
220 {
221 struct type *type = (*pc)[1].type;
222 LONGEST k = (*pc)[2].longconst;
223
224 (*pc) += 4;
225 return value_from_longest (type, k);
226 }
227
228 case OP_VAR_VALUE:
229 {
230 struct value *v = const_var_ref ((*pc)[2].symbol);
231
232 (*pc) += 4;
233 return v;
234 }
235
236 /* We could add more operators in here. */
237
238 case UNOP_NEG:
239 (*pc)++;
240 v1 = const_expr (pc);
241 if (v1)
242 return value_neg (v1);
243 else
244 return 0;
245
246 default:
247 return 0;
248 }
249 }
250
251
252 /* Like const_expr, but guarantee also that *PC is undisturbed if the
253 expression is not constant. */
254 static struct value *
255 maybe_const_expr (union exp_element **pc)
256 {
257 union exp_element *tentative_pc = *pc;
258 struct value *v = const_expr (&tentative_pc);
259
260 /* If we got a value, then update the real PC. */
261 if (v)
262 *pc = tentative_pc;
263
264 return v;
265 }
266 \f
267
268 /* Generating bytecode from GDB expressions: general assumptions */
269
270 /* Here are a few general assumptions made throughout the code; if you
271 want to make a change that contradicts one of these, then you'd
272 better scan things pretty thoroughly.
273
274 - We assume that all values occupy one stack element. For example,
275 sometimes we'll swap to get at the left argument to a binary
276 operator. If we decide that void values should occupy no stack
277 elements, or that synthetic arrays (whose size is determined at
278 run time, created by the `@' operator) should occupy two stack
279 elements (address and length), then this will cause trouble.
280
281 - We assume the stack elements are infinitely wide, and that we
282 don't have to worry what happens if the user requests an
283 operation that is wider than the actual interpreter's stack.
284 That is, it's up to the interpreter to handle directly all the
285 integer widths the user has access to. (Woe betide the language
286 with bignums!)
287
288 - We don't support side effects. Thus, we don't have to worry about
289 GCC's generalized lvalues, function calls, etc.
290
291 - We don't support floating point. Many places where we switch on
292 some type don't bother to include cases for floating point; there
293 may be even more subtle ways this assumption exists. For
294 example, the arguments to % must be integers.
295
296 - We assume all subexpressions have a static, unchanging type. If
297 we tried to support convenience variables, this would be a
298 problem.
299
300 - All values on the stack should always be fully zero- or
301 sign-extended.
302
303 (I wasn't sure whether to choose this or its opposite --- that
304 only addresses are assumed extended --- but it turns out that
305 neither convention completely eliminates spurious extend
306 operations (if everything is always extended, then you have to
307 extend after add, because it could overflow; if nothing is
308 extended, then you end up producing extends whenever you change
309 sizes), and this is simpler.) */
310 \f
311
312 /* Generating bytecode from GDB expressions: the `trace' kludge */
313
314 /* The compiler in this file is a general-purpose mechanism for
315 translating GDB expressions into bytecode. One ought to be able to
316 find a million and one uses for it.
317
318 However, at the moment it is HOPELESSLY BRAIN-DAMAGED for the sake
319 of expediency. Let he who is without sin cast the first stone.
320
321 For the data tracing facility, we need to insert `trace' bytecodes
322 before each data fetch; this records all the memory that the
323 expression touches in the course of evaluation, so that memory will
324 be available when the user later tries to evaluate the expression
325 in GDB.
326
327 This should be done (I think) in a post-processing pass, that walks
328 an arbitrary agent expression and inserts `trace' operations at the
329 appropriate points. But it's much faster to just hack them
330 directly into the code. And since we're in a crunch, that's what
331 I've done.
332
333 Setting the flag trace_kludge to non-zero enables the code that
334 emits the trace bytecodes at the appropriate points. */
335 int trace_kludge;
336
337 /* Inspired by trace_kludge, this indicates that pointers to chars
338 should get an added tracenz bytecode to record nonzero bytes, up to
339 a length that is the value of trace_string_kludge. */
340 int trace_string_kludge;
341
342 /* Scan for all static fields in the given class, including any base
343 classes, and generate tracing bytecodes for each. */
344
345 static void
346 gen_trace_static_fields (struct gdbarch *gdbarch,
347 struct agent_expr *ax,
348 struct type *type)
349 {
350 int i, nbases = TYPE_N_BASECLASSES (type);
351 struct axs_value value;
352
353 CHECK_TYPEDEF (type);
354
355 for (i = TYPE_NFIELDS (type) - 1; i >= nbases; i--)
356 {
357 if (field_is_static (&TYPE_FIELD (type, i)))
358 {
359 gen_static_field (gdbarch, ax, &value, type, i);
360 if (value.optimized_out)
361 continue;
362 switch (value.kind)
363 {
364 case axs_lvalue_memory:
365 {
366 int length = TYPE_LENGTH (check_typedef (value.type));
367
368 ax_const_l (ax, length);
369 ax_simple (ax, aop_trace);
370 }
371 break;
372
373 case axs_lvalue_register:
374 /* We don't actually need the register's value to be pushed,
375 just note that we need it to be collected. */
376 ax_reg_mask (ax, value.u.reg);
377
378 default:
379 break;
380 }
381 }
382 }
383
384 /* Now scan through base classes recursively. */
385 for (i = 0; i < nbases; i++)
386 {
387 struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
388
389 gen_trace_static_fields (gdbarch, ax, basetype);
390 }
391 }
392
393 /* Trace the lvalue on the stack, if it needs it. In either case, pop
394 the value. Useful on the left side of a comma, and at the end of
395 an expression being used for tracing. */
396 static void
397 gen_traced_pop (struct gdbarch *gdbarch,
398 struct agent_expr *ax, struct axs_value *value)
399 {
400 int string_trace = 0;
401 if (trace_string_kludge
402 && TYPE_CODE (value->type) == TYPE_CODE_PTR
403 && c_textual_element_type (check_typedef (TYPE_TARGET_TYPE (value->type)),
404 's'))
405 string_trace = 1;
406
407 if (trace_kludge)
408 switch (value->kind)
409 {
410 case axs_rvalue:
411 if (string_trace)
412 {
413 ax_const_l (ax, trace_string_kludge);
414 ax_simple (ax, aop_tracenz);
415 }
416 else
417 /* We don't trace rvalues, just the lvalues necessary to
418 produce them. So just dispose of this value. */
419 ax_simple (ax, aop_pop);
420 break;
421
422 case axs_lvalue_memory:
423 {
424 int length = TYPE_LENGTH (check_typedef (value->type));
425
426 if (string_trace)
427 ax_simple (ax, aop_dup);
428
429 /* There's no point in trying to use a trace_quick bytecode
430 here, since "trace_quick SIZE pop" is three bytes, whereas
431 "const8 SIZE trace" is also three bytes, does the same
432 thing, and the simplest code which generates that will also
433 work correctly for objects with large sizes. */
434 ax_const_l (ax, length);
435 ax_simple (ax, aop_trace);
436
437 if (string_trace)
438 {
439 ax_simple (ax, aop_ref32);
440 ax_const_l (ax, trace_string_kludge);
441 ax_simple (ax, aop_tracenz);
442 }
443 }
444 break;
445
446 case axs_lvalue_register:
447 /* We don't actually need the register's value to be on the
448 stack, and the target will get heartburn if the register is
449 larger than will fit in a stack, so just mark it for
450 collection and be done with it. */
451 ax_reg_mask (ax, value->u.reg);
452
453 /* But if the register points to a string, assume the value
454 will fit on the stack and push it anyway. */
455 if (string_trace)
456 {
457 ax_reg (ax, value->u.reg);
458 ax_const_l (ax, trace_string_kludge);
459 ax_simple (ax, aop_tracenz);
460 }
461 break;
462 }
463 else
464 /* If we're not tracing, just pop the value. */
465 ax_simple (ax, aop_pop);
466
467 /* To trace C++ classes with static fields stored elsewhere. */
468 if (trace_kludge
469 && (TYPE_CODE (value->type) == TYPE_CODE_STRUCT
470 || TYPE_CODE (value->type) == TYPE_CODE_UNION))
471 gen_trace_static_fields (gdbarch, ax, value->type);
472 }
473 \f
474
475
476 /* Generating bytecode from GDB expressions: helper functions */
477
478 /* Assume that the lower bits of the top of the stack is a value of
479 type TYPE, and the upper bits are zero. Sign-extend if necessary. */
480 static void
481 gen_sign_extend (struct agent_expr *ax, struct type *type)
482 {
483 /* Do we need to sign-extend this? */
484 if (!TYPE_UNSIGNED (type))
485 ax_ext (ax, TYPE_LENGTH (type) * TARGET_CHAR_BIT);
486 }
487
488
489 /* Assume the lower bits of the top of the stack hold a value of type
490 TYPE, and the upper bits are garbage. Sign-extend or truncate as
491 needed. */
492 static void
493 gen_extend (struct agent_expr *ax, struct type *type)
494 {
495 int bits = TYPE_LENGTH (type) * TARGET_CHAR_BIT;
496
497 /* I just had to. */
498 ((TYPE_UNSIGNED (type) ? ax_zero_ext : ax_ext) (ax, bits));
499 }
500
501
502 /* Assume that the top of the stack contains a value of type "pointer
503 to TYPE"; generate code to fetch its value. Note that TYPE is the
504 target type, not the pointer type. */
505 static void
506 gen_fetch (struct agent_expr *ax, struct type *type)
507 {
508 if (trace_kludge)
509 {
510 /* Record the area of memory we're about to fetch. */
511 ax_trace_quick (ax, TYPE_LENGTH (type));
512 }
513
514 if (TYPE_CODE (type) == TYPE_CODE_RANGE)
515 type = TYPE_TARGET_TYPE (type);
516
517 switch (TYPE_CODE (type))
518 {
519 case TYPE_CODE_PTR:
520 case TYPE_CODE_REF:
521 case TYPE_CODE_ENUM:
522 case TYPE_CODE_INT:
523 case TYPE_CODE_CHAR:
524 case TYPE_CODE_BOOL:
525 /* It's a scalar value, so we know how to dereference it. How
526 many bytes long is it? */
527 switch (TYPE_LENGTH (type))
528 {
529 case 8 / TARGET_CHAR_BIT:
530 ax_simple (ax, aop_ref8);
531 break;
532 case 16 / TARGET_CHAR_BIT:
533 ax_simple (ax, aop_ref16);
534 break;
535 case 32 / TARGET_CHAR_BIT:
536 ax_simple (ax, aop_ref32);
537 break;
538 case 64 / TARGET_CHAR_BIT:
539 ax_simple (ax, aop_ref64);
540 break;
541
542 /* Either our caller shouldn't have asked us to dereference
543 that pointer (other code's fault), or we're not
544 implementing something we should be (this code's fault).
545 In any case, it's a bug the user shouldn't see. */
546 default:
547 internal_error (__FILE__, __LINE__,
548 _("gen_fetch: strange size"));
549 }
550
551 gen_sign_extend (ax, type);
552 break;
553
554 default:
555 /* Our caller requested us to dereference a pointer from an unsupported
556 type. Error out and give callers a chance to handle the failure
557 gracefully. */
558 error (_("gen_fetch: Unsupported type code `%s'."),
559 TYPE_NAME (type));
560 }
561 }
562
563
564 /* Generate code to left shift the top of the stack by DISTANCE bits, or
565 right shift it by -DISTANCE bits if DISTANCE < 0. This generates
566 unsigned (logical) right shifts. */
567 static void
568 gen_left_shift (struct agent_expr *ax, int distance)
569 {
570 if (distance > 0)
571 {
572 ax_const_l (ax, distance);
573 ax_simple (ax, aop_lsh);
574 }
575 else if (distance < 0)
576 {
577 ax_const_l (ax, -distance);
578 ax_simple (ax, aop_rsh_unsigned);
579 }
580 }
581 \f
582
583
584 /* Generating bytecode from GDB expressions: symbol references */
585
586 /* Generate code to push the base address of the argument portion of
587 the top stack frame. */
588 static void
589 gen_frame_args_address (struct gdbarch *gdbarch, struct agent_expr *ax)
590 {
591 int frame_reg;
592 LONGEST frame_offset;
593
594 gdbarch_virtual_frame_pointer (gdbarch,
595 ax->scope, &frame_reg, &frame_offset);
596 ax_reg (ax, frame_reg);
597 gen_offset (ax, frame_offset);
598 }
599
600
601 /* Generate code to push the base address of the locals portion of the
602 top stack frame. */
603 static void
604 gen_frame_locals_address (struct gdbarch *gdbarch, struct agent_expr *ax)
605 {
606 int frame_reg;
607 LONGEST frame_offset;
608
609 gdbarch_virtual_frame_pointer (gdbarch,
610 ax->scope, &frame_reg, &frame_offset);
611 ax_reg (ax, frame_reg);
612 gen_offset (ax, frame_offset);
613 }
614
615
616 /* Generate code to add OFFSET to the top of the stack. Try to
617 generate short and readable code. We use this for getting to
618 variables on the stack, and structure members. If we were
619 programming in ML, it would be clearer why these are the same
620 thing. */
621 static void
622 gen_offset (struct agent_expr *ax, int offset)
623 {
624 /* It would suffice to simply push the offset and add it, but this
625 makes it easier to read positive and negative offsets in the
626 bytecode. */
627 if (offset > 0)
628 {
629 ax_const_l (ax, offset);
630 ax_simple (ax, aop_add);
631 }
632 else if (offset < 0)
633 {
634 ax_const_l (ax, -offset);
635 ax_simple (ax, aop_sub);
636 }
637 }
638
639
640 /* In many cases, a symbol's value is the offset from some other
641 address (stack frame, base register, etc.) Generate code to add
642 VAR's value to the top of the stack. */
643 static void
644 gen_sym_offset (struct agent_expr *ax, struct symbol *var)
645 {
646 gen_offset (ax, SYMBOL_VALUE (var));
647 }
648
649
650 /* Generate code for a variable reference to AX. The variable is the
651 symbol VAR. Set VALUE to describe the result. */
652
653 static void
654 gen_var_ref (struct gdbarch *gdbarch, struct agent_expr *ax,
655 struct axs_value *value, struct symbol *var)
656 {
657 /* Dereference any typedefs. */
658 value->type = check_typedef (SYMBOL_TYPE (var));
659 value->optimized_out = 0;
660
661 /* I'm imitating the code in read_var_value. */
662 switch (SYMBOL_CLASS (var))
663 {
664 case LOC_CONST: /* A constant, like an enum value. */
665 ax_const_l (ax, (LONGEST) SYMBOL_VALUE (var));
666 value->kind = axs_rvalue;
667 break;
668
669 case LOC_LABEL: /* A goto label, being used as a value. */
670 ax_const_l (ax, (LONGEST) SYMBOL_VALUE_ADDRESS (var));
671 value->kind = axs_rvalue;
672 break;
673
674 case LOC_CONST_BYTES:
675 internal_error (__FILE__, __LINE__,
676 _("gen_var_ref: LOC_CONST_BYTES "
677 "symbols are not supported"));
678
679 /* Variable at a fixed location in memory. Easy. */
680 case LOC_STATIC:
681 /* Push the address of the variable. */
682 ax_const_l (ax, SYMBOL_VALUE_ADDRESS (var));
683 value->kind = axs_lvalue_memory;
684 break;
685
686 case LOC_ARG: /* var lives in argument area of frame */
687 gen_frame_args_address (gdbarch, ax);
688 gen_sym_offset (ax, var);
689 value->kind = axs_lvalue_memory;
690 break;
691
692 case LOC_REF_ARG: /* As above, but the frame slot really
693 holds the address of the variable. */
694 gen_frame_args_address (gdbarch, ax);
695 gen_sym_offset (ax, var);
696 /* Don't assume any particular pointer size. */
697 gen_fetch (ax, builtin_type (gdbarch)->builtin_data_ptr);
698 value->kind = axs_lvalue_memory;
699 break;
700
701 case LOC_LOCAL: /* var lives in locals area of frame */
702 gen_frame_locals_address (gdbarch, ax);
703 gen_sym_offset (ax, var);
704 value->kind = axs_lvalue_memory;
705 break;
706
707 case LOC_TYPEDEF:
708 error (_("Cannot compute value of typedef `%s'."),
709 SYMBOL_PRINT_NAME (var));
710 break;
711
712 case LOC_BLOCK:
713 ax_const_l (ax, BLOCK_START (SYMBOL_BLOCK_VALUE (var)));
714 value->kind = axs_rvalue;
715 break;
716
717 case LOC_REGISTER:
718 /* Don't generate any code at all; in the process of treating
719 this as an lvalue or rvalue, the caller will generate the
720 right code. */
721 value->kind = axs_lvalue_register;
722 value->u.reg = SYMBOL_REGISTER_OPS (var)->register_number (var, gdbarch);
723 break;
724
725 /* A lot like LOC_REF_ARG, but the pointer lives directly in a
726 register, not on the stack. Simpler than LOC_REGISTER
727 because it's just like any other case where the thing
728 has a real address. */
729 case LOC_REGPARM_ADDR:
730 ax_reg (ax, SYMBOL_REGISTER_OPS (var)->register_number (var, gdbarch));
731 value->kind = axs_lvalue_memory;
732 break;
733
734 case LOC_UNRESOLVED:
735 {
736 struct minimal_symbol *msym
737 = lookup_minimal_symbol (SYMBOL_LINKAGE_NAME (var), NULL, NULL);
738
739 if (!msym)
740 error (_("Couldn't resolve symbol `%s'."), SYMBOL_PRINT_NAME (var));
741
742 /* Push the address of the variable. */
743 ax_const_l (ax, SYMBOL_VALUE_ADDRESS (msym));
744 value->kind = axs_lvalue_memory;
745 }
746 break;
747
748 case LOC_COMPUTED:
749 /* FIXME: cagney/2004-01-26: It should be possible to
750 unconditionally call the SYMBOL_COMPUTED_OPS method when available.
751 Unfortunately DWARF 2 stores the frame-base (instead of the
752 function) location in a function's symbol. Oops! For the
753 moment enable this when/where applicable. */
754 SYMBOL_COMPUTED_OPS (var)->tracepoint_var_ref (var, gdbarch, ax, value);
755 break;
756
757 case LOC_OPTIMIZED_OUT:
758 /* Flag this, but don't say anything; leave it up to callers to
759 warn the user. */
760 value->optimized_out = 1;
761 break;
762
763 default:
764 error (_("Cannot find value of botched symbol `%s'."),
765 SYMBOL_PRINT_NAME (var));
766 break;
767 }
768 }
769 \f
770
771
772 /* Generating bytecode from GDB expressions: literals */
773
774 static void
775 gen_int_literal (struct agent_expr *ax, struct axs_value *value, LONGEST k,
776 struct type *type)
777 {
778 ax_const_l (ax, k);
779 value->kind = axs_rvalue;
780 value->type = check_typedef (type);
781 }
782 \f
783
784
785 /* Generating bytecode from GDB expressions: unary conversions, casts */
786
787 /* Take what's on the top of the stack (as described by VALUE), and
788 try to make an rvalue out of it. Signal an error if we can't do
789 that. */
790 void
791 require_rvalue (struct agent_expr *ax, struct axs_value *value)
792 {
793 /* Only deal with scalars, structs and such may be too large
794 to fit in a stack entry. */
795 value->type = check_typedef (value->type);
796 if (TYPE_CODE (value->type) == TYPE_CODE_ARRAY
797 || TYPE_CODE (value->type) == TYPE_CODE_STRUCT
798 || TYPE_CODE (value->type) == TYPE_CODE_UNION
799 || TYPE_CODE (value->type) == TYPE_CODE_FUNC)
800 error (_("Value not scalar: cannot be an rvalue."));
801
802 switch (value->kind)
803 {
804 case axs_rvalue:
805 /* It's already an rvalue. */
806 break;
807
808 case axs_lvalue_memory:
809 /* The top of stack is the address of the object. Dereference. */
810 gen_fetch (ax, value->type);
811 break;
812
813 case axs_lvalue_register:
814 /* There's nothing on the stack, but value->u.reg is the
815 register number containing the value.
816
817 When we add floating-point support, this is going to have to
818 change. What about SPARC register pairs, for example? */
819 ax_reg (ax, value->u.reg);
820 gen_extend (ax, value->type);
821 break;
822 }
823
824 value->kind = axs_rvalue;
825 }
826
827
828 /* Assume the top of the stack is described by VALUE, and perform the
829 usual unary conversions. This is motivated by ANSI 6.2.2, but of
830 course GDB expressions are not ANSI; they're the mishmash union of
831 a bunch of languages. Rah.
832
833 NOTE! This function promises to produce an rvalue only when the
834 incoming value is of an appropriate type. In other words, the
835 consumer of the value this function produces may assume the value
836 is an rvalue only after checking its type.
837
838 The immediate issue is that if the user tries to use a structure or
839 union as an operand of, say, the `+' operator, we don't want to try
840 to convert that structure to an rvalue; require_rvalue will bomb on
841 structs and unions. Rather, we want to simply pass the struct
842 lvalue through unchanged, and let `+' raise an error. */
843
844 static void
845 gen_usual_unary (struct expression *exp, struct agent_expr *ax,
846 struct axs_value *value)
847 {
848 /* We don't have to generate any code for the usual integral
849 conversions, since values are always represented as full-width on
850 the stack. Should we tweak the type? */
851
852 /* Some types require special handling. */
853 switch (TYPE_CODE (value->type))
854 {
855 /* Functions get converted to a pointer to the function. */
856 case TYPE_CODE_FUNC:
857 value->type = lookup_pointer_type (value->type);
858 value->kind = axs_rvalue; /* Should always be true, but just in case. */
859 break;
860
861 /* Arrays get converted to a pointer to their first element, and
862 are no longer an lvalue. */
863 case TYPE_CODE_ARRAY:
864 {
865 struct type *elements = TYPE_TARGET_TYPE (value->type);
866
867 value->type = lookup_pointer_type (elements);
868 value->kind = axs_rvalue;
869 /* We don't need to generate any code; the address of the array
870 is also the address of its first element. */
871 }
872 break;
873
874 /* Don't try to convert structures and unions to rvalues. Let the
875 consumer signal an error. */
876 case TYPE_CODE_STRUCT:
877 case TYPE_CODE_UNION:
878 return;
879 }
880
881 /* If the value is an lvalue, dereference it. */
882 require_rvalue (ax, value);
883 }
884
885
886 /* Return non-zero iff the type TYPE1 is considered "wider" than the
887 type TYPE2, according to the rules described in gen_usual_arithmetic. */
888 static int
889 type_wider_than (struct type *type1, struct type *type2)
890 {
891 return (TYPE_LENGTH (type1) > TYPE_LENGTH (type2)
892 || (TYPE_LENGTH (type1) == TYPE_LENGTH (type2)
893 && TYPE_UNSIGNED (type1)
894 && !TYPE_UNSIGNED (type2)));
895 }
896
897
898 /* Return the "wider" of the two types TYPE1 and TYPE2. */
899 static struct type *
900 max_type (struct type *type1, struct type *type2)
901 {
902 return type_wider_than (type1, type2) ? type1 : type2;
903 }
904
905
906 /* Generate code to convert a scalar value of type FROM to type TO. */
907 static void
908 gen_conversion (struct agent_expr *ax, struct type *from, struct type *to)
909 {
910 /* Perhaps there is a more graceful way to state these rules. */
911
912 /* If we're converting to a narrower type, then we need to clear out
913 the upper bits. */
914 if (TYPE_LENGTH (to) < TYPE_LENGTH (from))
915 gen_extend (ax, from);
916
917 /* If the two values have equal width, but different signednesses,
918 then we need to extend. */
919 else if (TYPE_LENGTH (to) == TYPE_LENGTH (from))
920 {
921 if (TYPE_UNSIGNED (from) != TYPE_UNSIGNED (to))
922 gen_extend (ax, to);
923 }
924
925 /* If we're converting to a wider type, and becoming unsigned, then
926 we need to zero out any possible sign bits. */
927 else if (TYPE_LENGTH (to) > TYPE_LENGTH (from))
928 {
929 if (TYPE_UNSIGNED (to))
930 gen_extend (ax, to);
931 }
932 }
933
934
935 /* Return non-zero iff the type FROM will require any bytecodes to be
936 emitted to be converted to the type TO. */
937 static int
938 is_nontrivial_conversion (struct type *from, struct type *to)
939 {
940 struct agent_expr *ax = new_agent_expr (NULL, 0);
941 int nontrivial;
942
943 /* Actually generate the code, and see if anything came out. At the
944 moment, it would be trivial to replicate the code in
945 gen_conversion here, but in the future, when we're supporting
946 floating point and the like, it may not be. Doing things this
947 way allows this function to be independent of the logic in
948 gen_conversion. */
949 gen_conversion (ax, from, to);
950 nontrivial = ax->len > 0;
951 free_agent_expr (ax);
952 return nontrivial;
953 }
954
955
956 /* Generate code to perform the "usual arithmetic conversions" (ANSI C
957 6.2.1.5) for the two operands of an arithmetic operator. This
958 effectively finds a "least upper bound" type for the two arguments,
959 and promotes each argument to that type. *VALUE1 and *VALUE2
960 describe the values as they are passed in, and as they are left. */
961 static void
962 gen_usual_arithmetic (struct expression *exp, struct agent_expr *ax,
963 struct axs_value *value1, struct axs_value *value2)
964 {
965 /* Do the usual binary conversions. */
966 if (TYPE_CODE (value1->type) == TYPE_CODE_INT
967 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
968 {
969 /* The ANSI integral promotions seem to work this way: Order the
970 integer types by size, and then by signedness: an n-bit
971 unsigned type is considered "wider" than an n-bit signed
972 type. Promote to the "wider" of the two types, and always
973 promote at least to int. */
974 struct type *target = max_type (builtin_type (exp->gdbarch)->builtin_int,
975 max_type (value1->type, value2->type));
976
977 /* Deal with value2, on the top of the stack. */
978 gen_conversion (ax, value2->type, target);
979
980 /* Deal with value1, not on the top of the stack. Don't
981 generate the `swap' instructions if we're not actually going
982 to do anything. */
983 if (is_nontrivial_conversion (value1->type, target))
984 {
985 ax_simple (ax, aop_swap);
986 gen_conversion (ax, value1->type, target);
987 ax_simple (ax, aop_swap);
988 }
989
990 value1->type = value2->type = check_typedef (target);
991 }
992 }
993
994
995 /* Generate code to perform the integral promotions (ANSI 6.2.1.1) on
996 the value on the top of the stack, as described by VALUE. Assume
997 the value has integral type. */
998 static void
999 gen_integral_promotions (struct expression *exp, struct agent_expr *ax,
1000 struct axs_value *value)
1001 {
1002 const struct builtin_type *builtin = builtin_type (exp->gdbarch);
1003
1004 if (!type_wider_than (value->type, builtin->builtin_int))
1005 {
1006 gen_conversion (ax, value->type, builtin->builtin_int);
1007 value->type = builtin->builtin_int;
1008 }
1009 else if (!type_wider_than (value->type, builtin->builtin_unsigned_int))
1010 {
1011 gen_conversion (ax, value->type, builtin->builtin_unsigned_int);
1012 value->type = builtin->builtin_unsigned_int;
1013 }
1014 }
1015
1016
1017 /* Generate code for a cast to TYPE. */
1018 static void
1019 gen_cast (struct agent_expr *ax, struct axs_value *value, struct type *type)
1020 {
1021 /* GCC does allow casts to yield lvalues, so this should be fixed
1022 before merging these changes into the trunk. */
1023 require_rvalue (ax, value);
1024 /* Dereference typedefs. */
1025 type = check_typedef (type);
1026
1027 switch (TYPE_CODE (type))
1028 {
1029 case TYPE_CODE_PTR:
1030 case TYPE_CODE_REF:
1031 /* It's implementation-defined, and I'll bet this is what GCC
1032 does. */
1033 break;
1034
1035 case TYPE_CODE_ARRAY:
1036 case TYPE_CODE_STRUCT:
1037 case TYPE_CODE_UNION:
1038 case TYPE_CODE_FUNC:
1039 error (_("Invalid type cast: intended type must be scalar."));
1040
1041 case TYPE_CODE_ENUM:
1042 case TYPE_CODE_BOOL:
1043 /* We don't have to worry about the size of the value, because
1044 all our integral values are fully sign-extended, and when
1045 casting pointers we can do anything we like. Is there any
1046 way for us to know what GCC actually does with a cast like
1047 this? */
1048 break;
1049
1050 case TYPE_CODE_INT:
1051 gen_conversion (ax, value->type, type);
1052 break;
1053
1054 case TYPE_CODE_VOID:
1055 /* We could pop the value, and rely on everyone else to check
1056 the type and notice that this value doesn't occupy a stack
1057 slot. But for now, leave the value on the stack, and
1058 preserve the "value == stack element" assumption. */
1059 break;
1060
1061 default:
1062 error (_("Casts to requested type are not yet implemented."));
1063 }
1064
1065 value->type = type;
1066 }
1067 \f
1068
1069
1070 /* Generating bytecode from GDB expressions: arithmetic */
1071
1072 /* Scale the integer on the top of the stack by the size of the target
1073 of the pointer type TYPE. */
1074 static void
1075 gen_scale (struct agent_expr *ax, enum agent_op op, struct type *type)
1076 {
1077 struct type *element = TYPE_TARGET_TYPE (type);
1078
1079 if (TYPE_LENGTH (element) != 1)
1080 {
1081 ax_const_l (ax, TYPE_LENGTH (element));
1082 ax_simple (ax, op);
1083 }
1084 }
1085
1086
1087 /* Generate code for pointer arithmetic PTR + INT. */
1088 static void
1089 gen_ptradd (struct agent_expr *ax, struct axs_value *value,
1090 struct axs_value *value1, struct axs_value *value2)
1091 {
1092 gdb_assert (pointer_type (value1->type));
1093 gdb_assert (TYPE_CODE (value2->type) == TYPE_CODE_INT);
1094
1095 gen_scale (ax, aop_mul, value1->type);
1096 ax_simple (ax, aop_add);
1097 gen_extend (ax, value1->type); /* Catch overflow. */
1098 value->type = value1->type;
1099 value->kind = axs_rvalue;
1100 }
1101
1102
1103 /* Generate code for pointer arithmetic PTR - INT. */
1104 static void
1105 gen_ptrsub (struct agent_expr *ax, struct axs_value *value,
1106 struct axs_value *value1, struct axs_value *value2)
1107 {
1108 gdb_assert (pointer_type (value1->type));
1109 gdb_assert (TYPE_CODE (value2->type) == TYPE_CODE_INT);
1110
1111 gen_scale (ax, aop_mul, value1->type);
1112 ax_simple (ax, aop_sub);
1113 gen_extend (ax, value1->type); /* Catch overflow. */
1114 value->type = value1->type;
1115 value->kind = axs_rvalue;
1116 }
1117
1118
1119 /* Generate code for pointer arithmetic PTR - PTR. */
1120 static void
1121 gen_ptrdiff (struct agent_expr *ax, struct axs_value *value,
1122 struct axs_value *value1, struct axs_value *value2,
1123 struct type *result_type)
1124 {
1125 gdb_assert (pointer_type (value1->type));
1126 gdb_assert (pointer_type (value2->type));
1127
1128 if (TYPE_LENGTH (TYPE_TARGET_TYPE (value1->type))
1129 != TYPE_LENGTH (TYPE_TARGET_TYPE (value2->type)))
1130 error (_("\
1131 First argument of `-' is a pointer, but second argument is neither\n\
1132 an integer nor a pointer of the same type."));
1133
1134 ax_simple (ax, aop_sub);
1135 gen_scale (ax, aop_div_unsigned, value1->type);
1136 value->type = result_type;
1137 value->kind = axs_rvalue;
1138 }
1139
1140 static void
1141 gen_equal (struct agent_expr *ax, struct axs_value *value,
1142 struct axs_value *value1, struct axs_value *value2,
1143 struct type *result_type)
1144 {
1145 if (pointer_type (value1->type) || pointer_type (value2->type))
1146 ax_simple (ax, aop_equal);
1147 else
1148 gen_binop (ax, value, value1, value2,
1149 aop_equal, aop_equal, 0, "equal");
1150 value->type = result_type;
1151 value->kind = axs_rvalue;
1152 }
1153
1154 static void
1155 gen_less (struct agent_expr *ax, struct axs_value *value,
1156 struct axs_value *value1, struct axs_value *value2,
1157 struct type *result_type)
1158 {
1159 if (pointer_type (value1->type) || pointer_type (value2->type))
1160 ax_simple (ax, aop_less_unsigned);
1161 else
1162 gen_binop (ax, value, value1, value2,
1163 aop_less_signed, aop_less_unsigned, 0, "less than");
1164 value->type = result_type;
1165 value->kind = axs_rvalue;
1166 }
1167
1168 /* Generate code for a binary operator that doesn't do pointer magic.
1169 We set VALUE to describe the result value; we assume VALUE1 and
1170 VALUE2 describe the two operands, and that they've undergone the
1171 usual binary conversions. MAY_CARRY should be non-zero iff the
1172 result needs to be extended. NAME is the English name of the
1173 operator, used in error messages */
1174 static void
1175 gen_binop (struct agent_expr *ax, struct axs_value *value,
1176 struct axs_value *value1, struct axs_value *value2,
1177 enum agent_op op, enum agent_op op_unsigned,
1178 int may_carry, char *name)
1179 {
1180 /* We only handle INT op INT. */
1181 if ((TYPE_CODE (value1->type) != TYPE_CODE_INT)
1182 || (TYPE_CODE (value2->type) != TYPE_CODE_INT))
1183 error (_("Invalid combination of types in %s."), name);
1184
1185 ax_simple (ax,
1186 TYPE_UNSIGNED (value1->type) ? op_unsigned : op);
1187 if (may_carry)
1188 gen_extend (ax, value1->type); /* catch overflow */
1189 value->type = value1->type;
1190 value->kind = axs_rvalue;
1191 }
1192
1193
1194 static void
1195 gen_logical_not (struct agent_expr *ax, struct axs_value *value,
1196 struct type *result_type)
1197 {
1198 if (TYPE_CODE (value->type) != TYPE_CODE_INT
1199 && TYPE_CODE (value->type) != TYPE_CODE_PTR)
1200 error (_("Invalid type of operand to `!'."));
1201
1202 ax_simple (ax, aop_log_not);
1203 value->type = result_type;
1204 }
1205
1206
1207 static void
1208 gen_complement (struct agent_expr *ax, struct axs_value *value)
1209 {
1210 if (TYPE_CODE (value->type) != TYPE_CODE_INT)
1211 error (_("Invalid type of operand to `~'."));
1212
1213 ax_simple (ax, aop_bit_not);
1214 gen_extend (ax, value->type);
1215 }
1216 \f
1217
1218
1219 /* Generating bytecode from GDB expressions: * & . -> @ sizeof */
1220
1221 /* Dereference the value on the top of the stack. */
1222 static void
1223 gen_deref (struct agent_expr *ax, struct axs_value *value)
1224 {
1225 /* The caller should check the type, because several operators use
1226 this, and we don't know what error message to generate. */
1227 if (!pointer_type (value->type))
1228 internal_error (__FILE__, __LINE__,
1229 _("gen_deref: expected a pointer"));
1230
1231 /* We've got an rvalue now, which is a pointer. We want to yield an
1232 lvalue, whose address is exactly that pointer. So we don't
1233 actually emit any code; we just change the type from "Pointer to
1234 T" to "T", and mark the value as an lvalue in memory. Leave it
1235 to the consumer to actually dereference it. */
1236 value->type = check_typedef (TYPE_TARGET_TYPE (value->type));
1237 if (TYPE_CODE (value->type) == TYPE_CODE_VOID)
1238 error (_("Attempt to dereference a generic pointer."));
1239 value->kind = ((TYPE_CODE (value->type) == TYPE_CODE_FUNC)
1240 ? axs_rvalue : axs_lvalue_memory);
1241 }
1242
1243
1244 /* Produce the address of the lvalue on the top of the stack. */
1245 static void
1246 gen_address_of (struct agent_expr *ax, struct axs_value *value)
1247 {
1248 /* Special case for taking the address of a function. The ANSI
1249 standard describes this as a special case, too, so this
1250 arrangement is not without motivation. */
1251 if (TYPE_CODE (value->type) == TYPE_CODE_FUNC)
1252 /* The value's already an rvalue on the stack, so we just need to
1253 change the type. */
1254 value->type = lookup_pointer_type (value->type);
1255 else
1256 switch (value->kind)
1257 {
1258 case axs_rvalue:
1259 error (_("Operand of `&' is an rvalue, which has no address."));
1260
1261 case axs_lvalue_register:
1262 error (_("Operand of `&' is in a register, and has no address."));
1263
1264 case axs_lvalue_memory:
1265 value->kind = axs_rvalue;
1266 value->type = lookup_pointer_type (value->type);
1267 break;
1268 }
1269 }
1270
1271 /* Generate code to push the value of a bitfield of a structure whose
1272 address is on the top of the stack. START and END give the
1273 starting and one-past-ending *bit* numbers of the field within the
1274 structure. */
1275 static void
1276 gen_bitfield_ref (struct expression *exp, struct agent_expr *ax,
1277 struct axs_value *value, struct type *type,
1278 int start, int end)
1279 {
1280 /* Note that ops[i] fetches 8 << i bits. */
1281 static enum agent_op ops[]
1282 = {aop_ref8, aop_ref16, aop_ref32, aop_ref64};
1283 static int num_ops = (sizeof (ops) / sizeof (ops[0]));
1284
1285 /* We don't want to touch any byte that the bitfield doesn't
1286 actually occupy; we shouldn't make any accesses we're not
1287 explicitly permitted to. We rely here on the fact that the
1288 bytecode `ref' operators work on unaligned addresses.
1289
1290 It takes some fancy footwork to get the stack to work the way
1291 we'd like. Say we're retrieving a bitfield that requires three
1292 fetches. Initially, the stack just contains the address:
1293 addr
1294 For the first fetch, we duplicate the address
1295 addr addr
1296 then add the byte offset, do the fetch, and shift and mask as
1297 needed, yielding a fragment of the value, properly aligned for
1298 the final bitwise or:
1299 addr frag1
1300 then we swap, and repeat the process:
1301 frag1 addr --- address on top
1302 frag1 addr addr --- duplicate it
1303 frag1 addr frag2 --- get second fragment
1304 frag1 frag2 addr --- swap again
1305 frag1 frag2 frag3 --- get third fragment
1306 Notice that, since the third fragment is the last one, we don't
1307 bother duplicating the address this time. Now we have all the
1308 fragments on the stack, and we can simply `or' them together,
1309 yielding the final value of the bitfield. */
1310
1311 /* The first and one-after-last bits in the field, but rounded down
1312 and up to byte boundaries. */
1313 int bound_start = (start / TARGET_CHAR_BIT) * TARGET_CHAR_BIT;
1314 int bound_end = (((end + TARGET_CHAR_BIT - 1)
1315 / TARGET_CHAR_BIT)
1316 * TARGET_CHAR_BIT);
1317
1318 /* current bit offset within the structure */
1319 int offset;
1320
1321 /* The index in ops of the opcode we're considering. */
1322 int op;
1323
1324 /* The number of fragments we generated in the process. Probably
1325 equal to the number of `one' bits in bytesize, but who cares? */
1326 int fragment_count;
1327
1328 /* Dereference any typedefs. */
1329 type = check_typedef (type);
1330
1331 /* Can we fetch the number of bits requested at all? */
1332 if ((end - start) > ((1 << num_ops) * 8))
1333 internal_error (__FILE__, __LINE__,
1334 _("gen_bitfield_ref: bitfield too wide"));
1335
1336 /* Note that we know here that we only need to try each opcode once.
1337 That may not be true on machines with weird byte sizes. */
1338 offset = bound_start;
1339 fragment_count = 0;
1340 for (op = num_ops - 1; op >= 0; op--)
1341 {
1342 /* number of bits that ops[op] would fetch */
1343 int op_size = 8 << op;
1344
1345 /* The stack at this point, from bottom to top, contains zero or
1346 more fragments, then the address. */
1347
1348 /* Does this fetch fit within the bitfield? */
1349 if (offset + op_size <= bound_end)
1350 {
1351 /* Is this the last fragment? */
1352 int last_frag = (offset + op_size == bound_end);
1353
1354 if (!last_frag)
1355 ax_simple (ax, aop_dup); /* keep a copy of the address */
1356
1357 /* Add the offset. */
1358 gen_offset (ax, offset / TARGET_CHAR_BIT);
1359
1360 if (trace_kludge)
1361 {
1362 /* Record the area of memory we're about to fetch. */
1363 ax_trace_quick (ax, op_size / TARGET_CHAR_BIT);
1364 }
1365
1366 /* Perform the fetch. */
1367 ax_simple (ax, ops[op]);
1368
1369 /* Shift the bits we have to their proper position.
1370 gen_left_shift will generate right shifts when the operand
1371 is negative.
1372
1373 A big-endian field diagram to ponder:
1374 byte 0 byte 1 byte 2 byte 3 byte 4 byte 5 byte 6 byte 7
1375 +------++------++------++------++------++------++------++------+
1376 xxxxAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCxxxxxxxxxxx
1377 ^ ^ ^ ^
1378 bit number 16 32 48 53
1379 These are bit numbers as supplied by GDB. Note that the
1380 bit numbers run from right to left once you've fetched the
1381 value!
1382
1383 A little-endian field diagram to ponder:
1384 byte 7 byte 6 byte 5 byte 4 byte 3 byte 2 byte 1 byte 0
1385 +------++------++------++------++------++------++------++------+
1386 xxxxxxxxxxxAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCxxxx
1387 ^ ^ ^ ^ ^
1388 bit number 48 32 16 4 0
1389
1390 In both cases, the most significant end is on the left
1391 (i.e. normal numeric writing order), which means that you
1392 don't go crazy thinking about `left' and `right' shifts.
1393
1394 We don't have to worry about masking yet:
1395 - If they contain garbage off the least significant end, then we
1396 must be looking at the low end of the field, and the right
1397 shift will wipe them out.
1398 - If they contain garbage off the most significant end, then we
1399 must be looking at the most significant end of the word, and
1400 the sign/zero extension will wipe them out.
1401 - If we're in the interior of the word, then there is no garbage
1402 on either end, because the ref operators zero-extend. */
1403 if (gdbarch_byte_order (exp->gdbarch) == BFD_ENDIAN_BIG)
1404 gen_left_shift (ax, end - (offset + op_size));
1405 else
1406 gen_left_shift (ax, offset - start);
1407
1408 if (!last_frag)
1409 /* Bring the copy of the address up to the top. */
1410 ax_simple (ax, aop_swap);
1411
1412 offset += op_size;
1413 fragment_count++;
1414 }
1415 }
1416
1417 /* Generate enough bitwise `or' operations to combine all the
1418 fragments we left on the stack. */
1419 while (fragment_count-- > 1)
1420 ax_simple (ax, aop_bit_or);
1421
1422 /* Sign- or zero-extend the value as appropriate. */
1423 ((TYPE_UNSIGNED (type) ? ax_zero_ext : ax_ext) (ax, end - start));
1424
1425 /* This is *not* an lvalue. Ugh. */
1426 value->kind = axs_rvalue;
1427 value->type = type;
1428 }
1429
1430 /* Generate bytecodes for field number FIELDNO of type TYPE. OFFSET
1431 is an accumulated offset (in bytes), will be nonzero for objects
1432 embedded in other objects, like C++ base classes. Behavior should
1433 generally follow value_primitive_field. */
1434
1435 static void
1436 gen_primitive_field (struct expression *exp,
1437 struct agent_expr *ax, struct axs_value *value,
1438 int offset, int fieldno, struct type *type)
1439 {
1440 /* Is this a bitfield? */
1441 if (TYPE_FIELD_PACKED (type, fieldno))
1442 gen_bitfield_ref (exp, ax, value, TYPE_FIELD_TYPE (type, fieldno),
1443 (offset * TARGET_CHAR_BIT
1444 + TYPE_FIELD_BITPOS (type, fieldno)),
1445 (offset * TARGET_CHAR_BIT
1446 + TYPE_FIELD_BITPOS (type, fieldno)
1447 + TYPE_FIELD_BITSIZE (type, fieldno)));
1448 else
1449 {
1450 gen_offset (ax, offset
1451 + TYPE_FIELD_BITPOS (type, fieldno) / TARGET_CHAR_BIT);
1452 value->kind = axs_lvalue_memory;
1453 value->type = TYPE_FIELD_TYPE (type, fieldno);
1454 }
1455 }
1456
1457 /* Search for the given field in either the given type or one of its
1458 base classes. Return 1 if found, 0 if not. */
1459
1460 static int
1461 gen_struct_ref_recursive (struct expression *exp, struct agent_expr *ax,
1462 struct axs_value *value,
1463 char *field, int offset, struct type *type)
1464 {
1465 int i, rslt;
1466 int nbases = TYPE_N_BASECLASSES (type);
1467
1468 CHECK_TYPEDEF (type);
1469
1470 for (i = TYPE_NFIELDS (type) - 1; i >= nbases; i--)
1471 {
1472 const char *this_name = TYPE_FIELD_NAME (type, i);
1473
1474 if (this_name)
1475 {
1476 if (strcmp (field, this_name) == 0)
1477 {
1478 /* Note that bytecodes for the struct's base (aka
1479 "this") will have been generated already, which will
1480 be unnecessary but not harmful if the static field is
1481 being handled as a global. */
1482 if (field_is_static (&TYPE_FIELD (type, i)))
1483 {
1484 gen_static_field (exp->gdbarch, ax, value, type, i);
1485 if (value->optimized_out)
1486 error (_("static field `%s' has been "
1487 "optimized out, cannot use"),
1488 field);
1489 return 1;
1490 }
1491
1492 gen_primitive_field (exp, ax, value, offset, i, type);
1493 return 1;
1494 }
1495 #if 0 /* is this right? */
1496 if (this_name[0] == '\0')
1497 internal_error (__FILE__, __LINE__,
1498 _("find_field: anonymous unions not supported"));
1499 #endif
1500 }
1501 }
1502
1503 /* Now scan through base classes recursively. */
1504 for (i = 0; i < nbases; i++)
1505 {
1506 struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
1507
1508 rslt = gen_struct_ref_recursive (exp, ax, value, field,
1509 offset + TYPE_BASECLASS_BITPOS (type, i)
1510 / TARGET_CHAR_BIT,
1511 basetype);
1512 if (rslt)
1513 return 1;
1514 }
1515
1516 /* Not found anywhere, flag so caller can complain. */
1517 return 0;
1518 }
1519
1520 /* Generate code to reference the member named FIELD of a structure or
1521 union. The top of the stack, as described by VALUE, should have
1522 type (pointer to a)* struct/union. OPERATOR_NAME is the name of
1523 the operator being compiled, and OPERAND_NAME is the kind of thing
1524 it operates on; we use them in error messages. */
1525 static void
1526 gen_struct_ref (struct expression *exp, struct agent_expr *ax,
1527 struct axs_value *value, char *field,
1528 char *operator_name, char *operand_name)
1529 {
1530 struct type *type;
1531 int found;
1532
1533 /* Follow pointers until we reach a non-pointer. These aren't the C
1534 semantics, but they're what the normal GDB evaluator does, so we
1535 should at least be consistent. */
1536 while (pointer_type (value->type))
1537 {
1538 require_rvalue (ax, value);
1539 gen_deref (ax, value);
1540 }
1541 type = check_typedef (value->type);
1542
1543 /* This must yield a structure or a union. */
1544 if (TYPE_CODE (type) != TYPE_CODE_STRUCT
1545 && TYPE_CODE (type) != TYPE_CODE_UNION)
1546 error (_("The left operand of `%s' is not a %s."),
1547 operator_name, operand_name);
1548
1549 /* And it must be in memory; we don't deal with structure rvalues,
1550 or structures living in registers. */
1551 if (value->kind != axs_lvalue_memory)
1552 error (_("Structure does not live in memory."));
1553
1554 /* Search through fields and base classes recursively. */
1555 found = gen_struct_ref_recursive (exp, ax, value, field, 0, type);
1556
1557 if (!found)
1558 error (_("Couldn't find member named `%s' in struct/union/class `%s'"),
1559 field, TYPE_TAG_NAME (type));
1560 }
1561
1562 static int
1563 gen_namespace_elt (struct expression *exp,
1564 struct agent_expr *ax, struct axs_value *value,
1565 const struct type *curtype, char *name);
1566 static int
1567 gen_maybe_namespace_elt (struct expression *exp,
1568 struct agent_expr *ax, struct axs_value *value,
1569 const struct type *curtype, char *name);
1570
1571 static void
1572 gen_static_field (struct gdbarch *gdbarch,
1573 struct agent_expr *ax, struct axs_value *value,
1574 struct type *type, int fieldno)
1575 {
1576 if (TYPE_FIELD_LOC_KIND (type, fieldno) == FIELD_LOC_KIND_PHYSADDR)
1577 {
1578 ax_const_l (ax, TYPE_FIELD_STATIC_PHYSADDR (type, fieldno));
1579 value->kind = axs_lvalue_memory;
1580 value->type = TYPE_FIELD_TYPE (type, fieldno);
1581 value->optimized_out = 0;
1582 }
1583 else
1584 {
1585 const char *phys_name = TYPE_FIELD_STATIC_PHYSNAME (type, fieldno);
1586 struct symbol *sym = lookup_symbol (phys_name, 0, VAR_DOMAIN, 0);
1587
1588 if (sym)
1589 {
1590 gen_var_ref (gdbarch, ax, value, sym);
1591
1592 /* Don't error if the value was optimized out, we may be
1593 scanning all static fields and just want to pass over this
1594 and continue with the rest. */
1595 }
1596 else
1597 {
1598 /* Silently assume this was optimized out; class printing
1599 will let the user know why the data is missing. */
1600 value->optimized_out = 1;
1601 }
1602 }
1603 }
1604
1605 static int
1606 gen_struct_elt_for_reference (struct expression *exp,
1607 struct agent_expr *ax, struct axs_value *value,
1608 struct type *type, char *fieldname)
1609 {
1610 struct type *t = type;
1611 int i;
1612
1613 if (TYPE_CODE (t) != TYPE_CODE_STRUCT
1614 && TYPE_CODE (t) != TYPE_CODE_UNION)
1615 internal_error (__FILE__, __LINE__,
1616 _("non-aggregate type to gen_struct_elt_for_reference"));
1617
1618 for (i = TYPE_NFIELDS (t) - 1; i >= TYPE_N_BASECLASSES (t); i--)
1619 {
1620 const char *t_field_name = TYPE_FIELD_NAME (t, i);
1621
1622 if (t_field_name && strcmp (t_field_name, fieldname) == 0)
1623 {
1624 if (field_is_static (&TYPE_FIELD (t, i)))
1625 {
1626 gen_static_field (exp->gdbarch, ax, value, t, i);
1627 if (value->optimized_out)
1628 error (_("static field `%s' has been "
1629 "optimized out, cannot use"),
1630 fieldname);
1631 return 1;
1632 }
1633 if (TYPE_FIELD_PACKED (t, i))
1634 error (_("pointers to bitfield members not allowed"));
1635
1636 /* FIXME we need a way to do "want_address" equivalent */
1637
1638 error (_("Cannot reference non-static field \"%s\""), fieldname);
1639 }
1640 }
1641
1642 /* FIXME add other scoped-reference cases here */
1643
1644 /* Do a last-ditch lookup. */
1645 return gen_maybe_namespace_elt (exp, ax, value, type, fieldname);
1646 }
1647
1648 /* C++: Return the member NAME of the namespace given by the type
1649 CURTYPE. */
1650
1651 static int
1652 gen_namespace_elt (struct expression *exp,
1653 struct agent_expr *ax, struct axs_value *value,
1654 const struct type *curtype, char *name)
1655 {
1656 int found = gen_maybe_namespace_elt (exp, ax, value, curtype, name);
1657
1658 if (!found)
1659 error (_("No symbol \"%s\" in namespace \"%s\"."),
1660 name, TYPE_TAG_NAME (curtype));
1661
1662 return found;
1663 }
1664
1665 /* A helper function used by value_namespace_elt and
1666 value_struct_elt_for_reference. It looks up NAME inside the
1667 context CURTYPE; this works if CURTYPE is a namespace or if CURTYPE
1668 is a class and NAME refers to a type in CURTYPE itself (as opposed
1669 to, say, some base class of CURTYPE). */
1670
1671 static int
1672 gen_maybe_namespace_elt (struct expression *exp,
1673 struct agent_expr *ax, struct axs_value *value,
1674 const struct type *curtype, char *name)
1675 {
1676 const char *namespace_name = TYPE_TAG_NAME (curtype);
1677 struct symbol *sym;
1678
1679 sym = cp_lookup_symbol_namespace (namespace_name, name,
1680 block_for_pc (ax->scope),
1681 VAR_DOMAIN);
1682
1683 if (sym == NULL)
1684 return 0;
1685
1686 gen_var_ref (exp->gdbarch, ax, value, sym);
1687
1688 if (value->optimized_out)
1689 error (_("`%s' has been optimized out, cannot use"),
1690 SYMBOL_PRINT_NAME (sym));
1691
1692 return 1;
1693 }
1694
1695
1696 static int
1697 gen_aggregate_elt_ref (struct expression *exp,
1698 struct agent_expr *ax, struct axs_value *value,
1699 struct type *type, char *field,
1700 char *operator_name, char *operand_name)
1701 {
1702 switch (TYPE_CODE (type))
1703 {
1704 case TYPE_CODE_STRUCT:
1705 case TYPE_CODE_UNION:
1706 return gen_struct_elt_for_reference (exp, ax, value, type, field);
1707 break;
1708 case TYPE_CODE_NAMESPACE:
1709 return gen_namespace_elt (exp, ax, value, type, field);
1710 break;
1711 default:
1712 internal_error (__FILE__, __LINE__,
1713 _("non-aggregate type in gen_aggregate_elt_ref"));
1714 }
1715
1716 return 0;
1717 }
1718
1719 /* Generate code for GDB's magical `repeat' operator.
1720 LVALUE @ INT creates an array INT elements long, and whose elements
1721 have the same type as LVALUE, located in memory so that LVALUE is
1722 its first element. For example, argv[0]@argc gives you the array
1723 of command-line arguments.
1724
1725 Unfortunately, because we have to know the types before we actually
1726 have a value for the expression, we can't implement this perfectly
1727 without changing the type system, having values that occupy two
1728 stack slots, doing weird things with sizeof, etc. So we require
1729 the right operand to be a constant expression. */
1730 static void
1731 gen_repeat (struct expression *exp, union exp_element **pc,
1732 struct agent_expr *ax, struct axs_value *value)
1733 {
1734 struct axs_value value1;
1735
1736 /* We don't want to turn this into an rvalue, so no conversions
1737 here. */
1738 gen_expr (exp, pc, ax, &value1);
1739 if (value1.kind != axs_lvalue_memory)
1740 error (_("Left operand of `@' must be an object in memory."));
1741
1742 /* Evaluate the length; it had better be a constant. */
1743 {
1744 struct value *v = const_expr (pc);
1745 int length;
1746
1747 if (!v)
1748 error (_("Right operand of `@' must be a "
1749 "constant, in agent expressions."));
1750 if (TYPE_CODE (value_type (v)) != TYPE_CODE_INT)
1751 error (_("Right operand of `@' must be an integer."));
1752 length = value_as_long (v);
1753 if (length <= 0)
1754 error (_("Right operand of `@' must be positive."));
1755
1756 /* The top of the stack is already the address of the object, so
1757 all we need to do is frob the type of the lvalue. */
1758 {
1759 /* FIXME-type-allocation: need a way to free this type when we are
1760 done with it. */
1761 struct type *array
1762 = lookup_array_range_type (value1.type, 0, length - 1);
1763
1764 value->kind = axs_lvalue_memory;
1765 value->type = array;
1766 }
1767 }
1768 }
1769
1770
1771 /* Emit code for the `sizeof' operator.
1772 *PC should point at the start of the operand expression; we advance it
1773 to the first instruction after the operand. */
1774 static void
1775 gen_sizeof (struct expression *exp, union exp_element **pc,
1776 struct agent_expr *ax, struct axs_value *value,
1777 struct type *size_type)
1778 {
1779 /* We don't care about the value of the operand expression; we only
1780 care about its type. However, in the current arrangement, the
1781 only way to find an expression's type is to generate code for it.
1782 So we generate code for the operand, and then throw it away,
1783 replacing it with code that simply pushes its size. */
1784 int start = ax->len;
1785
1786 gen_expr (exp, pc, ax, value);
1787
1788 /* Throw away the code we just generated. */
1789 ax->len = start;
1790
1791 ax_const_l (ax, TYPE_LENGTH (value->type));
1792 value->kind = axs_rvalue;
1793 value->type = size_type;
1794 }
1795 \f
1796
1797 /* Generating bytecode from GDB expressions: general recursive thingy */
1798
1799 /* XXX: i18n */
1800 /* A gen_expr function written by a Gen-X'er guy.
1801 Append code for the subexpression of EXPR starting at *POS_P to AX. */
1802 void
1803 gen_expr (struct expression *exp, union exp_element **pc,
1804 struct agent_expr *ax, struct axs_value *value)
1805 {
1806 /* Used to hold the descriptions of operand expressions. */
1807 struct axs_value value1, value2, value3;
1808 enum exp_opcode op = (*pc)[0].opcode, op2;
1809 int if1, go1, if2, go2, end;
1810 struct type *int_type = builtin_type (exp->gdbarch)->builtin_int;
1811
1812 /* If we're looking at a constant expression, just push its value. */
1813 {
1814 struct value *v = maybe_const_expr (pc);
1815
1816 if (v)
1817 {
1818 ax_const_l (ax, value_as_long (v));
1819 value->kind = axs_rvalue;
1820 value->type = check_typedef (value_type (v));
1821 return;
1822 }
1823 }
1824
1825 /* Otherwise, go ahead and generate code for it. */
1826 switch (op)
1827 {
1828 /* Binary arithmetic operators. */
1829 case BINOP_ADD:
1830 case BINOP_SUB:
1831 case BINOP_MUL:
1832 case BINOP_DIV:
1833 case BINOP_REM:
1834 case BINOP_LSH:
1835 case BINOP_RSH:
1836 case BINOP_SUBSCRIPT:
1837 case BINOP_BITWISE_AND:
1838 case BINOP_BITWISE_IOR:
1839 case BINOP_BITWISE_XOR:
1840 case BINOP_EQUAL:
1841 case BINOP_NOTEQUAL:
1842 case BINOP_LESS:
1843 case BINOP_GTR:
1844 case BINOP_LEQ:
1845 case BINOP_GEQ:
1846 (*pc)++;
1847 gen_expr (exp, pc, ax, &value1);
1848 gen_usual_unary (exp, ax, &value1);
1849 gen_expr_binop_rest (exp, op, pc, ax, value, &value1, &value2);
1850 break;
1851
1852 case BINOP_LOGICAL_AND:
1853 (*pc)++;
1854 /* Generate the obvious sequence of tests and jumps. */
1855 gen_expr (exp, pc, ax, &value1);
1856 gen_usual_unary (exp, ax, &value1);
1857 if1 = ax_goto (ax, aop_if_goto);
1858 go1 = ax_goto (ax, aop_goto);
1859 ax_label (ax, if1, ax->len);
1860 gen_expr (exp, pc, ax, &value2);
1861 gen_usual_unary (exp, ax, &value2);
1862 if2 = ax_goto (ax, aop_if_goto);
1863 go2 = ax_goto (ax, aop_goto);
1864 ax_label (ax, if2, ax->len);
1865 ax_const_l (ax, 1);
1866 end = ax_goto (ax, aop_goto);
1867 ax_label (ax, go1, ax->len);
1868 ax_label (ax, go2, ax->len);
1869 ax_const_l (ax, 0);
1870 ax_label (ax, end, ax->len);
1871 value->kind = axs_rvalue;
1872 value->type = int_type;
1873 break;
1874
1875 case BINOP_LOGICAL_OR:
1876 (*pc)++;
1877 /* Generate the obvious sequence of tests and jumps. */
1878 gen_expr (exp, pc, ax, &value1);
1879 gen_usual_unary (exp, ax, &value1);
1880 if1 = ax_goto (ax, aop_if_goto);
1881 gen_expr (exp, pc, ax, &value2);
1882 gen_usual_unary (exp, ax, &value2);
1883 if2 = ax_goto (ax, aop_if_goto);
1884 ax_const_l (ax, 0);
1885 end = ax_goto (ax, aop_goto);
1886 ax_label (ax, if1, ax->len);
1887 ax_label (ax, if2, ax->len);
1888 ax_const_l (ax, 1);
1889 ax_label (ax, end, ax->len);
1890 value->kind = axs_rvalue;
1891 value->type = int_type;
1892 break;
1893
1894 case TERNOP_COND:
1895 (*pc)++;
1896 gen_expr (exp, pc, ax, &value1);
1897 gen_usual_unary (exp, ax, &value1);
1898 /* For (A ? B : C), it's easiest to generate subexpression
1899 bytecodes in order, but if_goto jumps on true, so we invert
1900 the sense of A. Then we can do B by dropping through, and
1901 jump to do C. */
1902 gen_logical_not (ax, &value1, int_type);
1903 if1 = ax_goto (ax, aop_if_goto);
1904 gen_expr (exp, pc, ax, &value2);
1905 gen_usual_unary (exp, ax, &value2);
1906 end = ax_goto (ax, aop_goto);
1907 ax_label (ax, if1, ax->len);
1908 gen_expr (exp, pc, ax, &value3);
1909 gen_usual_unary (exp, ax, &value3);
1910 ax_label (ax, end, ax->len);
1911 /* This is arbitary - what if B and C are incompatible types? */
1912 value->type = value2.type;
1913 value->kind = value2.kind;
1914 break;
1915
1916 case BINOP_ASSIGN:
1917 (*pc)++;
1918 if ((*pc)[0].opcode == OP_INTERNALVAR)
1919 {
1920 char *name = internalvar_name ((*pc)[1].internalvar);
1921 struct trace_state_variable *tsv;
1922
1923 (*pc) += 3;
1924 gen_expr (exp, pc, ax, value);
1925 tsv = find_trace_state_variable (name);
1926 if (tsv)
1927 {
1928 ax_tsv (ax, aop_setv, tsv->number);
1929 if (trace_kludge)
1930 ax_tsv (ax, aop_tracev, tsv->number);
1931 }
1932 else
1933 error (_("$%s is not a trace state variable, "
1934 "may not assign to it"), name);
1935 }
1936 else
1937 error (_("May only assign to trace state variables"));
1938 break;
1939
1940 case BINOP_ASSIGN_MODIFY:
1941 (*pc)++;
1942 op2 = (*pc)[0].opcode;
1943 (*pc)++;
1944 (*pc)++;
1945 if ((*pc)[0].opcode == OP_INTERNALVAR)
1946 {
1947 char *name = internalvar_name ((*pc)[1].internalvar);
1948 struct trace_state_variable *tsv;
1949
1950 (*pc) += 3;
1951 tsv = find_trace_state_variable (name);
1952 if (tsv)
1953 {
1954 /* The tsv will be the left half of the binary operation. */
1955 ax_tsv (ax, aop_getv, tsv->number);
1956 if (trace_kludge)
1957 ax_tsv (ax, aop_tracev, tsv->number);
1958 /* Trace state variables are always 64-bit integers. */
1959 value1.kind = axs_rvalue;
1960 value1.type = builtin_type (exp->gdbarch)->builtin_long_long;
1961 /* Now do right half of expression. */
1962 gen_expr_binop_rest (exp, op2, pc, ax, value, &value1, &value2);
1963 /* We have a result of the binary op, set the tsv. */
1964 ax_tsv (ax, aop_setv, tsv->number);
1965 if (trace_kludge)
1966 ax_tsv (ax, aop_tracev, tsv->number);
1967 }
1968 else
1969 error (_("$%s is not a trace state variable, "
1970 "may not assign to it"), name);
1971 }
1972 else
1973 error (_("May only assign to trace state variables"));
1974 break;
1975
1976 /* Note that we need to be a little subtle about generating code
1977 for comma. In C, we can do some optimizations here because
1978 we know the left operand is only being evaluated for effect.
1979 However, if the tracing kludge is in effect, then we always
1980 need to evaluate the left hand side fully, so that all the
1981 variables it mentions get traced. */
1982 case BINOP_COMMA:
1983 (*pc)++;
1984 gen_expr (exp, pc, ax, &value1);
1985 /* Don't just dispose of the left operand. We might be tracing,
1986 in which case we want to emit code to trace it if it's an
1987 lvalue. */
1988 gen_traced_pop (exp->gdbarch, ax, &value1);
1989 gen_expr (exp, pc, ax, value);
1990 /* It's the consumer's responsibility to trace the right operand. */
1991 break;
1992
1993 case OP_LONG: /* some integer constant */
1994 {
1995 struct type *type = (*pc)[1].type;
1996 LONGEST k = (*pc)[2].longconst;
1997
1998 (*pc) += 4;
1999 gen_int_literal (ax, value, k, type);
2000 }
2001 break;
2002
2003 case OP_VAR_VALUE:
2004 gen_var_ref (exp->gdbarch, ax, value, (*pc)[2].symbol);
2005
2006 if (value->optimized_out)
2007 error (_("`%s' has been optimized out, cannot use"),
2008 SYMBOL_PRINT_NAME ((*pc)[2].symbol));
2009
2010 (*pc) += 4;
2011 break;
2012
2013 case OP_REGISTER:
2014 {
2015 const char *name = &(*pc)[2].string;
2016 int reg;
2017
2018 (*pc) += 4 + BYTES_TO_EXP_ELEM ((*pc)[1].longconst + 1);
2019 reg = user_reg_map_name_to_regnum (exp->gdbarch, name, strlen (name));
2020 if (reg == -1)
2021 internal_error (__FILE__, __LINE__,
2022 _("Register $%s not available"), name);
2023 /* No support for tracing user registers yet. */
2024 if (reg >= gdbarch_num_regs (exp->gdbarch)
2025 + gdbarch_num_pseudo_regs (exp->gdbarch))
2026 error (_("'%s' is a user-register; "
2027 "GDB cannot yet trace user-register contents."),
2028 name);
2029 value->kind = axs_lvalue_register;
2030 value->u.reg = reg;
2031 value->type = register_type (exp->gdbarch, reg);
2032 }
2033 break;
2034
2035 case OP_INTERNALVAR:
2036 {
2037 struct internalvar *var = (*pc)[1].internalvar;
2038 const char *name = internalvar_name (var);
2039 struct trace_state_variable *tsv;
2040
2041 (*pc) += 3;
2042 tsv = find_trace_state_variable (name);
2043 if (tsv)
2044 {
2045 ax_tsv (ax, aop_getv, tsv->number);
2046 if (trace_kludge)
2047 ax_tsv (ax, aop_tracev, tsv->number);
2048 /* Trace state variables are always 64-bit integers. */
2049 value->kind = axs_rvalue;
2050 value->type = builtin_type (exp->gdbarch)->builtin_long_long;
2051 }
2052 else if (! compile_internalvar_to_ax (var, ax, value))
2053 error (_("$%s is not a trace state variable; GDB agent "
2054 "expressions cannot use convenience variables."), name);
2055 }
2056 break;
2057
2058 /* Weirdo operator: see comments for gen_repeat for details. */
2059 case BINOP_REPEAT:
2060 /* Note that gen_repeat handles its own argument evaluation. */
2061 (*pc)++;
2062 gen_repeat (exp, pc, ax, value);
2063 break;
2064
2065 case UNOP_CAST:
2066 {
2067 struct type *type = (*pc)[1].type;
2068
2069 (*pc) += 3;
2070 gen_expr (exp, pc, ax, value);
2071 gen_cast (ax, value, type);
2072 }
2073 break;
2074
2075 case UNOP_MEMVAL:
2076 {
2077 struct type *type = check_typedef ((*pc)[1].type);
2078
2079 (*pc) += 3;
2080 gen_expr (exp, pc, ax, value);
2081
2082 /* If we have an axs_rvalue or an axs_lvalue_memory, then we
2083 already have the right value on the stack. For
2084 axs_lvalue_register, we must convert. */
2085 if (value->kind == axs_lvalue_register)
2086 require_rvalue (ax, value);
2087
2088 value->type = type;
2089 value->kind = axs_lvalue_memory;
2090 }
2091 break;
2092
2093 case UNOP_PLUS:
2094 (*pc)++;
2095 /* + FOO is equivalent to 0 + FOO, which can be optimized. */
2096 gen_expr (exp, pc, ax, value);
2097 gen_usual_unary (exp, ax, value);
2098 break;
2099
2100 case UNOP_NEG:
2101 (*pc)++;
2102 /* -FOO is equivalent to 0 - FOO. */
2103 gen_int_literal (ax, &value1, 0,
2104 builtin_type (exp->gdbarch)->builtin_int);
2105 gen_usual_unary (exp, ax, &value1); /* shouldn't do much */
2106 gen_expr (exp, pc, ax, &value2);
2107 gen_usual_unary (exp, ax, &value2);
2108 gen_usual_arithmetic (exp, ax, &value1, &value2);
2109 gen_binop (ax, value, &value1, &value2, aop_sub, aop_sub, 1, "negation");
2110 break;
2111
2112 case UNOP_LOGICAL_NOT:
2113 (*pc)++;
2114 gen_expr (exp, pc, ax, value);
2115 gen_usual_unary (exp, ax, value);
2116 gen_logical_not (ax, value, int_type);
2117 break;
2118
2119 case UNOP_COMPLEMENT:
2120 (*pc)++;
2121 gen_expr (exp, pc, ax, value);
2122 gen_usual_unary (exp, ax, value);
2123 gen_integral_promotions (exp, ax, value);
2124 gen_complement (ax, value);
2125 break;
2126
2127 case UNOP_IND:
2128 (*pc)++;
2129 gen_expr (exp, pc, ax, value);
2130 gen_usual_unary (exp, ax, value);
2131 if (!pointer_type (value->type))
2132 error (_("Argument of unary `*' is not a pointer."));
2133 gen_deref (ax, value);
2134 break;
2135
2136 case UNOP_ADDR:
2137 (*pc)++;
2138 gen_expr (exp, pc, ax, value);
2139 gen_address_of (ax, value);
2140 break;
2141
2142 case UNOP_SIZEOF:
2143 (*pc)++;
2144 /* Notice that gen_sizeof handles its own operand, unlike most
2145 of the other unary operator functions. This is because we
2146 have to throw away the code we generate. */
2147 gen_sizeof (exp, pc, ax, value,
2148 builtin_type (exp->gdbarch)->builtin_int);
2149 break;
2150
2151 case STRUCTOP_STRUCT:
2152 case STRUCTOP_PTR:
2153 {
2154 int length = (*pc)[1].longconst;
2155 char *name = &(*pc)[2].string;
2156
2157 (*pc) += 4 + BYTES_TO_EXP_ELEM (length + 1);
2158 gen_expr (exp, pc, ax, value);
2159 if (op == STRUCTOP_STRUCT)
2160 gen_struct_ref (exp, ax, value, name, ".", "structure or union");
2161 else if (op == STRUCTOP_PTR)
2162 gen_struct_ref (exp, ax, value, name, "->",
2163 "pointer to a structure or union");
2164 else
2165 /* If this `if' chain doesn't handle it, then the case list
2166 shouldn't mention it, and we shouldn't be here. */
2167 internal_error (__FILE__, __LINE__,
2168 _("gen_expr: unhandled struct case"));
2169 }
2170 break;
2171
2172 case OP_THIS:
2173 {
2174 struct symbol *sym, *func;
2175 struct block *b;
2176 const struct language_defn *lang;
2177
2178 b = block_for_pc (ax->scope);
2179 func = block_linkage_function (b);
2180 lang = language_def (SYMBOL_LANGUAGE (func));
2181
2182 sym = lookup_language_this (lang, b);
2183 if (!sym)
2184 error (_("no `%s' found"), lang->la_name_of_this);
2185
2186 gen_var_ref (exp->gdbarch, ax, value, sym);
2187
2188 if (value->optimized_out)
2189 error (_("`%s' has been optimized out, cannot use"),
2190 SYMBOL_PRINT_NAME (sym));
2191
2192 (*pc) += 2;
2193 }
2194 break;
2195
2196 case OP_SCOPE:
2197 {
2198 struct type *type = (*pc)[1].type;
2199 int length = longest_to_int ((*pc)[2].longconst);
2200 char *name = &(*pc)[3].string;
2201 int found;
2202
2203 found = gen_aggregate_elt_ref (exp, ax, value, type, name,
2204 "?", "??");
2205 if (!found)
2206 error (_("There is no field named %s"), name);
2207 (*pc) += 5 + BYTES_TO_EXP_ELEM (length + 1);
2208 }
2209 break;
2210
2211 case OP_TYPE:
2212 error (_("Attempt to use a type name as an expression."));
2213
2214 default:
2215 error (_("Unsupported operator %s (%d) in expression."),
2216 op_name (exp, op), op);
2217 }
2218 }
2219
2220 /* This handles the middle-to-right-side of code generation for binary
2221 expressions, which is shared between regular binary operations and
2222 assign-modify (+= and friends) expressions. */
2223
2224 static void
2225 gen_expr_binop_rest (struct expression *exp,
2226 enum exp_opcode op, union exp_element **pc,
2227 struct agent_expr *ax, struct axs_value *value,
2228 struct axs_value *value1, struct axs_value *value2)
2229 {
2230 struct type *int_type = builtin_type (exp->gdbarch)->builtin_int;
2231
2232 gen_expr (exp, pc, ax, value2);
2233 gen_usual_unary (exp, ax, value2);
2234 gen_usual_arithmetic (exp, ax, value1, value2);
2235 switch (op)
2236 {
2237 case BINOP_ADD:
2238 if (TYPE_CODE (value1->type) == TYPE_CODE_INT
2239 && pointer_type (value2->type))
2240 {
2241 /* Swap the values and proceed normally. */
2242 ax_simple (ax, aop_swap);
2243 gen_ptradd (ax, value, value2, value1);
2244 }
2245 else if (pointer_type (value1->type)
2246 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
2247 gen_ptradd (ax, value, value1, value2);
2248 else
2249 gen_binop (ax, value, value1, value2,
2250 aop_add, aop_add, 1, "addition");
2251 break;
2252 case BINOP_SUB:
2253 if (pointer_type (value1->type)
2254 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
2255 gen_ptrsub (ax,value, value1, value2);
2256 else if (pointer_type (value1->type)
2257 && pointer_type (value2->type))
2258 /* FIXME --- result type should be ptrdiff_t */
2259 gen_ptrdiff (ax, value, value1, value2,
2260 builtin_type (exp->gdbarch)->builtin_long);
2261 else
2262 gen_binop (ax, value, value1, value2,
2263 aop_sub, aop_sub, 1, "subtraction");
2264 break;
2265 case BINOP_MUL:
2266 gen_binop (ax, value, value1, value2,
2267 aop_mul, aop_mul, 1, "multiplication");
2268 break;
2269 case BINOP_DIV:
2270 gen_binop (ax, value, value1, value2,
2271 aop_div_signed, aop_div_unsigned, 1, "division");
2272 break;
2273 case BINOP_REM:
2274 gen_binop (ax, value, value1, value2,
2275 aop_rem_signed, aop_rem_unsigned, 1, "remainder");
2276 break;
2277 case BINOP_LSH:
2278 gen_binop (ax, value, value1, value2,
2279 aop_lsh, aop_lsh, 1, "left shift");
2280 break;
2281 case BINOP_RSH:
2282 gen_binop (ax, value, value1, value2,
2283 aop_rsh_signed, aop_rsh_unsigned, 1, "right shift");
2284 break;
2285 case BINOP_SUBSCRIPT:
2286 {
2287 struct type *type;
2288
2289 if (binop_types_user_defined_p (op, value1->type, value2->type))
2290 {
2291 error (_("cannot subscript requested type: "
2292 "cannot call user defined functions"));
2293 }
2294 else
2295 {
2296 /* If the user attempts to subscript something that is not
2297 an array or pointer type (like a plain int variable for
2298 example), then report this as an error. */
2299 type = check_typedef (value1->type);
2300 if (TYPE_CODE (type) != TYPE_CODE_ARRAY
2301 && TYPE_CODE (type) != TYPE_CODE_PTR)
2302 {
2303 if (TYPE_NAME (type))
2304 error (_("cannot subscript something of type `%s'"),
2305 TYPE_NAME (type));
2306 else
2307 error (_("cannot subscript requested type"));
2308 }
2309 }
2310
2311 if (!is_integral_type (value2->type))
2312 error (_("Argument to arithmetic operation "
2313 "not a number or boolean."));
2314
2315 gen_ptradd (ax, value, value1, value2);
2316 gen_deref (ax, value);
2317 break;
2318 }
2319 case BINOP_BITWISE_AND:
2320 gen_binop (ax, value, value1, value2,
2321 aop_bit_and, aop_bit_and, 0, "bitwise and");
2322 break;
2323
2324 case BINOP_BITWISE_IOR:
2325 gen_binop (ax, value, value1, value2,
2326 aop_bit_or, aop_bit_or, 0, "bitwise or");
2327 break;
2328
2329 case BINOP_BITWISE_XOR:
2330 gen_binop (ax, value, value1, value2,
2331 aop_bit_xor, aop_bit_xor, 0, "bitwise exclusive-or");
2332 break;
2333
2334 case BINOP_EQUAL:
2335 gen_equal (ax, value, value1, value2, int_type);
2336 break;
2337
2338 case BINOP_NOTEQUAL:
2339 gen_equal (ax, value, value1, value2, int_type);
2340 gen_logical_not (ax, value, int_type);
2341 break;
2342
2343 case BINOP_LESS:
2344 gen_less (ax, value, value1, value2, int_type);
2345 break;
2346
2347 case BINOP_GTR:
2348 ax_simple (ax, aop_swap);
2349 gen_less (ax, value, value1, value2, int_type);
2350 break;
2351
2352 case BINOP_LEQ:
2353 ax_simple (ax, aop_swap);
2354 gen_less (ax, value, value1, value2, int_type);
2355 gen_logical_not (ax, value, int_type);
2356 break;
2357
2358 case BINOP_GEQ:
2359 gen_less (ax, value, value1, value2, int_type);
2360 gen_logical_not (ax, value, int_type);
2361 break;
2362
2363 default:
2364 /* We should only list operators in the outer case statement
2365 that we actually handle in the inner case statement. */
2366 internal_error (__FILE__, __LINE__,
2367 _("gen_expr: op case sets don't match"));
2368 }
2369 }
2370 \f
2371
2372 /* Given a single variable and a scope, generate bytecodes to trace
2373 its value. This is for use in situations where we have only a
2374 variable's name, and no parsed expression; for instance, when the
2375 name comes from a list of local variables of a function. */
2376
2377 struct agent_expr *
2378 gen_trace_for_var (CORE_ADDR scope, struct gdbarch *gdbarch,
2379 struct symbol *var)
2380 {
2381 struct cleanup *old_chain = 0;
2382 struct agent_expr *ax = new_agent_expr (gdbarch, scope);
2383 struct axs_value value;
2384
2385 old_chain = make_cleanup_free_agent_expr (ax);
2386
2387 trace_kludge = 1;
2388 gen_var_ref (gdbarch, ax, &value, var);
2389
2390 /* If there is no actual variable to trace, flag it by returning
2391 an empty agent expression. */
2392 if (value.optimized_out)
2393 {
2394 do_cleanups (old_chain);
2395 return NULL;
2396 }
2397
2398 /* Make sure we record the final object, and get rid of it. */
2399 gen_traced_pop (gdbarch, ax, &value);
2400
2401 /* Oh, and terminate. */
2402 ax_simple (ax, aop_end);
2403
2404 /* We have successfully built the agent expr, so cancel the cleanup
2405 request. If we add more cleanups that we always want done, this
2406 will have to get more complicated. */
2407 discard_cleanups (old_chain);
2408 return ax;
2409 }
2410
2411 /* Generating bytecode from GDB expressions: driver */
2412
2413 /* Given a GDB expression EXPR, return bytecode to trace its value.
2414 The result will use the `trace' and `trace_quick' bytecodes to
2415 record the value of all memory touched by the expression. The
2416 caller can then use the ax_reqs function to discover which
2417 registers it relies upon. */
2418 struct agent_expr *
2419 gen_trace_for_expr (CORE_ADDR scope, struct expression *expr)
2420 {
2421 struct cleanup *old_chain = 0;
2422 struct agent_expr *ax = new_agent_expr (expr->gdbarch, scope);
2423 union exp_element *pc;
2424 struct axs_value value;
2425
2426 old_chain = make_cleanup_free_agent_expr (ax);
2427
2428 pc = expr->elts;
2429 trace_kludge = 1;
2430 value.optimized_out = 0;
2431 gen_expr (expr, &pc, ax, &value);
2432
2433 /* Make sure we record the final object, and get rid of it. */
2434 gen_traced_pop (expr->gdbarch, ax, &value);
2435
2436 /* Oh, and terminate. */
2437 ax_simple (ax, aop_end);
2438
2439 /* We have successfully built the agent expr, so cancel the cleanup
2440 request. If we add more cleanups that we always want done, this
2441 will have to get more complicated. */
2442 discard_cleanups (old_chain);
2443 return ax;
2444 }
2445
2446 /* Given a GDB expression EXPR, return a bytecode sequence that will
2447 evaluate and return a result. The bytecodes will do a direct
2448 evaluation, using the current data on the target, rather than
2449 recording blocks of memory and registers for later use, as
2450 gen_trace_for_expr does. The generated bytecode sequence leaves
2451 the result of expression evaluation on the top of the stack. */
2452
2453 struct agent_expr *
2454 gen_eval_for_expr (CORE_ADDR scope, struct expression *expr)
2455 {
2456 struct cleanup *old_chain = 0;
2457 struct agent_expr *ax = new_agent_expr (expr->gdbarch, scope);
2458 union exp_element *pc;
2459 struct axs_value value;
2460
2461 old_chain = make_cleanup_free_agent_expr (ax);
2462
2463 pc = expr->elts;
2464 trace_kludge = 0;
2465 value.optimized_out = 0;
2466 gen_expr (expr, &pc, ax, &value);
2467
2468 require_rvalue (ax, &value);
2469
2470 /* Oh, and terminate. */
2471 ax_simple (ax, aop_end);
2472
2473 /* We have successfully built the agent expr, so cancel the cleanup
2474 request. If we add more cleanups that we always want done, this
2475 will have to get more complicated. */
2476 discard_cleanups (old_chain);
2477 return ax;
2478 }
2479
2480 struct agent_expr *
2481 gen_trace_for_return_address (CORE_ADDR scope, struct gdbarch *gdbarch)
2482 {
2483 struct cleanup *old_chain = 0;
2484 struct agent_expr *ax = new_agent_expr (gdbarch, scope);
2485 struct axs_value value;
2486
2487 old_chain = make_cleanup_free_agent_expr (ax);
2488
2489 trace_kludge = 1;
2490
2491 gdbarch_gen_return_address (gdbarch, ax, &value, scope);
2492
2493 /* Make sure we record the final object, and get rid of it. */
2494 gen_traced_pop (gdbarch, ax, &value);
2495
2496 /* Oh, and terminate. */
2497 ax_simple (ax, aop_end);
2498
2499 /* We have successfully built the agent expr, so cancel the cleanup
2500 request. If we add more cleanups that we always want done, this
2501 will have to get more complicated. */
2502 discard_cleanups (old_chain);
2503 return ax;
2504 }
2505
2506 static void
2507 agent_command (char *exp, int from_tty)
2508 {
2509 struct cleanup *old_chain = 0;
2510 struct expression *expr;
2511 struct agent_expr *agent;
2512 struct frame_info *fi = get_current_frame (); /* need current scope */
2513
2514 /* We don't deal with overlay debugging at the moment. We need to
2515 think more carefully about this. If you copy this code into
2516 another command, change the error message; the user shouldn't
2517 have to know anything about agent expressions. */
2518 if (overlay_debugging)
2519 error (_("GDB can't do agent expression translation with overlays."));
2520
2521 if (exp == 0)
2522 error_no_arg (_("expression to translate"));
2523
2524 trace_string_kludge = 0;
2525 if (*exp == '/')
2526 exp = decode_agent_options (exp);
2527
2528 /* Recognize the return address collection directive specially. Note
2529 that it is not really an expression of any sort. */
2530 if (strcmp (exp, "$_ret") == 0)
2531 {
2532 agent = gen_trace_for_return_address (get_frame_pc (fi),
2533 get_current_arch ());
2534 old_chain = make_cleanup_free_agent_expr (agent);
2535 }
2536 else
2537 {
2538 expr = parse_expression (exp);
2539 old_chain = make_cleanup (free_current_contents, &expr);
2540 agent = gen_trace_for_expr (get_frame_pc (fi), expr);
2541 make_cleanup_free_agent_expr (agent);
2542 }
2543
2544 ax_reqs (agent);
2545 ax_print (gdb_stdout, agent);
2546
2547 /* It would be nice to call ax_reqs here to gather some general info
2548 about the expression, and then print out the result. */
2549
2550 do_cleanups (old_chain);
2551 dont_repeat ();
2552 }
2553
2554 /* Parse the given expression, compile it into an agent expression
2555 that does direct evaluation, and display the resulting
2556 expression. */
2557
2558 static void
2559 agent_eval_command (char *exp, int from_tty)
2560 {
2561 struct cleanup *old_chain = 0;
2562 struct expression *expr;
2563 struct agent_expr *agent;
2564 struct frame_info *fi = get_current_frame (); /* need current scope */
2565
2566 /* We don't deal with overlay debugging at the moment. We need to
2567 think more carefully about this. If you copy this code into
2568 another command, change the error message; the user shouldn't
2569 have to know anything about agent expressions. */
2570 if (overlay_debugging)
2571 error (_("GDB can't do agent expression translation with overlays."));
2572
2573 if (exp == 0)
2574 error_no_arg (_("expression to translate"));
2575
2576 expr = parse_expression (exp);
2577 old_chain = make_cleanup (free_current_contents, &expr);
2578 agent = gen_eval_for_expr (get_frame_pc (fi), expr);
2579 make_cleanup_free_agent_expr (agent);
2580 ax_reqs (agent);
2581 ax_print (gdb_stdout, agent);
2582
2583 /* It would be nice to call ax_reqs here to gather some general info
2584 about the expression, and then print out the result. */
2585
2586 do_cleanups (old_chain);
2587 dont_repeat ();
2588 }
2589 \f
2590
2591 /* Initialization code. */
2592
2593 void _initialize_ax_gdb (void);
2594 void
2595 _initialize_ax_gdb (void)
2596 {
2597 add_cmd ("agent", class_maintenance, agent_command,
2598 _("Translate an expression into "
2599 "remote agent bytecode for tracing."),
2600 &maintenancelist);
2601
2602 add_cmd ("agent-eval", class_maintenance, agent_eval_command,
2603 _("Translate an expression into remote "
2604 "agent bytecode for evaluation."),
2605 &maintenancelist);
2606 }
This page took 0.081938 seconds and 4 git commands to generate.