2012-04-27 Sergio Durigan Junior <sergiodj@redhat.com>
[deliverable/binutils-gdb.git] / gdb / ax-gdb.c
1 /* GDB-specific functions for operating on agent expressions.
2
3 Copyright (C) 1998-2001, 2003, 2007-2012 Free Software Foundation,
4 Inc.
5
6 This file is part of GDB.
7
8 This program is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
12
13 This program is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20
21 #include "defs.h"
22 #include "symtab.h"
23 #include "symfile.h"
24 #include "gdbtypes.h"
25 #include "language.h"
26 #include "value.h"
27 #include "expression.h"
28 #include "command.h"
29 #include "gdbcmd.h"
30 #include "frame.h"
31 #include "target.h"
32 #include "ax.h"
33 #include "ax-gdb.h"
34 #include "gdb_string.h"
35 #include "block.h"
36 #include "regcache.h"
37 #include "user-regs.h"
38 #include "language.h"
39 #include "dictionary.h"
40 #include "breakpoint.h"
41 #include "tracepoint.h"
42 #include "cp-support.h"
43 #include "arch-utils.h"
44
45 #include "valprint.h"
46 #include "c-lang.h"
47
48 /* To make sense of this file, you should read doc/agentexpr.texi.
49 Then look at the types and enums in ax-gdb.h. For the code itself,
50 look at gen_expr, towards the bottom; that's the main function that
51 looks at the GDB expressions and calls everything else to generate
52 code.
53
54 I'm beginning to wonder whether it wouldn't be nicer to internally
55 generate trees, with types, and then spit out the bytecode in
56 linear form afterwards; we could generate fewer `swap', `ext', and
57 `zero_ext' bytecodes that way; it would make good constant folding
58 easier, too. But at the moment, I think we should be willing to
59 pay for the simplicity of this code with less-than-optimal bytecode
60 strings.
61
62 Remember, "GBD" stands for "Great Britain, Dammit!" So be careful. */
63 \f
64
65
66 /* Prototypes for local functions. */
67
68 /* There's a standard order to the arguments of these functions:
69 union exp_element ** --- pointer into expression
70 struct agent_expr * --- agent expression buffer to generate code into
71 struct axs_value * --- describes value left on top of stack */
72
73 static struct value *const_var_ref (struct symbol *var);
74 static struct value *const_expr (union exp_element **pc);
75 static struct value *maybe_const_expr (union exp_element **pc);
76
77 static void gen_traced_pop (struct gdbarch *, struct agent_expr *,
78 struct axs_value *);
79
80 static void gen_sign_extend (struct agent_expr *, struct type *);
81 static void gen_extend (struct agent_expr *, struct type *);
82 static void gen_fetch (struct agent_expr *, struct type *);
83 static void gen_left_shift (struct agent_expr *, int);
84
85
86 static void gen_frame_args_address (struct gdbarch *, struct agent_expr *);
87 static void gen_frame_locals_address (struct gdbarch *, struct agent_expr *);
88 static void gen_offset (struct agent_expr *ax, int offset);
89 static void gen_sym_offset (struct agent_expr *, struct symbol *);
90 static void gen_var_ref (struct gdbarch *, struct agent_expr *ax,
91 struct axs_value *value, struct symbol *var);
92
93
94 static void gen_int_literal (struct agent_expr *ax,
95 struct axs_value *value,
96 LONGEST k, struct type *type);
97
98 static void gen_usual_unary (struct expression *exp, struct agent_expr *ax,
99 struct axs_value *value);
100 static int type_wider_than (struct type *type1, struct type *type2);
101 static struct type *max_type (struct type *type1, struct type *type2);
102 static void gen_conversion (struct agent_expr *ax,
103 struct type *from, struct type *to);
104 static int is_nontrivial_conversion (struct type *from, struct type *to);
105 static void gen_usual_arithmetic (struct expression *exp,
106 struct agent_expr *ax,
107 struct axs_value *value1,
108 struct axs_value *value2);
109 static void gen_integral_promotions (struct expression *exp,
110 struct agent_expr *ax,
111 struct axs_value *value);
112 static void gen_cast (struct agent_expr *ax,
113 struct axs_value *value, struct type *type);
114 static void gen_scale (struct agent_expr *ax,
115 enum agent_op op, struct type *type);
116 static void gen_ptradd (struct agent_expr *ax, struct axs_value *value,
117 struct axs_value *value1, struct axs_value *value2);
118 static void gen_ptrsub (struct agent_expr *ax, struct axs_value *value,
119 struct axs_value *value1, struct axs_value *value2);
120 static void gen_ptrdiff (struct agent_expr *ax, struct axs_value *value,
121 struct axs_value *value1, struct axs_value *value2,
122 struct type *result_type);
123 static void gen_binop (struct agent_expr *ax,
124 struct axs_value *value,
125 struct axs_value *value1,
126 struct axs_value *value2,
127 enum agent_op op,
128 enum agent_op op_unsigned, int may_carry, char *name);
129 static void gen_logical_not (struct agent_expr *ax, struct axs_value *value,
130 struct type *result_type);
131 static void gen_complement (struct agent_expr *ax, struct axs_value *value);
132 static void gen_deref (struct agent_expr *, struct axs_value *);
133 static void gen_address_of (struct agent_expr *, struct axs_value *);
134 static void gen_bitfield_ref (struct expression *exp, struct agent_expr *ax,
135 struct axs_value *value,
136 struct type *type, int start, int end);
137 static void gen_primitive_field (struct expression *exp,
138 struct agent_expr *ax,
139 struct axs_value *value,
140 int offset, int fieldno, struct type *type);
141 static int gen_struct_ref_recursive (struct expression *exp,
142 struct agent_expr *ax,
143 struct axs_value *value,
144 char *field, int offset,
145 struct type *type);
146 static void gen_struct_ref (struct expression *exp, struct agent_expr *ax,
147 struct axs_value *value,
148 char *field,
149 char *operator_name, char *operand_name);
150 static void gen_static_field (struct gdbarch *gdbarch,
151 struct agent_expr *ax, struct axs_value *value,
152 struct type *type, int fieldno);
153 static void gen_repeat (struct expression *exp, union exp_element **pc,
154 struct agent_expr *ax, struct axs_value *value);
155 static void gen_sizeof (struct expression *exp, union exp_element **pc,
156 struct agent_expr *ax, struct axs_value *value,
157 struct type *size_type);
158 static void gen_expr_binop_rest (struct expression *exp,
159 enum exp_opcode op, union exp_element **pc,
160 struct agent_expr *ax,
161 struct axs_value *value,
162 struct axs_value *value1,
163 struct axs_value *value2);
164
165 static void agent_command (char *exp, int from_tty);
166 \f
167
168 /* Detecting constant expressions. */
169
170 /* If the variable reference at *PC is a constant, return its value.
171 Otherwise, return zero.
172
173 Hey, Wally! How can a variable reference be a constant?
174
175 Well, Beav, this function really handles the OP_VAR_VALUE operator,
176 not specifically variable references. GDB uses OP_VAR_VALUE to
177 refer to any kind of symbolic reference: function names, enum
178 elements, and goto labels are all handled through the OP_VAR_VALUE
179 operator, even though they're constants. It makes sense given the
180 situation.
181
182 Gee, Wally, don'cha wonder sometimes if data representations that
183 subvert commonly accepted definitions of terms in favor of heavily
184 context-specific interpretations are really just a tool of the
185 programming hegemony to preserve their power and exclude the
186 proletariat? */
187
188 static struct value *
189 const_var_ref (struct symbol *var)
190 {
191 struct type *type = SYMBOL_TYPE (var);
192
193 switch (SYMBOL_CLASS (var))
194 {
195 case LOC_CONST:
196 return value_from_longest (type, (LONGEST) SYMBOL_VALUE (var));
197
198 case LOC_LABEL:
199 return value_from_pointer (type, (CORE_ADDR) SYMBOL_VALUE_ADDRESS (var));
200
201 default:
202 return 0;
203 }
204 }
205
206
207 /* If the expression starting at *PC has a constant value, return it.
208 Otherwise, return zero. If we return a value, then *PC will be
209 advanced to the end of it. If we return zero, *PC could be
210 anywhere. */
211 static struct value *
212 const_expr (union exp_element **pc)
213 {
214 enum exp_opcode op = (*pc)->opcode;
215 struct value *v1;
216
217 switch (op)
218 {
219 case OP_LONG:
220 {
221 struct type *type = (*pc)[1].type;
222 LONGEST k = (*pc)[2].longconst;
223
224 (*pc) += 4;
225 return value_from_longest (type, k);
226 }
227
228 case OP_VAR_VALUE:
229 {
230 struct value *v = const_var_ref ((*pc)[2].symbol);
231
232 (*pc) += 4;
233 return v;
234 }
235
236 /* We could add more operators in here. */
237
238 case UNOP_NEG:
239 (*pc)++;
240 v1 = const_expr (pc);
241 if (v1)
242 return value_neg (v1);
243 else
244 return 0;
245
246 default:
247 return 0;
248 }
249 }
250
251
252 /* Like const_expr, but guarantee also that *PC is undisturbed if the
253 expression is not constant. */
254 static struct value *
255 maybe_const_expr (union exp_element **pc)
256 {
257 union exp_element *tentative_pc = *pc;
258 struct value *v = const_expr (&tentative_pc);
259
260 /* If we got a value, then update the real PC. */
261 if (v)
262 *pc = tentative_pc;
263
264 return v;
265 }
266 \f
267
268 /* Generating bytecode from GDB expressions: general assumptions */
269
270 /* Here are a few general assumptions made throughout the code; if you
271 want to make a change that contradicts one of these, then you'd
272 better scan things pretty thoroughly.
273
274 - We assume that all values occupy one stack element. For example,
275 sometimes we'll swap to get at the left argument to a binary
276 operator. If we decide that void values should occupy no stack
277 elements, or that synthetic arrays (whose size is determined at
278 run time, created by the `@' operator) should occupy two stack
279 elements (address and length), then this will cause trouble.
280
281 - We assume the stack elements are infinitely wide, and that we
282 don't have to worry what happens if the user requests an
283 operation that is wider than the actual interpreter's stack.
284 That is, it's up to the interpreter to handle directly all the
285 integer widths the user has access to. (Woe betide the language
286 with bignums!)
287
288 - We don't support side effects. Thus, we don't have to worry about
289 GCC's generalized lvalues, function calls, etc.
290
291 - We don't support floating point. Many places where we switch on
292 some type don't bother to include cases for floating point; there
293 may be even more subtle ways this assumption exists. For
294 example, the arguments to % must be integers.
295
296 - We assume all subexpressions have a static, unchanging type. If
297 we tried to support convenience variables, this would be a
298 problem.
299
300 - All values on the stack should always be fully zero- or
301 sign-extended.
302
303 (I wasn't sure whether to choose this or its opposite --- that
304 only addresses are assumed extended --- but it turns out that
305 neither convention completely eliminates spurious extend
306 operations (if everything is always extended, then you have to
307 extend after add, because it could overflow; if nothing is
308 extended, then you end up producing extends whenever you change
309 sizes), and this is simpler.) */
310 \f
311
312 /* Generating bytecode from GDB expressions: the `trace' kludge */
313
314 /* The compiler in this file is a general-purpose mechanism for
315 translating GDB expressions into bytecode. One ought to be able to
316 find a million and one uses for it.
317
318 However, at the moment it is HOPELESSLY BRAIN-DAMAGED for the sake
319 of expediency. Let he who is without sin cast the first stone.
320
321 For the data tracing facility, we need to insert `trace' bytecodes
322 before each data fetch; this records all the memory that the
323 expression touches in the course of evaluation, so that memory will
324 be available when the user later tries to evaluate the expression
325 in GDB.
326
327 This should be done (I think) in a post-processing pass, that walks
328 an arbitrary agent expression and inserts `trace' operations at the
329 appropriate points. But it's much faster to just hack them
330 directly into the code. And since we're in a crunch, that's what
331 I've done.
332
333 Setting the flag trace_kludge to non-zero enables the code that
334 emits the trace bytecodes at the appropriate points. */
335 int trace_kludge;
336
337 /* Inspired by trace_kludge, this indicates that pointers to chars
338 should get an added tracenz bytecode to record nonzero bytes, up to
339 a length that is the value of trace_string_kludge. */
340 int trace_string_kludge;
341
342 /* Scan for all static fields in the given class, including any base
343 classes, and generate tracing bytecodes for each. */
344
345 static void
346 gen_trace_static_fields (struct gdbarch *gdbarch,
347 struct agent_expr *ax,
348 struct type *type)
349 {
350 int i, nbases = TYPE_N_BASECLASSES (type);
351 struct axs_value value;
352
353 CHECK_TYPEDEF (type);
354
355 for (i = TYPE_NFIELDS (type) - 1; i >= nbases; i--)
356 {
357 if (field_is_static (&TYPE_FIELD (type, i)))
358 {
359 gen_static_field (gdbarch, ax, &value, type, i);
360 if (value.optimized_out)
361 continue;
362 switch (value.kind)
363 {
364 case axs_lvalue_memory:
365 {
366 int length = TYPE_LENGTH (check_typedef (value.type));
367
368 ax_const_l (ax, length);
369 ax_simple (ax, aop_trace);
370 }
371 break;
372
373 case axs_lvalue_register:
374 /* We don't actually need the register's value to be pushed,
375 just note that we need it to be collected. */
376 ax_reg_mask (ax, value.u.reg);
377
378 default:
379 break;
380 }
381 }
382 }
383
384 /* Now scan through base classes recursively. */
385 for (i = 0; i < nbases; i++)
386 {
387 struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
388
389 gen_trace_static_fields (gdbarch, ax, basetype);
390 }
391 }
392
393 /* Trace the lvalue on the stack, if it needs it. In either case, pop
394 the value. Useful on the left side of a comma, and at the end of
395 an expression being used for tracing. */
396 static void
397 gen_traced_pop (struct gdbarch *gdbarch,
398 struct agent_expr *ax, struct axs_value *value)
399 {
400 int string_trace = 0;
401 if (trace_string_kludge
402 && TYPE_CODE (value->type) == TYPE_CODE_PTR
403 && c_textual_element_type (check_typedef (TYPE_TARGET_TYPE (value->type)),
404 's'))
405 string_trace = 1;
406
407 if (trace_kludge)
408 switch (value->kind)
409 {
410 case axs_rvalue:
411 if (string_trace)
412 {
413 ax_const_l (ax, trace_string_kludge);
414 ax_simple (ax, aop_tracenz);
415 }
416 else
417 /* We don't trace rvalues, just the lvalues necessary to
418 produce them. So just dispose of this value. */
419 ax_simple (ax, aop_pop);
420 break;
421
422 case axs_lvalue_memory:
423 {
424 int length = TYPE_LENGTH (check_typedef (value->type));
425
426 if (string_trace)
427 ax_simple (ax, aop_dup);
428
429 /* There's no point in trying to use a trace_quick bytecode
430 here, since "trace_quick SIZE pop" is three bytes, whereas
431 "const8 SIZE trace" is also three bytes, does the same
432 thing, and the simplest code which generates that will also
433 work correctly for objects with large sizes. */
434 ax_const_l (ax, length);
435 ax_simple (ax, aop_trace);
436
437 if (string_trace)
438 {
439 ax_simple (ax, aop_ref32);
440 ax_const_l (ax, trace_string_kludge);
441 ax_simple (ax, aop_tracenz);
442 }
443 }
444 break;
445
446 case axs_lvalue_register:
447 /* We don't actually need the register's value to be on the
448 stack, and the target will get heartburn if the register is
449 larger than will fit in a stack, so just mark it for
450 collection and be done with it. */
451 ax_reg_mask (ax, value->u.reg);
452
453 /* But if the register points to a string, assume the value
454 will fit on the stack and push it anyway. */
455 if (string_trace)
456 {
457 ax_reg (ax, value->u.reg);
458 ax_const_l (ax, trace_string_kludge);
459 ax_simple (ax, aop_tracenz);
460 }
461 break;
462 }
463 else
464 /* If we're not tracing, just pop the value. */
465 ax_simple (ax, aop_pop);
466
467 /* To trace C++ classes with static fields stored elsewhere. */
468 if (trace_kludge
469 && (TYPE_CODE (value->type) == TYPE_CODE_STRUCT
470 || TYPE_CODE (value->type) == TYPE_CODE_UNION))
471 gen_trace_static_fields (gdbarch, ax, value->type);
472 }
473 \f
474
475
476 /* Generating bytecode from GDB expressions: helper functions */
477
478 /* Assume that the lower bits of the top of the stack is a value of
479 type TYPE, and the upper bits are zero. Sign-extend if necessary. */
480 static void
481 gen_sign_extend (struct agent_expr *ax, struct type *type)
482 {
483 /* Do we need to sign-extend this? */
484 if (!TYPE_UNSIGNED (type))
485 ax_ext (ax, TYPE_LENGTH (type) * TARGET_CHAR_BIT);
486 }
487
488
489 /* Assume the lower bits of the top of the stack hold a value of type
490 TYPE, and the upper bits are garbage. Sign-extend or truncate as
491 needed. */
492 static void
493 gen_extend (struct agent_expr *ax, struct type *type)
494 {
495 int bits = TYPE_LENGTH (type) * TARGET_CHAR_BIT;
496
497 /* I just had to. */
498 ((TYPE_UNSIGNED (type) ? ax_zero_ext : ax_ext) (ax, bits));
499 }
500
501
502 /* Assume that the top of the stack contains a value of type "pointer
503 to TYPE"; generate code to fetch its value. Note that TYPE is the
504 target type, not the pointer type. */
505 static void
506 gen_fetch (struct agent_expr *ax, struct type *type)
507 {
508 if (trace_kludge)
509 {
510 /* Record the area of memory we're about to fetch. */
511 ax_trace_quick (ax, TYPE_LENGTH (type));
512 }
513
514 if (TYPE_CODE (type) == TYPE_CODE_RANGE)
515 type = TYPE_TARGET_TYPE (type);
516
517 switch (TYPE_CODE (type))
518 {
519 case TYPE_CODE_PTR:
520 case TYPE_CODE_REF:
521 case TYPE_CODE_ENUM:
522 case TYPE_CODE_INT:
523 case TYPE_CODE_CHAR:
524 case TYPE_CODE_BOOL:
525 /* It's a scalar value, so we know how to dereference it. How
526 many bytes long is it? */
527 switch (TYPE_LENGTH (type))
528 {
529 case 8 / TARGET_CHAR_BIT:
530 ax_simple (ax, aop_ref8);
531 break;
532 case 16 / TARGET_CHAR_BIT:
533 ax_simple (ax, aop_ref16);
534 break;
535 case 32 / TARGET_CHAR_BIT:
536 ax_simple (ax, aop_ref32);
537 break;
538 case 64 / TARGET_CHAR_BIT:
539 ax_simple (ax, aop_ref64);
540 break;
541
542 /* Either our caller shouldn't have asked us to dereference
543 that pointer (other code's fault), or we're not
544 implementing something we should be (this code's fault).
545 In any case, it's a bug the user shouldn't see. */
546 default:
547 internal_error (__FILE__, __LINE__,
548 _("gen_fetch: strange size"));
549 }
550
551 gen_sign_extend (ax, type);
552 break;
553
554 default:
555 /* Our caller requested us to dereference a pointer from an unsupported
556 type. Error out and give callers a chance to handle the failure
557 gracefully. */
558 error (_("gen_fetch: Unsupported type code `%s'."),
559 TYPE_NAME (type));
560 }
561 }
562
563
564 /* Generate code to left shift the top of the stack by DISTANCE bits, or
565 right shift it by -DISTANCE bits if DISTANCE < 0. This generates
566 unsigned (logical) right shifts. */
567 static void
568 gen_left_shift (struct agent_expr *ax, int distance)
569 {
570 if (distance > 0)
571 {
572 ax_const_l (ax, distance);
573 ax_simple (ax, aop_lsh);
574 }
575 else if (distance < 0)
576 {
577 ax_const_l (ax, -distance);
578 ax_simple (ax, aop_rsh_unsigned);
579 }
580 }
581 \f
582
583
584 /* Generating bytecode from GDB expressions: symbol references */
585
586 /* Generate code to push the base address of the argument portion of
587 the top stack frame. */
588 static void
589 gen_frame_args_address (struct gdbarch *gdbarch, struct agent_expr *ax)
590 {
591 int frame_reg;
592 LONGEST frame_offset;
593
594 gdbarch_virtual_frame_pointer (gdbarch,
595 ax->scope, &frame_reg, &frame_offset);
596 ax_reg (ax, frame_reg);
597 gen_offset (ax, frame_offset);
598 }
599
600
601 /* Generate code to push the base address of the locals portion of the
602 top stack frame. */
603 static void
604 gen_frame_locals_address (struct gdbarch *gdbarch, struct agent_expr *ax)
605 {
606 int frame_reg;
607 LONGEST frame_offset;
608
609 gdbarch_virtual_frame_pointer (gdbarch,
610 ax->scope, &frame_reg, &frame_offset);
611 ax_reg (ax, frame_reg);
612 gen_offset (ax, frame_offset);
613 }
614
615
616 /* Generate code to add OFFSET to the top of the stack. Try to
617 generate short and readable code. We use this for getting to
618 variables on the stack, and structure members. If we were
619 programming in ML, it would be clearer why these are the same
620 thing. */
621 static void
622 gen_offset (struct agent_expr *ax, int offset)
623 {
624 /* It would suffice to simply push the offset and add it, but this
625 makes it easier to read positive and negative offsets in the
626 bytecode. */
627 if (offset > 0)
628 {
629 ax_const_l (ax, offset);
630 ax_simple (ax, aop_add);
631 }
632 else if (offset < 0)
633 {
634 ax_const_l (ax, -offset);
635 ax_simple (ax, aop_sub);
636 }
637 }
638
639
640 /* In many cases, a symbol's value is the offset from some other
641 address (stack frame, base register, etc.) Generate code to add
642 VAR's value to the top of the stack. */
643 static void
644 gen_sym_offset (struct agent_expr *ax, struct symbol *var)
645 {
646 gen_offset (ax, SYMBOL_VALUE (var));
647 }
648
649
650 /* Generate code for a variable reference to AX. The variable is the
651 symbol VAR. Set VALUE to describe the result. */
652
653 static void
654 gen_var_ref (struct gdbarch *gdbarch, struct agent_expr *ax,
655 struct axs_value *value, struct symbol *var)
656 {
657 /* Dereference any typedefs. */
658 value->type = check_typedef (SYMBOL_TYPE (var));
659 value->optimized_out = 0;
660
661 /* I'm imitating the code in read_var_value. */
662 switch (SYMBOL_CLASS (var))
663 {
664 case LOC_CONST: /* A constant, like an enum value. */
665 ax_const_l (ax, (LONGEST) SYMBOL_VALUE (var));
666 value->kind = axs_rvalue;
667 break;
668
669 case LOC_LABEL: /* A goto label, being used as a value. */
670 ax_const_l (ax, (LONGEST) SYMBOL_VALUE_ADDRESS (var));
671 value->kind = axs_rvalue;
672 break;
673
674 case LOC_CONST_BYTES:
675 internal_error (__FILE__, __LINE__,
676 _("gen_var_ref: LOC_CONST_BYTES "
677 "symbols are not supported"));
678
679 /* Variable at a fixed location in memory. Easy. */
680 case LOC_STATIC:
681 /* Push the address of the variable. */
682 ax_const_l (ax, SYMBOL_VALUE_ADDRESS (var));
683 value->kind = axs_lvalue_memory;
684 break;
685
686 case LOC_ARG: /* var lives in argument area of frame */
687 gen_frame_args_address (gdbarch, ax);
688 gen_sym_offset (ax, var);
689 value->kind = axs_lvalue_memory;
690 break;
691
692 case LOC_REF_ARG: /* As above, but the frame slot really
693 holds the address of the variable. */
694 gen_frame_args_address (gdbarch, ax);
695 gen_sym_offset (ax, var);
696 /* Don't assume any particular pointer size. */
697 gen_fetch (ax, builtin_type (gdbarch)->builtin_data_ptr);
698 value->kind = axs_lvalue_memory;
699 break;
700
701 case LOC_LOCAL: /* var lives in locals area of frame */
702 gen_frame_locals_address (gdbarch, ax);
703 gen_sym_offset (ax, var);
704 value->kind = axs_lvalue_memory;
705 break;
706
707 case LOC_TYPEDEF:
708 error (_("Cannot compute value of typedef `%s'."),
709 SYMBOL_PRINT_NAME (var));
710 break;
711
712 case LOC_BLOCK:
713 ax_const_l (ax, BLOCK_START (SYMBOL_BLOCK_VALUE (var)));
714 value->kind = axs_rvalue;
715 break;
716
717 case LOC_REGISTER:
718 /* Don't generate any code at all; in the process of treating
719 this as an lvalue or rvalue, the caller will generate the
720 right code. */
721 value->kind = axs_lvalue_register;
722 value->u.reg = SYMBOL_REGISTER_OPS (var)->register_number (var, gdbarch);
723 break;
724
725 /* A lot like LOC_REF_ARG, but the pointer lives directly in a
726 register, not on the stack. Simpler than LOC_REGISTER
727 because it's just like any other case where the thing
728 has a real address. */
729 case LOC_REGPARM_ADDR:
730 ax_reg (ax, SYMBOL_REGISTER_OPS (var)->register_number (var, gdbarch));
731 value->kind = axs_lvalue_memory;
732 break;
733
734 case LOC_UNRESOLVED:
735 {
736 struct minimal_symbol *msym
737 = lookup_minimal_symbol (SYMBOL_LINKAGE_NAME (var), NULL, NULL);
738
739 if (!msym)
740 error (_("Couldn't resolve symbol `%s'."), SYMBOL_PRINT_NAME (var));
741
742 /* Push the address of the variable. */
743 ax_const_l (ax, SYMBOL_VALUE_ADDRESS (msym));
744 value->kind = axs_lvalue_memory;
745 }
746 break;
747
748 case LOC_COMPUTED:
749 /* FIXME: cagney/2004-01-26: It should be possible to
750 unconditionally call the SYMBOL_COMPUTED_OPS method when available.
751 Unfortunately DWARF 2 stores the frame-base (instead of the
752 function) location in a function's symbol. Oops! For the
753 moment enable this when/where applicable. */
754 SYMBOL_COMPUTED_OPS (var)->tracepoint_var_ref (var, gdbarch, ax, value);
755 break;
756
757 case LOC_OPTIMIZED_OUT:
758 /* Flag this, but don't say anything; leave it up to callers to
759 warn the user. */
760 value->optimized_out = 1;
761 break;
762
763 default:
764 error (_("Cannot find value of botched symbol `%s'."),
765 SYMBOL_PRINT_NAME (var));
766 break;
767 }
768 }
769 \f
770
771
772 /* Generating bytecode from GDB expressions: literals */
773
774 static void
775 gen_int_literal (struct agent_expr *ax, struct axs_value *value, LONGEST k,
776 struct type *type)
777 {
778 ax_const_l (ax, k);
779 value->kind = axs_rvalue;
780 value->type = check_typedef (type);
781 }
782 \f
783
784
785 /* Generating bytecode from GDB expressions: unary conversions, casts */
786
787 /* Take what's on the top of the stack (as described by VALUE), and
788 try to make an rvalue out of it. Signal an error if we can't do
789 that. */
790 void
791 require_rvalue (struct agent_expr *ax, struct axs_value *value)
792 {
793 /* Only deal with scalars, structs and such may be too large
794 to fit in a stack entry. */
795 value->type = check_typedef (value->type);
796 if (TYPE_CODE (value->type) == TYPE_CODE_ARRAY
797 || TYPE_CODE (value->type) == TYPE_CODE_STRUCT
798 || TYPE_CODE (value->type) == TYPE_CODE_UNION
799 || TYPE_CODE (value->type) == TYPE_CODE_FUNC)
800 error (_("Value not scalar: cannot be an rvalue."));
801
802 switch (value->kind)
803 {
804 case axs_rvalue:
805 /* It's already an rvalue. */
806 break;
807
808 case axs_lvalue_memory:
809 /* The top of stack is the address of the object. Dereference. */
810 gen_fetch (ax, value->type);
811 break;
812
813 case axs_lvalue_register:
814 /* There's nothing on the stack, but value->u.reg is the
815 register number containing the value.
816
817 When we add floating-point support, this is going to have to
818 change. What about SPARC register pairs, for example? */
819 ax_reg (ax, value->u.reg);
820 gen_extend (ax, value->type);
821 break;
822 }
823
824 value->kind = axs_rvalue;
825 }
826
827
828 /* Assume the top of the stack is described by VALUE, and perform the
829 usual unary conversions. This is motivated by ANSI 6.2.2, but of
830 course GDB expressions are not ANSI; they're the mishmash union of
831 a bunch of languages. Rah.
832
833 NOTE! This function promises to produce an rvalue only when the
834 incoming value is of an appropriate type. In other words, the
835 consumer of the value this function produces may assume the value
836 is an rvalue only after checking its type.
837
838 The immediate issue is that if the user tries to use a structure or
839 union as an operand of, say, the `+' operator, we don't want to try
840 to convert that structure to an rvalue; require_rvalue will bomb on
841 structs and unions. Rather, we want to simply pass the struct
842 lvalue through unchanged, and let `+' raise an error. */
843
844 static void
845 gen_usual_unary (struct expression *exp, struct agent_expr *ax,
846 struct axs_value *value)
847 {
848 /* We don't have to generate any code for the usual integral
849 conversions, since values are always represented as full-width on
850 the stack. Should we tweak the type? */
851
852 /* Some types require special handling. */
853 switch (TYPE_CODE (value->type))
854 {
855 /* Functions get converted to a pointer to the function. */
856 case TYPE_CODE_FUNC:
857 value->type = lookup_pointer_type (value->type);
858 value->kind = axs_rvalue; /* Should always be true, but just in case. */
859 break;
860
861 /* Arrays get converted to a pointer to their first element, and
862 are no longer an lvalue. */
863 case TYPE_CODE_ARRAY:
864 {
865 struct type *elements = TYPE_TARGET_TYPE (value->type);
866
867 value->type = lookup_pointer_type (elements);
868 value->kind = axs_rvalue;
869 /* We don't need to generate any code; the address of the array
870 is also the address of its first element. */
871 }
872 break;
873
874 /* Don't try to convert structures and unions to rvalues. Let the
875 consumer signal an error. */
876 case TYPE_CODE_STRUCT:
877 case TYPE_CODE_UNION:
878 return;
879 }
880
881 /* If the value is an lvalue, dereference it. */
882 require_rvalue (ax, value);
883 }
884
885
886 /* Return non-zero iff the type TYPE1 is considered "wider" than the
887 type TYPE2, according to the rules described in gen_usual_arithmetic. */
888 static int
889 type_wider_than (struct type *type1, struct type *type2)
890 {
891 return (TYPE_LENGTH (type1) > TYPE_LENGTH (type2)
892 || (TYPE_LENGTH (type1) == TYPE_LENGTH (type2)
893 && TYPE_UNSIGNED (type1)
894 && !TYPE_UNSIGNED (type2)));
895 }
896
897
898 /* Return the "wider" of the two types TYPE1 and TYPE2. */
899 static struct type *
900 max_type (struct type *type1, struct type *type2)
901 {
902 return type_wider_than (type1, type2) ? type1 : type2;
903 }
904
905
906 /* Generate code to convert a scalar value of type FROM to type TO. */
907 static void
908 gen_conversion (struct agent_expr *ax, struct type *from, struct type *to)
909 {
910 /* Perhaps there is a more graceful way to state these rules. */
911
912 /* If we're converting to a narrower type, then we need to clear out
913 the upper bits. */
914 if (TYPE_LENGTH (to) < TYPE_LENGTH (from))
915 gen_extend (ax, from);
916
917 /* If the two values have equal width, but different signednesses,
918 then we need to extend. */
919 else if (TYPE_LENGTH (to) == TYPE_LENGTH (from))
920 {
921 if (TYPE_UNSIGNED (from) != TYPE_UNSIGNED (to))
922 gen_extend (ax, to);
923 }
924
925 /* If we're converting to a wider type, and becoming unsigned, then
926 we need to zero out any possible sign bits. */
927 else if (TYPE_LENGTH (to) > TYPE_LENGTH (from))
928 {
929 if (TYPE_UNSIGNED (to))
930 gen_extend (ax, to);
931 }
932 }
933
934
935 /* Return non-zero iff the type FROM will require any bytecodes to be
936 emitted to be converted to the type TO. */
937 static int
938 is_nontrivial_conversion (struct type *from, struct type *to)
939 {
940 struct agent_expr *ax = new_agent_expr (NULL, 0);
941 int nontrivial;
942
943 /* Actually generate the code, and see if anything came out. At the
944 moment, it would be trivial to replicate the code in
945 gen_conversion here, but in the future, when we're supporting
946 floating point and the like, it may not be. Doing things this
947 way allows this function to be independent of the logic in
948 gen_conversion. */
949 gen_conversion (ax, from, to);
950 nontrivial = ax->len > 0;
951 free_agent_expr (ax);
952 return nontrivial;
953 }
954
955
956 /* Generate code to perform the "usual arithmetic conversions" (ANSI C
957 6.2.1.5) for the two operands of an arithmetic operator. This
958 effectively finds a "least upper bound" type for the two arguments,
959 and promotes each argument to that type. *VALUE1 and *VALUE2
960 describe the values as they are passed in, and as they are left. */
961 static void
962 gen_usual_arithmetic (struct expression *exp, struct agent_expr *ax,
963 struct axs_value *value1, struct axs_value *value2)
964 {
965 /* Do the usual binary conversions. */
966 if (TYPE_CODE (value1->type) == TYPE_CODE_INT
967 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
968 {
969 /* The ANSI integral promotions seem to work this way: Order the
970 integer types by size, and then by signedness: an n-bit
971 unsigned type is considered "wider" than an n-bit signed
972 type. Promote to the "wider" of the two types, and always
973 promote at least to int. */
974 struct type *target = max_type (builtin_type (exp->gdbarch)->builtin_int,
975 max_type (value1->type, value2->type));
976
977 /* Deal with value2, on the top of the stack. */
978 gen_conversion (ax, value2->type, target);
979
980 /* Deal with value1, not on the top of the stack. Don't
981 generate the `swap' instructions if we're not actually going
982 to do anything. */
983 if (is_nontrivial_conversion (value1->type, target))
984 {
985 ax_simple (ax, aop_swap);
986 gen_conversion (ax, value1->type, target);
987 ax_simple (ax, aop_swap);
988 }
989
990 value1->type = value2->type = check_typedef (target);
991 }
992 }
993
994
995 /* Generate code to perform the integral promotions (ANSI 6.2.1.1) on
996 the value on the top of the stack, as described by VALUE. Assume
997 the value has integral type. */
998 static void
999 gen_integral_promotions (struct expression *exp, struct agent_expr *ax,
1000 struct axs_value *value)
1001 {
1002 const struct builtin_type *builtin = builtin_type (exp->gdbarch);
1003
1004 if (!type_wider_than (value->type, builtin->builtin_int))
1005 {
1006 gen_conversion (ax, value->type, builtin->builtin_int);
1007 value->type = builtin->builtin_int;
1008 }
1009 else if (!type_wider_than (value->type, builtin->builtin_unsigned_int))
1010 {
1011 gen_conversion (ax, value->type, builtin->builtin_unsigned_int);
1012 value->type = builtin->builtin_unsigned_int;
1013 }
1014 }
1015
1016
1017 /* Generate code for a cast to TYPE. */
1018 static void
1019 gen_cast (struct agent_expr *ax, struct axs_value *value, struct type *type)
1020 {
1021 /* GCC does allow casts to yield lvalues, so this should be fixed
1022 before merging these changes into the trunk. */
1023 require_rvalue (ax, value);
1024 /* Dereference typedefs. */
1025 type = check_typedef (type);
1026
1027 switch (TYPE_CODE (type))
1028 {
1029 case TYPE_CODE_PTR:
1030 case TYPE_CODE_REF:
1031 /* It's implementation-defined, and I'll bet this is what GCC
1032 does. */
1033 break;
1034
1035 case TYPE_CODE_ARRAY:
1036 case TYPE_CODE_STRUCT:
1037 case TYPE_CODE_UNION:
1038 case TYPE_CODE_FUNC:
1039 error (_("Invalid type cast: intended type must be scalar."));
1040
1041 case TYPE_CODE_ENUM:
1042 case TYPE_CODE_BOOL:
1043 /* We don't have to worry about the size of the value, because
1044 all our integral values are fully sign-extended, and when
1045 casting pointers we can do anything we like. Is there any
1046 way for us to know what GCC actually does with a cast like
1047 this? */
1048 break;
1049
1050 case TYPE_CODE_INT:
1051 gen_conversion (ax, value->type, type);
1052 break;
1053
1054 case TYPE_CODE_VOID:
1055 /* We could pop the value, and rely on everyone else to check
1056 the type and notice that this value doesn't occupy a stack
1057 slot. But for now, leave the value on the stack, and
1058 preserve the "value == stack element" assumption. */
1059 break;
1060
1061 default:
1062 error (_("Casts to requested type are not yet implemented."));
1063 }
1064
1065 value->type = type;
1066 }
1067 \f
1068
1069
1070 /* Generating bytecode from GDB expressions: arithmetic */
1071
1072 /* Scale the integer on the top of the stack by the size of the target
1073 of the pointer type TYPE. */
1074 static void
1075 gen_scale (struct agent_expr *ax, enum agent_op op, struct type *type)
1076 {
1077 struct type *element = TYPE_TARGET_TYPE (type);
1078
1079 if (TYPE_LENGTH (element) != 1)
1080 {
1081 ax_const_l (ax, TYPE_LENGTH (element));
1082 ax_simple (ax, op);
1083 }
1084 }
1085
1086
1087 /* Generate code for pointer arithmetic PTR + INT. */
1088 static void
1089 gen_ptradd (struct agent_expr *ax, struct axs_value *value,
1090 struct axs_value *value1, struct axs_value *value2)
1091 {
1092 gdb_assert (pointer_type (value1->type));
1093 gdb_assert (TYPE_CODE (value2->type) == TYPE_CODE_INT);
1094
1095 gen_scale (ax, aop_mul, value1->type);
1096 ax_simple (ax, aop_add);
1097 gen_extend (ax, value1->type); /* Catch overflow. */
1098 value->type = value1->type;
1099 value->kind = axs_rvalue;
1100 }
1101
1102
1103 /* Generate code for pointer arithmetic PTR - INT. */
1104 static void
1105 gen_ptrsub (struct agent_expr *ax, struct axs_value *value,
1106 struct axs_value *value1, struct axs_value *value2)
1107 {
1108 gdb_assert (pointer_type (value1->type));
1109 gdb_assert (TYPE_CODE (value2->type) == TYPE_CODE_INT);
1110
1111 gen_scale (ax, aop_mul, value1->type);
1112 ax_simple (ax, aop_sub);
1113 gen_extend (ax, value1->type); /* Catch overflow. */
1114 value->type = value1->type;
1115 value->kind = axs_rvalue;
1116 }
1117
1118
1119 /* Generate code for pointer arithmetic PTR - PTR. */
1120 static void
1121 gen_ptrdiff (struct agent_expr *ax, struct axs_value *value,
1122 struct axs_value *value1, struct axs_value *value2,
1123 struct type *result_type)
1124 {
1125 gdb_assert (pointer_type (value1->type));
1126 gdb_assert (pointer_type (value2->type));
1127
1128 if (TYPE_LENGTH (TYPE_TARGET_TYPE (value1->type))
1129 != TYPE_LENGTH (TYPE_TARGET_TYPE (value2->type)))
1130 error (_("\
1131 First argument of `-' is a pointer, but second argument is neither\n\
1132 an integer nor a pointer of the same type."));
1133
1134 ax_simple (ax, aop_sub);
1135 gen_scale (ax, aop_div_unsigned, value1->type);
1136 value->type = result_type;
1137 value->kind = axs_rvalue;
1138 }
1139
1140 static void
1141 gen_equal (struct agent_expr *ax, struct axs_value *value,
1142 struct axs_value *value1, struct axs_value *value2,
1143 struct type *result_type)
1144 {
1145 if (pointer_type (value1->type) || pointer_type (value2->type))
1146 ax_simple (ax, aop_equal);
1147 else
1148 gen_binop (ax, value, value1, value2,
1149 aop_equal, aop_equal, 0, "equal");
1150 value->type = result_type;
1151 value->kind = axs_rvalue;
1152 }
1153
1154 static void
1155 gen_less (struct agent_expr *ax, struct axs_value *value,
1156 struct axs_value *value1, struct axs_value *value2,
1157 struct type *result_type)
1158 {
1159 if (pointer_type (value1->type) || pointer_type (value2->type))
1160 ax_simple (ax, aop_less_unsigned);
1161 else
1162 gen_binop (ax, value, value1, value2,
1163 aop_less_signed, aop_less_unsigned, 0, "less than");
1164 value->type = result_type;
1165 value->kind = axs_rvalue;
1166 }
1167
1168 /* Generate code for a binary operator that doesn't do pointer magic.
1169 We set VALUE to describe the result value; we assume VALUE1 and
1170 VALUE2 describe the two operands, and that they've undergone the
1171 usual binary conversions. MAY_CARRY should be non-zero iff the
1172 result needs to be extended. NAME is the English name of the
1173 operator, used in error messages */
1174 static void
1175 gen_binop (struct agent_expr *ax, struct axs_value *value,
1176 struct axs_value *value1, struct axs_value *value2,
1177 enum agent_op op, enum agent_op op_unsigned,
1178 int may_carry, char *name)
1179 {
1180 /* We only handle INT op INT. */
1181 if ((TYPE_CODE (value1->type) != TYPE_CODE_INT)
1182 || (TYPE_CODE (value2->type) != TYPE_CODE_INT))
1183 error (_("Invalid combination of types in %s."), name);
1184
1185 ax_simple (ax,
1186 TYPE_UNSIGNED (value1->type) ? op_unsigned : op);
1187 if (may_carry)
1188 gen_extend (ax, value1->type); /* catch overflow */
1189 value->type = value1->type;
1190 value->kind = axs_rvalue;
1191 }
1192
1193
1194 static void
1195 gen_logical_not (struct agent_expr *ax, struct axs_value *value,
1196 struct type *result_type)
1197 {
1198 if (TYPE_CODE (value->type) != TYPE_CODE_INT
1199 && TYPE_CODE (value->type) != TYPE_CODE_PTR)
1200 error (_("Invalid type of operand to `!'."));
1201
1202 ax_simple (ax, aop_log_not);
1203 value->type = result_type;
1204 }
1205
1206
1207 static void
1208 gen_complement (struct agent_expr *ax, struct axs_value *value)
1209 {
1210 if (TYPE_CODE (value->type) != TYPE_CODE_INT)
1211 error (_("Invalid type of operand to `~'."));
1212
1213 ax_simple (ax, aop_bit_not);
1214 gen_extend (ax, value->type);
1215 }
1216 \f
1217
1218
1219 /* Generating bytecode from GDB expressions: * & . -> @ sizeof */
1220
1221 /* Dereference the value on the top of the stack. */
1222 static void
1223 gen_deref (struct agent_expr *ax, struct axs_value *value)
1224 {
1225 /* The caller should check the type, because several operators use
1226 this, and we don't know what error message to generate. */
1227 if (!pointer_type (value->type))
1228 internal_error (__FILE__, __LINE__,
1229 _("gen_deref: expected a pointer"));
1230
1231 /* We've got an rvalue now, which is a pointer. We want to yield an
1232 lvalue, whose address is exactly that pointer. So we don't
1233 actually emit any code; we just change the type from "Pointer to
1234 T" to "T", and mark the value as an lvalue in memory. Leave it
1235 to the consumer to actually dereference it. */
1236 value->type = check_typedef (TYPE_TARGET_TYPE (value->type));
1237 if (TYPE_CODE (value->type) == TYPE_CODE_VOID)
1238 error (_("Attempt to dereference a generic pointer."));
1239 value->kind = ((TYPE_CODE (value->type) == TYPE_CODE_FUNC)
1240 ? axs_rvalue : axs_lvalue_memory);
1241 }
1242
1243
1244 /* Produce the address of the lvalue on the top of the stack. */
1245 static void
1246 gen_address_of (struct agent_expr *ax, struct axs_value *value)
1247 {
1248 /* Special case for taking the address of a function. The ANSI
1249 standard describes this as a special case, too, so this
1250 arrangement is not without motivation. */
1251 if (TYPE_CODE (value->type) == TYPE_CODE_FUNC)
1252 /* The value's already an rvalue on the stack, so we just need to
1253 change the type. */
1254 value->type = lookup_pointer_type (value->type);
1255 else
1256 switch (value->kind)
1257 {
1258 case axs_rvalue:
1259 error (_("Operand of `&' is an rvalue, which has no address."));
1260
1261 case axs_lvalue_register:
1262 error (_("Operand of `&' is in a register, and has no address."));
1263
1264 case axs_lvalue_memory:
1265 value->kind = axs_rvalue;
1266 value->type = lookup_pointer_type (value->type);
1267 break;
1268 }
1269 }
1270
1271 /* Generate code to push the value of a bitfield of a structure whose
1272 address is on the top of the stack. START and END give the
1273 starting and one-past-ending *bit* numbers of the field within the
1274 structure. */
1275 static void
1276 gen_bitfield_ref (struct expression *exp, struct agent_expr *ax,
1277 struct axs_value *value, struct type *type,
1278 int start, int end)
1279 {
1280 /* Note that ops[i] fetches 8 << i bits. */
1281 static enum agent_op ops[]
1282 = {aop_ref8, aop_ref16, aop_ref32, aop_ref64};
1283 static int num_ops = (sizeof (ops) / sizeof (ops[0]));
1284
1285 /* We don't want to touch any byte that the bitfield doesn't
1286 actually occupy; we shouldn't make any accesses we're not
1287 explicitly permitted to. We rely here on the fact that the
1288 bytecode `ref' operators work on unaligned addresses.
1289
1290 It takes some fancy footwork to get the stack to work the way
1291 we'd like. Say we're retrieving a bitfield that requires three
1292 fetches. Initially, the stack just contains the address:
1293 addr
1294 For the first fetch, we duplicate the address
1295 addr addr
1296 then add the byte offset, do the fetch, and shift and mask as
1297 needed, yielding a fragment of the value, properly aligned for
1298 the final bitwise or:
1299 addr frag1
1300 then we swap, and repeat the process:
1301 frag1 addr --- address on top
1302 frag1 addr addr --- duplicate it
1303 frag1 addr frag2 --- get second fragment
1304 frag1 frag2 addr --- swap again
1305 frag1 frag2 frag3 --- get third fragment
1306 Notice that, since the third fragment is the last one, we don't
1307 bother duplicating the address this time. Now we have all the
1308 fragments on the stack, and we can simply `or' them together,
1309 yielding the final value of the bitfield. */
1310
1311 /* The first and one-after-last bits in the field, but rounded down
1312 and up to byte boundaries. */
1313 int bound_start = (start / TARGET_CHAR_BIT) * TARGET_CHAR_BIT;
1314 int bound_end = (((end + TARGET_CHAR_BIT - 1)
1315 / TARGET_CHAR_BIT)
1316 * TARGET_CHAR_BIT);
1317
1318 /* current bit offset within the structure */
1319 int offset;
1320
1321 /* The index in ops of the opcode we're considering. */
1322 int op;
1323
1324 /* The number of fragments we generated in the process. Probably
1325 equal to the number of `one' bits in bytesize, but who cares? */
1326 int fragment_count;
1327
1328 /* Dereference any typedefs. */
1329 type = check_typedef (type);
1330
1331 /* Can we fetch the number of bits requested at all? */
1332 if ((end - start) > ((1 << num_ops) * 8))
1333 internal_error (__FILE__, __LINE__,
1334 _("gen_bitfield_ref: bitfield too wide"));
1335
1336 /* Note that we know here that we only need to try each opcode once.
1337 That may not be true on machines with weird byte sizes. */
1338 offset = bound_start;
1339 fragment_count = 0;
1340 for (op = num_ops - 1; op >= 0; op--)
1341 {
1342 /* number of bits that ops[op] would fetch */
1343 int op_size = 8 << op;
1344
1345 /* The stack at this point, from bottom to top, contains zero or
1346 more fragments, then the address. */
1347
1348 /* Does this fetch fit within the bitfield? */
1349 if (offset + op_size <= bound_end)
1350 {
1351 /* Is this the last fragment? */
1352 int last_frag = (offset + op_size == bound_end);
1353
1354 if (!last_frag)
1355 ax_simple (ax, aop_dup); /* keep a copy of the address */
1356
1357 /* Add the offset. */
1358 gen_offset (ax, offset / TARGET_CHAR_BIT);
1359
1360 if (trace_kludge)
1361 {
1362 /* Record the area of memory we're about to fetch. */
1363 ax_trace_quick (ax, op_size / TARGET_CHAR_BIT);
1364 }
1365
1366 /* Perform the fetch. */
1367 ax_simple (ax, ops[op]);
1368
1369 /* Shift the bits we have to their proper position.
1370 gen_left_shift will generate right shifts when the operand
1371 is negative.
1372
1373 A big-endian field diagram to ponder:
1374 byte 0 byte 1 byte 2 byte 3 byte 4 byte 5 byte 6 byte 7
1375 +------++------++------++------++------++------++------++------+
1376 xxxxAAAAAAAAAAAAAAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCxxxxxxxxxxx
1377 ^ ^ ^ ^
1378 bit number 16 32 48 53
1379 These are bit numbers as supplied by GDB. Note that the
1380 bit numbers run from right to left once you've fetched the
1381 value!
1382
1383 A little-endian field diagram to ponder:
1384 byte 7 byte 6 byte 5 byte 4 byte 3 byte 2 byte 1 byte 0
1385 +------++------++------++------++------++------++------++------+
1386 xxxxxxxxxxxAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCCCCCCCCCCCCCCxxxx
1387 ^ ^ ^ ^ ^
1388 bit number 48 32 16 4 0
1389
1390 In both cases, the most significant end is on the left
1391 (i.e. normal numeric writing order), which means that you
1392 don't go crazy thinking about `left' and `right' shifts.
1393
1394 We don't have to worry about masking yet:
1395 - If they contain garbage off the least significant end, then we
1396 must be looking at the low end of the field, and the right
1397 shift will wipe them out.
1398 - If they contain garbage off the most significant end, then we
1399 must be looking at the most significant end of the word, and
1400 the sign/zero extension will wipe them out.
1401 - If we're in the interior of the word, then there is no garbage
1402 on either end, because the ref operators zero-extend. */
1403 if (gdbarch_byte_order (exp->gdbarch) == BFD_ENDIAN_BIG)
1404 gen_left_shift (ax, end - (offset + op_size));
1405 else
1406 gen_left_shift (ax, offset - start);
1407
1408 if (!last_frag)
1409 /* Bring the copy of the address up to the top. */
1410 ax_simple (ax, aop_swap);
1411
1412 offset += op_size;
1413 fragment_count++;
1414 }
1415 }
1416
1417 /* Generate enough bitwise `or' operations to combine all the
1418 fragments we left on the stack. */
1419 while (fragment_count-- > 1)
1420 ax_simple (ax, aop_bit_or);
1421
1422 /* Sign- or zero-extend the value as appropriate. */
1423 ((TYPE_UNSIGNED (type) ? ax_zero_ext : ax_ext) (ax, end - start));
1424
1425 /* This is *not* an lvalue. Ugh. */
1426 value->kind = axs_rvalue;
1427 value->type = type;
1428 }
1429
1430 /* Generate bytecodes for field number FIELDNO of type TYPE. OFFSET
1431 is an accumulated offset (in bytes), will be nonzero for objects
1432 embedded in other objects, like C++ base classes. Behavior should
1433 generally follow value_primitive_field. */
1434
1435 static void
1436 gen_primitive_field (struct expression *exp,
1437 struct agent_expr *ax, struct axs_value *value,
1438 int offset, int fieldno, struct type *type)
1439 {
1440 /* Is this a bitfield? */
1441 if (TYPE_FIELD_PACKED (type, fieldno))
1442 gen_bitfield_ref (exp, ax, value, TYPE_FIELD_TYPE (type, fieldno),
1443 (offset * TARGET_CHAR_BIT
1444 + TYPE_FIELD_BITPOS (type, fieldno)),
1445 (offset * TARGET_CHAR_BIT
1446 + TYPE_FIELD_BITPOS (type, fieldno)
1447 + TYPE_FIELD_BITSIZE (type, fieldno)));
1448 else
1449 {
1450 gen_offset (ax, offset
1451 + TYPE_FIELD_BITPOS (type, fieldno) / TARGET_CHAR_BIT);
1452 value->kind = axs_lvalue_memory;
1453 value->type = TYPE_FIELD_TYPE (type, fieldno);
1454 }
1455 }
1456
1457 /* Search for the given field in either the given type or one of its
1458 base classes. Return 1 if found, 0 if not. */
1459
1460 static int
1461 gen_struct_ref_recursive (struct expression *exp, struct agent_expr *ax,
1462 struct axs_value *value,
1463 char *field, int offset, struct type *type)
1464 {
1465 int i, rslt;
1466 int nbases = TYPE_N_BASECLASSES (type);
1467
1468 CHECK_TYPEDEF (type);
1469
1470 for (i = TYPE_NFIELDS (type) - 1; i >= nbases; i--)
1471 {
1472 const char *this_name = TYPE_FIELD_NAME (type, i);
1473
1474 if (this_name)
1475 {
1476 if (strcmp (field, this_name) == 0)
1477 {
1478 /* Note that bytecodes for the struct's base (aka
1479 "this") will have been generated already, which will
1480 be unnecessary but not harmful if the static field is
1481 being handled as a global. */
1482 if (field_is_static (&TYPE_FIELD (type, i)))
1483 {
1484 gen_static_field (exp->gdbarch, ax, value, type, i);
1485 if (value->optimized_out)
1486 error (_("static field `%s' has been "
1487 "optimized out, cannot use"),
1488 field);
1489 return 1;
1490 }
1491
1492 gen_primitive_field (exp, ax, value, offset, i, type);
1493 return 1;
1494 }
1495 #if 0 /* is this right? */
1496 if (this_name[0] == '\0')
1497 internal_error (__FILE__, __LINE__,
1498 _("find_field: anonymous unions not supported"));
1499 #endif
1500 }
1501 }
1502
1503 /* Now scan through base classes recursively. */
1504 for (i = 0; i < nbases; i++)
1505 {
1506 struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
1507
1508 rslt = gen_struct_ref_recursive (exp, ax, value, field,
1509 offset + TYPE_BASECLASS_BITPOS (type, i)
1510 / TARGET_CHAR_BIT,
1511 basetype);
1512 if (rslt)
1513 return 1;
1514 }
1515
1516 /* Not found anywhere, flag so caller can complain. */
1517 return 0;
1518 }
1519
1520 /* Generate code to reference the member named FIELD of a structure or
1521 union. The top of the stack, as described by VALUE, should have
1522 type (pointer to a)* struct/union. OPERATOR_NAME is the name of
1523 the operator being compiled, and OPERAND_NAME is the kind of thing
1524 it operates on; we use them in error messages. */
1525 static void
1526 gen_struct_ref (struct expression *exp, struct agent_expr *ax,
1527 struct axs_value *value, char *field,
1528 char *operator_name, char *operand_name)
1529 {
1530 struct type *type;
1531 int found;
1532
1533 /* Follow pointers until we reach a non-pointer. These aren't the C
1534 semantics, but they're what the normal GDB evaluator does, so we
1535 should at least be consistent. */
1536 while (pointer_type (value->type))
1537 {
1538 require_rvalue (ax, value);
1539 gen_deref (ax, value);
1540 }
1541 type = check_typedef (value->type);
1542
1543 /* This must yield a structure or a union. */
1544 if (TYPE_CODE (type) != TYPE_CODE_STRUCT
1545 && TYPE_CODE (type) != TYPE_CODE_UNION)
1546 error (_("The left operand of `%s' is not a %s."),
1547 operator_name, operand_name);
1548
1549 /* And it must be in memory; we don't deal with structure rvalues,
1550 or structures living in registers. */
1551 if (value->kind != axs_lvalue_memory)
1552 error (_("Structure does not live in memory."));
1553
1554 /* Search through fields and base classes recursively. */
1555 found = gen_struct_ref_recursive (exp, ax, value, field, 0, type);
1556
1557 if (!found)
1558 error (_("Couldn't find member named `%s' in struct/union/class `%s'"),
1559 field, TYPE_TAG_NAME (type));
1560 }
1561
1562 static int
1563 gen_namespace_elt (struct expression *exp,
1564 struct agent_expr *ax, struct axs_value *value,
1565 const struct type *curtype, char *name);
1566 static int
1567 gen_maybe_namespace_elt (struct expression *exp,
1568 struct agent_expr *ax, struct axs_value *value,
1569 const struct type *curtype, char *name);
1570
1571 static void
1572 gen_static_field (struct gdbarch *gdbarch,
1573 struct agent_expr *ax, struct axs_value *value,
1574 struct type *type, int fieldno)
1575 {
1576 if (TYPE_FIELD_LOC_KIND (type, fieldno) == FIELD_LOC_KIND_PHYSADDR)
1577 {
1578 ax_const_l (ax, TYPE_FIELD_STATIC_PHYSADDR (type, fieldno));
1579 value->kind = axs_lvalue_memory;
1580 value->type = TYPE_FIELD_TYPE (type, fieldno);
1581 value->optimized_out = 0;
1582 }
1583 else
1584 {
1585 const char *phys_name = TYPE_FIELD_STATIC_PHYSNAME (type, fieldno);
1586 struct symbol *sym = lookup_symbol (phys_name, 0, VAR_DOMAIN, 0);
1587
1588 if (sym)
1589 {
1590 gen_var_ref (gdbarch, ax, value, sym);
1591
1592 /* Don't error if the value was optimized out, we may be
1593 scanning all static fields and just want to pass over this
1594 and continue with the rest. */
1595 }
1596 else
1597 {
1598 /* Silently assume this was optimized out; class printing
1599 will let the user know why the data is missing. */
1600 value->optimized_out = 1;
1601 }
1602 }
1603 }
1604
1605 static int
1606 gen_struct_elt_for_reference (struct expression *exp,
1607 struct agent_expr *ax, struct axs_value *value,
1608 struct type *type, char *fieldname)
1609 {
1610 struct type *t = type;
1611 int i;
1612
1613 if (TYPE_CODE (t) != TYPE_CODE_STRUCT
1614 && TYPE_CODE (t) != TYPE_CODE_UNION)
1615 internal_error (__FILE__, __LINE__,
1616 _("non-aggregate type to gen_struct_elt_for_reference"));
1617
1618 for (i = TYPE_NFIELDS (t) - 1; i >= TYPE_N_BASECLASSES (t); i--)
1619 {
1620 const char *t_field_name = TYPE_FIELD_NAME (t, i);
1621
1622 if (t_field_name && strcmp (t_field_name, fieldname) == 0)
1623 {
1624 if (field_is_static (&TYPE_FIELD (t, i)))
1625 {
1626 gen_static_field (exp->gdbarch, ax, value, t, i);
1627 if (value->optimized_out)
1628 error (_("static field `%s' has been "
1629 "optimized out, cannot use"),
1630 fieldname);
1631 return 1;
1632 }
1633 if (TYPE_FIELD_PACKED (t, i))
1634 error (_("pointers to bitfield members not allowed"));
1635
1636 /* FIXME we need a way to do "want_address" equivalent */
1637
1638 error (_("Cannot reference non-static field \"%s\""), fieldname);
1639 }
1640 }
1641
1642 /* FIXME add other scoped-reference cases here */
1643
1644 /* Do a last-ditch lookup. */
1645 return gen_maybe_namespace_elt (exp, ax, value, type, fieldname);
1646 }
1647
1648 /* C++: Return the member NAME of the namespace given by the type
1649 CURTYPE. */
1650
1651 static int
1652 gen_namespace_elt (struct expression *exp,
1653 struct agent_expr *ax, struct axs_value *value,
1654 const struct type *curtype, char *name)
1655 {
1656 int found = gen_maybe_namespace_elt (exp, ax, value, curtype, name);
1657
1658 if (!found)
1659 error (_("No symbol \"%s\" in namespace \"%s\"."),
1660 name, TYPE_TAG_NAME (curtype));
1661
1662 return found;
1663 }
1664
1665 /* A helper function used by value_namespace_elt and
1666 value_struct_elt_for_reference. It looks up NAME inside the
1667 context CURTYPE; this works if CURTYPE is a namespace or if CURTYPE
1668 is a class and NAME refers to a type in CURTYPE itself (as opposed
1669 to, say, some base class of CURTYPE). */
1670
1671 static int
1672 gen_maybe_namespace_elt (struct expression *exp,
1673 struct agent_expr *ax, struct axs_value *value,
1674 const struct type *curtype, char *name)
1675 {
1676 const char *namespace_name = TYPE_TAG_NAME (curtype);
1677 struct symbol *sym;
1678
1679 sym = cp_lookup_symbol_namespace (namespace_name, name,
1680 block_for_pc (ax->scope),
1681 VAR_DOMAIN);
1682
1683 if (sym == NULL)
1684 return 0;
1685
1686 gen_var_ref (exp->gdbarch, ax, value, sym);
1687
1688 if (value->optimized_out)
1689 error (_("`%s' has been optimized out, cannot use"),
1690 SYMBOL_PRINT_NAME (sym));
1691
1692 return 1;
1693 }
1694
1695
1696 static int
1697 gen_aggregate_elt_ref (struct expression *exp,
1698 struct agent_expr *ax, struct axs_value *value,
1699 struct type *type, char *field,
1700 char *operator_name, char *operand_name)
1701 {
1702 switch (TYPE_CODE (type))
1703 {
1704 case TYPE_CODE_STRUCT:
1705 case TYPE_CODE_UNION:
1706 return gen_struct_elt_for_reference (exp, ax, value, type, field);
1707 break;
1708 case TYPE_CODE_NAMESPACE:
1709 return gen_namespace_elt (exp, ax, value, type, field);
1710 break;
1711 default:
1712 internal_error (__FILE__, __LINE__,
1713 _("non-aggregate type in gen_aggregate_elt_ref"));
1714 }
1715
1716 return 0;
1717 }
1718
1719 /* Generate code for GDB's magical `repeat' operator.
1720 LVALUE @ INT creates an array INT elements long, and whose elements
1721 have the same type as LVALUE, located in memory so that LVALUE is
1722 its first element. For example, argv[0]@argc gives you the array
1723 of command-line arguments.
1724
1725 Unfortunately, because we have to know the types before we actually
1726 have a value for the expression, we can't implement this perfectly
1727 without changing the type system, having values that occupy two
1728 stack slots, doing weird things with sizeof, etc. So we require
1729 the right operand to be a constant expression. */
1730 static void
1731 gen_repeat (struct expression *exp, union exp_element **pc,
1732 struct agent_expr *ax, struct axs_value *value)
1733 {
1734 struct axs_value value1;
1735
1736 /* We don't want to turn this into an rvalue, so no conversions
1737 here. */
1738 gen_expr (exp, pc, ax, &value1);
1739 if (value1.kind != axs_lvalue_memory)
1740 error (_("Left operand of `@' must be an object in memory."));
1741
1742 /* Evaluate the length; it had better be a constant. */
1743 {
1744 struct value *v = const_expr (pc);
1745 int length;
1746
1747 if (!v)
1748 error (_("Right operand of `@' must be a "
1749 "constant, in agent expressions."));
1750 if (TYPE_CODE (value_type (v)) != TYPE_CODE_INT)
1751 error (_("Right operand of `@' must be an integer."));
1752 length = value_as_long (v);
1753 if (length <= 0)
1754 error (_("Right operand of `@' must be positive."));
1755
1756 /* The top of the stack is already the address of the object, so
1757 all we need to do is frob the type of the lvalue. */
1758 {
1759 /* FIXME-type-allocation: need a way to free this type when we are
1760 done with it. */
1761 struct type *array
1762 = lookup_array_range_type (value1.type, 0, length - 1);
1763
1764 value->kind = axs_lvalue_memory;
1765 value->type = array;
1766 }
1767 }
1768 }
1769
1770
1771 /* Emit code for the `sizeof' operator.
1772 *PC should point at the start of the operand expression; we advance it
1773 to the first instruction after the operand. */
1774 static void
1775 gen_sizeof (struct expression *exp, union exp_element **pc,
1776 struct agent_expr *ax, struct axs_value *value,
1777 struct type *size_type)
1778 {
1779 /* We don't care about the value of the operand expression; we only
1780 care about its type. However, in the current arrangement, the
1781 only way to find an expression's type is to generate code for it.
1782 So we generate code for the operand, and then throw it away,
1783 replacing it with code that simply pushes its size. */
1784 int start = ax->len;
1785
1786 gen_expr (exp, pc, ax, value);
1787
1788 /* Throw away the code we just generated. */
1789 ax->len = start;
1790
1791 ax_const_l (ax, TYPE_LENGTH (value->type));
1792 value->kind = axs_rvalue;
1793 value->type = size_type;
1794 }
1795 \f
1796
1797 /* Generating bytecode from GDB expressions: general recursive thingy */
1798
1799 /* XXX: i18n */
1800 /* A gen_expr function written by a Gen-X'er guy.
1801 Append code for the subexpression of EXPR starting at *POS_P to AX. */
1802 void
1803 gen_expr (struct expression *exp, union exp_element **pc,
1804 struct agent_expr *ax, struct axs_value *value)
1805 {
1806 /* Used to hold the descriptions of operand expressions. */
1807 struct axs_value value1, value2, value3;
1808 enum exp_opcode op = (*pc)[0].opcode, op2;
1809 int if1, go1, if2, go2, end;
1810 struct type *int_type = builtin_type (exp->gdbarch)->builtin_int;
1811
1812 /* If we're looking at a constant expression, just push its value. */
1813 {
1814 struct value *v = maybe_const_expr (pc);
1815
1816 if (v)
1817 {
1818 ax_const_l (ax, value_as_long (v));
1819 value->kind = axs_rvalue;
1820 value->type = check_typedef (value_type (v));
1821 return;
1822 }
1823 }
1824
1825 /* Otherwise, go ahead and generate code for it. */
1826 switch (op)
1827 {
1828 /* Binary arithmetic operators. */
1829 case BINOP_ADD:
1830 case BINOP_SUB:
1831 case BINOP_MUL:
1832 case BINOP_DIV:
1833 case BINOP_REM:
1834 case BINOP_LSH:
1835 case BINOP_RSH:
1836 case BINOP_SUBSCRIPT:
1837 case BINOP_BITWISE_AND:
1838 case BINOP_BITWISE_IOR:
1839 case BINOP_BITWISE_XOR:
1840 case BINOP_EQUAL:
1841 case BINOP_NOTEQUAL:
1842 case BINOP_LESS:
1843 case BINOP_GTR:
1844 case BINOP_LEQ:
1845 case BINOP_GEQ:
1846 (*pc)++;
1847 gen_expr (exp, pc, ax, &value1);
1848 gen_usual_unary (exp, ax, &value1);
1849 gen_expr_binop_rest (exp, op, pc, ax, value, &value1, &value2);
1850 break;
1851
1852 case BINOP_LOGICAL_AND:
1853 (*pc)++;
1854 /* Generate the obvious sequence of tests and jumps. */
1855 gen_expr (exp, pc, ax, &value1);
1856 gen_usual_unary (exp, ax, &value1);
1857 if1 = ax_goto (ax, aop_if_goto);
1858 go1 = ax_goto (ax, aop_goto);
1859 ax_label (ax, if1, ax->len);
1860 gen_expr (exp, pc, ax, &value2);
1861 gen_usual_unary (exp, ax, &value2);
1862 if2 = ax_goto (ax, aop_if_goto);
1863 go2 = ax_goto (ax, aop_goto);
1864 ax_label (ax, if2, ax->len);
1865 ax_const_l (ax, 1);
1866 end = ax_goto (ax, aop_goto);
1867 ax_label (ax, go1, ax->len);
1868 ax_label (ax, go2, ax->len);
1869 ax_const_l (ax, 0);
1870 ax_label (ax, end, ax->len);
1871 value->kind = axs_rvalue;
1872 value->type = int_type;
1873 break;
1874
1875 case BINOP_LOGICAL_OR:
1876 (*pc)++;
1877 /* Generate the obvious sequence of tests and jumps. */
1878 gen_expr (exp, pc, ax, &value1);
1879 gen_usual_unary (exp, ax, &value1);
1880 if1 = ax_goto (ax, aop_if_goto);
1881 gen_expr (exp, pc, ax, &value2);
1882 gen_usual_unary (exp, ax, &value2);
1883 if2 = ax_goto (ax, aop_if_goto);
1884 ax_const_l (ax, 0);
1885 end = ax_goto (ax, aop_goto);
1886 ax_label (ax, if1, ax->len);
1887 ax_label (ax, if2, ax->len);
1888 ax_const_l (ax, 1);
1889 ax_label (ax, end, ax->len);
1890 value->kind = axs_rvalue;
1891 value->type = int_type;
1892 break;
1893
1894 case TERNOP_COND:
1895 (*pc)++;
1896 gen_expr (exp, pc, ax, &value1);
1897 gen_usual_unary (exp, ax, &value1);
1898 /* For (A ? B : C), it's easiest to generate subexpression
1899 bytecodes in order, but if_goto jumps on true, so we invert
1900 the sense of A. Then we can do B by dropping through, and
1901 jump to do C. */
1902 gen_logical_not (ax, &value1, int_type);
1903 if1 = ax_goto (ax, aop_if_goto);
1904 gen_expr (exp, pc, ax, &value2);
1905 gen_usual_unary (exp, ax, &value2);
1906 end = ax_goto (ax, aop_goto);
1907 ax_label (ax, if1, ax->len);
1908 gen_expr (exp, pc, ax, &value3);
1909 gen_usual_unary (exp, ax, &value3);
1910 ax_label (ax, end, ax->len);
1911 /* This is arbitary - what if B and C are incompatible types? */
1912 value->type = value2.type;
1913 value->kind = value2.kind;
1914 break;
1915
1916 case BINOP_ASSIGN:
1917 (*pc)++;
1918 if ((*pc)[0].opcode == OP_INTERNALVAR)
1919 {
1920 char *name = internalvar_name ((*pc)[1].internalvar);
1921 struct trace_state_variable *tsv;
1922
1923 (*pc) += 3;
1924 gen_expr (exp, pc, ax, value);
1925 tsv = find_trace_state_variable (name);
1926 if (tsv)
1927 {
1928 ax_tsv (ax, aop_setv, tsv->number);
1929 if (trace_kludge)
1930 ax_tsv (ax, aop_tracev, tsv->number);
1931 }
1932 else
1933 error (_("$%s is not a trace state variable, "
1934 "may not assign to it"), name);
1935 }
1936 else
1937 error (_("May only assign to trace state variables"));
1938 break;
1939
1940 case BINOP_ASSIGN_MODIFY:
1941 (*pc)++;
1942 op2 = (*pc)[0].opcode;
1943 (*pc)++;
1944 (*pc)++;
1945 if ((*pc)[0].opcode == OP_INTERNALVAR)
1946 {
1947 char *name = internalvar_name ((*pc)[1].internalvar);
1948 struct trace_state_variable *tsv;
1949
1950 (*pc) += 3;
1951 tsv = find_trace_state_variable (name);
1952 if (tsv)
1953 {
1954 /* The tsv will be the left half of the binary operation. */
1955 ax_tsv (ax, aop_getv, tsv->number);
1956 if (trace_kludge)
1957 ax_tsv (ax, aop_tracev, tsv->number);
1958 /* Trace state variables are always 64-bit integers. */
1959 value1.kind = axs_rvalue;
1960 value1.type = builtin_type (exp->gdbarch)->builtin_long_long;
1961 /* Now do right half of expression. */
1962 gen_expr_binop_rest (exp, op2, pc, ax, value, &value1, &value2);
1963 /* We have a result of the binary op, set the tsv. */
1964 ax_tsv (ax, aop_setv, tsv->number);
1965 if (trace_kludge)
1966 ax_tsv (ax, aop_tracev, tsv->number);
1967 }
1968 else
1969 error (_("$%s is not a trace state variable, "
1970 "may not assign to it"), name);
1971 }
1972 else
1973 error (_("May only assign to trace state variables"));
1974 break;
1975
1976 /* Note that we need to be a little subtle about generating code
1977 for comma. In C, we can do some optimizations here because
1978 we know the left operand is only being evaluated for effect.
1979 However, if the tracing kludge is in effect, then we always
1980 need to evaluate the left hand side fully, so that all the
1981 variables it mentions get traced. */
1982 case BINOP_COMMA:
1983 (*pc)++;
1984 gen_expr (exp, pc, ax, &value1);
1985 /* Don't just dispose of the left operand. We might be tracing,
1986 in which case we want to emit code to trace it if it's an
1987 lvalue. */
1988 gen_traced_pop (exp->gdbarch, ax, &value1);
1989 gen_expr (exp, pc, ax, value);
1990 /* It's the consumer's responsibility to trace the right operand. */
1991 break;
1992
1993 case OP_LONG: /* some integer constant */
1994 {
1995 struct type *type = (*pc)[1].type;
1996 LONGEST k = (*pc)[2].longconst;
1997
1998 (*pc) += 4;
1999 gen_int_literal (ax, value, k, type);
2000 }
2001 break;
2002
2003 case OP_VAR_VALUE:
2004 gen_var_ref (exp->gdbarch, ax, value, (*pc)[2].symbol);
2005
2006 if (value->optimized_out)
2007 error (_("`%s' has been optimized out, cannot use"),
2008 SYMBOL_PRINT_NAME ((*pc)[2].symbol));
2009
2010 (*pc) += 4;
2011 break;
2012
2013 case OP_REGISTER:
2014 {
2015 const char *name = &(*pc)[2].string;
2016 int reg;
2017
2018 (*pc) += 4 + BYTES_TO_EXP_ELEM ((*pc)[1].longconst + 1);
2019 reg = user_reg_map_name_to_regnum (exp->gdbarch, name, strlen (name));
2020 if (reg == -1)
2021 internal_error (__FILE__, __LINE__,
2022 _("Register $%s not available"), name);
2023 /* No support for tracing user registers yet. */
2024 if (reg >= gdbarch_num_regs (exp->gdbarch)
2025 + gdbarch_num_pseudo_regs (exp->gdbarch))
2026 error (_("'%s' is a user-register; "
2027 "GDB cannot yet trace user-register contents."),
2028 name);
2029 value->kind = axs_lvalue_register;
2030 value->u.reg = reg;
2031 value->type = register_type (exp->gdbarch, reg);
2032 }
2033 break;
2034
2035 case OP_INTERNALVAR:
2036 {
2037 struct internalvar *var = (*pc)[1].internalvar;
2038 const char *name = internalvar_name (var);
2039 struct trace_state_variable *tsv;
2040
2041 (*pc) += 3;
2042 tsv = find_trace_state_variable (name);
2043 if (tsv)
2044 {
2045 ax_tsv (ax, aop_getv, tsv->number);
2046 if (trace_kludge)
2047 ax_tsv (ax, aop_tracev, tsv->number);
2048 /* Trace state variables are always 64-bit integers. */
2049 value->kind = axs_rvalue;
2050 value->type = builtin_type (exp->gdbarch)->builtin_long_long;
2051 }
2052 else if (! compile_internalvar_to_ax (var, ax, value))
2053 error (_("$%s is not a trace state variable; GDB agent "
2054 "expressions cannot use convenience variables."), name);
2055 }
2056 break;
2057
2058 /* Weirdo operator: see comments for gen_repeat for details. */
2059 case BINOP_REPEAT:
2060 /* Note that gen_repeat handles its own argument evaluation. */
2061 (*pc)++;
2062 gen_repeat (exp, pc, ax, value);
2063 break;
2064
2065 case UNOP_CAST:
2066 {
2067 struct type *type = (*pc)[1].type;
2068
2069 (*pc) += 3;
2070 gen_expr (exp, pc, ax, value);
2071 gen_cast (ax, value, type);
2072 }
2073 break;
2074
2075 case UNOP_MEMVAL:
2076 {
2077 struct type *type = check_typedef ((*pc)[1].type);
2078
2079 (*pc) += 3;
2080 gen_expr (exp, pc, ax, value);
2081
2082 /* If we have an axs_rvalue or an axs_lvalue_memory, then we
2083 already have the right value on the stack. For
2084 axs_lvalue_register, we must convert. */
2085 if (value->kind == axs_lvalue_register)
2086 require_rvalue (ax, value);
2087
2088 value->type = type;
2089 value->kind = axs_lvalue_memory;
2090 }
2091 break;
2092
2093 case UNOP_PLUS:
2094 (*pc)++;
2095 /* + FOO is equivalent to 0 + FOO, which can be optimized. */
2096 gen_expr (exp, pc, ax, value);
2097 gen_usual_unary (exp, ax, value);
2098 break;
2099
2100 case UNOP_NEG:
2101 (*pc)++;
2102 /* -FOO is equivalent to 0 - FOO. */
2103 gen_int_literal (ax, &value1, 0,
2104 builtin_type (exp->gdbarch)->builtin_int);
2105 gen_usual_unary (exp, ax, &value1); /* shouldn't do much */
2106 gen_expr (exp, pc, ax, &value2);
2107 gen_usual_unary (exp, ax, &value2);
2108 gen_usual_arithmetic (exp, ax, &value1, &value2);
2109 gen_binop (ax, value, &value1, &value2, aop_sub, aop_sub, 1, "negation");
2110 break;
2111
2112 case UNOP_LOGICAL_NOT:
2113 (*pc)++;
2114 gen_expr (exp, pc, ax, value);
2115 gen_usual_unary (exp, ax, value);
2116 gen_logical_not (ax, value, int_type);
2117 break;
2118
2119 case UNOP_COMPLEMENT:
2120 (*pc)++;
2121 gen_expr (exp, pc, ax, value);
2122 gen_usual_unary (exp, ax, value);
2123 gen_integral_promotions (exp, ax, value);
2124 gen_complement (ax, value);
2125 break;
2126
2127 case UNOP_IND:
2128 (*pc)++;
2129 gen_expr (exp, pc, ax, value);
2130 gen_usual_unary (exp, ax, value);
2131 if (!pointer_type (value->type))
2132 error (_("Argument of unary `*' is not a pointer."));
2133 gen_deref (ax, value);
2134 break;
2135
2136 case UNOP_ADDR:
2137 (*pc)++;
2138 gen_expr (exp, pc, ax, value);
2139 gen_address_of (ax, value);
2140 break;
2141
2142 case UNOP_SIZEOF:
2143 (*pc)++;
2144 /* Notice that gen_sizeof handles its own operand, unlike most
2145 of the other unary operator functions. This is because we
2146 have to throw away the code we generate. */
2147 gen_sizeof (exp, pc, ax, value,
2148 builtin_type (exp->gdbarch)->builtin_int);
2149 break;
2150
2151 case STRUCTOP_STRUCT:
2152 case STRUCTOP_PTR:
2153 {
2154 int length = (*pc)[1].longconst;
2155 char *name = &(*pc)[2].string;
2156
2157 (*pc) += 4 + BYTES_TO_EXP_ELEM (length + 1);
2158 gen_expr (exp, pc, ax, value);
2159 if (op == STRUCTOP_STRUCT)
2160 gen_struct_ref (exp, ax, value, name, ".", "structure or union");
2161 else if (op == STRUCTOP_PTR)
2162 gen_struct_ref (exp, ax, value, name, "->",
2163 "pointer to a structure or union");
2164 else
2165 /* If this `if' chain doesn't handle it, then the case list
2166 shouldn't mention it, and we shouldn't be here. */
2167 internal_error (__FILE__, __LINE__,
2168 _("gen_expr: unhandled struct case"));
2169 }
2170 break;
2171
2172 case OP_THIS:
2173 {
2174 char *this_name;
2175 struct symbol *sym, *func;
2176 struct block *b;
2177 const struct language_defn *lang;
2178
2179 b = block_for_pc (ax->scope);
2180 func = block_linkage_function (b);
2181 lang = language_def (SYMBOL_LANGUAGE (func));
2182
2183 sym = lookup_language_this (lang, b);
2184 if (!sym)
2185 error (_("no `%s' found"), lang->la_name_of_this);
2186
2187 gen_var_ref (exp->gdbarch, ax, value, sym);
2188
2189 if (value->optimized_out)
2190 error (_("`%s' has been optimized out, cannot use"),
2191 SYMBOL_PRINT_NAME (sym));
2192
2193 (*pc) += 2;
2194 }
2195 break;
2196
2197 case OP_SCOPE:
2198 {
2199 struct type *type = (*pc)[1].type;
2200 int length = longest_to_int ((*pc)[2].longconst);
2201 char *name = &(*pc)[3].string;
2202 int found;
2203
2204 found = gen_aggregate_elt_ref (exp, ax, value, type, name,
2205 "?", "??");
2206 if (!found)
2207 error (_("There is no field named %s"), name);
2208 (*pc) += 5 + BYTES_TO_EXP_ELEM (length + 1);
2209 }
2210 break;
2211
2212 case OP_TYPE:
2213 error (_("Attempt to use a type name as an expression."));
2214
2215 default:
2216 error (_("Unsupported operator %s (%d) in expression."),
2217 op_name (exp, op), op);
2218 }
2219 }
2220
2221 /* This handles the middle-to-right-side of code generation for binary
2222 expressions, which is shared between regular binary operations and
2223 assign-modify (+= and friends) expressions. */
2224
2225 static void
2226 gen_expr_binop_rest (struct expression *exp,
2227 enum exp_opcode op, union exp_element **pc,
2228 struct agent_expr *ax, struct axs_value *value,
2229 struct axs_value *value1, struct axs_value *value2)
2230 {
2231 struct type *int_type = builtin_type (exp->gdbarch)->builtin_int;
2232
2233 gen_expr (exp, pc, ax, value2);
2234 gen_usual_unary (exp, ax, value2);
2235 gen_usual_arithmetic (exp, ax, value1, value2);
2236 switch (op)
2237 {
2238 case BINOP_ADD:
2239 if (TYPE_CODE (value1->type) == TYPE_CODE_INT
2240 && pointer_type (value2->type))
2241 {
2242 /* Swap the values and proceed normally. */
2243 ax_simple (ax, aop_swap);
2244 gen_ptradd (ax, value, value2, value1);
2245 }
2246 else if (pointer_type (value1->type)
2247 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
2248 gen_ptradd (ax, value, value1, value2);
2249 else
2250 gen_binop (ax, value, value1, value2,
2251 aop_add, aop_add, 1, "addition");
2252 break;
2253 case BINOP_SUB:
2254 if (pointer_type (value1->type)
2255 && TYPE_CODE (value2->type) == TYPE_CODE_INT)
2256 gen_ptrsub (ax,value, value1, value2);
2257 else if (pointer_type (value1->type)
2258 && pointer_type (value2->type))
2259 /* FIXME --- result type should be ptrdiff_t */
2260 gen_ptrdiff (ax, value, value1, value2,
2261 builtin_type (exp->gdbarch)->builtin_long);
2262 else
2263 gen_binop (ax, value, value1, value2,
2264 aop_sub, aop_sub, 1, "subtraction");
2265 break;
2266 case BINOP_MUL:
2267 gen_binop (ax, value, value1, value2,
2268 aop_mul, aop_mul, 1, "multiplication");
2269 break;
2270 case BINOP_DIV:
2271 gen_binop (ax, value, value1, value2,
2272 aop_div_signed, aop_div_unsigned, 1, "division");
2273 break;
2274 case BINOP_REM:
2275 gen_binop (ax, value, value1, value2,
2276 aop_rem_signed, aop_rem_unsigned, 1, "remainder");
2277 break;
2278 case BINOP_LSH:
2279 gen_binop (ax, value, value1, value2,
2280 aop_lsh, aop_lsh, 1, "left shift");
2281 break;
2282 case BINOP_RSH:
2283 gen_binop (ax, value, value1, value2,
2284 aop_rsh_signed, aop_rsh_unsigned, 1, "right shift");
2285 break;
2286 case BINOP_SUBSCRIPT:
2287 {
2288 struct type *type;
2289
2290 if (binop_types_user_defined_p (op, value1->type, value2->type))
2291 {
2292 error (_("cannot subscript requested type: "
2293 "cannot call user defined functions"));
2294 }
2295 else
2296 {
2297 /* If the user attempts to subscript something that is not
2298 an array or pointer type (like a plain int variable for
2299 example), then report this as an error. */
2300 type = check_typedef (value1->type);
2301 if (TYPE_CODE (type) != TYPE_CODE_ARRAY
2302 && TYPE_CODE (type) != TYPE_CODE_PTR)
2303 {
2304 if (TYPE_NAME (type))
2305 error (_("cannot subscript something of type `%s'"),
2306 TYPE_NAME (type));
2307 else
2308 error (_("cannot subscript requested type"));
2309 }
2310 }
2311
2312 if (!is_integral_type (value2->type))
2313 error (_("Argument to arithmetic operation "
2314 "not a number or boolean."));
2315
2316 gen_ptradd (ax, value, value1, value2);
2317 gen_deref (ax, value);
2318 break;
2319 }
2320 case BINOP_BITWISE_AND:
2321 gen_binop (ax, value, value1, value2,
2322 aop_bit_and, aop_bit_and, 0, "bitwise and");
2323 break;
2324
2325 case BINOP_BITWISE_IOR:
2326 gen_binop (ax, value, value1, value2,
2327 aop_bit_or, aop_bit_or, 0, "bitwise or");
2328 break;
2329
2330 case BINOP_BITWISE_XOR:
2331 gen_binop (ax, value, value1, value2,
2332 aop_bit_xor, aop_bit_xor, 0, "bitwise exclusive-or");
2333 break;
2334
2335 case BINOP_EQUAL:
2336 gen_equal (ax, value, value1, value2, int_type);
2337 break;
2338
2339 case BINOP_NOTEQUAL:
2340 gen_equal (ax, value, value1, value2, int_type);
2341 gen_logical_not (ax, value, int_type);
2342 break;
2343
2344 case BINOP_LESS:
2345 gen_less (ax, value, value1, value2, int_type);
2346 break;
2347
2348 case BINOP_GTR:
2349 ax_simple (ax, aop_swap);
2350 gen_less (ax, value, value1, value2, int_type);
2351 break;
2352
2353 case BINOP_LEQ:
2354 ax_simple (ax, aop_swap);
2355 gen_less (ax, value, value1, value2, int_type);
2356 gen_logical_not (ax, value, int_type);
2357 break;
2358
2359 case BINOP_GEQ:
2360 gen_less (ax, value, value1, value2, int_type);
2361 gen_logical_not (ax, value, int_type);
2362 break;
2363
2364 default:
2365 /* We should only list operators in the outer case statement
2366 that we actually handle in the inner case statement. */
2367 internal_error (__FILE__, __LINE__,
2368 _("gen_expr: op case sets don't match"));
2369 }
2370 }
2371 \f
2372
2373 /* Given a single variable and a scope, generate bytecodes to trace
2374 its value. This is for use in situations where we have only a
2375 variable's name, and no parsed expression; for instance, when the
2376 name comes from a list of local variables of a function. */
2377
2378 struct agent_expr *
2379 gen_trace_for_var (CORE_ADDR scope, struct gdbarch *gdbarch,
2380 struct symbol *var)
2381 {
2382 struct cleanup *old_chain = 0;
2383 struct agent_expr *ax = new_agent_expr (gdbarch, scope);
2384 struct axs_value value;
2385
2386 old_chain = make_cleanup_free_agent_expr (ax);
2387
2388 trace_kludge = 1;
2389 gen_var_ref (gdbarch, ax, &value, var);
2390
2391 /* If there is no actual variable to trace, flag it by returning
2392 an empty agent expression. */
2393 if (value.optimized_out)
2394 {
2395 do_cleanups (old_chain);
2396 return NULL;
2397 }
2398
2399 /* Make sure we record the final object, and get rid of it. */
2400 gen_traced_pop (gdbarch, ax, &value);
2401
2402 /* Oh, and terminate. */
2403 ax_simple (ax, aop_end);
2404
2405 /* We have successfully built the agent expr, so cancel the cleanup
2406 request. If we add more cleanups that we always want done, this
2407 will have to get more complicated. */
2408 discard_cleanups (old_chain);
2409 return ax;
2410 }
2411
2412 /* Generating bytecode from GDB expressions: driver */
2413
2414 /* Given a GDB expression EXPR, return bytecode to trace its value.
2415 The result will use the `trace' and `trace_quick' bytecodes to
2416 record the value of all memory touched by the expression. The
2417 caller can then use the ax_reqs function to discover which
2418 registers it relies upon. */
2419 struct agent_expr *
2420 gen_trace_for_expr (CORE_ADDR scope, struct expression *expr)
2421 {
2422 struct cleanup *old_chain = 0;
2423 struct agent_expr *ax = new_agent_expr (expr->gdbarch, scope);
2424 union exp_element *pc;
2425 struct axs_value value;
2426
2427 old_chain = make_cleanup_free_agent_expr (ax);
2428
2429 pc = expr->elts;
2430 trace_kludge = 1;
2431 value.optimized_out = 0;
2432 gen_expr (expr, &pc, ax, &value);
2433
2434 /* Make sure we record the final object, and get rid of it. */
2435 gen_traced_pop (expr->gdbarch, ax, &value);
2436
2437 /* Oh, and terminate. */
2438 ax_simple (ax, aop_end);
2439
2440 /* We have successfully built the agent expr, so cancel the cleanup
2441 request. If we add more cleanups that we always want done, this
2442 will have to get more complicated. */
2443 discard_cleanups (old_chain);
2444 return ax;
2445 }
2446
2447 /* Given a GDB expression EXPR, return a bytecode sequence that will
2448 evaluate and return a result. The bytecodes will do a direct
2449 evaluation, using the current data on the target, rather than
2450 recording blocks of memory and registers for later use, as
2451 gen_trace_for_expr does. The generated bytecode sequence leaves
2452 the result of expression evaluation on the top of the stack. */
2453
2454 struct agent_expr *
2455 gen_eval_for_expr (CORE_ADDR scope, struct expression *expr)
2456 {
2457 struct cleanup *old_chain = 0;
2458 struct agent_expr *ax = new_agent_expr (expr->gdbarch, scope);
2459 union exp_element *pc;
2460 struct axs_value value;
2461
2462 old_chain = make_cleanup_free_agent_expr (ax);
2463
2464 pc = expr->elts;
2465 trace_kludge = 0;
2466 value.optimized_out = 0;
2467 gen_expr (expr, &pc, ax, &value);
2468
2469 require_rvalue (ax, &value);
2470
2471 /* Oh, and terminate. */
2472 ax_simple (ax, aop_end);
2473
2474 /* We have successfully built the agent expr, so cancel the cleanup
2475 request. If we add more cleanups that we always want done, this
2476 will have to get more complicated. */
2477 discard_cleanups (old_chain);
2478 return ax;
2479 }
2480
2481 struct agent_expr *
2482 gen_trace_for_return_address (CORE_ADDR scope, struct gdbarch *gdbarch)
2483 {
2484 struct cleanup *old_chain = 0;
2485 struct agent_expr *ax = new_agent_expr (gdbarch, scope);
2486 struct axs_value value;
2487
2488 old_chain = make_cleanup_free_agent_expr (ax);
2489
2490 trace_kludge = 1;
2491
2492 gdbarch_gen_return_address (gdbarch, ax, &value, scope);
2493
2494 /* Make sure we record the final object, and get rid of it. */
2495 gen_traced_pop (gdbarch, ax, &value);
2496
2497 /* Oh, and terminate. */
2498 ax_simple (ax, aop_end);
2499
2500 /* We have successfully built the agent expr, so cancel the cleanup
2501 request. If we add more cleanups that we always want done, this
2502 will have to get more complicated. */
2503 discard_cleanups (old_chain);
2504 return ax;
2505 }
2506
2507 static void
2508 agent_command (char *exp, int from_tty)
2509 {
2510 struct cleanup *old_chain = 0;
2511 struct expression *expr;
2512 struct agent_expr *agent;
2513 struct frame_info *fi = get_current_frame (); /* need current scope */
2514
2515 /* We don't deal with overlay debugging at the moment. We need to
2516 think more carefully about this. If you copy this code into
2517 another command, change the error message; the user shouldn't
2518 have to know anything about agent expressions. */
2519 if (overlay_debugging)
2520 error (_("GDB can't do agent expression translation with overlays."));
2521
2522 if (exp == 0)
2523 error_no_arg (_("expression to translate"));
2524
2525 trace_string_kludge = 0;
2526 if (*exp == '/')
2527 exp = decode_agent_options (exp);
2528
2529 /* Recognize the return address collection directive specially. Note
2530 that it is not really an expression of any sort. */
2531 if (strcmp (exp, "$_ret") == 0)
2532 {
2533 agent = gen_trace_for_return_address (get_frame_pc (fi),
2534 get_current_arch ());
2535 old_chain = make_cleanup_free_agent_expr (agent);
2536 }
2537 else
2538 {
2539 expr = parse_expression (exp);
2540 old_chain = make_cleanup (free_current_contents, &expr);
2541 agent = gen_trace_for_expr (get_frame_pc (fi), expr);
2542 make_cleanup_free_agent_expr (agent);
2543 }
2544
2545 ax_reqs (agent);
2546 ax_print (gdb_stdout, agent);
2547
2548 /* It would be nice to call ax_reqs here to gather some general info
2549 about the expression, and then print out the result. */
2550
2551 do_cleanups (old_chain);
2552 dont_repeat ();
2553 }
2554
2555 /* Parse the given expression, compile it into an agent expression
2556 that does direct evaluation, and display the resulting
2557 expression. */
2558
2559 static void
2560 agent_eval_command (char *exp, int from_tty)
2561 {
2562 struct cleanup *old_chain = 0;
2563 struct expression *expr;
2564 struct agent_expr *agent;
2565 struct frame_info *fi = get_current_frame (); /* need current scope */
2566
2567 /* We don't deal with overlay debugging at the moment. We need to
2568 think more carefully about this. If you copy this code into
2569 another command, change the error message; the user shouldn't
2570 have to know anything about agent expressions. */
2571 if (overlay_debugging)
2572 error (_("GDB can't do agent expression translation with overlays."));
2573
2574 if (exp == 0)
2575 error_no_arg (_("expression to translate"));
2576
2577 expr = parse_expression (exp);
2578 old_chain = make_cleanup (free_current_contents, &expr);
2579 agent = gen_eval_for_expr (get_frame_pc (fi), expr);
2580 make_cleanup_free_agent_expr (agent);
2581 ax_reqs (agent);
2582 ax_print (gdb_stdout, agent);
2583
2584 /* It would be nice to call ax_reqs here to gather some general info
2585 about the expression, and then print out the result. */
2586
2587 do_cleanups (old_chain);
2588 dont_repeat ();
2589 }
2590 \f
2591
2592 /* Initialization code. */
2593
2594 void _initialize_ax_gdb (void);
2595 void
2596 _initialize_ax_gdb (void)
2597 {
2598 add_cmd ("agent", class_maintenance, agent_command,
2599 _("Translate an expression into "
2600 "remote agent bytecode for tracing."),
2601 &maintenancelist);
2602
2603 add_cmd ("agent-eval", class_maintenance, agent_eval_command,
2604 _("Translate an expression into remote "
2605 "agent bytecode for evaluation."),
2606 &maintenancelist);
2607 }
This page took 0.111741 seconds and 5 git commands to generate.