gdb
[deliverable/binutils-gdb.git] / gdb / dwarf2expr.c
1 /* DWARF 2 Expression Evaluator.
2
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 Contributed by Daniel Berlin (dan@dberlin.org)
7
8 This file is part of GDB.
9
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
14
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22
23 #include "defs.h"
24 #include "symtab.h"
25 #include "gdbtypes.h"
26 #include "value.h"
27 #include "gdbcore.h"
28 #include "dwarf2.h"
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
31
32 /* Local prototypes. */
33
34 static void execute_stack_op (struct dwarf_expr_context *,
35 gdb_byte *, gdb_byte *);
36 static struct type *unsigned_address_type (struct gdbarch *, int);
37
38 /* Create a new context for the expression evaluator. */
39
40 struct dwarf_expr_context *
41 new_dwarf_expr_context (void)
42 {
43 struct dwarf_expr_context *retval;
44
45 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
46 retval->stack_len = 0;
47 retval->stack_allocated = 10;
48 retval->stack = xmalloc (retval->stack_allocated
49 * sizeof (struct dwarf_stack_value));
50 retval->num_pieces = 0;
51 retval->pieces = 0;
52 retval->max_recursion_depth = 0x100;
53 return retval;
54 }
55
56 /* Release the memory allocated to CTX. */
57
58 void
59 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
60 {
61 xfree (ctx->stack);
62 xfree (ctx->pieces);
63 xfree (ctx);
64 }
65
66 /* Helper for make_cleanup_free_dwarf_expr_context. */
67
68 static void
69 free_dwarf_expr_context_cleanup (void *arg)
70 {
71 free_dwarf_expr_context (arg);
72 }
73
74 /* Return a cleanup that calls free_dwarf_expr_context. */
75
76 struct cleanup *
77 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
78 {
79 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
80 }
81
82 /* Expand the memory allocated to CTX's stack to contain at least
83 NEED more elements than are currently used. */
84
85 static void
86 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
87 {
88 if (ctx->stack_len + need > ctx->stack_allocated)
89 {
90 size_t newlen = ctx->stack_len + need + 10;
91
92 ctx->stack = xrealloc (ctx->stack,
93 newlen * sizeof (struct dwarf_stack_value));
94 ctx->stack_allocated = newlen;
95 }
96 }
97
98 /* Push VALUE onto CTX's stack. */
99
100 void
101 dwarf_expr_push (struct dwarf_expr_context *ctx, CORE_ADDR value,
102 int in_stack_memory)
103 {
104 struct dwarf_stack_value *v;
105
106 dwarf_expr_grow_stack (ctx, 1);
107 v = &ctx->stack[ctx->stack_len++];
108 v->value = value;
109 v->in_stack_memory = in_stack_memory;
110 }
111
112 /* Pop the top item off of CTX's stack. */
113
114 void
115 dwarf_expr_pop (struct dwarf_expr_context *ctx)
116 {
117 if (ctx->stack_len <= 0)
118 error (_("dwarf expression stack underflow"));
119 ctx->stack_len--;
120 }
121
122 /* Retrieve the N'th item on CTX's stack. */
123
124 CORE_ADDR
125 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
126 {
127 if (ctx->stack_len <= n)
128 error (_("Asked for position %d of stack, stack only has %d elements on it."),
129 n, ctx->stack_len);
130 return ctx->stack[ctx->stack_len - (1 + n)].value;
131
132 }
133
134 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
135
136 int
137 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
138 {
139 if (ctx->stack_len <= n)
140 error (_("Asked for position %d of stack, stack only has %d elements on it."),
141 n, ctx->stack_len);
142 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
143
144 }
145
146 /* Return true if the expression stack is empty. */
147
148 static int
149 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
150 {
151 return ctx->stack_len == 0;
152 }
153
154 /* Add a new piece to CTX's piece list. */
155 static void
156 add_piece (struct dwarf_expr_context *ctx, ULONGEST size)
157 {
158 struct dwarf_expr_piece *p;
159
160 ctx->num_pieces++;
161
162 if (ctx->pieces)
163 ctx->pieces = xrealloc (ctx->pieces,
164 (ctx->num_pieces
165 * sizeof (struct dwarf_expr_piece)));
166 else
167 ctx->pieces = xmalloc (ctx->num_pieces
168 * sizeof (struct dwarf_expr_piece));
169
170 p = &ctx->pieces[ctx->num_pieces - 1];
171 p->location = ctx->location;
172 p->size = size;
173 if (p->location == DWARF_VALUE_LITERAL)
174 {
175 p->v.literal.data = ctx->data;
176 p->v.literal.length = ctx->len;
177 }
178 else if (dwarf_expr_stack_empty_p (ctx))
179 {
180 p->location = DWARF_VALUE_OPTIMIZED_OUT;
181 /* Also reset the context's location, for our callers. This is
182 a somewhat strange approach, but this lets us avoid setting
183 the location to DWARF_VALUE_MEMORY in all the individual
184 cases in the evaluator. */
185 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
186 }
187 else
188 {
189 p->v.expr.value = dwarf_expr_fetch (ctx, 0);
190 p->v.expr.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
191 }
192 }
193
194 /* Evaluate the expression at ADDR (LEN bytes long) using the context
195 CTX. */
196
197 void
198 dwarf_expr_eval (struct dwarf_expr_context *ctx, gdb_byte *addr, size_t len)
199 {
200 int old_recursion_depth = ctx->recursion_depth;
201
202 execute_stack_op (ctx, addr, addr + len);
203
204 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
205
206 gdb_assert (ctx->recursion_depth == old_recursion_depth);
207 }
208
209 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
210 by R, and return the new value of BUF. Verify that it doesn't extend
211 past BUF_END. */
212
213 gdb_byte *
214 read_uleb128 (gdb_byte *buf, gdb_byte *buf_end, ULONGEST * r)
215 {
216 unsigned shift = 0;
217 ULONGEST result = 0;
218 gdb_byte byte;
219
220 while (1)
221 {
222 if (buf >= buf_end)
223 error (_("read_uleb128: Corrupted DWARF expression."));
224
225 byte = *buf++;
226 result |= (byte & 0x7f) << shift;
227 if ((byte & 0x80) == 0)
228 break;
229 shift += 7;
230 }
231 *r = result;
232 return buf;
233 }
234
235 /* Decode the signed LEB128 constant at BUF into the variable pointed to
236 by R, and return the new value of BUF. Verify that it doesn't extend
237 past BUF_END. */
238
239 gdb_byte *
240 read_sleb128 (gdb_byte *buf, gdb_byte *buf_end, LONGEST * r)
241 {
242 unsigned shift = 0;
243 LONGEST result = 0;
244 gdb_byte byte;
245
246 while (1)
247 {
248 if (buf >= buf_end)
249 error (_("read_sleb128: Corrupted DWARF expression."));
250
251 byte = *buf++;
252 result |= (byte & 0x7f) << shift;
253 shift += 7;
254 if ((byte & 0x80) == 0)
255 break;
256 }
257 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
258 result |= -(1 << shift);
259
260 *r = result;
261 return buf;
262 }
263
264 /* Read an address of size ADDR_SIZE from BUF, and verify that it
265 doesn't extend past BUF_END. */
266
267 CORE_ADDR
268 dwarf2_read_address (struct gdbarch *gdbarch, gdb_byte *buf,
269 gdb_byte *buf_end, int addr_size)
270 {
271 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
272
273 if (buf_end - buf < addr_size)
274 error (_("dwarf2_read_address: Corrupted DWARF expression."));
275
276 /* For most architectures, calling extract_unsigned_integer() alone
277 is sufficient for extracting an address. However, some
278 architectures (e.g. MIPS) use signed addresses and using
279 extract_unsigned_integer() will not produce a correct
280 result. Make sure we invoke gdbarch_integer_to_address()
281 for those architectures which require it.
282
283 The use of `unsigned_address_type' in the code below refers to
284 the type of buf and has no bearing on the signedness of the
285 address being returned. */
286
287 if (gdbarch_integer_to_address_p (gdbarch))
288 return gdbarch_integer_to_address
289 (gdbarch, unsigned_address_type (gdbarch, addr_size), buf);
290
291 return extract_unsigned_integer (buf, addr_size, byte_order);
292 }
293
294 /* Return the type of an address of size ADDR_SIZE,
295 for unsigned arithmetic. */
296
297 static struct type *
298 unsigned_address_type (struct gdbarch *gdbarch, int addr_size)
299 {
300 switch (addr_size)
301 {
302 case 2:
303 return builtin_type (gdbarch)->builtin_uint16;
304 case 4:
305 return builtin_type (gdbarch)->builtin_uint32;
306 case 8:
307 return builtin_type (gdbarch)->builtin_uint64;
308 default:
309 internal_error (__FILE__, __LINE__,
310 _("Unsupported address size.\n"));
311 }
312 }
313
314 /* Return the type of an address of size ADDR_SIZE,
315 for signed arithmetic. */
316
317 static struct type *
318 signed_address_type (struct gdbarch *gdbarch, int addr_size)
319 {
320 switch (addr_size)
321 {
322 case 2:
323 return builtin_type (gdbarch)->builtin_int16;
324 case 4:
325 return builtin_type (gdbarch)->builtin_int32;
326 case 8:
327 return builtin_type (gdbarch)->builtin_int64;
328 default:
329 internal_error (__FILE__, __LINE__,
330 _("Unsupported address size.\n"));
331 }
332 }
333 \f
334
335 /* Check that the current operator is either at the end of an
336 expression, or that it is followed by a composition operator. */
337
338 static void
339 require_composition (gdb_byte *op_ptr, gdb_byte *op_end, const char *op_name)
340 {
341 /* It seems like DW_OP_GNU_uninit should be handled here. However,
342 it doesn't seem to make sense for DW_OP_*_value, and it was not
343 checked at the other place that this function is called. */
344 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
345 error (_("DWARF-2 expression error: `%s' operations must be "
346 "used either alone or in conjuction with DW_OP_piece "
347 "or DW_OP_bit_piece."),
348 op_name);
349 }
350
351 /* The engine for the expression evaluator. Using the context in CTX,
352 evaluate the expression between OP_PTR and OP_END. */
353
354 static void
355 execute_stack_op (struct dwarf_expr_context *ctx,
356 gdb_byte *op_ptr, gdb_byte *op_end)
357 {
358 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
359
360 ctx->location = DWARF_VALUE_MEMORY;
361 ctx->initialized = 1; /* Default is initialized. */
362
363 if (ctx->recursion_depth > ctx->max_recursion_depth)
364 error (_("DWARF-2 expression error: Loop detected (%d)."),
365 ctx->recursion_depth);
366 ctx->recursion_depth++;
367
368 while (op_ptr < op_end)
369 {
370 enum dwarf_location_atom op = *op_ptr++;
371 CORE_ADDR result;
372 /* Assume the value is not in stack memory.
373 Code that knows otherwise sets this to 1.
374 Some arithmetic on stack addresses can probably be assumed to still
375 be a stack address, but we skip this complication for now.
376 This is just an optimization, so it's always ok to punt
377 and leave this as 0. */
378 int in_stack_memory = 0;
379 ULONGEST uoffset, reg;
380 LONGEST offset;
381
382 switch (op)
383 {
384 case DW_OP_lit0:
385 case DW_OP_lit1:
386 case DW_OP_lit2:
387 case DW_OP_lit3:
388 case DW_OP_lit4:
389 case DW_OP_lit5:
390 case DW_OP_lit6:
391 case DW_OP_lit7:
392 case DW_OP_lit8:
393 case DW_OP_lit9:
394 case DW_OP_lit10:
395 case DW_OP_lit11:
396 case DW_OP_lit12:
397 case DW_OP_lit13:
398 case DW_OP_lit14:
399 case DW_OP_lit15:
400 case DW_OP_lit16:
401 case DW_OP_lit17:
402 case DW_OP_lit18:
403 case DW_OP_lit19:
404 case DW_OP_lit20:
405 case DW_OP_lit21:
406 case DW_OP_lit22:
407 case DW_OP_lit23:
408 case DW_OP_lit24:
409 case DW_OP_lit25:
410 case DW_OP_lit26:
411 case DW_OP_lit27:
412 case DW_OP_lit28:
413 case DW_OP_lit29:
414 case DW_OP_lit30:
415 case DW_OP_lit31:
416 result = op - DW_OP_lit0;
417 break;
418
419 case DW_OP_addr:
420 result = dwarf2_read_address (ctx->gdbarch,
421 op_ptr, op_end, ctx->addr_size);
422 op_ptr += ctx->addr_size;
423 break;
424
425 case DW_OP_const1u:
426 result = extract_unsigned_integer (op_ptr, 1, byte_order);
427 op_ptr += 1;
428 break;
429 case DW_OP_const1s:
430 result = extract_signed_integer (op_ptr, 1, byte_order);
431 op_ptr += 1;
432 break;
433 case DW_OP_const2u:
434 result = extract_unsigned_integer (op_ptr, 2, byte_order);
435 op_ptr += 2;
436 break;
437 case DW_OP_const2s:
438 result = extract_signed_integer (op_ptr, 2, byte_order);
439 op_ptr += 2;
440 break;
441 case DW_OP_const4u:
442 result = extract_unsigned_integer (op_ptr, 4, byte_order);
443 op_ptr += 4;
444 break;
445 case DW_OP_const4s:
446 result = extract_signed_integer (op_ptr, 4, byte_order);
447 op_ptr += 4;
448 break;
449 case DW_OP_const8u:
450 result = extract_unsigned_integer (op_ptr, 8, byte_order);
451 op_ptr += 8;
452 break;
453 case DW_OP_const8s:
454 result = extract_signed_integer (op_ptr, 8, byte_order);
455 op_ptr += 8;
456 break;
457 case DW_OP_constu:
458 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
459 result = uoffset;
460 break;
461 case DW_OP_consts:
462 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
463 result = offset;
464 break;
465
466 /* The DW_OP_reg operations are required to occur alone in
467 location expressions. */
468 case DW_OP_reg0:
469 case DW_OP_reg1:
470 case DW_OP_reg2:
471 case DW_OP_reg3:
472 case DW_OP_reg4:
473 case DW_OP_reg5:
474 case DW_OP_reg6:
475 case DW_OP_reg7:
476 case DW_OP_reg8:
477 case DW_OP_reg9:
478 case DW_OP_reg10:
479 case DW_OP_reg11:
480 case DW_OP_reg12:
481 case DW_OP_reg13:
482 case DW_OP_reg14:
483 case DW_OP_reg15:
484 case DW_OP_reg16:
485 case DW_OP_reg17:
486 case DW_OP_reg18:
487 case DW_OP_reg19:
488 case DW_OP_reg20:
489 case DW_OP_reg21:
490 case DW_OP_reg22:
491 case DW_OP_reg23:
492 case DW_OP_reg24:
493 case DW_OP_reg25:
494 case DW_OP_reg26:
495 case DW_OP_reg27:
496 case DW_OP_reg28:
497 case DW_OP_reg29:
498 case DW_OP_reg30:
499 case DW_OP_reg31:
500 if (op_ptr != op_end
501 && *op_ptr != DW_OP_piece
502 && *op_ptr != DW_OP_GNU_uninit)
503 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
504 "used either alone or in conjuction with DW_OP_piece."));
505
506 result = op - DW_OP_reg0;
507 ctx->location = DWARF_VALUE_REGISTER;
508 break;
509
510 case DW_OP_regx:
511 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
512 require_composition (op_ptr, op_end, "DW_OP_regx");
513
514 result = reg;
515 ctx->location = DWARF_VALUE_REGISTER;
516 break;
517
518 case DW_OP_implicit_value:
519 {
520 ULONGEST len;
521
522 op_ptr = read_uleb128 (op_ptr, op_end, &len);
523 if (op_ptr + len > op_end)
524 error (_("DW_OP_implicit_value: too few bytes available."));
525 ctx->len = len;
526 ctx->data = op_ptr;
527 ctx->location = DWARF_VALUE_LITERAL;
528 op_ptr += len;
529 require_composition (op_ptr, op_end, "DW_OP_implicit_value");
530 }
531 goto no_push;
532
533 case DW_OP_stack_value:
534 ctx->location = DWARF_VALUE_STACK;
535 require_composition (op_ptr, op_end, "DW_OP_stack_value");
536 goto no_push;
537
538 case DW_OP_breg0:
539 case DW_OP_breg1:
540 case DW_OP_breg2:
541 case DW_OP_breg3:
542 case DW_OP_breg4:
543 case DW_OP_breg5:
544 case DW_OP_breg6:
545 case DW_OP_breg7:
546 case DW_OP_breg8:
547 case DW_OP_breg9:
548 case DW_OP_breg10:
549 case DW_OP_breg11:
550 case DW_OP_breg12:
551 case DW_OP_breg13:
552 case DW_OP_breg14:
553 case DW_OP_breg15:
554 case DW_OP_breg16:
555 case DW_OP_breg17:
556 case DW_OP_breg18:
557 case DW_OP_breg19:
558 case DW_OP_breg20:
559 case DW_OP_breg21:
560 case DW_OP_breg22:
561 case DW_OP_breg23:
562 case DW_OP_breg24:
563 case DW_OP_breg25:
564 case DW_OP_breg26:
565 case DW_OP_breg27:
566 case DW_OP_breg28:
567 case DW_OP_breg29:
568 case DW_OP_breg30:
569 case DW_OP_breg31:
570 {
571 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
572 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
573 result += offset;
574 }
575 break;
576 case DW_OP_bregx:
577 {
578 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
579 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
580 result = (ctx->read_reg) (ctx->baton, reg);
581 result += offset;
582 }
583 break;
584 case DW_OP_fbreg:
585 {
586 gdb_byte *datastart;
587 size_t datalen;
588 unsigned int before_stack_len;
589
590 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
591 /* Rather than create a whole new context, we simply
592 record the stack length before execution, then reset it
593 afterwards, effectively erasing whatever the recursive
594 call put there. */
595 before_stack_len = ctx->stack_len;
596 /* FIXME: cagney/2003-03-26: This code should be using
597 get_frame_base_address(), and then implement a dwarf2
598 specific this_base method. */
599 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
600 dwarf_expr_eval (ctx, datastart, datalen);
601 if (ctx->location == DWARF_VALUE_LITERAL
602 || ctx->location == DWARF_VALUE_STACK)
603 error (_("Not implemented: computing frame base using explicit value operator"));
604 result = dwarf_expr_fetch (ctx, 0);
605 if (ctx->location == DWARF_VALUE_REGISTER)
606 result = (ctx->read_reg) (ctx->baton, result);
607 result = result + offset;
608 in_stack_memory = 1;
609 ctx->stack_len = before_stack_len;
610 ctx->location = DWARF_VALUE_MEMORY;
611 }
612 break;
613
614 case DW_OP_dup:
615 result = dwarf_expr_fetch (ctx, 0);
616 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
617 break;
618
619 case DW_OP_drop:
620 dwarf_expr_pop (ctx);
621 goto no_push;
622
623 case DW_OP_pick:
624 offset = *op_ptr++;
625 result = dwarf_expr_fetch (ctx, offset);
626 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
627 break;
628
629 case DW_OP_swap:
630 {
631 struct dwarf_stack_value t1, t2;
632
633 if (ctx->stack_len < 2)
634 error (_("Not enough elements for DW_OP_swap. Need 2, have %d."),
635 ctx->stack_len);
636 t1 = ctx->stack[ctx->stack_len - 1];
637 t2 = ctx->stack[ctx->stack_len - 2];
638 ctx->stack[ctx->stack_len - 1] = t2;
639 ctx->stack[ctx->stack_len - 2] = t1;
640 goto no_push;
641 }
642
643 case DW_OP_over:
644 result = dwarf_expr_fetch (ctx, 1);
645 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
646 break;
647
648 case DW_OP_rot:
649 {
650 struct dwarf_stack_value t1, t2, t3;
651
652 if (ctx->stack_len < 3)
653 error (_("Not enough elements for DW_OP_rot. Need 3, have %d."),
654 ctx->stack_len);
655 t1 = ctx->stack[ctx->stack_len - 1];
656 t2 = ctx->stack[ctx->stack_len - 2];
657 t3 = ctx->stack[ctx->stack_len - 3];
658 ctx->stack[ctx->stack_len - 1] = t2;
659 ctx->stack[ctx->stack_len - 2] = t3;
660 ctx->stack[ctx->stack_len - 3] = t1;
661 goto no_push;
662 }
663
664 case DW_OP_deref:
665 case DW_OP_deref_size:
666 case DW_OP_abs:
667 case DW_OP_neg:
668 case DW_OP_not:
669 case DW_OP_plus_uconst:
670 /* Unary operations. */
671 result = dwarf_expr_fetch (ctx, 0);
672 dwarf_expr_pop (ctx);
673
674 switch (op)
675 {
676 case DW_OP_deref:
677 {
678 gdb_byte *buf = alloca (ctx->addr_size);
679
680 (ctx->read_mem) (ctx->baton, buf, result, ctx->addr_size);
681 result = dwarf2_read_address (ctx->gdbarch,
682 buf, buf + ctx->addr_size,
683 ctx->addr_size);
684 }
685 break;
686
687 case DW_OP_deref_size:
688 {
689 int addr_size = *op_ptr++;
690 gdb_byte *buf = alloca (addr_size);
691
692 (ctx->read_mem) (ctx->baton, buf, result, addr_size);
693 result = dwarf2_read_address (ctx->gdbarch,
694 buf, buf + addr_size,
695 addr_size);
696 }
697 break;
698
699 case DW_OP_abs:
700 if ((signed int) result < 0)
701 result = -result;
702 break;
703 case DW_OP_neg:
704 result = -result;
705 break;
706 case DW_OP_not:
707 result = ~result;
708 break;
709 case DW_OP_plus_uconst:
710 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
711 result += reg;
712 break;
713 }
714 break;
715
716 case DW_OP_and:
717 case DW_OP_div:
718 case DW_OP_minus:
719 case DW_OP_mod:
720 case DW_OP_mul:
721 case DW_OP_or:
722 case DW_OP_plus:
723 case DW_OP_shl:
724 case DW_OP_shr:
725 case DW_OP_shra:
726 case DW_OP_xor:
727 case DW_OP_le:
728 case DW_OP_ge:
729 case DW_OP_eq:
730 case DW_OP_lt:
731 case DW_OP_gt:
732 case DW_OP_ne:
733 {
734 /* Binary operations. Use the value engine to do computations in
735 the right width. */
736 CORE_ADDR first, second;
737 enum exp_opcode binop;
738 struct value *val1 = NULL, *val2 = NULL;
739 struct type *stype, *utype;
740
741 second = dwarf_expr_fetch (ctx, 0);
742 dwarf_expr_pop (ctx);
743
744 first = dwarf_expr_fetch (ctx, 0);
745 dwarf_expr_pop (ctx);
746
747 utype = unsigned_address_type (ctx->gdbarch, ctx->addr_size);
748 stype = signed_address_type (ctx->gdbarch, ctx->addr_size);
749
750 switch (op)
751 {
752 case DW_OP_and:
753 binop = BINOP_BITWISE_AND;
754 break;
755 case DW_OP_div:
756 binop = BINOP_DIV;
757 val1 = value_from_longest (stype, first);
758 val2 = value_from_longest (stype, second);
759 break;
760 case DW_OP_minus:
761 binop = BINOP_SUB;
762 break;
763 case DW_OP_mod:
764 binop = BINOP_MOD;
765 break;
766 case DW_OP_mul:
767 binop = BINOP_MUL;
768 break;
769 case DW_OP_or:
770 binop = BINOP_BITWISE_IOR;
771 break;
772 case DW_OP_plus:
773 binop = BINOP_ADD;
774 break;
775 case DW_OP_shl:
776 binop = BINOP_LSH;
777 break;
778 case DW_OP_shr:
779 binop = BINOP_RSH;
780 break;
781 case DW_OP_shra:
782 binop = BINOP_RSH;
783 val1 = value_from_longest (stype, first);
784 break;
785 case DW_OP_xor:
786 binop = BINOP_BITWISE_XOR;
787 break;
788 case DW_OP_le:
789 binop = BINOP_LEQ;
790 val1 = value_from_longest (stype, first);
791 val2 = value_from_longest (stype, second);
792 break;
793 case DW_OP_ge:
794 binop = BINOP_GEQ;
795 val1 = value_from_longest (stype, first);
796 val2 = value_from_longest (stype, second);
797 break;
798 case DW_OP_eq:
799 binop = BINOP_EQUAL;
800 val1 = value_from_longest (stype, first);
801 val2 = value_from_longest (stype, second);
802 break;
803 case DW_OP_lt:
804 binop = BINOP_LESS;
805 val1 = value_from_longest (stype, first);
806 val2 = value_from_longest (stype, second);
807 break;
808 case DW_OP_gt:
809 binop = BINOP_GTR;
810 val1 = value_from_longest (stype, first);
811 val2 = value_from_longest (stype, second);
812 break;
813 case DW_OP_ne:
814 binop = BINOP_NOTEQUAL;
815 val1 = value_from_longest (stype, first);
816 val2 = value_from_longest (stype, second);
817 break;
818 default:
819 internal_error (__FILE__, __LINE__,
820 _("Can't be reached."));
821 }
822
823 /* We use unsigned operands by default. */
824 if (val1 == NULL)
825 val1 = value_from_longest (utype, first);
826 if (val2 == NULL)
827 val2 = value_from_longest (utype, second);
828
829 result = value_as_long (value_binop (val1, val2, binop));
830 }
831 break;
832
833 case DW_OP_call_frame_cfa:
834 result = (ctx->get_frame_cfa) (ctx->baton);
835 in_stack_memory = 1;
836 break;
837
838 case DW_OP_GNU_push_tls_address:
839 /* Variable is at a constant offset in the thread-local
840 storage block into the objfile for the current thread and
841 the dynamic linker module containing this expression. Here
842 we return returns the offset from that base. The top of the
843 stack has the offset from the beginning of the thread
844 control block at which the variable is located. Nothing
845 should follow this operator, so the top of stack would be
846 returned. */
847 result = dwarf_expr_fetch (ctx, 0);
848 dwarf_expr_pop (ctx);
849 result = (ctx->get_tls_address) (ctx->baton, result);
850 break;
851
852 case DW_OP_skip:
853 offset = extract_signed_integer (op_ptr, 2, byte_order);
854 op_ptr += 2;
855 op_ptr += offset;
856 goto no_push;
857
858 case DW_OP_bra:
859 offset = extract_signed_integer (op_ptr, 2, byte_order);
860 op_ptr += 2;
861 if (dwarf_expr_fetch (ctx, 0) != 0)
862 op_ptr += offset;
863 dwarf_expr_pop (ctx);
864 goto no_push;
865
866 case DW_OP_nop:
867 goto no_push;
868
869 case DW_OP_piece:
870 {
871 ULONGEST size;
872
873 /* Record the piece. */
874 op_ptr = read_uleb128 (op_ptr, op_end, &size);
875 add_piece (ctx, size);
876
877 /* Pop off the address/regnum, and reset the location
878 type. */
879 if (ctx->location != DWARF_VALUE_LITERAL
880 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
881 dwarf_expr_pop (ctx);
882 ctx->location = DWARF_VALUE_MEMORY;
883 }
884 goto no_push;
885
886 case DW_OP_GNU_uninit:
887 if (op_ptr != op_end)
888 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
889 "be the very last op."));
890
891 ctx->initialized = 0;
892 goto no_push;
893
894 default:
895 error (_("Unhandled dwarf expression opcode 0x%x"), op);
896 }
897
898 /* Most things push a result value. */
899 dwarf_expr_push (ctx, result, in_stack_memory);
900 no_push:;
901 }
902
903 ctx->recursion_depth--;
904 gdb_assert (ctx->recursion_depth >= 0);
905 }
This page took 0.078084 seconds and 5 git commands to generate.