2010-12-31 Michael Snyder <msnyder@vmware.com>
[deliverable/binutils-gdb.git] / gdb / dwarf2expr.c
1 /* DWARF 2 Expression Evaluator.
2
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 Contributed by Daniel Berlin (dan@dberlin.org)
7
8 This file is part of GDB.
9
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
14
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22
23 #include "defs.h"
24 #include "symtab.h"
25 #include "gdbtypes.h"
26 #include "value.h"
27 #include "gdbcore.h"
28 #include "dwarf2.h"
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
31
32 /* Local prototypes. */
33
34 static void execute_stack_op (struct dwarf_expr_context *,
35 const gdb_byte *, const gdb_byte *);
36
37 /* Create a new context for the expression evaluator. */
38
39 struct dwarf_expr_context *
40 new_dwarf_expr_context (void)
41 {
42 struct dwarf_expr_context *retval;
43
44 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
45 retval->stack_len = 0;
46 retval->stack_allocated = 10;
47 retval->stack = xmalloc (retval->stack_allocated
48 * sizeof (struct dwarf_stack_value));
49 retval->num_pieces = 0;
50 retval->pieces = 0;
51 retval->max_recursion_depth = 0x100;
52 return retval;
53 }
54
55 /* Release the memory allocated to CTX. */
56
57 void
58 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
59 {
60 xfree (ctx->stack);
61 xfree (ctx->pieces);
62 xfree (ctx);
63 }
64
65 /* Helper for make_cleanup_free_dwarf_expr_context. */
66
67 static void
68 free_dwarf_expr_context_cleanup (void *arg)
69 {
70 free_dwarf_expr_context (arg);
71 }
72
73 /* Return a cleanup that calls free_dwarf_expr_context. */
74
75 struct cleanup *
76 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
77 {
78 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
79 }
80
81 /* Expand the memory allocated to CTX's stack to contain at least
82 NEED more elements than are currently used. */
83
84 static void
85 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
86 {
87 if (ctx->stack_len + need > ctx->stack_allocated)
88 {
89 size_t newlen = ctx->stack_len + need + 10;
90
91 ctx->stack = xrealloc (ctx->stack,
92 newlen * sizeof (struct dwarf_stack_value));
93 ctx->stack_allocated = newlen;
94 }
95 }
96
97 /* Push VALUE onto CTX's stack. */
98
99 void
100 dwarf_expr_push (struct dwarf_expr_context *ctx, ULONGEST value,
101 int in_stack_memory)
102 {
103 struct dwarf_stack_value *v;
104
105 /* We keep all stack elements within the range defined by the
106 DWARF address size. */
107 if (ctx->addr_size < sizeof (ULONGEST))
108 value &= ((ULONGEST) 1 << (ctx->addr_size * HOST_CHAR_BIT)) - 1;
109
110 dwarf_expr_grow_stack (ctx, 1);
111 v = &ctx->stack[ctx->stack_len++];
112 v->value = value;
113 v->in_stack_memory = in_stack_memory;
114 }
115
116 /* Pop the top item off of CTX's stack. */
117
118 void
119 dwarf_expr_pop (struct dwarf_expr_context *ctx)
120 {
121 if (ctx->stack_len <= 0)
122 error (_("dwarf expression stack underflow"));
123 ctx->stack_len--;
124 }
125
126 /* Retrieve the N'th item on CTX's stack. */
127
128 ULONGEST
129 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
130 {
131 if (ctx->stack_len <= n)
132 error (_("Asked for position %d of stack, stack only has %d elements on it."),
133 n, ctx->stack_len);
134 return ctx->stack[ctx->stack_len - (1 + n)].value;
135
136 }
137
138 /* Retrieve the N'th item on CTX's stack, converted to an address. */
139
140 CORE_ADDR
141 dwarf_expr_fetch_address (struct dwarf_expr_context *ctx, int n)
142 {
143 ULONGEST result = dwarf_expr_fetch (ctx, n);
144
145 /* For most architectures, calling extract_unsigned_integer() alone
146 is sufficient for extracting an address. However, some
147 architectures (e.g. MIPS) use signed addresses and using
148 extract_unsigned_integer() will not produce a correct
149 result. Make sure we invoke gdbarch_integer_to_address()
150 for those architectures which require it. */
151 if (gdbarch_integer_to_address_p (ctx->gdbarch))
152 {
153 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
154 gdb_byte *buf = alloca (ctx->addr_size);
155 struct type *int_type;
156
157 switch (ctx->addr_size)
158 {
159 case 2:
160 int_type = builtin_type (ctx->gdbarch)->builtin_uint16;
161 break;
162 case 4:
163 int_type = builtin_type (ctx->gdbarch)->builtin_uint32;
164 break;
165 case 8:
166 int_type = builtin_type (ctx->gdbarch)->builtin_uint64;
167 break;
168 default:
169 internal_error (__FILE__, __LINE__,
170 _("Unsupported address size.\n"));
171 }
172
173 store_unsigned_integer (buf, ctx->addr_size, byte_order, result);
174 return gdbarch_integer_to_address (ctx->gdbarch, int_type, buf);
175 }
176
177 return (CORE_ADDR) result;
178 }
179
180 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
181
182 int
183 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
184 {
185 if (ctx->stack_len <= n)
186 error (_("Asked for position %d of stack, stack only has %d elements on it."),
187 n, ctx->stack_len);
188 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
189
190 }
191
192 /* Return true if the expression stack is empty. */
193
194 static int
195 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
196 {
197 return ctx->stack_len == 0;
198 }
199
200 /* Add a new piece to CTX's piece list. */
201 static void
202 add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset)
203 {
204 struct dwarf_expr_piece *p;
205
206 ctx->num_pieces++;
207
208 ctx->pieces = xrealloc (ctx->pieces,
209 (ctx->num_pieces
210 * sizeof (struct dwarf_expr_piece)));
211
212 p = &ctx->pieces[ctx->num_pieces - 1];
213 p->location = ctx->location;
214 p->size = size;
215 p->offset = offset;
216
217 if (p->location == DWARF_VALUE_LITERAL)
218 {
219 p->v.literal.data = ctx->data;
220 p->v.literal.length = ctx->len;
221 }
222 else if (dwarf_expr_stack_empty_p (ctx))
223 {
224 p->location = DWARF_VALUE_OPTIMIZED_OUT;
225 /* Also reset the context's location, for our callers. This is
226 a somewhat strange approach, but this lets us avoid setting
227 the location to DWARF_VALUE_MEMORY in all the individual
228 cases in the evaluator. */
229 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
230 }
231 else if (p->location == DWARF_VALUE_MEMORY)
232 {
233 p->v.mem.addr = dwarf_expr_fetch_address (ctx, 0);
234 p->v.mem.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
235 }
236 else if (p->location == DWARF_VALUE_IMPLICIT_POINTER)
237 {
238 p->v.ptr.die = ctx->len;
239 p->v.ptr.offset = (LONGEST) dwarf_expr_fetch (ctx, 0);
240 }
241 else
242 {
243 p->v.value = dwarf_expr_fetch (ctx, 0);
244 }
245 }
246
247 /* Evaluate the expression at ADDR (LEN bytes long) using the context
248 CTX. */
249
250 void
251 dwarf_expr_eval (struct dwarf_expr_context *ctx, const gdb_byte *addr,
252 size_t len)
253 {
254 int old_recursion_depth = ctx->recursion_depth;
255
256 execute_stack_op (ctx, addr, addr + len);
257
258 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
259
260 gdb_assert (ctx->recursion_depth == old_recursion_depth);
261 }
262
263 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
264 by R, and return the new value of BUF. Verify that it doesn't extend
265 past BUF_END. */
266
267 const gdb_byte *
268 read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end, ULONGEST * r)
269 {
270 unsigned shift = 0;
271 ULONGEST result = 0;
272 gdb_byte byte;
273
274 while (1)
275 {
276 if (buf >= buf_end)
277 error (_("read_uleb128: Corrupted DWARF expression."));
278
279 byte = *buf++;
280 result |= (byte & 0x7f) << shift;
281 if ((byte & 0x80) == 0)
282 break;
283 shift += 7;
284 }
285 *r = result;
286 return buf;
287 }
288
289 /* Decode the signed LEB128 constant at BUF into the variable pointed to
290 by R, and return the new value of BUF. Verify that it doesn't extend
291 past BUF_END. */
292
293 const gdb_byte *
294 read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end, LONGEST * r)
295 {
296 unsigned shift = 0;
297 LONGEST result = 0;
298 gdb_byte byte;
299
300 while (1)
301 {
302 if (buf >= buf_end)
303 error (_("read_sleb128: Corrupted DWARF expression."));
304
305 byte = *buf++;
306 result |= (byte & 0x7f) << shift;
307 shift += 7;
308 if ((byte & 0x80) == 0)
309 break;
310 }
311 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
312 result |= -(1 << shift);
313
314 *r = result;
315 return buf;
316 }
317 \f
318
319 /* Check that the current operator is either at the end of an
320 expression, or that it is followed by a composition operator. */
321
322 void
323 dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end,
324 const char *op_name)
325 {
326 /* It seems like DW_OP_GNU_uninit should be handled here. However,
327 it doesn't seem to make sense for DW_OP_*_value, and it was not
328 checked at the other place that this function is called. */
329 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
330 error (_("DWARF-2 expression error: `%s' operations must be "
331 "used either alone or in conjuction with DW_OP_piece "
332 "or DW_OP_bit_piece."),
333 op_name);
334 }
335
336 /* The engine for the expression evaluator. Using the context in CTX,
337 evaluate the expression between OP_PTR and OP_END. */
338
339 static void
340 execute_stack_op (struct dwarf_expr_context *ctx,
341 const gdb_byte *op_ptr, const gdb_byte *op_end)
342 {
343 #define sign_ext(x) ((LONGEST) (((x) ^ sign_bit) - sign_bit))
344 ULONGEST sign_bit = (ctx->addr_size >= sizeof (ULONGEST) ? 0
345 : ((ULONGEST) 1) << (ctx->addr_size * 8 - 1));
346 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
347
348 ctx->location = DWARF_VALUE_MEMORY;
349 ctx->initialized = 1; /* Default is initialized. */
350
351 if (ctx->recursion_depth > ctx->max_recursion_depth)
352 error (_("DWARF-2 expression error: Loop detected (%d)."),
353 ctx->recursion_depth);
354 ctx->recursion_depth++;
355
356 while (op_ptr < op_end)
357 {
358 enum dwarf_location_atom op = *op_ptr++;
359 ULONGEST result;
360 /* Assume the value is not in stack memory.
361 Code that knows otherwise sets this to 1.
362 Some arithmetic on stack addresses can probably be assumed to still
363 be a stack address, but we skip this complication for now.
364 This is just an optimization, so it's always ok to punt
365 and leave this as 0. */
366 int in_stack_memory = 0;
367 ULONGEST uoffset, reg;
368 LONGEST offset;
369
370 switch (op)
371 {
372 case DW_OP_lit0:
373 case DW_OP_lit1:
374 case DW_OP_lit2:
375 case DW_OP_lit3:
376 case DW_OP_lit4:
377 case DW_OP_lit5:
378 case DW_OP_lit6:
379 case DW_OP_lit7:
380 case DW_OP_lit8:
381 case DW_OP_lit9:
382 case DW_OP_lit10:
383 case DW_OP_lit11:
384 case DW_OP_lit12:
385 case DW_OP_lit13:
386 case DW_OP_lit14:
387 case DW_OP_lit15:
388 case DW_OP_lit16:
389 case DW_OP_lit17:
390 case DW_OP_lit18:
391 case DW_OP_lit19:
392 case DW_OP_lit20:
393 case DW_OP_lit21:
394 case DW_OP_lit22:
395 case DW_OP_lit23:
396 case DW_OP_lit24:
397 case DW_OP_lit25:
398 case DW_OP_lit26:
399 case DW_OP_lit27:
400 case DW_OP_lit28:
401 case DW_OP_lit29:
402 case DW_OP_lit30:
403 case DW_OP_lit31:
404 result = op - DW_OP_lit0;
405 break;
406
407 case DW_OP_addr:
408 result = extract_unsigned_integer (op_ptr,
409 ctx->addr_size, byte_order);
410 op_ptr += ctx->addr_size;
411 /* Some versions of GCC emit DW_OP_addr before
412 DW_OP_GNU_push_tls_address. In this case the value is an
413 index, not an address. We don't support things like
414 branching between the address and the TLS op. */
415 if (op_ptr >= op_end || *op_ptr != DW_OP_GNU_push_tls_address)
416 result += ctx->offset;
417 break;
418
419 case DW_OP_const1u:
420 result = extract_unsigned_integer (op_ptr, 1, byte_order);
421 op_ptr += 1;
422 break;
423 case DW_OP_const1s:
424 result = extract_signed_integer (op_ptr, 1, byte_order);
425 op_ptr += 1;
426 break;
427 case DW_OP_const2u:
428 result = extract_unsigned_integer (op_ptr, 2, byte_order);
429 op_ptr += 2;
430 break;
431 case DW_OP_const2s:
432 result = extract_signed_integer (op_ptr, 2, byte_order);
433 op_ptr += 2;
434 break;
435 case DW_OP_const4u:
436 result = extract_unsigned_integer (op_ptr, 4, byte_order);
437 op_ptr += 4;
438 break;
439 case DW_OP_const4s:
440 result = extract_signed_integer (op_ptr, 4, byte_order);
441 op_ptr += 4;
442 break;
443 case DW_OP_const8u:
444 result = extract_unsigned_integer (op_ptr, 8, byte_order);
445 op_ptr += 8;
446 break;
447 case DW_OP_const8s:
448 result = extract_signed_integer (op_ptr, 8, byte_order);
449 op_ptr += 8;
450 break;
451 case DW_OP_constu:
452 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
453 result = uoffset;
454 break;
455 case DW_OP_consts:
456 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
457 result = offset;
458 break;
459
460 /* The DW_OP_reg operations are required to occur alone in
461 location expressions. */
462 case DW_OP_reg0:
463 case DW_OP_reg1:
464 case DW_OP_reg2:
465 case DW_OP_reg3:
466 case DW_OP_reg4:
467 case DW_OP_reg5:
468 case DW_OP_reg6:
469 case DW_OP_reg7:
470 case DW_OP_reg8:
471 case DW_OP_reg9:
472 case DW_OP_reg10:
473 case DW_OP_reg11:
474 case DW_OP_reg12:
475 case DW_OP_reg13:
476 case DW_OP_reg14:
477 case DW_OP_reg15:
478 case DW_OP_reg16:
479 case DW_OP_reg17:
480 case DW_OP_reg18:
481 case DW_OP_reg19:
482 case DW_OP_reg20:
483 case DW_OP_reg21:
484 case DW_OP_reg22:
485 case DW_OP_reg23:
486 case DW_OP_reg24:
487 case DW_OP_reg25:
488 case DW_OP_reg26:
489 case DW_OP_reg27:
490 case DW_OP_reg28:
491 case DW_OP_reg29:
492 case DW_OP_reg30:
493 case DW_OP_reg31:
494 if (op_ptr != op_end
495 && *op_ptr != DW_OP_piece
496 && *op_ptr != DW_OP_bit_piece
497 && *op_ptr != DW_OP_GNU_uninit)
498 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
499 "used either alone or in conjuction with DW_OP_piece "
500 "or DW_OP_bit_piece."));
501
502 result = op - DW_OP_reg0;
503 ctx->location = DWARF_VALUE_REGISTER;
504 break;
505
506 case DW_OP_regx:
507 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
508 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx");
509
510 result = reg;
511 ctx->location = DWARF_VALUE_REGISTER;
512 break;
513
514 case DW_OP_implicit_value:
515 {
516 ULONGEST len;
517
518 op_ptr = read_uleb128 (op_ptr, op_end, &len);
519 if (op_ptr + len > op_end)
520 error (_("DW_OP_implicit_value: too few bytes available."));
521 ctx->len = len;
522 ctx->data = op_ptr;
523 ctx->location = DWARF_VALUE_LITERAL;
524 op_ptr += len;
525 dwarf_expr_require_composition (op_ptr, op_end,
526 "DW_OP_implicit_value");
527 }
528 goto no_push;
529
530 case DW_OP_stack_value:
531 ctx->location = DWARF_VALUE_STACK;
532 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value");
533 goto no_push;
534
535 case DW_OP_GNU_implicit_pointer:
536 {
537 ULONGEST die;
538 LONGEST len;
539
540 /* The referred-to DIE. */
541 ctx->len = extract_unsigned_integer (op_ptr, ctx->addr_size,
542 byte_order);
543 op_ptr += ctx->addr_size;
544
545 /* The byte offset into the data. */
546 op_ptr = read_sleb128 (op_ptr, op_end, &len);
547 result = (ULONGEST) len;
548
549 ctx->location = DWARF_VALUE_IMPLICIT_POINTER;
550 dwarf_expr_require_composition (op_ptr, op_end,
551 "DW_OP_GNU_implicit_pointer");
552 }
553 break;
554
555 case DW_OP_breg0:
556 case DW_OP_breg1:
557 case DW_OP_breg2:
558 case DW_OP_breg3:
559 case DW_OP_breg4:
560 case DW_OP_breg5:
561 case DW_OP_breg6:
562 case DW_OP_breg7:
563 case DW_OP_breg8:
564 case DW_OP_breg9:
565 case DW_OP_breg10:
566 case DW_OP_breg11:
567 case DW_OP_breg12:
568 case DW_OP_breg13:
569 case DW_OP_breg14:
570 case DW_OP_breg15:
571 case DW_OP_breg16:
572 case DW_OP_breg17:
573 case DW_OP_breg18:
574 case DW_OP_breg19:
575 case DW_OP_breg20:
576 case DW_OP_breg21:
577 case DW_OP_breg22:
578 case DW_OP_breg23:
579 case DW_OP_breg24:
580 case DW_OP_breg25:
581 case DW_OP_breg26:
582 case DW_OP_breg27:
583 case DW_OP_breg28:
584 case DW_OP_breg29:
585 case DW_OP_breg30:
586 case DW_OP_breg31:
587 {
588 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
589 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
590 result += offset;
591 }
592 break;
593 case DW_OP_bregx:
594 {
595 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
596 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
597 result = (ctx->read_reg) (ctx->baton, reg);
598 result += offset;
599 }
600 break;
601 case DW_OP_fbreg:
602 {
603 const gdb_byte *datastart;
604 size_t datalen;
605 unsigned int before_stack_len;
606
607 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
608 /* Rather than create a whole new context, we simply
609 record the stack length before execution, then reset it
610 afterwards, effectively erasing whatever the recursive
611 call put there. */
612 before_stack_len = ctx->stack_len;
613 /* FIXME: cagney/2003-03-26: This code should be using
614 get_frame_base_address(), and then implement a dwarf2
615 specific this_base method. */
616 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
617 dwarf_expr_eval (ctx, datastart, datalen);
618 if (ctx->location == DWARF_VALUE_MEMORY)
619 result = dwarf_expr_fetch_address (ctx, 0);
620 else if (ctx->location == DWARF_VALUE_REGISTER)
621 result = (ctx->read_reg) (ctx->baton, dwarf_expr_fetch (ctx, 0));
622 else
623 error (_("Not implemented: computing frame base using explicit value operator"));
624 result = result + offset;
625 in_stack_memory = 1;
626 ctx->stack_len = before_stack_len;
627 ctx->location = DWARF_VALUE_MEMORY;
628 }
629 break;
630
631 case DW_OP_dup:
632 result = dwarf_expr_fetch (ctx, 0);
633 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
634 break;
635
636 case DW_OP_drop:
637 dwarf_expr_pop (ctx);
638 goto no_push;
639
640 case DW_OP_pick:
641 offset = *op_ptr++;
642 result = dwarf_expr_fetch (ctx, offset);
643 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
644 break;
645
646 case DW_OP_swap:
647 {
648 struct dwarf_stack_value t1, t2;
649
650 if (ctx->stack_len < 2)
651 error (_("Not enough elements for DW_OP_swap. Need 2, have %d."),
652 ctx->stack_len);
653 t1 = ctx->stack[ctx->stack_len - 1];
654 t2 = ctx->stack[ctx->stack_len - 2];
655 ctx->stack[ctx->stack_len - 1] = t2;
656 ctx->stack[ctx->stack_len - 2] = t1;
657 goto no_push;
658 }
659
660 case DW_OP_over:
661 result = dwarf_expr_fetch (ctx, 1);
662 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
663 break;
664
665 case DW_OP_rot:
666 {
667 struct dwarf_stack_value t1, t2, t3;
668
669 if (ctx->stack_len < 3)
670 error (_("Not enough elements for DW_OP_rot. Need 3, have %d."),
671 ctx->stack_len);
672 t1 = ctx->stack[ctx->stack_len - 1];
673 t2 = ctx->stack[ctx->stack_len - 2];
674 t3 = ctx->stack[ctx->stack_len - 3];
675 ctx->stack[ctx->stack_len - 1] = t2;
676 ctx->stack[ctx->stack_len - 2] = t3;
677 ctx->stack[ctx->stack_len - 3] = t1;
678 goto no_push;
679 }
680
681 case DW_OP_deref:
682 case DW_OP_deref_size:
683 {
684 int addr_size = (op == DW_OP_deref ? ctx->addr_size : *op_ptr++);
685 gdb_byte *buf = alloca (addr_size);
686 CORE_ADDR addr = dwarf_expr_fetch_address (ctx, 0);
687 dwarf_expr_pop (ctx);
688
689 (ctx->read_mem) (ctx->baton, buf, addr, addr_size);
690 result = extract_unsigned_integer (buf, addr_size, byte_order);
691 break;
692 }
693
694 case DW_OP_abs:
695 case DW_OP_neg:
696 case DW_OP_not:
697 case DW_OP_plus_uconst:
698 /* Unary operations. */
699 result = dwarf_expr_fetch (ctx, 0);
700 dwarf_expr_pop (ctx);
701
702 switch (op)
703 {
704 case DW_OP_abs:
705 if (sign_ext (result) < 0)
706 result = -result;
707 break;
708 case DW_OP_neg:
709 result = -result;
710 break;
711 case DW_OP_not:
712 result = ~result;
713 break;
714 case DW_OP_plus_uconst:
715 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
716 result += reg;
717 break;
718 }
719 break;
720
721 case DW_OP_and:
722 case DW_OP_div:
723 case DW_OP_minus:
724 case DW_OP_mod:
725 case DW_OP_mul:
726 case DW_OP_or:
727 case DW_OP_plus:
728 case DW_OP_shl:
729 case DW_OP_shr:
730 case DW_OP_shra:
731 case DW_OP_xor:
732 case DW_OP_le:
733 case DW_OP_ge:
734 case DW_OP_eq:
735 case DW_OP_lt:
736 case DW_OP_gt:
737 case DW_OP_ne:
738 {
739 /* Binary operations. */
740 ULONGEST first, second;
741
742 second = dwarf_expr_fetch (ctx, 0);
743 dwarf_expr_pop (ctx);
744
745 first = dwarf_expr_fetch (ctx, 0);
746 dwarf_expr_pop (ctx);
747
748 switch (op)
749 {
750 case DW_OP_and:
751 result = first & second;
752 break;
753 case DW_OP_div:
754 if (!second)
755 error (_("Division by zero"));
756 result = sign_ext (first) / sign_ext (second);
757 break;
758 case DW_OP_minus:
759 result = first - second;
760 break;
761 case DW_OP_mod:
762 if (!second)
763 error (_("Division by zero"));
764 result = first % second;
765 break;
766 case DW_OP_mul:
767 result = first * second;
768 break;
769 case DW_OP_or:
770 result = first | second;
771 break;
772 case DW_OP_plus:
773 result = first + second;
774 break;
775 case DW_OP_shl:
776 result = first << second;
777 break;
778 case DW_OP_shr:
779 result = first >> second;
780 break;
781 case DW_OP_shra:
782 result = sign_ext (first) >> second;
783 break;
784 case DW_OP_xor:
785 result = first ^ second;
786 break;
787 case DW_OP_le:
788 result = sign_ext (first) <= sign_ext (second);
789 break;
790 case DW_OP_ge:
791 result = sign_ext (first) >= sign_ext (second);
792 break;
793 case DW_OP_eq:
794 result = sign_ext (first) == sign_ext (second);
795 break;
796 case DW_OP_lt:
797 result = sign_ext (first) < sign_ext (second);
798 break;
799 case DW_OP_gt:
800 result = sign_ext (first) > sign_ext (second);
801 break;
802 case DW_OP_ne:
803 result = sign_ext (first) != sign_ext (second);
804 break;
805 default:
806 internal_error (__FILE__, __LINE__,
807 _("Can't be reached."));
808 }
809 }
810 break;
811
812 case DW_OP_call_frame_cfa:
813 result = (ctx->get_frame_cfa) (ctx->baton);
814 in_stack_memory = 1;
815 break;
816
817 case DW_OP_GNU_push_tls_address:
818 /* Variable is at a constant offset in the thread-local
819 storage block into the objfile for the current thread and
820 the dynamic linker module containing this expression. Here
821 we return returns the offset from that base. The top of the
822 stack has the offset from the beginning of the thread
823 control block at which the variable is located. Nothing
824 should follow this operator, so the top of stack would be
825 returned. */
826 result = dwarf_expr_fetch (ctx, 0);
827 dwarf_expr_pop (ctx);
828 result = (ctx->get_tls_address) (ctx->baton, result);
829 break;
830
831 case DW_OP_skip:
832 offset = extract_signed_integer (op_ptr, 2, byte_order);
833 op_ptr += 2;
834 op_ptr += offset;
835 goto no_push;
836
837 case DW_OP_bra:
838 offset = extract_signed_integer (op_ptr, 2, byte_order);
839 op_ptr += 2;
840 if (dwarf_expr_fetch (ctx, 0) != 0)
841 op_ptr += offset;
842 dwarf_expr_pop (ctx);
843 goto no_push;
844
845 case DW_OP_nop:
846 goto no_push;
847
848 case DW_OP_piece:
849 {
850 ULONGEST size;
851
852 /* Record the piece. */
853 op_ptr = read_uleb128 (op_ptr, op_end, &size);
854 add_piece (ctx, 8 * size, 0);
855
856 /* Pop off the address/regnum, and reset the location
857 type. */
858 if (ctx->location != DWARF_VALUE_LITERAL
859 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
860 dwarf_expr_pop (ctx);
861 ctx->location = DWARF_VALUE_MEMORY;
862 }
863 goto no_push;
864
865 case DW_OP_bit_piece:
866 {
867 ULONGEST size, offset;
868
869 /* Record the piece. */
870 op_ptr = read_uleb128 (op_ptr, op_end, &size);
871 op_ptr = read_uleb128 (op_ptr, op_end, &offset);
872 add_piece (ctx, size, offset);
873
874 /* Pop off the address/regnum, and reset the location
875 type. */
876 if (ctx->location != DWARF_VALUE_LITERAL
877 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
878 dwarf_expr_pop (ctx);
879 ctx->location = DWARF_VALUE_MEMORY;
880 }
881 goto no_push;
882
883 case DW_OP_GNU_uninit:
884 if (op_ptr != op_end)
885 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
886 "be the very last op."));
887
888 ctx->initialized = 0;
889 goto no_push;
890
891 case DW_OP_call2:
892 result = extract_unsigned_integer (op_ptr, 2, byte_order);
893 op_ptr += 2;
894 ctx->dwarf_call (ctx, result);
895 goto no_push;
896
897 case DW_OP_call4:
898 result = extract_unsigned_integer (op_ptr, 4, byte_order);
899 op_ptr += 4;
900 ctx->dwarf_call (ctx, result);
901 goto no_push;
902
903 default:
904 error (_("Unhandled dwarf expression opcode 0x%x"), op);
905 }
906
907 /* Most things push a result value. */
908 dwarf_expr_push (ctx, result, in_stack_memory);
909 no_push:;
910 }
911
912 /* To simplify our main caller, if the result is an implicit
913 pointer, then make a pieced value. This is ok because we can't
914 have implicit pointers in contexts where pieces are invalid. */
915 if (ctx->location == DWARF_VALUE_IMPLICIT_POINTER)
916 add_piece (ctx, 8 * ctx->addr_size, 0);
917
918 ctx->recursion_depth--;
919 gdb_assert (ctx->recursion_depth >= 0);
920 #undef sign_ext
921 }
This page took 0.048634 seconds and 4 git commands to generate.