Commit | Line | Data |
---|---|---|
852483bc MK |
1 | /* DWARF 2 Expression Evaluator. |
2 | ||
7b6bb8da | 3 | Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011 |
9b254dd1 | 4 | Free Software Foundation, Inc. |
852483bc | 5 | |
4c2df51b DJ |
6 | Contributed by Daniel Berlin (dan@dberlin.org) |
7 | ||
8 | This file is part of GDB. | |
9 | ||
10 | This program is free software; you can redistribute it and/or modify | |
11 | it under the terms of the GNU General Public License as published by | |
a9762ec7 | 12 | the Free Software Foundation; either version 3 of the License, or |
4c2df51b DJ |
13 | (at your option) any later version. |
14 | ||
15 | This program is distributed in the hope that it will be useful, | |
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
18 | GNU General Public License for more details. | |
19 | ||
20 | You should have received a copy of the GNU General Public License | |
a9762ec7 | 21 | along with this program. If not, see <http://www.gnu.org/licenses/>. */ |
4c2df51b DJ |
22 | |
23 | #include "defs.h" | |
24 | #include "symtab.h" | |
25 | #include "gdbtypes.h" | |
26 | #include "value.h" | |
27 | #include "gdbcore.h" | |
fa8f86ff | 28 | #include "dwarf2.h" |
4c2df51b | 29 | #include "dwarf2expr.h" |
1e3a102a | 30 | #include "gdb_assert.h" |
4c2df51b DJ |
31 | |
32 | /* Local prototypes. */ | |
33 | ||
34 | static void execute_stack_op (struct dwarf_expr_context *, | |
0d45f56e | 35 | const gdb_byte *, const gdb_byte *); |
4c2df51b | 36 | |
8a9b8146 TT |
37 | /* Cookie for gdbarch data. */ |
38 | ||
39 | static struct gdbarch_data *dwarf_arch_cookie; | |
40 | ||
41 | /* This holds gdbarch-specific types used by the DWARF expression | |
42 | evaluator. See comments in execute_stack_op. */ | |
43 | ||
44 | struct dwarf_gdbarch_types | |
45 | { | |
46 | struct type *dw_types[3]; | |
47 | }; | |
48 | ||
49 | /* Allocate and fill in dwarf_gdbarch_types for an arch. */ | |
50 | ||
51 | static void * | |
52 | dwarf_gdbarch_types_init (struct gdbarch *gdbarch) | |
53 | { | |
54 | struct dwarf_gdbarch_types *types | |
55 | = GDBARCH_OBSTACK_ZALLOC (gdbarch, struct dwarf_gdbarch_types); | |
56 | ||
57 | /* The types themselves are lazily initialized. */ | |
58 | ||
59 | return types; | |
60 | } | |
61 | ||
62 | /* Return the type used for DWARF operations where the type is | |
63 | unspecified in the DWARF spec. Only certain sizes are | |
64 | supported. */ | |
65 | ||
66 | static struct type * | |
67 | dwarf_expr_address_type (struct dwarf_expr_context *ctx) | |
68 | { | |
69 | struct dwarf_gdbarch_types *types = gdbarch_data (ctx->gdbarch, | |
70 | dwarf_arch_cookie); | |
71 | int ndx; | |
72 | ||
73 | if (ctx->addr_size == 2) | |
74 | ndx = 0; | |
75 | else if (ctx->addr_size == 4) | |
76 | ndx = 1; | |
77 | else if (ctx->addr_size == 8) | |
78 | ndx = 2; | |
79 | else | |
80 | error (_("Unsupported address size in DWARF expressions: %d bits"), | |
81 | 8 * ctx->addr_size); | |
82 | ||
83 | if (types->dw_types[ndx] == NULL) | |
84 | types->dw_types[ndx] | |
85 | = arch_integer_type (ctx->gdbarch, | |
86 | 8 * ctx->addr_size, | |
87 | 0, "<signed DWARF address type>"); | |
88 | ||
89 | return types->dw_types[ndx]; | |
90 | } | |
91 | ||
4c2df51b DJ |
92 | /* Create a new context for the expression evaluator. */ |
93 | ||
94 | struct dwarf_expr_context * | |
e4adbba9 | 95 | new_dwarf_expr_context (void) |
4c2df51b DJ |
96 | { |
97 | struct dwarf_expr_context *retval; | |
9a619af0 | 98 | |
4c2df51b | 99 | retval = xcalloc (1, sizeof (struct dwarf_expr_context)); |
18ec9831 KB |
100 | retval->stack_len = 0; |
101 | retval->stack_allocated = 10; | |
b966cb8a TT |
102 | retval->stack = xmalloc (retval->stack_allocated |
103 | * sizeof (struct dwarf_stack_value)); | |
87808bd6 JB |
104 | retval->num_pieces = 0; |
105 | retval->pieces = 0; | |
1e3a102a | 106 | retval->max_recursion_depth = 0x100; |
4c2df51b DJ |
107 | return retval; |
108 | } | |
109 | ||
110 | /* Release the memory allocated to CTX. */ | |
111 | ||
112 | void | |
113 | free_dwarf_expr_context (struct dwarf_expr_context *ctx) | |
114 | { | |
115 | xfree (ctx->stack); | |
87808bd6 | 116 | xfree (ctx->pieces); |
4c2df51b DJ |
117 | xfree (ctx); |
118 | } | |
119 | ||
4a227398 TT |
120 | /* Helper for make_cleanup_free_dwarf_expr_context. */ |
121 | ||
122 | static void | |
123 | free_dwarf_expr_context_cleanup (void *arg) | |
124 | { | |
125 | free_dwarf_expr_context (arg); | |
126 | } | |
127 | ||
128 | /* Return a cleanup that calls free_dwarf_expr_context. */ | |
129 | ||
130 | struct cleanup * | |
131 | make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx) | |
132 | { | |
133 | return make_cleanup (free_dwarf_expr_context_cleanup, ctx); | |
134 | } | |
135 | ||
4c2df51b DJ |
136 | /* Expand the memory allocated to CTX's stack to contain at least |
137 | NEED more elements than are currently used. */ | |
138 | ||
139 | static void | |
140 | dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need) | |
141 | { | |
142 | if (ctx->stack_len + need > ctx->stack_allocated) | |
143 | { | |
18ec9831 | 144 | size_t newlen = ctx->stack_len + need + 10; |
9a619af0 | 145 | |
4c2df51b | 146 | ctx->stack = xrealloc (ctx->stack, |
44353522 | 147 | newlen * sizeof (struct dwarf_stack_value)); |
18ec9831 | 148 | ctx->stack_allocated = newlen; |
4c2df51b DJ |
149 | } |
150 | } | |
151 | ||
152 | /* Push VALUE onto CTX's stack. */ | |
153 | ||
8a9b8146 TT |
154 | static void |
155 | dwarf_expr_push (struct dwarf_expr_context *ctx, struct value *value, | |
44353522 | 156 | int in_stack_memory) |
4c2df51b | 157 | { |
44353522 DE |
158 | struct dwarf_stack_value *v; |
159 | ||
4c2df51b | 160 | dwarf_expr_grow_stack (ctx, 1); |
44353522 DE |
161 | v = &ctx->stack[ctx->stack_len++]; |
162 | v->value = value; | |
163 | v->in_stack_memory = in_stack_memory; | |
4c2df51b DJ |
164 | } |
165 | ||
8a9b8146 | 166 | /* Push VALUE onto CTX's stack. */ |
4c2df51b DJ |
167 | |
168 | void | |
8a9b8146 TT |
169 | dwarf_expr_push_address (struct dwarf_expr_context *ctx, CORE_ADDR value, |
170 | int in_stack_memory) | |
171 | { | |
172 | dwarf_expr_push (ctx, | |
173 | value_from_ulongest (dwarf_expr_address_type (ctx), value), | |
174 | in_stack_memory); | |
175 | } | |
176 | ||
177 | /* Pop the top item off of CTX's stack. */ | |
178 | ||
179 | static void | |
4c2df51b DJ |
180 | dwarf_expr_pop (struct dwarf_expr_context *ctx) |
181 | { | |
182 | if (ctx->stack_len <= 0) | |
8a3fe4f8 | 183 | error (_("dwarf expression stack underflow")); |
4c2df51b DJ |
184 | ctx->stack_len--; |
185 | } | |
186 | ||
187 | /* Retrieve the N'th item on CTX's stack. */ | |
188 | ||
8a9b8146 | 189 | struct value * |
4c2df51b DJ |
190 | dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n) |
191 | { | |
ef0fdf07 | 192 | if (ctx->stack_len <= n) |
3e43a32a MS |
193 | error (_("Asked for position %d of stack, " |
194 | "stack only has %d elements on it."), | |
4c2df51b | 195 | n, ctx->stack_len); |
44353522 | 196 | return ctx->stack[ctx->stack_len - (1 + n)].value; |
8a9b8146 TT |
197 | } |
198 | ||
199 | /* Require that TYPE be an integral type; throw an exception if not. */ | |
44353522 | 200 | |
8a9b8146 TT |
201 | static void |
202 | dwarf_require_integral (struct type *type) | |
203 | { | |
204 | if (TYPE_CODE (type) != TYPE_CODE_INT | |
205 | && TYPE_CODE (type) != TYPE_CODE_CHAR | |
206 | && TYPE_CODE (type) != TYPE_CODE_BOOL) | |
207 | error (_("integral type expected in DWARF expression")); | |
208 | } | |
209 | ||
210 | /* Return the unsigned form of TYPE. TYPE is necessarily an integral | |
211 | type. */ | |
212 | ||
213 | static struct type * | |
214 | get_unsigned_type (struct gdbarch *gdbarch, struct type *type) | |
215 | { | |
216 | switch (TYPE_LENGTH (type)) | |
217 | { | |
218 | case 1: | |
219 | return builtin_type (gdbarch)->builtin_uint8; | |
220 | case 2: | |
221 | return builtin_type (gdbarch)->builtin_uint16; | |
222 | case 4: | |
223 | return builtin_type (gdbarch)->builtin_uint32; | |
224 | case 8: | |
225 | return builtin_type (gdbarch)->builtin_uint64; | |
226 | default: | |
227 | error (_("no unsigned variant found for type, while evaluating " | |
228 | "DWARF expression")); | |
229 | } | |
44353522 DE |
230 | } |
231 | ||
f2c7657e UW |
232 | /* Retrieve the N'th item on CTX's stack, converted to an address. */ |
233 | ||
234 | CORE_ADDR | |
235 | dwarf_expr_fetch_address (struct dwarf_expr_context *ctx, int n) | |
236 | { | |
8a9b8146 TT |
237 | struct value *result_val = dwarf_expr_fetch (ctx, n); |
238 | enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch); | |
239 | ULONGEST result; | |
240 | ||
241 | dwarf_require_integral (value_type (result_val)); | |
242 | result = extract_unsigned_integer (value_contents (result_val), | |
243 | TYPE_LENGTH (value_type (result_val)), | |
244 | byte_order); | |
f2c7657e UW |
245 | |
246 | /* For most architectures, calling extract_unsigned_integer() alone | |
247 | is sufficient for extracting an address. However, some | |
248 | architectures (e.g. MIPS) use signed addresses and using | |
249 | extract_unsigned_integer() will not produce a correct | |
250 | result. Make sure we invoke gdbarch_integer_to_address() | |
251 | for those architectures which require it. */ | |
252 | if (gdbarch_integer_to_address_p (ctx->gdbarch)) | |
253 | { | |
f2c7657e | 254 | gdb_byte *buf = alloca (ctx->addr_size); |
8a9b8146 TT |
255 | struct type *int_type = get_unsigned_type (ctx->gdbarch, |
256 | value_type (result_val)); | |
f2c7657e UW |
257 | |
258 | store_unsigned_integer (buf, ctx->addr_size, byte_order, result); | |
259 | return gdbarch_integer_to_address (ctx->gdbarch, int_type, buf); | |
260 | } | |
261 | ||
262 | return (CORE_ADDR) result; | |
263 | } | |
264 | ||
44353522 DE |
265 | /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */ |
266 | ||
267 | int | |
268 | dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n) | |
269 | { | |
270 | if (ctx->stack_len <= n) | |
3e43a32a MS |
271 | error (_("Asked for position %d of stack, " |
272 | "stack only has %d elements on it."), | |
44353522 DE |
273 | n, ctx->stack_len); |
274 | return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory; | |
4c2df51b DJ |
275 | } |
276 | ||
cb826367 TT |
277 | /* Return true if the expression stack is empty. */ |
278 | ||
279 | static int | |
280 | dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx) | |
281 | { | |
282 | return ctx->stack_len == 0; | |
283 | } | |
284 | ||
87808bd6 JB |
285 | /* Add a new piece to CTX's piece list. */ |
286 | static void | |
d3b1e874 | 287 | add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset) |
87808bd6 JB |
288 | { |
289 | struct dwarf_expr_piece *p; | |
290 | ||
291 | ctx->num_pieces++; | |
292 | ||
d3b1e874 TT |
293 | ctx->pieces = xrealloc (ctx->pieces, |
294 | (ctx->num_pieces | |
295 | * sizeof (struct dwarf_expr_piece))); | |
87808bd6 JB |
296 | |
297 | p = &ctx->pieces[ctx->num_pieces - 1]; | |
cec03d70 | 298 | p->location = ctx->location; |
87808bd6 | 299 | p->size = size; |
d3b1e874 TT |
300 | p->offset = offset; |
301 | ||
cec03d70 TT |
302 | if (p->location == DWARF_VALUE_LITERAL) |
303 | { | |
304 | p->v.literal.data = ctx->data; | |
305 | p->v.literal.length = ctx->len; | |
306 | } | |
cb826367 TT |
307 | else if (dwarf_expr_stack_empty_p (ctx)) |
308 | { | |
309 | p->location = DWARF_VALUE_OPTIMIZED_OUT; | |
310 | /* Also reset the context's location, for our callers. This is | |
311 | a somewhat strange approach, but this lets us avoid setting | |
312 | the location to DWARF_VALUE_MEMORY in all the individual | |
313 | cases in the evaluator. */ | |
314 | ctx->location = DWARF_VALUE_OPTIMIZED_OUT; | |
315 | } | |
f2c7657e UW |
316 | else if (p->location == DWARF_VALUE_MEMORY) |
317 | { | |
318 | p->v.mem.addr = dwarf_expr_fetch_address (ctx, 0); | |
319 | p->v.mem.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0); | |
320 | } | |
8cf6f0b1 TT |
321 | else if (p->location == DWARF_VALUE_IMPLICIT_POINTER) |
322 | { | |
323 | p->v.ptr.die = ctx->len; | |
8a9b8146 | 324 | p->v.ptr.offset = value_as_long (dwarf_expr_fetch (ctx, 0)); |
8cf6f0b1 | 325 | } |
8a9b8146 TT |
326 | else if (p->location == DWARF_VALUE_REGISTER) |
327 | p->v.regno = value_as_long (dwarf_expr_fetch (ctx, 0)); | |
cec03d70 | 328 | else |
44353522 | 329 | { |
f2c7657e | 330 | p->v.value = dwarf_expr_fetch (ctx, 0); |
44353522 | 331 | } |
87808bd6 JB |
332 | } |
333 | ||
4c2df51b DJ |
334 | /* Evaluate the expression at ADDR (LEN bytes long) using the context |
335 | CTX. */ | |
336 | ||
337 | void | |
0d45f56e TT |
338 | dwarf_expr_eval (struct dwarf_expr_context *ctx, const gdb_byte *addr, |
339 | size_t len) | |
4c2df51b | 340 | { |
1e3a102a JK |
341 | int old_recursion_depth = ctx->recursion_depth; |
342 | ||
4c2df51b | 343 | execute_stack_op (ctx, addr, addr + len); |
1e3a102a JK |
344 | |
345 | /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */ | |
346 | ||
347 | gdb_assert (ctx->recursion_depth == old_recursion_depth); | |
4c2df51b DJ |
348 | } |
349 | ||
350 | /* Decode the unsigned LEB128 constant at BUF into the variable pointed to | |
351 | by R, and return the new value of BUF. Verify that it doesn't extend | |
352 | past BUF_END. */ | |
353 | ||
0d45f56e TT |
354 | const gdb_byte * |
355 | read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end, ULONGEST * r) | |
4c2df51b DJ |
356 | { |
357 | unsigned shift = 0; | |
358 | ULONGEST result = 0; | |
852483bc | 359 | gdb_byte byte; |
4c2df51b DJ |
360 | |
361 | while (1) | |
362 | { | |
363 | if (buf >= buf_end) | |
8a3fe4f8 | 364 | error (_("read_uleb128: Corrupted DWARF expression.")); |
4c2df51b DJ |
365 | |
366 | byte = *buf++; | |
9930639c | 367 | result |= ((ULONGEST) (byte & 0x7f)) << shift; |
4c2df51b DJ |
368 | if ((byte & 0x80) == 0) |
369 | break; | |
370 | shift += 7; | |
371 | } | |
372 | *r = result; | |
373 | return buf; | |
374 | } | |
375 | ||
376 | /* Decode the signed LEB128 constant at BUF into the variable pointed to | |
377 | by R, and return the new value of BUF. Verify that it doesn't extend | |
378 | past BUF_END. */ | |
379 | ||
0d45f56e TT |
380 | const gdb_byte * |
381 | read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end, LONGEST * r) | |
4c2df51b DJ |
382 | { |
383 | unsigned shift = 0; | |
384 | LONGEST result = 0; | |
852483bc | 385 | gdb_byte byte; |
4c2df51b DJ |
386 | |
387 | while (1) | |
388 | { | |
389 | if (buf >= buf_end) | |
8a3fe4f8 | 390 | error (_("read_sleb128: Corrupted DWARF expression.")); |
4c2df51b DJ |
391 | |
392 | byte = *buf++; | |
9930639c | 393 | result |= ((ULONGEST) (byte & 0x7f)) << shift; |
4c2df51b DJ |
394 | shift += 7; |
395 | if ((byte & 0x80) == 0) | |
396 | break; | |
397 | } | |
398 | if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0) | |
399 | result |= -(1 << shift); | |
400 | ||
401 | *r = result; | |
402 | return buf; | |
403 | } | |
4c2df51b | 404 | \f |
cec03d70 TT |
405 | |
406 | /* Check that the current operator is either at the end of an | |
407 | expression, or that it is followed by a composition operator. */ | |
408 | ||
3cf03773 TT |
409 | void |
410 | dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end, | |
411 | const char *op_name) | |
cec03d70 TT |
412 | { |
413 | /* It seems like DW_OP_GNU_uninit should be handled here. However, | |
414 | it doesn't seem to make sense for DW_OP_*_value, and it was not | |
415 | checked at the other place that this function is called. */ | |
416 | if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece) | |
417 | error (_("DWARF-2 expression error: `%s' operations must be " | |
418 | "used either alone or in conjuction with DW_OP_piece " | |
419 | "or DW_OP_bit_piece."), | |
420 | op_name); | |
421 | } | |
422 | ||
8a9b8146 TT |
423 | /* Return true iff the types T1 and T2 are "the same". This only does |
424 | checks that might reasonably be needed to compare DWARF base | |
425 | types. */ | |
426 | ||
427 | static int | |
428 | base_types_equal_p (struct type *t1, struct type *t2) | |
429 | { | |
430 | if (TYPE_CODE (t1) != TYPE_CODE (t2)) | |
431 | return 0; | |
432 | if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)) | |
433 | return 0; | |
434 | return TYPE_LENGTH (t1) == TYPE_LENGTH (t2); | |
435 | } | |
436 | ||
437 | /* A convenience function to call get_base_type on CTX and return the | |
438 | result. DIE is the DIE whose type we need. SIZE is non-zero if | |
439 | this function should verify that the resulting type has the correct | |
440 | size. */ | |
441 | ||
442 | static struct type * | |
443 | dwarf_get_base_type (struct dwarf_expr_context *ctx, ULONGEST die, int size) | |
444 | { | |
445 | struct type *result; | |
446 | ||
447 | if (ctx->get_base_type) | |
448 | { | |
449 | result = ctx->get_base_type (ctx, die); | |
9ff3b74f TT |
450 | if (result == NULL) |
451 | error (_("Could not find type for DW_OP_GNU_const_type")); | |
8a9b8146 TT |
452 | if (size != 0 && TYPE_LENGTH (result) != size) |
453 | error (_("DW_OP_GNU_const_type has different sizes for type and data")); | |
454 | } | |
455 | else | |
456 | /* Anything will do. */ | |
457 | result = builtin_type (ctx->gdbarch)->builtin_int; | |
458 | ||
459 | return result; | |
460 | } | |
461 | ||
4c2df51b DJ |
462 | /* The engine for the expression evaluator. Using the context in CTX, |
463 | evaluate the expression between OP_PTR and OP_END. */ | |
464 | ||
465 | static void | |
852483bc | 466 | execute_stack_op (struct dwarf_expr_context *ctx, |
0d45f56e | 467 | const gdb_byte *op_ptr, const gdb_byte *op_end) |
4c2df51b | 468 | { |
e17a4113 | 469 | enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch); |
8a9b8146 TT |
470 | /* Old-style "untyped" DWARF values need special treatment in a |
471 | couple of places, specifically DW_OP_mod and DW_OP_shr. We need | |
472 | a special type for these values so we can distinguish them from | |
473 | values that have an explicit type, because explicitly-typed | |
474 | values do not need special treatment. This special type must be | |
475 | different (in the `==' sense) from any base type coming from the | |
476 | CU. */ | |
477 | struct type *address_type = dwarf_expr_address_type (ctx); | |
9a619af0 | 478 | |
cec03d70 | 479 | ctx->location = DWARF_VALUE_MEMORY; |
42be36b3 | 480 | ctx->initialized = 1; /* Default is initialized. */ |
18ec9831 | 481 | |
1e3a102a JK |
482 | if (ctx->recursion_depth > ctx->max_recursion_depth) |
483 | error (_("DWARF-2 expression error: Loop detected (%d)."), | |
484 | ctx->recursion_depth); | |
485 | ctx->recursion_depth++; | |
486 | ||
4c2df51b DJ |
487 | while (op_ptr < op_end) |
488 | { | |
489 | enum dwarf_location_atom op = *op_ptr++; | |
f2c7657e | 490 | ULONGEST result; |
44353522 DE |
491 | /* Assume the value is not in stack memory. |
492 | Code that knows otherwise sets this to 1. | |
493 | Some arithmetic on stack addresses can probably be assumed to still | |
494 | be a stack address, but we skip this complication for now. | |
495 | This is just an optimization, so it's always ok to punt | |
496 | and leave this as 0. */ | |
497 | int in_stack_memory = 0; | |
4c2df51b DJ |
498 | ULONGEST uoffset, reg; |
499 | LONGEST offset; | |
8a9b8146 | 500 | struct value *result_val = NULL; |
4c2df51b | 501 | |
4c2df51b DJ |
502 | switch (op) |
503 | { | |
504 | case DW_OP_lit0: | |
505 | case DW_OP_lit1: | |
506 | case DW_OP_lit2: | |
507 | case DW_OP_lit3: | |
508 | case DW_OP_lit4: | |
509 | case DW_OP_lit5: | |
510 | case DW_OP_lit6: | |
511 | case DW_OP_lit7: | |
512 | case DW_OP_lit8: | |
513 | case DW_OP_lit9: | |
514 | case DW_OP_lit10: | |
515 | case DW_OP_lit11: | |
516 | case DW_OP_lit12: | |
517 | case DW_OP_lit13: | |
518 | case DW_OP_lit14: | |
519 | case DW_OP_lit15: | |
520 | case DW_OP_lit16: | |
521 | case DW_OP_lit17: | |
522 | case DW_OP_lit18: | |
523 | case DW_OP_lit19: | |
524 | case DW_OP_lit20: | |
525 | case DW_OP_lit21: | |
526 | case DW_OP_lit22: | |
527 | case DW_OP_lit23: | |
528 | case DW_OP_lit24: | |
529 | case DW_OP_lit25: | |
530 | case DW_OP_lit26: | |
531 | case DW_OP_lit27: | |
532 | case DW_OP_lit28: | |
533 | case DW_OP_lit29: | |
534 | case DW_OP_lit30: | |
535 | case DW_OP_lit31: | |
536 | result = op - DW_OP_lit0; | |
8a9b8146 | 537 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
538 | break; |
539 | ||
540 | case DW_OP_addr: | |
f2c7657e UW |
541 | result = extract_unsigned_integer (op_ptr, |
542 | ctx->addr_size, byte_order); | |
ae0d2f24 | 543 | op_ptr += ctx->addr_size; |
ac56253d TT |
544 | /* Some versions of GCC emit DW_OP_addr before |
545 | DW_OP_GNU_push_tls_address. In this case the value is an | |
546 | index, not an address. We don't support things like | |
547 | branching between the address and the TLS op. */ | |
548 | if (op_ptr >= op_end || *op_ptr != DW_OP_GNU_push_tls_address) | |
549 | result += ctx->offset; | |
8a9b8146 | 550 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
551 | break; |
552 | ||
553 | case DW_OP_const1u: | |
e17a4113 | 554 | result = extract_unsigned_integer (op_ptr, 1, byte_order); |
8a9b8146 | 555 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
556 | op_ptr += 1; |
557 | break; | |
558 | case DW_OP_const1s: | |
e17a4113 | 559 | result = extract_signed_integer (op_ptr, 1, byte_order); |
8a9b8146 | 560 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
561 | op_ptr += 1; |
562 | break; | |
563 | case DW_OP_const2u: | |
e17a4113 | 564 | result = extract_unsigned_integer (op_ptr, 2, byte_order); |
8a9b8146 | 565 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
566 | op_ptr += 2; |
567 | break; | |
568 | case DW_OP_const2s: | |
e17a4113 | 569 | result = extract_signed_integer (op_ptr, 2, byte_order); |
8a9b8146 | 570 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
571 | op_ptr += 2; |
572 | break; | |
573 | case DW_OP_const4u: | |
e17a4113 | 574 | result = extract_unsigned_integer (op_ptr, 4, byte_order); |
8a9b8146 | 575 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
576 | op_ptr += 4; |
577 | break; | |
578 | case DW_OP_const4s: | |
e17a4113 | 579 | result = extract_signed_integer (op_ptr, 4, byte_order); |
8a9b8146 | 580 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
581 | op_ptr += 4; |
582 | break; | |
583 | case DW_OP_const8u: | |
e17a4113 | 584 | result = extract_unsigned_integer (op_ptr, 8, byte_order); |
8a9b8146 | 585 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
586 | op_ptr += 8; |
587 | break; | |
588 | case DW_OP_const8s: | |
e17a4113 | 589 | result = extract_signed_integer (op_ptr, 8, byte_order); |
8a9b8146 | 590 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
591 | op_ptr += 8; |
592 | break; | |
593 | case DW_OP_constu: | |
594 | op_ptr = read_uleb128 (op_ptr, op_end, &uoffset); | |
595 | result = uoffset; | |
8a9b8146 | 596 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
597 | break; |
598 | case DW_OP_consts: | |
599 | op_ptr = read_sleb128 (op_ptr, op_end, &offset); | |
600 | result = offset; | |
8a9b8146 | 601 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
602 | break; |
603 | ||
604 | /* The DW_OP_reg operations are required to occur alone in | |
605 | location expressions. */ | |
606 | case DW_OP_reg0: | |
607 | case DW_OP_reg1: | |
608 | case DW_OP_reg2: | |
609 | case DW_OP_reg3: | |
610 | case DW_OP_reg4: | |
611 | case DW_OP_reg5: | |
612 | case DW_OP_reg6: | |
613 | case DW_OP_reg7: | |
614 | case DW_OP_reg8: | |
615 | case DW_OP_reg9: | |
616 | case DW_OP_reg10: | |
617 | case DW_OP_reg11: | |
618 | case DW_OP_reg12: | |
619 | case DW_OP_reg13: | |
620 | case DW_OP_reg14: | |
621 | case DW_OP_reg15: | |
622 | case DW_OP_reg16: | |
623 | case DW_OP_reg17: | |
624 | case DW_OP_reg18: | |
625 | case DW_OP_reg19: | |
626 | case DW_OP_reg20: | |
627 | case DW_OP_reg21: | |
628 | case DW_OP_reg22: | |
629 | case DW_OP_reg23: | |
630 | case DW_OP_reg24: | |
631 | case DW_OP_reg25: | |
632 | case DW_OP_reg26: | |
633 | case DW_OP_reg27: | |
634 | case DW_OP_reg28: | |
635 | case DW_OP_reg29: | |
636 | case DW_OP_reg30: | |
637 | case DW_OP_reg31: | |
42be36b3 CT |
638 | if (op_ptr != op_end |
639 | && *op_ptr != DW_OP_piece | |
d3b1e874 | 640 | && *op_ptr != DW_OP_bit_piece |
42be36b3 | 641 | && *op_ptr != DW_OP_GNU_uninit) |
8a3fe4f8 | 642 | error (_("DWARF-2 expression error: DW_OP_reg operations must be " |
d3b1e874 TT |
643 | "used either alone or in conjuction with DW_OP_piece " |
644 | "or DW_OP_bit_piece.")); | |
4c2df51b | 645 | |
61fbb938 | 646 | result = op - DW_OP_reg0; |
8a9b8146 | 647 | result_val = value_from_ulongest (address_type, result); |
cec03d70 | 648 | ctx->location = DWARF_VALUE_REGISTER; |
4c2df51b DJ |
649 | break; |
650 | ||
651 | case DW_OP_regx: | |
652 | op_ptr = read_uleb128 (op_ptr, op_end, ®); | |
3cf03773 | 653 | dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx"); |
4c2df51b | 654 | |
61fbb938 | 655 | result = reg; |
8a9b8146 | 656 | result_val = value_from_ulongest (address_type, result); |
cec03d70 | 657 | ctx->location = DWARF_VALUE_REGISTER; |
4c2df51b DJ |
658 | break; |
659 | ||
cec03d70 TT |
660 | case DW_OP_implicit_value: |
661 | { | |
662 | ULONGEST len; | |
9a619af0 | 663 | |
cec03d70 TT |
664 | op_ptr = read_uleb128 (op_ptr, op_end, &len); |
665 | if (op_ptr + len > op_end) | |
666 | error (_("DW_OP_implicit_value: too few bytes available.")); | |
667 | ctx->len = len; | |
668 | ctx->data = op_ptr; | |
669 | ctx->location = DWARF_VALUE_LITERAL; | |
670 | op_ptr += len; | |
3cf03773 TT |
671 | dwarf_expr_require_composition (op_ptr, op_end, |
672 | "DW_OP_implicit_value"); | |
cec03d70 TT |
673 | } |
674 | goto no_push; | |
675 | ||
676 | case DW_OP_stack_value: | |
677 | ctx->location = DWARF_VALUE_STACK; | |
3cf03773 | 678 | dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value"); |
cec03d70 TT |
679 | goto no_push; |
680 | ||
8cf6f0b1 TT |
681 | case DW_OP_GNU_implicit_pointer: |
682 | { | |
683 | ULONGEST die; | |
684 | LONGEST len; | |
685 | ||
686 | /* The referred-to DIE. */ | |
687 | ctx->len = extract_unsigned_integer (op_ptr, ctx->addr_size, | |
688 | byte_order); | |
689 | op_ptr += ctx->addr_size; | |
690 | ||
691 | /* The byte offset into the data. */ | |
692 | op_ptr = read_sleb128 (op_ptr, op_end, &len); | |
693 | result = (ULONGEST) len; | |
8a9b8146 | 694 | result_val = value_from_ulongest (address_type, result); |
8cf6f0b1 TT |
695 | |
696 | ctx->location = DWARF_VALUE_IMPLICIT_POINTER; | |
697 | dwarf_expr_require_composition (op_ptr, op_end, | |
698 | "DW_OP_GNU_implicit_pointer"); | |
699 | } | |
700 | break; | |
701 | ||
4c2df51b DJ |
702 | case DW_OP_breg0: |
703 | case DW_OP_breg1: | |
704 | case DW_OP_breg2: | |
705 | case DW_OP_breg3: | |
706 | case DW_OP_breg4: | |
707 | case DW_OP_breg5: | |
708 | case DW_OP_breg6: | |
709 | case DW_OP_breg7: | |
710 | case DW_OP_breg8: | |
711 | case DW_OP_breg9: | |
712 | case DW_OP_breg10: | |
713 | case DW_OP_breg11: | |
714 | case DW_OP_breg12: | |
715 | case DW_OP_breg13: | |
716 | case DW_OP_breg14: | |
717 | case DW_OP_breg15: | |
718 | case DW_OP_breg16: | |
719 | case DW_OP_breg17: | |
720 | case DW_OP_breg18: | |
721 | case DW_OP_breg19: | |
722 | case DW_OP_breg20: | |
723 | case DW_OP_breg21: | |
724 | case DW_OP_breg22: | |
725 | case DW_OP_breg23: | |
726 | case DW_OP_breg24: | |
727 | case DW_OP_breg25: | |
728 | case DW_OP_breg26: | |
729 | case DW_OP_breg27: | |
730 | case DW_OP_breg28: | |
731 | case DW_OP_breg29: | |
732 | case DW_OP_breg30: | |
733 | case DW_OP_breg31: | |
734 | { | |
735 | op_ptr = read_sleb128 (op_ptr, op_end, &offset); | |
61fbb938 | 736 | result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0); |
4c2df51b | 737 | result += offset; |
8a9b8146 | 738 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
739 | } |
740 | break; | |
741 | case DW_OP_bregx: | |
742 | { | |
743 | op_ptr = read_uleb128 (op_ptr, op_end, ®); | |
744 | op_ptr = read_sleb128 (op_ptr, op_end, &offset); | |
61fbb938 | 745 | result = (ctx->read_reg) (ctx->baton, reg); |
4c2df51b | 746 | result += offset; |
8a9b8146 | 747 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
748 | } |
749 | break; | |
750 | case DW_OP_fbreg: | |
751 | { | |
0d45f56e | 752 | const gdb_byte *datastart; |
4c2df51b DJ |
753 | size_t datalen; |
754 | unsigned int before_stack_len; | |
755 | ||
756 | op_ptr = read_sleb128 (op_ptr, op_end, &offset); | |
757 | /* Rather than create a whole new context, we simply | |
758 | record the stack length before execution, then reset it | |
759 | afterwards, effectively erasing whatever the recursive | |
760 | call put there. */ | |
761 | before_stack_len = ctx->stack_len; | |
da62e633 AC |
762 | /* FIXME: cagney/2003-03-26: This code should be using |
763 | get_frame_base_address(), and then implement a dwarf2 | |
764 | specific this_base method. */ | |
4c2df51b DJ |
765 | (ctx->get_frame_base) (ctx->baton, &datastart, &datalen); |
766 | dwarf_expr_eval (ctx, datastart, datalen); | |
f2c7657e UW |
767 | if (ctx->location == DWARF_VALUE_MEMORY) |
768 | result = dwarf_expr_fetch_address (ctx, 0); | |
769 | else if (ctx->location == DWARF_VALUE_REGISTER) | |
8a9b8146 TT |
770 | result |
771 | = (ctx->read_reg) (ctx->baton, | |
772 | value_as_long (dwarf_expr_fetch (ctx, 0))); | |
f2c7657e | 773 | else |
3e43a32a MS |
774 | error (_("Not implemented: computing frame " |
775 | "base using explicit value operator")); | |
4c2df51b | 776 | result = result + offset; |
8a9b8146 | 777 | result_val = value_from_ulongest (address_type, result); |
44353522 | 778 | in_stack_memory = 1; |
4c2df51b | 779 | ctx->stack_len = before_stack_len; |
cec03d70 | 780 | ctx->location = DWARF_VALUE_MEMORY; |
4c2df51b DJ |
781 | } |
782 | break; | |
44353522 | 783 | |
4c2df51b | 784 | case DW_OP_dup: |
8a9b8146 | 785 | result_val = dwarf_expr_fetch (ctx, 0); |
44353522 | 786 | in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0); |
4c2df51b DJ |
787 | break; |
788 | ||
789 | case DW_OP_drop: | |
790 | dwarf_expr_pop (ctx); | |
791 | goto no_push; | |
792 | ||
793 | case DW_OP_pick: | |
794 | offset = *op_ptr++; | |
8a9b8146 | 795 | result_val = dwarf_expr_fetch (ctx, offset); |
44353522 | 796 | in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset); |
4c2df51b | 797 | break; |
9f3fe11c TG |
798 | |
799 | case DW_OP_swap: | |
800 | { | |
44353522 | 801 | struct dwarf_stack_value t1, t2; |
9f3fe11c TG |
802 | |
803 | if (ctx->stack_len < 2) | |
3e43a32a | 804 | error (_("Not enough elements for " |
0963b4bd | 805 | "DW_OP_swap. Need 2, have %d."), |
9f3fe11c TG |
806 | ctx->stack_len); |
807 | t1 = ctx->stack[ctx->stack_len - 1]; | |
808 | t2 = ctx->stack[ctx->stack_len - 2]; | |
809 | ctx->stack[ctx->stack_len - 1] = t2; | |
810 | ctx->stack[ctx->stack_len - 2] = t1; | |
811 | goto no_push; | |
812 | } | |
4c2df51b DJ |
813 | |
814 | case DW_OP_over: | |
8a9b8146 | 815 | result_val = dwarf_expr_fetch (ctx, 1); |
44353522 | 816 | in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1); |
4c2df51b DJ |
817 | break; |
818 | ||
819 | case DW_OP_rot: | |
820 | { | |
44353522 | 821 | struct dwarf_stack_value t1, t2, t3; |
4c2df51b DJ |
822 | |
823 | if (ctx->stack_len < 3) | |
0963b4bd MS |
824 | error (_("Not enough elements for " |
825 | "DW_OP_rot. Need 3, have %d."), | |
4c2df51b DJ |
826 | ctx->stack_len); |
827 | t1 = ctx->stack[ctx->stack_len - 1]; | |
828 | t2 = ctx->stack[ctx->stack_len - 2]; | |
829 | t3 = ctx->stack[ctx->stack_len - 3]; | |
830 | ctx->stack[ctx->stack_len - 1] = t2; | |
831 | ctx->stack[ctx->stack_len - 2] = t3; | |
832 | ctx->stack[ctx->stack_len - 3] = t1; | |
833 | goto no_push; | |
834 | } | |
835 | ||
836 | case DW_OP_deref: | |
837 | case DW_OP_deref_size: | |
8a9b8146 | 838 | case DW_OP_GNU_deref_type: |
f2c7657e UW |
839 | { |
840 | int addr_size = (op == DW_OP_deref ? ctx->addr_size : *op_ptr++); | |
841 | gdb_byte *buf = alloca (addr_size); | |
842 | CORE_ADDR addr = dwarf_expr_fetch_address (ctx, 0); | |
8a9b8146 TT |
843 | struct type *type; |
844 | ||
f2c7657e UW |
845 | dwarf_expr_pop (ctx); |
846 | ||
8a9b8146 TT |
847 | if (op == DW_OP_GNU_deref_type) |
848 | { | |
849 | ULONGEST type_die; | |
850 | ||
851 | op_ptr = read_uleb128 (op_ptr, op_end, &type_die); | |
852 | type = dwarf_get_base_type (ctx, type_die, 0); | |
853 | } | |
854 | else | |
855 | type = address_type; | |
856 | ||
f2c7657e | 857 | (ctx->read_mem) (ctx->baton, buf, addr, addr_size); |
8a9b8146 | 858 | result_val = value_from_contents_and_address (type, buf, addr); |
f2c7657e UW |
859 | break; |
860 | } | |
861 | ||
4c2df51b DJ |
862 | case DW_OP_abs: |
863 | case DW_OP_neg: | |
864 | case DW_OP_not: | |
865 | case DW_OP_plus_uconst: | |
8a9b8146 TT |
866 | { |
867 | /* Unary operations. */ | |
868 | result_val = dwarf_expr_fetch (ctx, 0); | |
869 | dwarf_expr_pop (ctx); | |
4c2df51b | 870 | |
8a9b8146 TT |
871 | switch (op) |
872 | { | |
873 | case DW_OP_abs: | |
874 | if (value_less (result_val, | |
875 | value_zero (value_type (result_val), not_lval))) | |
876 | result_val = value_neg (result_val); | |
877 | break; | |
878 | case DW_OP_neg: | |
879 | result_val = value_neg (result_val); | |
880 | break; | |
881 | case DW_OP_not: | |
882 | dwarf_require_integral (value_type (result_val)); | |
883 | result_val = value_complement (result_val); | |
884 | break; | |
885 | case DW_OP_plus_uconst: | |
886 | dwarf_require_integral (value_type (result_val)); | |
887 | result = value_as_long (result_val); | |
888 | op_ptr = read_uleb128 (op_ptr, op_end, ®); | |
889 | result += reg; | |
890 | result_val = value_from_ulongest (address_type, result); | |
891 | break; | |
892 | } | |
893 | } | |
4c2df51b DJ |
894 | break; |
895 | ||
896 | case DW_OP_and: | |
897 | case DW_OP_div: | |
898 | case DW_OP_minus: | |
899 | case DW_OP_mod: | |
900 | case DW_OP_mul: | |
901 | case DW_OP_or: | |
902 | case DW_OP_plus: | |
903 | case DW_OP_shl: | |
904 | case DW_OP_shr: | |
905 | case DW_OP_shra: | |
906 | case DW_OP_xor: | |
907 | case DW_OP_le: | |
908 | case DW_OP_ge: | |
909 | case DW_OP_eq: | |
910 | case DW_OP_lt: | |
911 | case DW_OP_gt: | |
912 | case DW_OP_ne: | |
913 | { | |
f2c7657e | 914 | /* Binary operations. */ |
8a9b8146 | 915 | struct value *first, *second; |
4c2df51b DJ |
916 | |
917 | second = dwarf_expr_fetch (ctx, 0); | |
918 | dwarf_expr_pop (ctx); | |
919 | ||
b263358a | 920 | first = dwarf_expr_fetch (ctx, 0); |
4c2df51b DJ |
921 | dwarf_expr_pop (ctx); |
922 | ||
8a9b8146 TT |
923 | if (! base_types_equal_p (value_type (first), value_type (second))) |
924 | error (_("Incompatible types on DWARF stack")); | |
925 | ||
4c2df51b DJ |
926 | switch (op) |
927 | { | |
928 | case DW_OP_and: | |
8a9b8146 TT |
929 | dwarf_require_integral (value_type (first)); |
930 | dwarf_require_integral (value_type (second)); | |
931 | result_val = value_binop (first, second, BINOP_BITWISE_AND); | |
4c2df51b DJ |
932 | break; |
933 | case DW_OP_div: | |
8a9b8146 | 934 | result_val = value_binop (first, second, BINOP_DIV); |
99c87dab | 935 | break; |
4c2df51b | 936 | case DW_OP_minus: |
8a9b8146 | 937 | result_val = value_binop (first, second, BINOP_SUB); |
4c2df51b DJ |
938 | break; |
939 | case DW_OP_mod: | |
8a9b8146 TT |
940 | { |
941 | int cast_back = 0; | |
942 | struct type *orig_type = value_type (first); | |
943 | ||
944 | /* We have to special-case "old-style" untyped values | |
945 | -- these must have mod computed using unsigned | |
946 | math. */ | |
947 | if (orig_type == address_type) | |
948 | { | |
949 | struct type *utype | |
950 | = get_unsigned_type (ctx->gdbarch, orig_type); | |
951 | ||
952 | cast_back = 1; | |
953 | first = value_cast (utype, first); | |
954 | second = value_cast (utype, second); | |
955 | } | |
956 | /* Note that value_binop doesn't handle float or | |
957 | decimal float here. This seems unimportant. */ | |
958 | result_val = value_binop (first, second, BINOP_MOD); | |
959 | if (cast_back) | |
960 | result_val = value_cast (orig_type, result_val); | |
961 | } | |
4c2df51b DJ |
962 | break; |
963 | case DW_OP_mul: | |
8a9b8146 | 964 | result_val = value_binop (first, second, BINOP_MUL); |
4c2df51b DJ |
965 | break; |
966 | case DW_OP_or: | |
8a9b8146 TT |
967 | dwarf_require_integral (value_type (first)); |
968 | dwarf_require_integral (value_type (second)); | |
969 | result_val = value_binop (first, second, BINOP_BITWISE_IOR); | |
4c2df51b DJ |
970 | break; |
971 | case DW_OP_plus: | |
8a9b8146 | 972 | result_val = value_binop (first, second, BINOP_ADD); |
4c2df51b DJ |
973 | break; |
974 | case DW_OP_shl: | |
8a9b8146 TT |
975 | dwarf_require_integral (value_type (first)); |
976 | dwarf_require_integral (value_type (second)); | |
977 | result_val = value_binop (first, second, BINOP_LSH); | |
4c2df51b DJ |
978 | break; |
979 | case DW_OP_shr: | |
8a9b8146 TT |
980 | dwarf_require_integral (value_type (first)); |
981 | dwarf_require_integral (value_type (second)); | |
b087e0ed | 982 | if (!TYPE_UNSIGNED (value_type (first))) |
8a9b8146 TT |
983 | { |
984 | struct type *utype | |
985 | = get_unsigned_type (ctx->gdbarch, value_type (first)); | |
986 | ||
987 | first = value_cast (utype, first); | |
988 | } | |
989 | ||
990 | result_val = value_binop (first, second, BINOP_RSH); | |
991 | /* Make sure we wind up with the same type we started | |
992 | with. */ | |
993 | if (value_type (result_val) != value_type (second)) | |
994 | result_val = value_cast (value_type (second), result_val); | |
99c87dab | 995 | break; |
4c2df51b | 996 | case DW_OP_shra: |
8a9b8146 TT |
997 | dwarf_require_integral (value_type (first)); |
998 | dwarf_require_integral (value_type (second)); | |
999 | result_val = value_binop (first, second, BINOP_RSH); | |
4c2df51b DJ |
1000 | break; |
1001 | case DW_OP_xor: | |
8a9b8146 TT |
1002 | dwarf_require_integral (value_type (first)); |
1003 | dwarf_require_integral (value_type (second)); | |
1004 | result_val = value_binop (first, second, BINOP_BITWISE_XOR); | |
4c2df51b DJ |
1005 | break; |
1006 | case DW_OP_le: | |
8a9b8146 TT |
1007 | /* A <= B is !(B < A). */ |
1008 | result = ! value_less (second, first); | |
1009 | result_val = value_from_ulongest (address_type, result); | |
4c2df51b DJ |
1010 | break; |
1011 | case DW_OP_ge: | |
8a9b8146 TT |
1012 | /* A >= B is !(A < B). */ |
1013 | result = ! value_less (first, second); | |
1014 | result_val = value_from_ulongest (address_type, result); | |
4c2df51b DJ |
1015 | break; |
1016 | case DW_OP_eq: | |
8a9b8146 TT |
1017 | result = value_equal (first, second); |
1018 | result_val = value_from_ulongest (address_type, result); | |
4c2df51b DJ |
1019 | break; |
1020 | case DW_OP_lt: | |
8a9b8146 TT |
1021 | result = value_less (first, second); |
1022 | result_val = value_from_ulongest (address_type, result); | |
4c2df51b DJ |
1023 | break; |
1024 | case DW_OP_gt: | |
8a9b8146 TT |
1025 | /* A > B is B < A. */ |
1026 | result = value_less (second, first); | |
1027 | result_val = value_from_ulongest (address_type, result); | |
4c2df51b DJ |
1028 | break; |
1029 | case DW_OP_ne: | |
8a9b8146 TT |
1030 | result = ! value_equal (first, second); |
1031 | result_val = value_from_ulongest (address_type, result); | |
4c2df51b DJ |
1032 | break; |
1033 | default: | |
1034 | internal_error (__FILE__, __LINE__, | |
e2e0b3e5 | 1035 | _("Can't be reached.")); |
4c2df51b | 1036 | } |
4c2df51b DJ |
1037 | } |
1038 | break; | |
1039 | ||
e7802207 TT |
1040 | case DW_OP_call_frame_cfa: |
1041 | result = (ctx->get_frame_cfa) (ctx->baton); | |
8a9b8146 | 1042 | result_val = value_from_ulongest (address_type, result); |
44353522 | 1043 | in_stack_memory = 1; |
e7802207 TT |
1044 | break; |
1045 | ||
4c2df51b | 1046 | case DW_OP_GNU_push_tls_address: |
c3228f12 EZ |
1047 | /* Variable is at a constant offset in the thread-local |
1048 | storage block into the objfile for the current thread and | |
0963b4bd | 1049 | the dynamic linker module containing this expression. Here |
c3228f12 EZ |
1050 | we return returns the offset from that base. The top of the |
1051 | stack has the offset from the beginning of the thread | |
1052 | control block at which the variable is located. Nothing | |
1053 | should follow this operator, so the top of stack would be | |
1054 | returned. */ | |
8a9b8146 | 1055 | result = value_as_long (dwarf_expr_fetch (ctx, 0)); |
4c2df51b DJ |
1056 | dwarf_expr_pop (ctx); |
1057 | result = (ctx->get_tls_address) (ctx->baton, result); | |
8a9b8146 | 1058 | result_val = value_from_ulongest (address_type, result); |
4c2df51b DJ |
1059 | break; |
1060 | ||
1061 | case DW_OP_skip: | |
e17a4113 | 1062 | offset = extract_signed_integer (op_ptr, 2, byte_order); |
4c2df51b DJ |
1063 | op_ptr += 2; |
1064 | op_ptr += offset; | |
1065 | goto no_push; | |
1066 | ||
1067 | case DW_OP_bra: | |
8a9b8146 TT |
1068 | { |
1069 | struct value *val; | |
1070 | ||
1071 | offset = extract_signed_integer (op_ptr, 2, byte_order); | |
1072 | op_ptr += 2; | |
1073 | val = dwarf_expr_fetch (ctx, 0); | |
1074 | dwarf_require_integral (value_type (val)); | |
1075 | if (value_as_long (val) != 0) | |
1076 | op_ptr += offset; | |
1077 | dwarf_expr_pop (ctx); | |
1078 | } | |
4c2df51b DJ |
1079 | goto no_push; |
1080 | ||
1081 | case DW_OP_nop: | |
1082 | goto no_push; | |
1083 | ||
87808bd6 JB |
1084 | case DW_OP_piece: |
1085 | { | |
1086 | ULONGEST size; | |
87808bd6 JB |
1087 | |
1088 | /* Record the piece. */ | |
1089 | op_ptr = read_uleb128 (op_ptr, op_end, &size); | |
d3b1e874 | 1090 | add_piece (ctx, 8 * size, 0); |
87808bd6 | 1091 | |
cec03d70 TT |
1092 | /* Pop off the address/regnum, and reset the location |
1093 | type. */ | |
cb826367 TT |
1094 | if (ctx->location != DWARF_VALUE_LITERAL |
1095 | && ctx->location != DWARF_VALUE_OPTIMIZED_OUT) | |
cec03d70 TT |
1096 | dwarf_expr_pop (ctx); |
1097 | ctx->location = DWARF_VALUE_MEMORY; | |
87808bd6 JB |
1098 | } |
1099 | goto no_push; | |
1100 | ||
d3b1e874 TT |
1101 | case DW_OP_bit_piece: |
1102 | { | |
1103 | ULONGEST size, offset; | |
1104 | ||
1105 | /* Record the piece. */ | |
1106 | op_ptr = read_uleb128 (op_ptr, op_end, &size); | |
1107 | op_ptr = read_uleb128 (op_ptr, op_end, &offset); | |
1108 | add_piece (ctx, size, offset); | |
1109 | ||
1110 | /* Pop off the address/regnum, and reset the location | |
1111 | type. */ | |
1112 | if (ctx->location != DWARF_VALUE_LITERAL | |
1113 | && ctx->location != DWARF_VALUE_OPTIMIZED_OUT) | |
1114 | dwarf_expr_pop (ctx); | |
1115 | ctx->location = DWARF_VALUE_MEMORY; | |
1116 | } | |
1117 | goto no_push; | |
1118 | ||
42be36b3 CT |
1119 | case DW_OP_GNU_uninit: |
1120 | if (op_ptr != op_end) | |
9c482037 | 1121 | error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always " |
42be36b3 CT |
1122 | "be the very last op.")); |
1123 | ||
1124 | ctx->initialized = 0; | |
1125 | goto no_push; | |
1126 | ||
5c631832 JK |
1127 | case DW_OP_call2: |
1128 | result = extract_unsigned_integer (op_ptr, 2, byte_order); | |
1129 | op_ptr += 2; | |
1130 | ctx->dwarf_call (ctx, result); | |
1131 | goto no_push; | |
1132 | ||
1133 | case DW_OP_call4: | |
1134 | result = extract_unsigned_integer (op_ptr, 4, byte_order); | |
1135 | op_ptr += 4; | |
1136 | ctx->dwarf_call (ctx, result); | |
1137 | goto no_push; | |
dd90784c JK |
1138 | |
1139 | case DW_OP_GNU_entry_value: | |
1140 | /* This operation is not yet supported by GDB. */ | |
1141 | ctx->location = DWARF_VALUE_OPTIMIZED_OUT; | |
1142 | ctx->stack_len = 0; | |
1143 | ctx->num_pieces = 0; | |
1144 | goto abort_expression; | |
5c631832 | 1145 | |
8a9b8146 TT |
1146 | case DW_OP_GNU_const_type: |
1147 | { | |
1148 | ULONGEST type_die; | |
1149 | int n; | |
1150 | const gdb_byte *data; | |
1151 | struct type *type; | |
1152 | ||
1153 | op_ptr = read_uleb128 (op_ptr, op_end, &type_die); | |
1154 | n = *op_ptr++; | |
1155 | data = op_ptr; | |
1156 | op_ptr += n; | |
1157 | ||
1158 | type = dwarf_get_base_type (ctx, type_die, n); | |
1159 | result_val = value_from_contents (type, data); | |
1160 | } | |
1161 | break; | |
1162 | ||
1163 | case DW_OP_GNU_regval_type: | |
1164 | { | |
1165 | ULONGEST type_die; | |
1166 | struct type *type; | |
1167 | ||
1168 | op_ptr = read_uleb128 (op_ptr, op_end, ®); | |
1169 | op_ptr = read_uleb128 (op_ptr, op_end, &type_die); | |
1170 | ||
1171 | type = dwarf_get_base_type (ctx, type_die, 0); | |
1172 | result = (ctx->read_reg) (ctx->baton, reg); | |
1173 | result_val = value_from_ulongest (type, result); | |
1174 | } | |
1175 | break; | |
1176 | ||
1177 | case DW_OP_GNU_convert: | |
1178 | case DW_OP_GNU_reinterpret: | |
1179 | { | |
1180 | ULONGEST type_die; | |
1181 | struct type *type; | |
1182 | ||
1183 | op_ptr = read_uleb128 (op_ptr, op_end, &type_die); | |
1184 | ||
1185 | type = dwarf_get_base_type (ctx, type_die, 0); | |
1186 | ||
1187 | result_val = dwarf_expr_fetch (ctx, 0); | |
1188 | dwarf_expr_pop (ctx); | |
1189 | ||
1190 | if (op == DW_OP_GNU_convert) | |
1191 | result_val = value_cast (type, result_val); | |
1192 | else if (type == value_type (result_val)) | |
1193 | { | |
1194 | /* Nothing. */ | |
1195 | } | |
1196 | else if (TYPE_LENGTH (type) | |
1197 | != TYPE_LENGTH (value_type (result_val))) | |
1198 | error (_("DW_OP_GNU_reinterpret has wrong size")); | |
1199 | else | |
1200 | result_val | |
1201 | = value_from_contents (type, | |
1202 | value_contents_all (result_val)); | |
1203 | } | |
1204 | break; | |
1205 | ||
4c2df51b | 1206 | default: |
8a3fe4f8 | 1207 | error (_("Unhandled dwarf expression opcode 0x%x"), op); |
4c2df51b DJ |
1208 | } |
1209 | ||
1210 | /* Most things push a result value. */ | |
8a9b8146 TT |
1211 | gdb_assert (result_val != NULL); |
1212 | dwarf_expr_push (ctx, result_val, in_stack_memory); | |
82ae4854 | 1213 | no_push: |
b27cf2b3 | 1214 | ; |
4c2df51b | 1215 | } |
1e3a102a | 1216 | |
8cf6f0b1 TT |
1217 | /* To simplify our main caller, if the result is an implicit |
1218 | pointer, then make a pieced value. This is ok because we can't | |
1219 | have implicit pointers in contexts where pieces are invalid. */ | |
1220 | if (ctx->location == DWARF_VALUE_IMPLICIT_POINTER) | |
1221 | add_piece (ctx, 8 * ctx->addr_size, 0); | |
1222 | ||
dd90784c | 1223 | abort_expression: |
1e3a102a JK |
1224 | ctx->recursion_depth--; |
1225 | gdb_assert (ctx->recursion_depth >= 0); | |
8a9b8146 TT |
1226 | } |
1227 | ||
1228 | void | |
1229 | _initialize_dwarf2expr (void) | |
1230 | { | |
1231 | dwarf_arch_cookie | |
1232 | = gdbarch_data_register_post_init (dwarf_gdbarch_types_init); | |
4c2df51b | 1233 | } |