Update NEWS post GDB 7.8 branch creation.
[deliverable/binutils-gdb.git] / gdb / valops.c
1 /* Perform non-arithmetic operations on values, for GDB.
2
3 Copyright (C) 1986-2014 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21 #include "symtab.h"
22 #include "gdbtypes.h"
23 #include "value.h"
24 #include "frame.h"
25 #include "inferior.h"
26 #include "gdbcore.h"
27 #include "target.h"
28 #include "demangle.h"
29 #include "language.h"
30 #include "gdbcmd.h"
31 #include "regcache.h"
32 #include "cp-abi.h"
33 #include "block.h"
34 #include "infcall.h"
35 #include "dictionary.h"
36 #include "cp-support.h"
37 #include "dfp.h"
38 #include "tracepoint.h"
39 #include <errno.h>
40 #include <string.h>
41 #include "gdb_assert.h"
42 #include "observer.h"
43 #include "objfiles.h"
44 #include "exceptions.h"
45 #include "extension.h"
46
47 extern unsigned int overload_debug;
48 /* Local functions. */
49
50 static int typecmp (int staticp, int varargs, int nargs,
51 struct field t1[], struct value *t2[]);
52
53 static struct value *search_struct_field (const char *, struct value *,
54 int, struct type *, int);
55
56 static struct value *search_struct_method (const char *, struct value **,
57 struct value **,
58 int, int *, struct type *);
59
60 static int find_oload_champ_namespace (struct value **, int,
61 const char *, const char *,
62 struct symbol ***,
63 struct badness_vector **,
64 const int no_adl);
65
66 static
67 int find_oload_champ_namespace_loop (struct value **, int,
68 const char *, const char *,
69 int, struct symbol ***,
70 struct badness_vector **, int *,
71 const int no_adl);
72
73 static int find_oload_champ (struct value **, int, int,
74 struct fn_field *, VEC (xmethod_worker_ptr) *,
75 struct symbol **, struct badness_vector **);
76
77 static int oload_method_static_p (struct fn_field *, int);
78
79 enum oload_classification { STANDARD, NON_STANDARD, INCOMPATIBLE };
80
81 static enum
82 oload_classification classify_oload_match (struct badness_vector *,
83 int, int);
84
85 static struct value *value_struct_elt_for_reference (struct type *,
86 int, struct type *,
87 const char *,
88 struct type *,
89 int, enum noside);
90
91 static struct value *value_namespace_elt (const struct type *,
92 const char *, int , enum noside);
93
94 static struct value *value_maybe_namespace_elt (const struct type *,
95 const char *, int,
96 enum noside);
97
98 static CORE_ADDR allocate_space_in_inferior (int);
99
100 static struct value *cast_into_complex (struct type *, struct value *);
101
102 static void find_method_list (struct value **, const char *,
103 int, struct type *, struct fn_field **, int *,
104 VEC (xmethod_worker_ptr) **,
105 struct type **, int *);
106
107 void _initialize_valops (void);
108
109 #if 0
110 /* Flag for whether we want to abandon failed expression evals by
111 default. */
112
113 static int auto_abandon = 0;
114 #endif
115
116 int overload_resolution = 0;
117 static void
118 show_overload_resolution (struct ui_file *file, int from_tty,
119 struct cmd_list_element *c,
120 const char *value)
121 {
122 fprintf_filtered (file, _("Overload resolution in evaluating "
123 "C++ functions is %s.\n"),
124 value);
125 }
126
127 /* Find the address of function name NAME in the inferior. If OBJF_P
128 is non-NULL, *OBJF_P will be set to the OBJFILE where the function
129 is defined. */
130
131 struct value *
132 find_function_in_inferior (const char *name, struct objfile **objf_p)
133 {
134 struct symbol *sym;
135
136 sym = lookup_symbol (name, 0, VAR_DOMAIN, 0);
137 if (sym != NULL)
138 {
139 if (SYMBOL_CLASS (sym) != LOC_BLOCK)
140 {
141 error (_("\"%s\" exists in this program but is not a function."),
142 name);
143 }
144
145 if (objf_p)
146 *objf_p = SYMBOL_SYMTAB (sym)->objfile;
147
148 return value_of_variable (sym, NULL);
149 }
150 else
151 {
152 struct bound_minimal_symbol msymbol =
153 lookup_bound_minimal_symbol (name);
154
155 if (msymbol.minsym != NULL)
156 {
157 struct objfile *objfile = msymbol.objfile;
158 struct gdbarch *gdbarch = get_objfile_arch (objfile);
159
160 struct type *type;
161 CORE_ADDR maddr;
162 type = lookup_pointer_type (builtin_type (gdbarch)->builtin_char);
163 type = lookup_function_type (type);
164 type = lookup_pointer_type (type);
165 maddr = BMSYMBOL_VALUE_ADDRESS (msymbol);
166
167 if (objf_p)
168 *objf_p = objfile;
169
170 return value_from_pointer (type, maddr);
171 }
172 else
173 {
174 if (!target_has_execution)
175 error (_("evaluation of this expression "
176 "requires the target program to be active"));
177 else
178 error (_("evaluation of this expression requires the "
179 "program to have a function \"%s\"."),
180 name);
181 }
182 }
183 }
184
185 /* Allocate NBYTES of space in the inferior using the inferior's
186 malloc and return a value that is a pointer to the allocated
187 space. */
188
189 struct value *
190 value_allocate_space_in_inferior (int len)
191 {
192 struct objfile *objf;
193 struct value *val = find_function_in_inferior ("malloc", &objf);
194 struct gdbarch *gdbarch = get_objfile_arch (objf);
195 struct value *blocklen;
196
197 blocklen = value_from_longest (builtin_type (gdbarch)->builtin_int, len);
198 val = call_function_by_hand (val, 1, &blocklen);
199 if (value_logical_not (val))
200 {
201 if (!target_has_execution)
202 error (_("No memory available to program now: "
203 "you need to start the target first"));
204 else
205 error (_("No memory available to program: call to malloc failed"));
206 }
207 return val;
208 }
209
210 static CORE_ADDR
211 allocate_space_in_inferior (int len)
212 {
213 return value_as_long (value_allocate_space_in_inferior (len));
214 }
215
216 /* Cast struct value VAL to type TYPE and return as a value.
217 Both type and val must be of TYPE_CODE_STRUCT or TYPE_CODE_UNION
218 for this to work. Typedef to one of the codes is permitted.
219 Returns NULL if the cast is neither an upcast nor a downcast. */
220
221 static struct value *
222 value_cast_structs (struct type *type, struct value *v2)
223 {
224 struct type *t1;
225 struct type *t2;
226 struct value *v;
227
228 gdb_assert (type != NULL && v2 != NULL);
229
230 t1 = check_typedef (type);
231 t2 = check_typedef (value_type (v2));
232
233 /* Check preconditions. */
234 gdb_assert ((TYPE_CODE (t1) == TYPE_CODE_STRUCT
235 || TYPE_CODE (t1) == TYPE_CODE_UNION)
236 && !!"Precondition is that type is of STRUCT or UNION kind.");
237 gdb_assert ((TYPE_CODE (t2) == TYPE_CODE_STRUCT
238 || TYPE_CODE (t2) == TYPE_CODE_UNION)
239 && !!"Precondition is that value is of STRUCT or UNION kind");
240
241 if (TYPE_NAME (t1) != NULL
242 && TYPE_NAME (t2) != NULL
243 && !strcmp (TYPE_NAME (t1), TYPE_NAME (t2)))
244 return NULL;
245
246 /* Upcasting: look in the type of the source to see if it contains the
247 type of the target as a superclass. If so, we'll need to
248 offset the pointer rather than just change its type. */
249 if (TYPE_NAME (t1) != NULL)
250 {
251 v = search_struct_field (type_name_no_tag (t1),
252 v2, 0, t2, 1);
253 if (v)
254 return v;
255 }
256
257 /* Downcasting: look in the type of the target to see if it contains the
258 type of the source as a superclass. If so, we'll need to
259 offset the pointer rather than just change its type. */
260 if (TYPE_NAME (t2) != NULL)
261 {
262 /* Try downcasting using the run-time type of the value. */
263 int full, top, using_enc;
264 struct type *real_type;
265
266 real_type = value_rtti_type (v2, &full, &top, &using_enc);
267 if (real_type)
268 {
269 v = value_full_object (v2, real_type, full, top, using_enc);
270 v = value_at_lazy (real_type, value_address (v));
271 real_type = value_type (v);
272
273 /* We might be trying to cast to the outermost enclosing
274 type, in which case search_struct_field won't work. */
275 if (TYPE_NAME (real_type) != NULL
276 && !strcmp (TYPE_NAME (real_type), TYPE_NAME (t1)))
277 return v;
278
279 v = search_struct_field (type_name_no_tag (t2), v, 0, real_type, 1);
280 if (v)
281 return v;
282 }
283
284 /* Try downcasting using information from the destination type
285 T2. This wouldn't work properly for classes with virtual
286 bases, but those were handled above. */
287 v = search_struct_field (type_name_no_tag (t2),
288 value_zero (t1, not_lval), 0, t1, 1);
289 if (v)
290 {
291 /* Downcasting is possible (t1 is superclass of v2). */
292 CORE_ADDR addr2 = value_address (v2);
293
294 addr2 -= value_address (v) + value_embedded_offset (v);
295 return value_at (type, addr2);
296 }
297 }
298
299 return NULL;
300 }
301
302 /* Cast one pointer or reference type to another. Both TYPE and
303 the type of ARG2 should be pointer types, or else both should be
304 reference types. If SUBCLASS_CHECK is non-zero, this will force a
305 check to see whether TYPE is a superclass of ARG2's type. If
306 SUBCLASS_CHECK is zero, then the subclass check is done only when
307 ARG2 is itself non-zero. Returns the new pointer or reference. */
308
309 struct value *
310 value_cast_pointers (struct type *type, struct value *arg2,
311 int subclass_check)
312 {
313 struct type *type1 = check_typedef (type);
314 struct type *type2 = check_typedef (value_type (arg2));
315 struct type *t1 = check_typedef (TYPE_TARGET_TYPE (type1));
316 struct type *t2 = check_typedef (TYPE_TARGET_TYPE (type2));
317
318 if (TYPE_CODE (t1) == TYPE_CODE_STRUCT
319 && TYPE_CODE (t2) == TYPE_CODE_STRUCT
320 && (subclass_check || !value_logical_not (arg2)))
321 {
322 struct value *v2;
323
324 if (TYPE_CODE (type2) == TYPE_CODE_REF)
325 v2 = coerce_ref (arg2);
326 else
327 v2 = value_ind (arg2);
328 gdb_assert (TYPE_CODE (check_typedef (value_type (v2)))
329 == TYPE_CODE_STRUCT && !!"Why did coercion fail?");
330 v2 = value_cast_structs (t1, v2);
331 /* At this point we have what we can have, un-dereference if needed. */
332 if (v2)
333 {
334 struct value *v = value_addr (v2);
335
336 deprecated_set_value_type (v, type);
337 return v;
338 }
339 }
340
341 /* No superclass found, just change the pointer type. */
342 arg2 = value_copy (arg2);
343 deprecated_set_value_type (arg2, type);
344 set_value_enclosing_type (arg2, type);
345 set_value_pointed_to_offset (arg2, 0); /* pai: chk_val */
346 return arg2;
347 }
348
349 /* Cast value ARG2 to type TYPE and return as a value.
350 More general than a C cast: accepts any two types of the same length,
351 and if ARG2 is an lvalue it can be cast into anything at all. */
352 /* In C++, casts may change pointer or object representations. */
353
354 struct value *
355 value_cast (struct type *type, struct value *arg2)
356 {
357 enum type_code code1;
358 enum type_code code2;
359 int scalar;
360 struct type *type2;
361
362 int convert_to_boolean = 0;
363
364 if (value_type (arg2) == type)
365 return arg2;
366
367 code1 = TYPE_CODE (check_typedef (type));
368
369 /* Check if we are casting struct reference to struct reference. */
370 if (code1 == TYPE_CODE_REF)
371 {
372 /* We dereference type; then we recurse and finally
373 we generate value of the given reference. Nothing wrong with
374 that. */
375 struct type *t1 = check_typedef (type);
376 struct type *dereftype = check_typedef (TYPE_TARGET_TYPE (t1));
377 struct value *val = value_cast (dereftype, arg2);
378
379 return value_ref (val);
380 }
381
382 code2 = TYPE_CODE (check_typedef (value_type (arg2)));
383
384 if (code2 == TYPE_CODE_REF)
385 /* We deref the value and then do the cast. */
386 return value_cast (type, coerce_ref (arg2));
387
388 CHECK_TYPEDEF (type);
389 code1 = TYPE_CODE (type);
390 arg2 = coerce_ref (arg2);
391 type2 = check_typedef (value_type (arg2));
392
393 /* You can't cast to a reference type. See value_cast_pointers
394 instead. */
395 gdb_assert (code1 != TYPE_CODE_REF);
396
397 /* A cast to an undetermined-length array_type, such as
398 (TYPE [])OBJECT, is treated like a cast to (TYPE [N])OBJECT,
399 where N is sizeof(OBJECT)/sizeof(TYPE). */
400 if (code1 == TYPE_CODE_ARRAY)
401 {
402 struct type *element_type = TYPE_TARGET_TYPE (type);
403 unsigned element_length = TYPE_LENGTH (check_typedef (element_type));
404
405 if (element_length > 0 && TYPE_ARRAY_UPPER_BOUND_IS_UNDEFINED (type))
406 {
407 struct type *range_type = TYPE_INDEX_TYPE (type);
408 int val_length = TYPE_LENGTH (type2);
409 LONGEST low_bound, high_bound, new_length;
410
411 if (get_discrete_bounds (range_type, &low_bound, &high_bound) < 0)
412 low_bound = 0, high_bound = 0;
413 new_length = val_length / element_length;
414 if (val_length % element_length != 0)
415 warning (_("array element type size does not "
416 "divide object size in cast"));
417 /* FIXME-type-allocation: need a way to free this type when
418 we are done with it. */
419 range_type = create_static_range_type ((struct type *) NULL,
420 TYPE_TARGET_TYPE (range_type),
421 low_bound,
422 new_length + low_bound - 1);
423 deprecated_set_value_type (arg2,
424 create_array_type ((struct type *) NULL,
425 element_type,
426 range_type));
427 return arg2;
428 }
429 }
430
431 if (current_language->c_style_arrays
432 && TYPE_CODE (type2) == TYPE_CODE_ARRAY
433 && !TYPE_VECTOR (type2))
434 arg2 = value_coerce_array (arg2);
435
436 if (TYPE_CODE (type2) == TYPE_CODE_FUNC)
437 arg2 = value_coerce_function (arg2);
438
439 type2 = check_typedef (value_type (arg2));
440 code2 = TYPE_CODE (type2);
441
442 if (code1 == TYPE_CODE_COMPLEX)
443 return cast_into_complex (type, arg2);
444 if (code1 == TYPE_CODE_BOOL)
445 {
446 code1 = TYPE_CODE_INT;
447 convert_to_boolean = 1;
448 }
449 if (code1 == TYPE_CODE_CHAR)
450 code1 = TYPE_CODE_INT;
451 if (code2 == TYPE_CODE_BOOL || code2 == TYPE_CODE_CHAR)
452 code2 = TYPE_CODE_INT;
453
454 scalar = (code2 == TYPE_CODE_INT || code2 == TYPE_CODE_FLT
455 || code2 == TYPE_CODE_DECFLOAT || code2 == TYPE_CODE_ENUM
456 || code2 == TYPE_CODE_RANGE);
457
458 if ((code1 == TYPE_CODE_STRUCT || code1 == TYPE_CODE_UNION)
459 && (code2 == TYPE_CODE_STRUCT || code2 == TYPE_CODE_UNION)
460 && TYPE_NAME (type) != 0)
461 {
462 struct value *v = value_cast_structs (type, arg2);
463
464 if (v)
465 return v;
466 }
467
468 if (code1 == TYPE_CODE_FLT && scalar)
469 return value_from_double (type, value_as_double (arg2));
470 else if (code1 == TYPE_CODE_DECFLOAT && scalar)
471 {
472 enum bfd_endian byte_order = gdbarch_byte_order (get_type_arch (type));
473 int dec_len = TYPE_LENGTH (type);
474 gdb_byte dec[16];
475
476 if (code2 == TYPE_CODE_FLT)
477 decimal_from_floating (arg2, dec, dec_len, byte_order);
478 else if (code2 == TYPE_CODE_DECFLOAT)
479 decimal_convert (value_contents (arg2), TYPE_LENGTH (type2),
480 byte_order, dec, dec_len, byte_order);
481 else
482 /* The only option left is an integral type. */
483 decimal_from_integral (arg2, dec, dec_len, byte_order);
484
485 return value_from_decfloat (type, dec);
486 }
487 else if ((code1 == TYPE_CODE_INT || code1 == TYPE_CODE_ENUM
488 || code1 == TYPE_CODE_RANGE)
489 && (scalar || code2 == TYPE_CODE_PTR
490 || code2 == TYPE_CODE_MEMBERPTR))
491 {
492 LONGEST longest;
493
494 /* When we cast pointers to integers, we mustn't use
495 gdbarch_pointer_to_address to find the address the pointer
496 represents, as value_as_long would. GDB should evaluate
497 expressions just as the compiler would --- and the compiler
498 sees a cast as a simple reinterpretation of the pointer's
499 bits. */
500 if (code2 == TYPE_CODE_PTR)
501 longest = extract_unsigned_integer
502 (value_contents (arg2), TYPE_LENGTH (type2),
503 gdbarch_byte_order (get_type_arch (type2)));
504 else
505 longest = value_as_long (arg2);
506 return value_from_longest (type, convert_to_boolean ?
507 (LONGEST) (longest ? 1 : 0) : longest);
508 }
509 else if (code1 == TYPE_CODE_PTR && (code2 == TYPE_CODE_INT
510 || code2 == TYPE_CODE_ENUM
511 || code2 == TYPE_CODE_RANGE))
512 {
513 /* TYPE_LENGTH (type) is the length of a pointer, but we really
514 want the length of an address! -- we are really dealing with
515 addresses (i.e., gdb representations) not pointers (i.e.,
516 target representations) here.
517
518 This allows things like "print *(int *)0x01000234" to work
519 without printing a misleading message -- which would
520 otherwise occur when dealing with a target having two byte
521 pointers and four byte addresses. */
522
523 int addr_bit = gdbarch_addr_bit (get_type_arch (type2));
524 LONGEST longest = value_as_long (arg2);
525
526 if (addr_bit < sizeof (LONGEST) * HOST_CHAR_BIT)
527 {
528 if (longest >= ((LONGEST) 1 << addr_bit)
529 || longest <= -((LONGEST) 1 << addr_bit))
530 warning (_("value truncated"));
531 }
532 return value_from_longest (type, longest);
533 }
534 else if (code1 == TYPE_CODE_METHODPTR && code2 == TYPE_CODE_INT
535 && value_as_long (arg2) == 0)
536 {
537 struct value *result = allocate_value (type);
538
539 cplus_make_method_ptr (type, value_contents_writeable (result), 0, 0);
540 return result;
541 }
542 else if (code1 == TYPE_CODE_MEMBERPTR && code2 == TYPE_CODE_INT
543 && value_as_long (arg2) == 0)
544 {
545 /* The Itanium C++ ABI represents NULL pointers to members as
546 minus one, instead of biasing the normal case. */
547 return value_from_longest (type, -1);
548 }
549 else if (code1 == TYPE_CODE_ARRAY && TYPE_VECTOR (type)
550 && code2 == TYPE_CODE_ARRAY && TYPE_VECTOR (type2)
551 && TYPE_LENGTH (type) != TYPE_LENGTH (type2))
552 error (_("Cannot convert between vector values of different sizes"));
553 else if (code1 == TYPE_CODE_ARRAY && TYPE_VECTOR (type) && scalar
554 && TYPE_LENGTH (type) != TYPE_LENGTH (type2))
555 error (_("can only cast scalar to vector of same size"));
556 else if (code1 == TYPE_CODE_VOID)
557 {
558 return value_zero (type, not_lval);
559 }
560 else if (TYPE_LENGTH (type) == TYPE_LENGTH (type2))
561 {
562 if (code1 == TYPE_CODE_PTR && code2 == TYPE_CODE_PTR)
563 return value_cast_pointers (type, arg2, 0);
564
565 arg2 = value_copy (arg2);
566 deprecated_set_value_type (arg2, type);
567 set_value_enclosing_type (arg2, type);
568 set_value_pointed_to_offset (arg2, 0); /* pai: chk_val */
569 return arg2;
570 }
571 else if (VALUE_LVAL (arg2) == lval_memory)
572 return value_at_lazy (type, value_address (arg2));
573 else
574 {
575 error (_("Invalid cast."));
576 return 0;
577 }
578 }
579
580 /* The C++ reinterpret_cast operator. */
581
582 struct value *
583 value_reinterpret_cast (struct type *type, struct value *arg)
584 {
585 struct value *result;
586 struct type *real_type = check_typedef (type);
587 struct type *arg_type, *dest_type;
588 int is_ref = 0;
589 enum type_code dest_code, arg_code;
590
591 /* Do reference, function, and array conversion. */
592 arg = coerce_array (arg);
593
594 /* Attempt to preserve the type the user asked for. */
595 dest_type = type;
596
597 /* If we are casting to a reference type, transform
598 reinterpret_cast<T&>(V) to *reinterpret_cast<T*>(&V). */
599 if (TYPE_CODE (real_type) == TYPE_CODE_REF)
600 {
601 is_ref = 1;
602 arg = value_addr (arg);
603 dest_type = lookup_pointer_type (TYPE_TARGET_TYPE (dest_type));
604 real_type = lookup_pointer_type (real_type);
605 }
606
607 arg_type = value_type (arg);
608
609 dest_code = TYPE_CODE (real_type);
610 arg_code = TYPE_CODE (arg_type);
611
612 /* We can convert pointer types, or any pointer type to int, or int
613 type to pointer. */
614 if ((dest_code == TYPE_CODE_PTR && arg_code == TYPE_CODE_INT)
615 || (dest_code == TYPE_CODE_INT && arg_code == TYPE_CODE_PTR)
616 || (dest_code == TYPE_CODE_METHODPTR && arg_code == TYPE_CODE_INT)
617 || (dest_code == TYPE_CODE_INT && arg_code == TYPE_CODE_METHODPTR)
618 || (dest_code == TYPE_CODE_MEMBERPTR && arg_code == TYPE_CODE_INT)
619 || (dest_code == TYPE_CODE_INT && arg_code == TYPE_CODE_MEMBERPTR)
620 || (dest_code == arg_code
621 && (dest_code == TYPE_CODE_PTR
622 || dest_code == TYPE_CODE_METHODPTR
623 || dest_code == TYPE_CODE_MEMBERPTR)))
624 result = value_cast (dest_type, arg);
625 else
626 error (_("Invalid reinterpret_cast"));
627
628 if (is_ref)
629 result = value_cast (type, value_ref (value_ind (result)));
630
631 return result;
632 }
633
634 /* A helper for value_dynamic_cast. This implements the first of two
635 runtime checks: we iterate over all the base classes of the value's
636 class which are equal to the desired class; if only one of these
637 holds the value, then it is the answer. */
638
639 static int
640 dynamic_cast_check_1 (struct type *desired_type,
641 const gdb_byte *valaddr,
642 int embedded_offset,
643 CORE_ADDR address,
644 struct value *val,
645 struct type *search_type,
646 CORE_ADDR arg_addr,
647 struct type *arg_type,
648 struct value **result)
649 {
650 int i, result_count = 0;
651
652 for (i = 0; i < TYPE_N_BASECLASSES (search_type) && result_count < 2; ++i)
653 {
654 int offset = baseclass_offset (search_type, i, valaddr, embedded_offset,
655 address, val);
656
657 if (class_types_same_p (desired_type, TYPE_BASECLASS (search_type, i)))
658 {
659 if (address + embedded_offset + offset >= arg_addr
660 && address + embedded_offset + offset < arg_addr + TYPE_LENGTH (arg_type))
661 {
662 ++result_count;
663 if (!*result)
664 *result = value_at_lazy (TYPE_BASECLASS (search_type, i),
665 address + embedded_offset + offset);
666 }
667 }
668 else
669 result_count += dynamic_cast_check_1 (desired_type,
670 valaddr,
671 embedded_offset + offset,
672 address, val,
673 TYPE_BASECLASS (search_type, i),
674 arg_addr,
675 arg_type,
676 result);
677 }
678
679 return result_count;
680 }
681
682 /* A helper for value_dynamic_cast. This implements the second of two
683 runtime checks: we look for a unique public sibling class of the
684 argument's declared class. */
685
686 static int
687 dynamic_cast_check_2 (struct type *desired_type,
688 const gdb_byte *valaddr,
689 int embedded_offset,
690 CORE_ADDR address,
691 struct value *val,
692 struct type *search_type,
693 struct value **result)
694 {
695 int i, result_count = 0;
696
697 for (i = 0; i < TYPE_N_BASECLASSES (search_type) && result_count < 2; ++i)
698 {
699 int offset;
700
701 if (! BASETYPE_VIA_PUBLIC (search_type, i))
702 continue;
703
704 offset = baseclass_offset (search_type, i, valaddr, embedded_offset,
705 address, val);
706 if (class_types_same_p (desired_type, TYPE_BASECLASS (search_type, i)))
707 {
708 ++result_count;
709 if (*result == NULL)
710 *result = value_at_lazy (TYPE_BASECLASS (search_type, i),
711 address + embedded_offset + offset);
712 }
713 else
714 result_count += dynamic_cast_check_2 (desired_type,
715 valaddr,
716 embedded_offset + offset,
717 address, val,
718 TYPE_BASECLASS (search_type, i),
719 result);
720 }
721
722 return result_count;
723 }
724
725 /* The C++ dynamic_cast operator. */
726
727 struct value *
728 value_dynamic_cast (struct type *type, struct value *arg)
729 {
730 int full, top, using_enc;
731 struct type *resolved_type = check_typedef (type);
732 struct type *arg_type = check_typedef (value_type (arg));
733 struct type *class_type, *rtti_type;
734 struct value *result, *tem, *original_arg = arg;
735 CORE_ADDR addr;
736 int is_ref = TYPE_CODE (resolved_type) == TYPE_CODE_REF;
737
738 if (TYPE_CODE (resolved_type) != TYPE_CODE_PTR
739 && TYPE_CODE (resolved_type) != TYPE_CODE_REF)
740 error (_("Argument to dynamic_cast must be a pointer or reference type"));
741 if (TYPE_CODE (TYPE_TARGET_TYPE (resolved_type)) != TYPE_CODE_VOID
742 && TYPE_CODE (TYPE_TARGET_TYPE (resolved_type)) != TYPE_CODE_CLASS)
743 error (_("Argument to dynamic_cast must be pointer to class or `void *'"));
744
745 class_type = check_typedef (TYPE_TARGET_TYPE (resolved_type));
746 if (TYPE_CODE (resolved_type) == TYPE_CODE_PTR)
747 {
748 if (TYPE_CODE (arg_type) != TYPE_CODE_PTR
749 && ! (TYPE_CODE (arg_type) == TYPE_CODE_INT
750 && value_as_long (arg) == 0))
751 error (_("Argument to dynamic_cast does not have pointer type"));
752 if (TYPE_CODE (arg_type) == TYPE_CODE_PTR)
753 {
754 arg_type = check_typedef (TYPE_TARGET_TYPE (arg_type));
755 if (TYPE_CODE (arg_type) != TYPE_CODE_CLASS)
756 error (_("Argument to dynamic_cast does "
757 "not have pointer to class type"));
758 }
759
760 /* Handle NULL pointers. */
761 if (value_as_long (arg) == 0)
762 return value_zero (type, not_lval);
763
764 arg = value_ind (arg);
765 }
766 else
767 {
768 if (TYPE_CODE (arg_type) != TYPE_CODE_CLASS)
769 error (_("Argument to dynamic_cast does not have class type"));
770 }
771
772 /* If the classes are the same, just return the argument. */
773 if (class_types_same_p (class_type, arg_type))
774 return value_cast (type, arg);
775
776 /* If the target type is a unique base class of the argument's
777 declared type, just cast it. */
778 if (is_ancestor (class_type, arg_type))
779 {
780 if (is_unique_ancestor (class_type, arg))
781 return value_cast (type, original_arg);
782 error (_("Ambiguous dynamic_cast"));
783 }
784
785 rtti_type = value_rtti_type (arg, &full, &top, &using_enc);
786 if (! rtti_type)
787 error (_("Couldn't determine value's most derived type for dynamic_cast"));
788
789 /* Compute the most derived object's address. */
790 addr = value_address (arg);
791 if (full)
792 {
793 /* Done. */
794 }
795 else if (using_enc)
796 addr += top;
797 else
798 addr += top + value_embedded_offset (arg);
799
800 /* dynamic_cast<void *> means to return a pointer to the
801 most-derived object. */
802 if (TYPE_CODE (resolved_type) == TYPE_CODE_PTR
803 && TYPE_CODE (TYPE_TARGET_TYPE (resolved_type)) == TYPE_CODE_VOID)
804 return value_at_lazy (type, addr);
805
806 tem = value_at (type, addr);
807 type = value_type (tem);
808
809 /* The first dynamic check specified in 5.2.7. */
810 if (is_public_ancestor (arg_type, TYPE_TARGET_TYPE (resolved_type)))
811 {
812 if (class_types_same_p (rtti_type, TYPE_TARGET_TYPE (resolved_type)))
813 return tem;
814 result = NULL;
815 if (dynamic_cast_check_1 (TYPE_TARGET_TYPE (resolved_type),
816 value_contents_for_printing (tem),
817 value_embedded_offset (tem),
818 value_address (tem), tem,
819 rtti_type, addr,
820 arg_type,
821 &result) == 1)
822 return value_cast (type,
823 is_ref ? value_ref (result) : value_addr (result));
824 }
825
826 /* The second dynamic check specified in 5.2.7. */
827 result = NULL;
828 if (is_public_ancestor (arg_type, rtti_type)
829 && dynamic_cast_check_2 (TYPE_TARGET_TYPE (resolved_type),
830 value_contents_for_printing (tem),
831 value_embedded_offset (tem),
832 value_address (tem), tem,
833 rtti_type, &result) == 1)
834 return value_cast (type,
835 is_ref ? value_ref (result) : value_addr (result));
836
837 if (TYPE_CODE (resolved_type) == TYPE_CODE_PTR)
838 return value_zero (type, not_lval);
839
840 error (_("dynamic_cast failed"));
841 }
842
843 /* Create a value of type TYPE that is zero, and return it. */
844
845 struct value *
846 value_zero (struct type *type, enum lval_type lv)
847 {
848 struct value *val = allocate_value (type);
849
850 VALUE_LVAL (val) = (lv == lval_computed ? not_lval : lv);
851 return val;
852 }
853
854 /* Create a not_lval value of numeric type TYPE that is one, and return it. */
855
856 struct value *
857 value_one (struct type *type)
858 {
859 struct type *type1 = check_typedef (type);
860 struct value *val;
861
862 if (TYPE_CODE (type1) == TYPE_CODE_DECFLOAT)
863 {
864 enum bfd_endian byte_order = gdbarch_byte_order (get_type_arch (type));
865 gdb_byte v[16];
866
867 decimal_from_string (v, TYPE_LENGTH (type), byte_order, "1");
868 val = value_from_decfloat (type, v);
869 }
870 else if (TYPE_CODE (type1) == TYPE_CODE_FLT)
871 {
872 val = value_from_double (type, (DOUBLEST) 1);
873 }
874 else if (is_integral_type (type1))
875 {
876 val = value_from_longest (type, (LONGEST) 1);
877 }
878 else if (TYPE_CODE (type1) == TYPE_CODE_ARRAY && TYPE_VECTOR (type1))
879 {
880 struct type *eltype = check_typedef (TYPE_TARGET_TYPE (type1));
881 int i;
882 LONGEST low_bound, high_bound;
883 struct value *tmp;
884
885 if (!get_array_bounds (type1, &low_bound, &high_bound))
886 error (_("Could not determine the vector bounds"));
887
888 val = allocate_value (type);
889 for (i = 0; i < high_bound - low_bound + 1; i++)
890 {
891 tmp = value_one (eltype);
892 memcpy (value_contents_writeable (val) + i * TYPE_LENGTH (eltype),
893 value_contents_all (tmp), TYPE_LENGTH (eltype));
894 }
895 }
896 else
897 {
898 error (_("Not a numeric type."));
899 }
900
901 /* value_one result is never used for assignments to. */
902 gdb_assert (VALUE_LVAL (val) == not_lval);
903
904 return val;
905 }
906
907 /* Helper function for value_at, value_at_lazy, and value_at_lazy_stack.
908 The type of the created value may differ from the passed type TYPE.
909 Make sure to retrieve the returned values's new type after this call
910 e.g. in case the type is a variable length array. */
911
912 static struct value *
913 get_value_at (struct type *type, CORE_ADDR addr, int lazy)
914 {
915 struct value *val;
916
917 if (TYPE_CODE (check_typedef (type)) == TYPE_CODE_VOID)
918 error (_("Attempt to dereference a generic pointer."));
919
920 val = value_from_contents_and_address (type, NULL, addr);
921
922 if (!lazy)
923 value_fetch_lazy (val);
924
925 return val;
926 }
927
928 /* Return a value with type TYPE located at ADDR.
929
930 Call value_at only if the data needs to be fetched immediately;
931 if we can be 'lazy' and defer the fetch, perhaps indefinately, call
932 value_at_lazy instead. value_at_lazy simply records the address of
933 the data and sets the lazy-evaluation-required flag. The lazy flag
934 is tested in the value_contents macro, which is used if and when
935 the contents are actually required. The type of the created value
936 may differ from the passed type TYPE. Make sure to retrieve the
937 returned values's new type after this call e.g. in case the type
938 is a variable length array.
939
940 Note: value_at does *NOT* handle embedded offsets; perform such
941 adjustments before or after calling it. */
942
943 struct value *
944 value_at (struct type *type, CORE_ADDR addr)
945 {
946 return get_value_at (type, addr, 0);
947 }
948
949 /* Return a lazy value with type TYPE located at ADDR (cf. value_at).
950 The type of the created value may differ from the passed type TYPE.
951 Make sure to retrieve the returned values's new type after this call
952 e.g. in case the type is a variable length array. */
953
954 struct value *
955 value_at_lazy (struct type *type, CORE_ADDR addr)
956 {
957 return get_value_at (type, addr, 1);
958 }
959
960 void
961 read_value_memory (struct value *val, int embedded_offset,
962 int stack, CORE_ADDR memaddr,
963 gdb_byte *buffer, size_t length)
964 {
965 ULONGEST xfered = 0;
966
967 while (xfered < length)
968 {
969 enum target_xfer_status status;
970 ULONGEST xfered_len;
971
972 status = target_xfer_partial (current_target.beneath,
973 TARGET_OBJECT_MEMORY, NULL,
974 buffer + xfered, NULL,
975 memaddr + xfered, length - xfered,
976 &xfered_len);
977
978 if (status == TARGET_XFER_OK)
979 /* nothing */;
980 else if (status == TARGET_XFER_UNAVAILABLE)
981 mark_value_bytes_unavailable (val, embedded_offset + xfered,
982 xfered_len);
983 else if (status == TARGET_XFER_EOF)
984 memory_error (TARGET_XFER_E_IO, memaddr + xfered);
985 else
986 memory_error (status, memaddr + xfered);
987
988 xfered += xfered_len;
989 QUIT;
990 }
991 }
992
993 /* Store the contents of FROMVAL into the location of TOVAL.
994 Return a new value with the location of TOVAL and contents of FROMVAL. */
995
996 struct value *
997 value_assign (struct value *toval, struct value *fromval)
998 {
999 struct type *type;
1000 struct value *val;
1001 struct frame_id old_frame;
1002
1003 if (!deprecated_value_modifiable (toval))
1004 error (_("Left operand of assignment is not a modifiable lvalue."));
1005
1006 toval = coerce_ref (toval);
1007
1008 type = value_type (toval);
1009 if (VALUE_LVAL (toval) != lval_internalvar)
1010 fromval = value_cast (type, fromval);
1011 else
1012 {
1013 /* Coerce arrays and functions to pointers, except for arrays
1014 which only live in GDB's storage. */
1015 if (!value_must_coerce_to_target (fromval))
1016 fromval = coerce_array (fromval);
1017 }
1018
1019 CHECK_TYPEDEF (type);
1020
1021 /* Since modifying a register can trash the frame chain, and
1022 modifying memory can trash the frame cache, we save the old frame
1023 and then restore the new frame afterwards. */
1024 old_frame = get_frame_id (deprecated_safe_get_selected_frame ());
1025
1026 switch (VALUE_LVAL (toval))
1027 {
1028 case lval_internalvar:
1029 set_internalvar (VALUE_INTERNALVAR (toval), fromval);
1030 return value_of_internalvar (get_type_arch (type),
1031 VALUE_INTERNALVAR (toval));
1032
1033 case lval_internalvar_component:
1034 {
1035 int offset = value_offset (toval);
1036
1037 /* Are we dealing with a bitfield?
1038
1039 It is important to mention that `value_parent (toval)' is
1040 non-NULL iff `value_bitsize (toval)' is non-zero. */
1041 if (value_bitsize (toval))
1042 {
1043 /* VALUE_INTERNALVAR below refers to the parent value, while
1044 the offset is relative to this parent value. */
1045 gdb_assert (value_parent (value_parent (toval)) == NULL);
1046 offset += value_offset (value_parent (toval));
1047 }
1048
1049 set_internalvar_component (VALUE_INTERNALVAR (toval),
1050 offset,
1051 value_bitpos (toval),
1052 value_bitsize (toval),
1053 fromval);
1054 }
1055 break;
1056
1057 case lval_memory:
1058 {
1059 const gdb_byte *dest_buffer;
1060 CORE_ADDR changed_addr;
1061 int changed_len;
1062 gdb_byte buffer[sizeof (LONGEST)];
1063
1064 if (value_bitsize (toval))
1065 {
1066 struct value *parent = value_parent (toval);
1067
1068 changed_addr = value_address (parent) + value_offset (toval);
1069 changed_len = (value_bitpos (toval)
1070 + value_bitsize (toval)
1071 + HOST_CHAR_BIT - 1)
1072 / HOST_CHAR_BIT;
1073
1074 /* If we can read-modify-write exactly the size of the
1075 containing type (e.g. short or int) then do so. This
1076 is safer for volatile bitfields mapped to hardware
1077 registers. */
1078 if (changed_len < TYPE_LENGTH (type)
1079 && TYPE_LENGTH (type) <= (int) sizeof (LONGEST)
1080 && ((LONGEST) changed_addr % TYPE_LENGTH (type)) == 0)
1081 changed_len = TYPE_LENGTH (type);
1082
1083 if (changed_len > (int) sizeof (LONGEST))
1084 error (_("Can't handle bitfields which "
1085 "don't fit in a %d bit word."),
1086 (int) sizeof (LONGEST) * HOST_CHAR_BIT);
1087
1088 read_memory (changed_addr, buffer, changed_len);
1089 modify_field (type, buffer, value_as_long (fromval),
1090 value_bitpos (toval), value_bitsize (toval));
1091 dest_buffer = buffer;
1092 }
1093 else
1094 {
1095 changed_addr = value_address (toval);
1096 changed_len = TYPE_LENGTH (type);
1097 dest_buffer = value_contents (fromval);
1098 }
1099
1100 write_memory_with_notification (changed_addr, dest_buffer, changed_len);
1101 }
1102 break;
1103
1104 case lval_register:
1105 {
1106 struct frame_info *frame;
1107 struct gdbarch *gdbarch;
1108 int value_reg;
1109
1110 /* Figure out which frame this is in currently. */
1111 frame = frame_find_by_id (VALUE_FRAME_ID (toval));
1112 value_reg = VALUE_REGNUM (toval);
1113
1114 if (!frame)
1115 error (_("Value being assigned to is no longer active."));
1116
1117 gdbarch = get_frame_arch (frame);
1118 if (gdbarch_convert_register_p (gdbarch, VALUE_REGNUM (toval), type))
1119 {
1120 /* If TOVAL is a special machine register requiring
1121 conversion of program values to a special raw
1122 format. */
1123 gdbarch_value_to_register (gdbarch, frame,
1124 VALUE_REGNUM (toval), type,
1125 value_contents (fromval));
1126 }
1127 else
1128 {
1129 if (value_bitsize (toval))
1130 {
1131 struct value *parent = value_parent (toval);
1132 int offset = value_offset (parent) + value_offset (toval);
1133 int changed_len;
1134 gdb_byte buffer[sizeof (LONGEST)];
1135 int optim, unavail;
1136
1137 changed_len = (value_bitpos (toval)
1138 + value_bitsize (toval)
1139 + HOST_CHAR_BIT - 1)
1140 / HOST_CHAR_BIT;
1141
1142 if (changed_len > (int) sizeof (LONGEST))
1143 error (_("Can't handle bitfields which "
1144 "don't fit in a %d bit word."),
1145 (int) sizeof (LONGEST) * HOST_CHAR_BIT);
1146
1147 if (!get_frame_register_bytes (frame, value_reg, offset,
1148 changed_len, buffer,
1149 &optim, &unavail))
1150 {
1151 if (optim)
1152 throw_error (OPTIMIZED_OUT_ERROR,
1153 _("value has been optimized out"));
1154 if (unavail)
1155 throw_error (NOT_AVAILABLE_ERROR,
1156 _("value is not available"));
1157 }
1158
1159 modify_field (type, buffer, value_as_long (fromval),
1160 value_bitpos (toval), value_bitsize (toval));
1161
1162 put_frame_register_bytes (frame, value_reg, offset,
1163 changed_len, buffer);
1164 }
1165 else
1166 {
1167 put_frame_register_bytes (frame, value_reg,
1168 value_offset (toval),
1169 TYPE_LENGTH (type),
1170 value_contents (fromval));
1171 }
1172 }
1173
1174 if (deprecated_register_changed_hook)
1175 deprecated_register_changed_hook (-1);
1176 break;
1177 }
1178
1179 case lval_computed:
1180 {
1181 const struct lval_funcs *funcs = value_computed_funcs (toval);
1182
1183 if (funcs->write != NULL)
1184 {
1185 funcs->write (toval, fromval);
1186 break;
1187 }
1188 }
1189 /* Fall through. */
1190
1191 default:
1192 error (_("Left operand of assignment is not an lvalue."));
1193 }
1194
1195 /* Assigning to the stack pointer, frame pointer, and other
1196 (architecture and calling convention specific) registers may
1197 cause the frame cache and regcache to be out of date. Assigning to memory
1198 also can. We just do this on all assignments to registers or
1199 memory, for simplicity's sake; I doubt the slowdown matters. */
1200 switch (VALUE_LVAL (toval))
1201 {
1202 case lval_memory:
1203 case lval_register:
1204 case lval_computed:
1205
1206 observer_notify_target_changed (&current_target);
1207
1208 /* Having destroyed the frame cache, restore the selected
1209 frame. */
1210
1211 /* FIXME: cagney/2002-11-02: There has to be a better way of
1212 doing this. Instead of constantly saving/restoring the
1213 frame. Why not create a get_selected_frame() function that,
1214 having saved the selected frame's ID can automatically
1215 re-find the previously selected frame automatically. */
1216
1217 {
1218 struct frame_info *fi = frame_find_by_id (old_frame);
1219
1220 if (fi != NULL)
1221 select_frame (fi);
1222 }
1223
1224 break;
1225 default:
1226 break;
1227 }
1228
1229 /* If the field does not entirely fill a LONGEST, then zero the sign
1230 bits. If the field is signed, and is negative, then sign
1231 extend. */
1232 if ((value_bitsize (toval) > 0)
1233 && (value_bitsize (toval) < 8 * (int) sizeof (LONGEST)))
1234 {
1235 LONGEST fieldval = value_as_long (fromval);
1236 LONGEST valmask = (((ULONGEST) 1) << value_bitsize (toval)) - 1;
1237
1238 fieldval &= valmask;
1239 if (!TYPE_UNSIGNED (type)
1240 && (fieldval & (valmask ^ (valmask >> 1))))
1241 fieldval |= ~valmask;
1242
1243 fromval = value_from_longest (type, fieldval);
1244 }
1245
1246 /* The return value is a copy of TOVAL so it shares its location
1247 information, but its contents are updated from FROMVAL. This
1248 implies the returned value is not lazy, even if TOVAL was. */
1249 val = value_copy (toval);
1250 set_value_lazy (val, 0);
1251 memcpy (value_contents_raw (val), value_contents (fromval),
1252 TYPE_LENGTH (type));
1253
1254 /* We copy over the enclosing type and pointed-to offset from FROMVAL
1255 in the case of pointer types. For object types, the enclosing type
1256 and embedded offset must *not* be copied: the target object refered
1257 to by TOVAL retains its original dynamic type after assignment. */
1258 if (TYPE_CODE (type) == TYPE_CODE_PTR)
1259 {
1260 set_value_enclosing_type (val, value_enclosing_type (fromval));
1261 set_value_pointed_to_offset (val, value_pointed_to_offset (fromval));
1262 }
1263
1264 return val;
1265 }
1266
1267 /* Extend a value VAL to COUNT repetitions of its type. */
1268
1269 struct value *
1270 value_repeat (struct value *arg1, int count)
1271 {
1272 struct value *val;
1273
1274 if (VALUE_LVAL (arg1) != lval_memory)
1275 error (_("Only values in memory can be extended with '@'."));
1276 if (count < 1)
1277 error (_("Invalid number %d of repetitions."), count);
1278
1279 val = allocate_repeat_value (value_enclosing_type (arg1), count);
1280
1281 VALUE_LVAL (val) = lval_memory;
1282 set_value_address (val, value_address (arg1));
1283
1284 read_value_memory (val, 0, value_stack (val), value_address (val),
1285 value_contents_all_raw (val),
1286 TYPE_LENGTH (value_enclosing_type (val)));
1287
1288 return val;
1289 }
1290
1291 struct value *
1292 value_of_variable (struct symbol *var, const struct block *b)
1293 {
1294 struct frame_info *frame;
1295
1296 if (!symbol_read_needs_frame (var))
1297 frame = NULL;
1298 else if (!b)
1299 frame = get_selected_frame (_("No frame selected."));
1300 else
1301 {
1302 frame = block_innermost_frame (b);
1303 if (!frame)
1304 {
1305 if (BLOCK_FUNCTION (b) && !block_inlined_p (b)
1306 && SYMBOL_PRINT_NAME (BLOCK_FUNCTION (b)))
1307 error (_("No frame is currently executing in block %s."),
1308 SYMBOL_PRINT_NAME (BLOCK_FUNCTION (b)));
1309 else
1310 error (_("No frame is currently executing in specified block"));
1311 }
1312 }
1313
1314 return read_var_value (var, frame);
1315 }
1316
1317 struct value *
1318 address_of_variable (struct symbol *var, const struct block *b)
1319 {
1320 struct type *type = SYMBOL_TYPE (var);
1321 struct value *val;
1322
1323 /* Evaluate it first; if the result is a memory address, we're fine.
1324 Lazy evaluation pays off here. */
1325
1326 val = value_of_variable (var, b);
1327 type = value_type (val);
1328
1329 if ((VALUE_LVAL (val) == lval_memory && value_lazy (val))
1330 || TYPE_CODE (type) == TYPE_CODE_FUNC)
1331 {
1332 CORE_ADDR addr = value_address (val);
1333
1334 return value_from_pointer (lookup_pointer_type (type), addr);
1335 }
1336
1337 /* Not a memory address; check what the problem was. */
1338 switch (VALUE_LVAL (val))
1339 {
1340 case lval_register:
1341 {
1342 struct frame_info *frame;
1343 const char *regname;
1344
1345 frame = frame_find_by_id (VALUE_FRAME_ID (val));
1346 gdb_assert (frame);
1347
1348 regname = gdbarch_register_name (get_frame_arch (frame),
1349 VALUE_REGNUM (val));
1350 gdb_assert (regname && *regname);
1351
1352 error (_("Address requested for identifier "
1353 "\"%s\" which is in register $%s"),
1354 SYMBOL_PRINT_NAME (var), regname);
1355 break;
1356 }
1357
1358 default:
1359 error (_("Can't take address of \"%s\" which isn't an lvalue."),
1360 SYMBOL_PRINT_NAME (var));
1361 break;
1362 }
1363
1364 return val;
1365 }
1366
1367 /* Return one if VAL does not live in target memory, but should in order
1368 to operate on it. Otherwise return zero. */
1369
1370 int
1371 value_must_coerce_to_target (struct value *val)
1372 {
1373 struct type *valtype;
1374
1375 /* The only lval kinds which do not live in target memory. */
1376 if (VALUE_LVAL (val) != not_lval
1377 && VALUE_LVAL (val) != lval_internalvar
1378 && VALUE_LVAL (val) != lval_xcallable)
1379 return 0;
1380
1381 valtype = check_typedef (value_type (val));
1382
1383 switch (TYPE_CODE (valtype))
1384 {
1385 case TYPE_CODE_ARRAY:
1386 return TYPE_VECTOR (valtype) ? 0 : 1;
1387 case TYPE_CODE_STRING:
1388 return 1;
1389 default:
1390 return 0;
1391 }
1392 }
1393
1394 /* Make sure that VAL lives in target memory if it's supposed to. For
1395 instance, strings are constructed as character arrays in GDB's
1396 storage, and this function copies them to the target. */
1397
1398 struct value *
1399 value_coerce_to_target (struct value *val)
1400 {
1401 LONGEST length;
1402 CORE_ADDR addr;
1403
1404 if (!value_must_coerce_to_target (val))
1405 return val;
1406
1407 length = TYPE_LENGTH (check_typedef (value_type (val)));
1408 addr = allocate_space_in_inferior (length);
1409 write_memory (addr, value_contents (val), length);
1410 return value_at_lazy (value_type (val), addr);
1411 }
1412
1413 /* Given a value which is an array, return a value which is a pointer
1414 to its first element, regardless of whether or not the array has a
1415 nonzero lower bound.
1416
1417 FIXME: A previous comment here indicated that this routine should
1418 be substracting the array's lower bound. It's not clear to me that
1419 this is correct. Given an array subscripting operation, it would
1420 certainly work to do the adjustment here, essentially computing:
1421
1422 (&array[0] - (lowerbound * sizeof array[0])) + (index * sizeof array[0])
1423
1424 However I believe a more appropriate and logical place to account
1425 for the lower bound is to do so in value_subscript, essentially
1426 computing:
1427
1428 (&array[0] + ((index - lowerbound) * sizeof array[0]))
1429
1430 As further evidence consider what would happen with operations
1431 other than array subscripting, where the caller would get back a
1432 value that had an address somewhere before the actual first element
1433 of the array, and the information about the lower bound would be
1434 lost because of the coercion to pointer type. */
1435
1436 struct value *
1437 value_coerce_array (struct value *arg1)
1438 {
1439 struct type *type = check_typedef (value_type (arg1));
1440
1441 /* If the user tries to do something requiring a pointer with an
1442 array that has not yet been pushed to the target, then this would
1443 be a good time to do so. */
1444 arg1 = value_coerce_to_target (arg1);
1445
1446 if (VALUE_LVAL (arg1) != lval_memory)
1447 error (_("Attempt to take address of value not located in memory."));
1448
1449 return value_from_pointer (lookup_pointer_type (TYPE_TARGET_TYPE (type)),
1450 value_address (arg1));
1451 }
1452
1453 /* Given a value which is a function, return a value which is a pointer
1454 to it. */
1455
1456 struct value *
1457 value_coerce_function (struct value *arg1)
1458 {
1459 struct value *retval;
1460
1461 if (VALUE_LVAL (arg1) != lval_memory)
1462 error (_("Attempt to take address of value not located in memory."));
1463
1464 retval = value_from_pointer (lookup_pointer_type (value_type (arg1)),
1465 value_address (arg1));
1466 return retval;
1467 }
1468
1469 /* Return a pointer value for the object for which ARG1 is the
1470 contents. */
1471
1472 struct value *
1473 value_addr (struct value *arg1)
1474 {
1475 struct value *arg2;
1476 struct type *type = check_typedef (value_type (arg1));
1477
1478 if (TYPE_CODE (type) == TYPE_CODE_REF)
1479 {
1480 /* Copy the value, but change the type from (T&) to (T*). We
1481 keep the same location information, which is efficient, and
1482 allows &(&X) to get the location containing the reference. */
1483 arg2 = value_copy (arg1);
1484 deprecated_set_value_type (arg2,
1485 lookup_pointer_type (TYPE_TARGET_TYPE (type)));
1486 return arg2;
1487 }
1488 if (TYPE_CODE (type) == TYPE_CODE_FUNC)
1489 return value_coerce_function (arg1);
1490
1491 /* If this is an array that has not yet been pushed to the target,
1492 then this would be a good time to force it to memory. */
1493 arg1 = value_coerce_to_target (arg1);
1494
1495 if (VALUE_LVAL (arg1) != lval_memory)
1496 error (_("Attempt to take address of value not located in memory."));
1497
1498 /* Get target memory address. */
1499 arg2 = value_from_pointer (lookup_pointer_type (value_type (arg1)),
1500 (value_address (arg1)
1501 + value_embedded_offset (arg1)));
1502
1503 /* This may be a pointer to a base subobject; so remember the
1504 full derived object's type ... */
1505 set_value_enclosing_type (arg2,
1506 lookup_pointer_type (value_enclosing_type (arg1)));
1507 /* ... and also the relative position of the subobject in the full
1508 object. */
1509 set_value_pointed_to_offset (arg2, value_embedded_offset (arg1));
1510 return arg2;
1511 }
1512
1513 /* Return a reference value for the object for which ARG1 is the
1514 contents. */
1515
1516 struct value *
1517 value_ref (struct value *arg1)
1518 {
1519 struct value *arg2;
1520 struct type *type = check_typedef (value_type (arg1));
1521
1522 if (TYPE_CODE (type) == TYPE_CODE_REF)
1523 return arg1;
1524
1525 arg2 = value_addr (arg1);
1526 deprecated_set_value_type (arg2, lookup_reference_type (type));
1527 return arg2;
1528 }
1529
1530 /* Given a value of a pointer type, apply the C unary * operator to
1531 it. */
1532
1533 struct value *
1534 value_ind (struct value *arg1)
1535 {
1536 struct type *base_type;
1537 struct value *arg2;
1538
1539 arg1 = coerce_array (arg1);
1540
1541 base_type = check_typedef (value_type (arg1));
1542
1543 if (VALUE_LVAL (arg1) == lval_computed)
1544 {
1545 const struct lval_funcs *funcs = value_computed_funcs (arg1);
1546
1547 if (funcs->indirect)
1548 {
1549 struct value *result = funcs->indirect (arg1);
1550
1551 if (result)
1552 return result;
1553 }
1554 }
1555
1556 if (TYPE_CODE (base_type) == TYPE_CODE_PTR)
1557 {
1558 struct type *enc_type;
1559
1560 /* We may be pointing to something embedded in a larger object.
1561 Get the real type of the enclosing object. */
1562 enc_type = check_typedef (value_enclosing_type (arg1));
1563 enc_type = TYPE_TARGET_TYPE (enc_type);
1564
1565 if (TYPE_CODE (check_typedef (enc_type)) == TYPE_CODE_FUNC
1566 || TYPE_CODE (check_typedef (enc_type)) == TYPE_CODE_METHOD)
1567 /* For functions, go through find_function_addr, which knows
1568 how to handle function descriptors. */
1569 arg2 = value_at_lazy (enc_type,
1570 find_function_addr (arg1, NULL));
1571 else
1572 /* Retrieve the enclosing object pointed to. */
1573 arg2 = value_at_lazy (enc_type,
1574 (value_as_address (arg1)
1575 - value_pointed_to_offset (arg1)));
1576
1577 enc_type = value_type (arg2);
1578 return readjust_indirect_value_type (arg2, enc_type, base_type, arg1);
1579 }
1580
1581 error (_("Attempt to take contents of a non-pointer value."));
1582 return 0; /* For lint -- never reached. */
1583 }
1584 \f
1585 /* Create a value for an array by allocating space in GDB, copying the
1586 data into that space, and then setting up an array value.
1587
1588 The array bounds are set from LOWBOUND and HIGHBOUND, and the array
1589 is populated from the values passed in ELEMVEC.
1590
1591 The element type of the array is inherited from the type of the
1592 first element, and all elements must have the same size (though we
1593 don't currently enforce any restriction on their types). */
1594
1595 struct value *
1596 value_array (int lowbound, int highbound, struct value **elemvec)
1597 {
1598 int nelem;
1599 int idx;
1600 unsigned int typelength;
1601 struct value *val;
1602 struct type *arraytype;
1603
1604 /* Validate that the bounds are reasonable and that each of the
1605 elements have the same size. */
1606
1607 nelem = highbound - lowbound + 1;
1608 if (nelem <= 0)
1609 {
1610 error (_("bad array bounds (%d, %d)"), lowbound, highbound);
1611 }
1612 typelength = TYPE_LENGTH (value_enclosing_type (elemvec[0]));
1613 for (idx = 1; idx < nelem; idx++)
1614 {
1615 if (TYPE_LENGTH (value_enclosing_type (elemvec[idx])) != typelength)
1616 {
1617 error (_("array elements must all be the same size"));
1618 }
1619 }
1620
1621 arraytype = lookup_array_range_type (value_enclosing_type (elemvec[0]),
1622 lowbound, highbound);
1623
1624 if (!current_language->c_style_arrays)
1625 {
1626 val = allocate_value (arraytype);
1627 for (idx = 0; idx < nelem; idx++)
1628 value_contents_copy (val, idx * typelength, elemvec[idx], 0,
1629 typelength);
1630 return val;
1631 }
1632
1633 /* Allocate space to store the array, and then initialize it by
1634 copying in each element. */
1635
1636 val = allocate_value (arraytype);
1637 for (idx = 0; idx < nelem; idx++)
1638 value_contents_copy (val, idx * typelength, elemvec[idx], 0, typelength);
1639 return val;
1640 }
1641
1642 struct value *
1643 value_cstring (char *ptr, ssize_t len, struct type *char_type)
1644 {
1645 struct value *val;
1646 int lowbound = current_language->string_lower_bound;
1647 ssize_t highbound = len / TYPE_LENGTH (char_type);
1648 struct type *stringtype
1649 = lookup_array_range_type (char_type, lowbound, highbound + lowbound - 1);
1650
1651 val = allocate_value (stringtype);
1652 memcpy (value_contents_raw (val), ptr, len);
1653 return val;
1654 }
1655
1656 /* Create a value for a string constant by allocating space in the
1657 inferior, copying the data into that space, and returning the
1658 address with type TYPE_CODE_STRING. PTR points to the string
1659 constant data; LEN is number of characters.
1660
1661 Note that string types are like array of char types with a lower
1662 bound of zero and an upper bound of LEN - 1. Also note that the
1663 string may contain embedded null bytes. */
1664
1665 struct value *
1666 value_string (char *ptr, ssize_t len, struct type *char_type)
1667 {
1668 struct value *val;
1669 int lowbound = current_language->string_lower_bound;
1670 ssize_t highbound = len / TYPE_LENGTH (char_type);
1671 struct type *stringtype
1672 = lookup_string_range_type (char_type, lowbound, highbound + lowbound - 1);
1673
1674 val = allocate_value (stringtype);
1675 memcpy (value_contents_raw (val), ptr, len);
1676 return val;
1677 }
1678
1679 \f
1680 /* See if we can pass arguments in T2 to a function which takes
1681 arguments of types T1. T1 is a list of NARGS arguments, and T2 is
1682 a NULL-terminated vector. If some arguments need coercion of some
1683 sort, then the coerced values are written into T2. Return value is
1684 0 if the arguments could be matched, or the position at which they
1685 differ if not.
1686
1687 STATICP is nonzero if the T1 argument list came from a static
1688 member function. T2 will still include the ``this'' pointer, but
1689 it will be skipped.
1690
1691 For non-static member functions, we ignore the first argument,
1692 which is the type of the instance variable. This is because we
1693 want to handle calls with objects from derived classes. This is
1694 not entirely correct: we should actually check to make sure that a
1695 requested operation is type secure, shouldn't we? FIXME. */
1696
1697 static int
1698 typecmp (int staticp, int varargs, int nargs,
1699 struct field t1[], struct value *t2[])
1700 {
1701 int i;
1702
1703 if (t2 == 0)
1704 internal_error (__FILE__, __LINE__,
1705 _("typecmp: no argument list"));
1706
1707 /* Skip ``this'' argument if applicable. T2 will always include
1708 THIS. */
1709 if (staticp)
1710 t2 ++;
1711
1712 for (i = 0;
1713 (i < nargs) && TYPE_CODE (t1[i].type) != TYPE_CODE_VOID;
1714 i++)
1715 {
1716 struct type *tt1, *tt2;
1717
1718 if (!t2[i])
1719 return i + 1;
1720
1721 tt1 = check_typedef (t1[i].type);
1722 tt2 = check_typedef (value_type (t2[i]));
1723
1724 if (TYPE_CODE (tt1) == TYPE_CODE_REF
1725 /* We should be doing hairy argument matching, as below. */
1726 && (TYPE_CODE (check_typedef (TYPE_TARGET_TYPE (tt1)))
1727 == TYPE_CODE (tt2)))
1728 {
1729 if (TYPE_CODE (tt2) == TYPE_CODE_ARRAY)
1730 t2[i] = value_coerce_array (t2[i]);
1731 else
1732 t2[i] = value_ref (t2[i]);
1733 continue;
1734 }
1735
1736 /* djb - 20000715 - Until the new type structure is in the
1737 place, and we can attempt things like implicit conversions,
1738 we need to do this so you can take something like a map<const
1739 char *>, and properly access map["hello"], because the
1740 argument to [] will be a reference to a pointer to a char,
1741 and the argument will be a pointer to a char. */
1742 while (TYPE_CODE(tt1) == TYPE_CODE_REF
1743 || TYPE_CODE (tt1) == TYPE_CODE_PTR)
1744 {
1745 tt1 = check_typedef( TYPE_TARGET_TYPE(tt1) );
1746 }
1747 while (TYPE_CODE(tt2) == TYPE_CODE_ARRAY
1748 || TYPE_CODE(tt2) == TYPE_CODE_PTR
1749 || TYPE_CODE(tt2) == TYPE_CODE_REF)
1750 {
1751 tt2 = check_typedef (TYPE_TARGET_TYPE(tt2));
1752 }
1753 if (TYPE_CODE (tt1) == TYPE_CODE (tt2))
1754 continue;
1755 /* Array to pointer is a `trivial conversion' according to the
1756 ARM. */
1757
1758 /* We should be doing much hairier argument matching (see
1759 section 13.2 of the ARM), but as a quick kludge, just check
1760 for the same type code. */
1761 if (TYPE_CODE (t1[i].type) != TYPE_CODE (value_type (t2[i])))
1762 return i + 1;
1763 }
1764 if (varargs || t2[i] == NULL)
1765 return 0;
1766 return i + 1;
1767 }
1768
1769 /* Helper class for do_search_struct_field that updates *RESULT_PTR
1770 and *LAST_BOFFSET, and possibly throws an exception if the field
1771 search has yielded ambiguous results. */
1772
1773 static void
1774 update_search_result (struct value **result_ptr, struct value *v,
1775 int *last_boffset, int boffset,
1776 const char *name, struct type *type)
1777 {
1778 if (v != NULL)
1779 {
1780 if (*result_ptr != NULL
1781 /* The result is not ambiguous if all the classes that are
1782 found occupy the same space. */
1783 && *last_boffset != boffset)
1784 error (_("base class '%s' is ambiguous in type '%s'"),
1785 name, TYPE_SAFE_NAME (type));
1786 *result_ptr = v;
1787 *last_boffset = boffset;
1788 }
1789 }
1790
1791 /* A helper for search_struct_field. This does all the work; most
1792 arguments are as passed to search_struct_field. The result is
1793 stored in *RESULT_PTR, which must be initialized to NULL.
1794 OUTERMOST_TYPE is the type of the initial type passed to
1795 search_struct_field; this is used for error reporting when the
1796 lookup is ambiguous. */
1797
1798 static void
1799 do_search_struct_field (const char *name, struct value *arg1, int offset,
1800 struct type *type, int looking_for_baseclass,
1801 struct value **result_ptr,
1802 int *last_boffset,
1803 struct type *outermost_type)
1804 {
1805 int i;
1806 int nbases;
1807
1808 CHECK_TYPEDEF (type);
1809 nbases = TYPE_N_BASECLASSES (type);
1810
1811 if (!looking_for_baseclass)
1812 for (i = TYPE_NFIELDS (type) - 1; i >= nbases; i--)
1813 {
1814 const char *t_field_name = TYPE_FIELD_NAME (type, i);
1815
1816 if (t_field_name && (strcmp_iw (t_field_name, name) == 0))
1817 {
1818 struct value *v;
1819
1820 if (field_is_static (&TYPE_FIELD (type, i)))
1821 v = value_static_field (type, i);
1822 else
1823 v = value_primitive_field (arg1, offset, i, type);
1824 *result_ptr = v;
1825 return;
1826 }
1827
1828 if (t_field_name
1829 && (t_field_name[0] == '\0'
1830 || (TYPE_CODE (type) == TYPE_CODE_UNION
1831 && (strcmp_iw (t_field_name, "else") == 0))))
1832 {
1833 struct type *field_type = TYPE_FIELD_TYPE (type, i);
1834
1835 if (TYPE_CODE (field_type) == TYPE_CODE_UNION
1836 || TYPE_CODE (field_type) == TYPE_CODE_STRUCT)
1837 {
1838 /* Look for a match through the fields of an anonymous
1839 union, or anonymous struct. C++ provides anonymous
1840 unions.
1841
1842 In the GNU Chill (now deleted from GDB)
1843 implementation of variant record types, each
1844 <alternative field> has an (anonymous) union type,
1845 each member of the union represents a <variant
1846 alternative>. Each <variant alternative> is
1847 represented as a struct, with a member for each
1848 <variant field>. */
1849
1850 struct value *v = NULL;
1851 int new_offset = offset;
1852
1853 /* This is pretty gross. In G++, the offset in an
1854 anonymous union is relative to the beginning of the
1855 enclosing struct. In the GNU Chill (now deleted
1856 from GDB) implementation of variant records, the
1857 bitpos is zero in an anonymous union field, so we
1858 have to add the offset of the union here. */
1859 if (TYPE_CODE (field_type) == TYPE_CODE_STRUCT
1860 || (TYPE_NFIELDS (field_type) > 0
1861 && TYPE_FIELD_BITPOS (field_type, 0) == 0))
1862 new_offset += TYPE_FIELD_BITPOS (type, i) / 8;
1863
1864 do_search_struct_field (name, arg1, new_offset,
1865 field_type,
1866 looking_for_baseclass, &v,
1867 last_boffset,
1868 outermost_type);
1869 if (v)
1870 {
1871 *result_ptr = v;
1872 return;
1873 }
1874 }
1875 }
1876 }
1877
1878 for (i = 0; i < nbases; i++)
1879 {
1880 struct value *v = NULL;
1881 struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
1882 /* If we are looking for baseclasses, this is what we get when
1883 we hit them. But it could happen that the base part's member
1884 name is not yet filled in. */
1885 int found_baseclass = (looking_for_baseclass
1886 && TYPE_BASECLASS_NAME (type, i) != NULL
1887 && (strcmp_iw (name,
1888 TYPE_BASECLASS_NAME (type,
1889 i)) == 0));
1890 int boffset = value_embedded_offset (arg1) + offset;
1891
1892 if (BASETYPE_VIA_VIRTUAL (type, i))
1893 {
1894 struct value *v2;
1895
1896 boffset = baseclass_offset (type, i,
1897 value_contents_for_printing (arg1),
1898 value_embedded_offset (arg1) + offset,
1899 value_address (arg1),
1900 arg1);
1901
1902 /* The virtual base class pointer might have been clobbered
1903 by the user program. Make sure that it still points to a
1904 valid memory location. */
1905
1906 boffset += value_embedded_offset (arg1) + offset;
1907 if (boffset < 0
1908 || boffset >= TYPE_LENGTH (value_enclosing_type (arg1)))
1909 {
1910 CORE_ADDR base_addr;
1911
1912 base_addr = value_address (arg1) + boffset;
1913 v2 = value_at_lazy (basetype, base_addr);
1914 if (target_read_memory (base_addr,
1915 value_contents_raw (v2),
1916 TYPE_LENGTH (value_type (v2))) != 0)
1917 error (_("virtual baseclass botch"));
1918 }
1919 else
1920 {
1921 v2 = value_copy (arg1);
1922 deprecated_set_value_type (v2, basetype);
1923 set_value_embedded_offset (v2, boffset);
1924 }
1925
1926 if (found_baseclass)
1927 v = v2;
1928 else
1929 {
1930 do_search_struct_field (name, v2, 0,
1931 TYPE_BASECLASS (type, i),
1932 looking_for_baseclass,
1933 result_ptr, last_boffset,
1934 outermost_type);
1935 }
1936 }
1937 else if (found_baseclass)
1938 v = value_primitive_field (arg1, offset, i, type);
1939 else
1940 {
1941 do_search_struct_field (name, arg1,
1942 offset + TYPE_BASECLASS_BITPOS (type,
1943 i) / 8,
1944 basetype, looking_for_baseclass,
1945 result_ptr, last_boffset,
1946 outermost_type);
1947 }
1948
1949 update_search_result (result_ptr, v, last_boffset,
1950 boffset, name, outermost_type);
1951 }
1952 }
1953
1954 /* Helper function used by value_struct_elt to recurse through
1955 baseclasses. Look for a field NAME in ARG1. Adjust the address of
1956 ARG1 by OFFSET bytes, and search in it assuming it has (class) type
1957 TYPE. If found, return value, else return NULL.
1958
1959 If LOOKING_FOR_BASECLASS, then instead of looking for struct
1960 fields, look for a baseclass named NAME. */
1961
1962 static struct value *
1963 search_struct_field (const char *name, struct value *arg1, int offset,
1964 struct type *type, int looking_for_baseclass)
1965 {
1966 struct value *result = NULL;
1967 int boffset = 0;
1968
1969 do_search_struct_field (name, arg1, offset, type, looking_for_baseclass,
1970 &result, &boffset, type);
1971 return result;
1972 }
1973
1974 /* Helper function used by value_struct_elt to recurse through
1975 baseclasses. Look for a field NAME in ARG1. Adjust the address of
1976 ARG1 by OFFSET bytes, and search in it assuming it has (class) type
1977 TYPE.
1978
1979 If found, return value, else if name matched and args not return
1980 (value) -1, else return NULL. */
1981
1982 static struct value *
1983 search_struct_method (const char *name, struct value **arg1p,
1984 struct value **args, int offset,
1985 int *static_memfuncp, struct type *type)
1986 {
1987 int i;
1988 struct value *v;
1989 int name_matched = 0;
1990 char dem_opname[64];
1991
1992 CHECK_TYPEDEF (type);
1993 for (i = TYPE_NFN_FIELDS (type) - 1; i >= 0; i--)
1994 {
1995 const char *t_field_name = TYPE_FN_FIELDLIST_NAME (type, i);
1996
1997 /* FIXME! May need to check for ARM demangling here. */
1998 if (strncmp (t_field_name, "__", 2) == 0 ||
1999 strncmp (t_field_name, "op", 2) == 0 ||
2000 strncmp (t_field_name, "type", 4) == 0)
2001 {
2002 if (cplus_demangle_opname (t_field_name, dem_opname, DMGL_ANSI))
2003 t_field_name = dem_opname;
2004 else if (cplus_demangle_opname (t_field_name, dem_opname, 0))
2005 t_field_name = dem_opname;
2006 }
2007 if (t_field_name && (strcmp_iw (t_field_name, name) == 0))
2008 {
2009 int j = TYPE_FN_FIELDLIST_LENGTH (type, i) - 1;
2010 struct fn_field *f = TYPE_FN_FIELDLIST1 (type, i);
2011
2012 name_matched = 1;
2013 check_stub_method_group (type, i);
2014 if (j > 0 && args == 0)
2015 error (_("cannot resolve overloaded method "
2016 "`%s': no arguments supplied"), name);
2017 else if (j == 0 && args == 0)
2018 {
2019 v = value_fn_field (arg1p, f, j, type, offset);
2020 if (v != NULL)
2021 return v;
2022 }
2023 else
2024 while (j >= 0)
2025 {
2026 if (!typecmp (TYPE_FN_FIELD_STATIC_P (f, j),
2027 TYPE_VARARGS (TYPE_FN_FIELD_TYPE (f, j)),
2028 TYPE_NFIELDS (TYPE_FN_FIELD_TYPE (f, j)),
2029 TYPE_FN_FIELD_ARGS (f, j), args))
2030 {
2031 if (TYPE_FN_FIELD_VIRTUAL_P (f, j))
2032 return value_virtual_fn_field (arg1p, f, j,
2033 type, offset);
2034 if (TYPE_FN_FIELD_STATIC_P (f, j)
2035 && static_memfuncp)
2036 *static_memfuncp = 1;
2037 v = value_fn_field (arg1p, f, j, type, offset);
2038 if (v != NULL)
2039 return v;
2040 }
2041 j--;
2042 }
2043 }
2044 }
2045
2046 for (i = TYPE_N_BASECLASSES (type) - 1; i >= 0; i--)
2047 {
2048 int base_offset;
2049 int this_offset;
2050
2051 if (BASETYPE_VIA_VIRTUAL (type, i))
2052 {
2053 struct type *baseclass = check_typedef (TYPE_BASECLASS (type, i));
2054 struct value *base_val;
2055 const gdb_byte *base_valaddr;
2056
2057 /* The virtual base class pointer might have been
2058 clobbered by the user program. Make sure that it
2059 still points to a valid memory location. */
2060
2061 if (offset < 0 || offset >= TYPE_LENGTH (type))
2062 {
2063 gdb_byte *tmp;
2064 struct cleanup *back_to;
2065 CORE_ADDR address;
2066
2067 tmp = xmalloc (TYPE_LENGTH (baseclass));
2068 back_to = make_cleanup (xfree, tmp);
2069 address = value_address (*arg1p);
2070
2071 if (target_read_memory (address + offset,
2072 tmp, TYPE_LENGTH (baseclass)) != 0)
2073 error (_("virtual baseclass botch"));
2074
2075 base_val = value_from_contents_and_address (baseclass,
2076 tmp,
2077 address + offset);
2078 base_valaddr = value_contents_for_printing (base_val);
2079 this_offset = 0;
2080 do_cleanups (back_to);
2081 }
2082 else
2083 {
2084 base_val = *arg1p;
2085 base_valaddr = value_contents_for_printing (*arg1p);
2086 this_offset = offset;
2087 }
2088
2089 base_offset = baseclass_offset (type, i, base_valaddr,
2090 this_offset, value_address (base_val),
2091 base_val);
2092 }
2093 else
2094 {
2095 base_offset = TYPE_BASECLASS_BITPOS (type, i) / 8;
2096 }
2097 v = search_struct_method (name, arg1p, args, base_offset + offset,
2098 static_memfuncp, TYPE_BASECLASS (type, i));
2099 if (v == (struct value *) - 1)
2100 {
2101 name_matched = 1;
2102 }
2103 else if (v)
2104 {
2105 /* FIXME-bothner: Why is this commented out? Why is it here? */
2106 /* *arg1p = arg1_tmp; */
2107 return v;
2108 }
2109 }
2110 if (name_matched)
2111 return (struct value *) - 1;
2112 else
2113 return NULL;
2114 }
2115
2116 /* Given *ARGP, a value of type (pointer to a)* structure/union,
2117 extract the component named NAME from the ultimate target
2118 structure/union and return it as a value with its appropriate type.
2119 ERR is used in the error message if *ARGP's type is wrong.
2120
2121 C++: ARGS is a list of argument types to aid in the selection of
2122 an appropriate method. Also, handle derived types.
2123
2124 STATIC_MEMFUNCP, if non-NULL, points to a caller-supplied location
2125 where the truthvalue of whether the function that was resolved was
2126 a static member function or not is stored.
2127
2128 ERR is an error message to be printed in case the field is not
2129 found. */
2130
2131 struct value *
2132 value_struct_elt (struct value **argp, struct value **args,
2133 const char *name, int *static_memfuncp, const char *err)
2134 {
2135 struct type *t;
2136 struct value *v;
2137
2138 *argp = coerce_array (*argp);
2139
2140 t = check_typedef (value_type (*argp));
2141
2142 /* Follow pointers until we get to a non-pointer. */
2143
2144 while (TYPE_CODE (t) == TYPE_CODE_PTR || TYPE_CODE (t) == TYPE_CODE_REF)
2145 {
2146 *argp = value_ind (*argp);
2147 /* Don't coerce fn pointer to fn and then back again! */
2148 if (TYPE_CODE (check_typedef (value_type (*argp))) != TYPE_CODE_FUNC)
2149 *argp = coerce_array (*argp);
2150 t = check_typedef (value_type (*argp));
2151 }
2152
2153 if (TYPE_CODE (t) != TYPE_CODE_STRUCT
2154 && TYPE_CODE (t) != TYPE_CODE_UNION)
2155 error (_("Attempt to extract a component of a value that is not a %s."),
2156 err);
2157
2158 /* Assume it's not, unless we see that it is. */
2159 if (static_memfuncp)
2160 *static_memfuncp = 0;
2161
2162 if (!args)
2163 {
2164 /* if there are no arguments ...do this... */
2165
2166 /* Try as a field first, because if we succeed, there is less
2167 work to be done. */
2168 v = search_struct_field (name, *argp, 0, t, 0);
2169 if (v)
2170 return v;
2171
2172 /* C++: If it was not found as a data field, then try to
2173 return it as a pointer to a method. */
2174 v = search_struct_method (name, argp, args, 0,
2175 static_memfuncp, t);
2176
2177 if (v == (struct value *) - 1)
2178 error (_("Cannot take address of method %s."), name);
2179 else if (v == 0)
2180 {
2181 if (TYPE_NFN_FIELDS (t))
2182 error (_("There is no member or method named %s."), name);
2183 else
2184 error (_("There is no member named %s."), name);
2185 }
2186 return v;
2187 }
2188
2189 v = search_struct_method (name, argp, args, 0,
2190 static_memfuncp, t);
2191
2192 if (v == (struct value *) - 1)
2193 {
2194 error (_("One of the arguments you tried to pass to %s could not "
2195 "be converted to what the function wants."), name);
2196 }
2197 else if (v == 0)
2198 {
2199 /* See if user tried to invoke data as function. If so, hand it
2200 back. If it's not callable (i.e., a pointer to function),
2201 gdb should give an error. */
2202 v = search_struct_field (name, *argp, 0, t, 0);
2203 /* If we found an ordinary field, then it is not a method call.
2204 So, treat it as if it were a static member function. */
2205 if (v && static_memfuncp)
2206 *static_memfuncp = 1;
2207 }
2208
2209 if (!v)
2210 throw_error (NOT_FOUND_ERROR,
2211 _("Structure has no component named %s."), name);
2212 return v;
2213 }
2214
2215 /* Given *ARGP, a value of type structure or union, or a pointer/reference
2216 to a structure or union, extract and return its component (field) of
2217 type FTYPE at the specified BITPOS.
2218 Throw an exception on error. */
2219
2220 struct value *
2221 value_struct_elt_bitpos (struct value **argp, int bitpos, struct type *ftype,
2222 const char *err)
2223 {
2224 struct type *t;
2225 struct value *v;
2226 int i;
2227 int nbases;
2228
2229 *argp = coerce_array (*argp);
2230
2231 t = check_typedef (value_type (*argp));
2232
2233 while (TYPE_CODE (t) == TYPE_CODE_PTR || TYPE_CODE (t) == TYPE_CODE_REF)
2234 {
2235 *argp = value_ind (*argp);
2236 if (TYPE_CODE (check_typedef (value_type (*argp))) != TYPE_CODE_FUNC)
2237 *argp = coerce_array (*argp);
2238 t = check_typedef (value_type (*argp));
2239 }
2240
2241 if (TYPE_CODE (t) != TYPE_CODE_STRUCT
2242 && TYPE_CODE (t) != TYPE_CODE_UNION)
2243 error (_("Attempt to extract a component of a value that is not a %s."),
2244 err);
2245
2246 for (i = TYPE_N_BASECLASSES (t); i < TYPE_NFIELDS (t); i++)
2247 {
2248 if (!field_is_static (&TYPE_FIELD (t, i))
2249 && bitpos == TYPE_FIELD_BITPOS (t, i)
2250 && types_equal (ftype, TYPE_FIELD_TYPE (t, i)))
2251 return value_primitive_field (*argp, 0, i, t);
2252 }
2253
2254 error (_("No field with matching bitpos and type."));
2255
2256 /* Never hit. */
2257 return NULL;
2258 }
2259
2260 /* Search through the methods of an object (and its bases) to find a
2261 specified method. Return the pointer to the fn_field list FN_LIST of
2262 overloaded instances defined in the source language. If available
2263 and matching, a vector of matching xmethods defined in extension
2264 languages are also returned in XM_WORKER_VEC
2265
2266 Helper function for value_find_oload_list.
2267 ARGP is a pointer to a pointer to a value (the object).
2268 METHOD is a string containing the method name.
2269 OFFSET is the offset within the value.
2270 TYPE is the assumed type of the object.
2271 FN_LIST is the pointer to matching overloaded instances defined in
2272 source language. Since this is a recursive function, *FN_LIST
2273 should be set to NULL when calling this function.
2274 NUM_FNS is the number of overloaded instances. *NUM_FNS should be set to
2275 0 when calling this function.
2276 XM_WORKER_VEC is the vector of matching xmethod workers. *XM_WORKER_VEC
2277 should also be set to NULL when calling this function.
2278 BASETYPE is set to the actual type of the subobject where the
2279 method is found.
2280 BOFFSET is the offset of the base subobject where the method is found. */
2281
2282 static void
2283 find_method_list (struct value **argp, const char *method,
2284 int offset, struct type *type,
2285 struct fn_field **fn_list, int *num_fns,
2286 VEC (xmethod_worker_ptr) **xm_worker_vec,
2287 struct type **basetype, int *boffset)
2288 {
2289 int i;
2290 struct fn_field *f = NULL;
2291 VEC (xmethod_worker_ptr) *worker_vec = NULL, *new_vec = NULL;
2292
2293 gdb_assert (fn_list != NULL && xm_worker_vec != NULL);
2294 CHECK_TYPEDEF (type);
2295
2296 /* First check in object itself.
2297 This function is called recursively to search through base classes.
2298 If there is a source method match found at some stage, then we need not
2299 look for source methods in consequent recursive calls. */
2300 if ((*fn_list) == NULL)
2301 {
2302 for (i = TYPE_NFN_FIELDS (type) - 1; i >= 0; i--)
2303 {
2304 /* pai: FIXME What about operators and type conversions? */
2305 const char *fn_field_name = TYPE_FN_FIELDLIST_NAME (type, i);
2306
2307 if (fn_field_name && (strcmp_iw (fn_field_name, method) == 0))
2308 {
2309 int len = TYPE_FN_FIELDLIST_LENGTH (type, i);
2310 f = TYPE_FN_FIELDLIST1 (type, i);
2311 *fn_list = f;
2312
2313 *num_fns = len;
2314 *basetype = type;
2315 *boffset = offset;
2316
2317 /* Resolve any stub methods. */
2318 check_stub_method_group (type, i);
2319
2320 break;
2321 }
2322 }
2323 }
2324
2325 /* Unlike source methods, xmethods can be accumulated over successive
2326 recursive calls. In other words, an xmethod named 'm' in a class
2327 will not hide an xmethod named 'm' in its base class(es). We want
2328 it to be this way because xmethods are after all convenience functions
2329 and hence there is no point restricting them with something like method
2330 hiding. Moreover, if hiding is done for xmethods as well, then we will
2331 have to provide a mechanism to un-hide (like the 'using' construct). */
2332 worker_vec = get_matching_xmethod_workers (type, method);
2333 new_vec = VEC_merge (xmethod_worker_ptr, *xm_worker_vec, worker_vec);
2334
2335 VEC_free (xmethod_worker_ptr, *xm_worker_vec);
2336 VEC_free (xmethod_worker_ptr, worker_vec);
2337 *xm_worker_vec = new_vec;
2338
2339 /* If source methods are not found in current class, look for them in the
2340 base classes. We also have to go through the base classes to gather
2341 extension methods. */
2342 for (i = TYPE_N_BASECLASSES (type) - 1; i >= 0; i--)
2343 {
2344 int base_offset;
2345
2346 if (BASETYPE_VIA_VIRTUAL (type, i))
2347 {
2348 base_offset = baseclass_offset (type, i,
2349 value_contents_for_printing (*argp),
2350 value_offset (*argp) + offset,
2351 value_address (*argp), *argp);
2352 }
2353 else /* Non-virtual base, simply use bit position from debug
2354 info. */
2355 {
2356 base_offset = TYPE_BASECLASS_BITPOS (type, i) / 8;
2357 }
2358
2359 find_method_list (argp, method, base_offset + offset,
2360 TYPE_BASECLASS (type, i), fn_list, num_fns,
2361 xm_worker_vec, basetype, boffset);
2362 }
2363 }
2364
2365 /* Return the list of overloaded methods of a specified name. The methods
2366 could be those GDB finds in the binary, or xmethod. Methods found in
2367 the binary are returned in FN_LIST, and xmethods are returned in
2368 XM_WORKER_VEC.
2369
2370 ARGP is a pointer to a pointer to a value (the object).
2371 METHOD is the method name.
2372 OFFSET is the offset within the value contents.
2373 FN_LIST is the pointer to matching overloaded instances defined in
2374 source language.
2375 NUM_FNS is the number of overloaded instances.
2376 XM_WORKER_VEC is the vector of matching xmethod workers defined in
2377 extension languages.
2378 BASETYPE is set to the type of the base subobject that defines the
2379 method.
2380 BOFFSET is the offset of the base subobject which defines the method. */
2381
2382 static void
2383 value_find_oload_method_list (struct value **argp, const char *method,
2384 int offset, struct fn_field **fn_list,
2385 int *num_fns,
2386 VEC (xmethod_worker_ptr) **xm_worker_vec,
2387 struct type **basetype, int *boffset)
2388 {
2389 struct type *t;
2390
2391 t = check_typedef (value_type (*argp));
2392
2393 /* Code snarfed from value_struct_elt. */
2394 while (TYPE_CODE (t) == TYPE_CODE_PTR || TYPE_CODE (t) == TYPE_CODE_REF)
2395 {
2396 *argp = value_ind (*argp);
2397 /* Don't coerce fn pointer to fn and then back again! */
2398 if (TYPE_CODE (check_typedef (value_type (*argp))) != TYPE_CODE_FUNC)
2399 *argp = coerce_array (*argp);
2400 t = check_typedef (value_type (*argp));
2401 }
2402
2403 if (TYPE_CODE (t) != TYPE_CODE_STRUCT
2404 && TYPE_CODE (t) != TYPE_CODE_UNION)
2405 error (_("Attempt to extract a component of a "
2406 "value that is not a struct or union"));
2407
2408 gdb_assert (fn_list != NULL && xm_worker_vec != NULL);
2409
2410 /* Clear the lists. */
2411 *fn_list = NULL;
2412 *num_fns = 0;
2413 *xm_worker_vec = NULL;
2414
2415 find_method_list (argp, method, 0, t, fn_list, num_fns, xm_worker_vec,
2416 basetype, boffset);
2417 }
2418
2419 /* Given an array of arguments (ARGS) (which includes an
2420 entry for "this" in the case of C++ methods), the number of
2421 arguments NARGS, the NAME of a function, and whether it's a method or
2422 not (METHOD), find the best function that matches on the argument types
2423 according to the overload resolution rules.
2424
2425 METHOD can be one of three values:
2426 NON_METHOD for non-member functions.
2427 METHOD: for member functions.
2428 BOTH: used for overload resolution of operators where the
2429 candidates are expected to be either member or non member
2430 functions. In this case the first argument ARGTYPES
2431 (representing 'this') is expected to be a reference to the
2432 target object, and will be dereferenced when attempting the
2433 non-member search.
2434
2435 In the case of class methods, the parameter OBJ is an object value
2436 in which to search for overloaded methods.
2437
2438 In the case of non-method functions, the parameter FSYM is a symbol
2439 corresponding to one of the overloaded functions.
2440
2441 Return value is an integer: 0 -> good match, 10 -> debugger applied
2442 non-standard coercions, 100 -> incompatible.
2443
2444 If a method is being searched for, VALP will hold the value.
2445 If a non-method is being searched for, SYMP will hold the symbol
2446 for it.
2447
2448 If a method is being searched for, and it is a static method,
2449 then STATICP will point to a non-zero value.
2450
2451 If NO_ADL argument dependent lookup is disabled. This is used to prevent
2452 ADL overload candidates when performing overload resolution for a fully
2453 qualified name.
2454
2455 Note: This function does *not* check the value of
2456 overload_resolution. Caller must check it to see whether overload
2457 resolution is permitted. */
2458
2459 int
2460 find_overload_match (struct value **args, int nargs,
2461 const char *name, enum oload_search_type method,
2462 struct value **objp, struct symbol *fsym,
2463 struct value **valp, struct symbol **symp,
2464 int *staticp, const int no_adl)
2465 {
2466 struct value *obj = (objp ? *objp : NULL);
2467 struct type *obj_type = obj ? value_type (obj) : NULL;
2468 /* Index of best overloaded function. */
2469 int func_oload_champ = -1;
2470 int method_oload_champ = -1;
2471 int src_method_oload_champ = -1;
2472 int ext_method_oload_champ = -1;
2473 int src_and_ext_equal = 0;
2474
2475 /* The measure for the current best match. */
2476 struct badness_vector *method_badness = NULL;
2477 struct badness_vector *func_badness = NULL;
2478 struct badness_vector *ext_method_badness = NULL;
2479 struct badness_vector *src_method_badness = NULL;
2480
2481 struct value *temp = obj;
2482 /* For methods, the list of overloaded methods. */
2483 struct fn_field *fns_ptr = NULL;
2484 /* For non-methods, the list of overloaded function symbols. */
2485 struct symbol **oload_syms = NULL;
2486 /* For xmethods, the VEC of xmethod workers. */
2487 VEC (xmethod_worker_ptr) *xm_worker_vec = NULL;
2488 /* Number of overloaded instances being considered. */
2489 int num_fns = 0;
2490 struct type *basetype = NULL;
2491 int boffset;
2492
2493 struct cleanup *all_cleanups = make_cleanup (null_cleanup, NULL);
2494
2495 const char *obj_type_name = NULL;
2496 const char *func_name = NULL;
2497 enum oload_classification match_quality;
2498 enum oload_classification method_match_quality = INCOMPATIBLE;
2499 enum oload_classification src_method_match_quality = INCOMPATIBLE;
2500 enum oload_classification ext_method_match_quality = INCOMPATIBLE;
2501 enum oload_classification func_match_quality = INCOMPATIBLE;
2502
2503 /* Get the list of overloaded methods or functions. */
2504 if (method == METHOD || method == BOTH)
2505 {
2506 gdb_assert (obj);
2507
2508 /* OBJ may be a pointer value rather than the object itself. */
2509 obj = coerce_ref (obj);
2510 while (TYPE_CODE (check_typedef (value_type (obj))) == TYPE_CODE_PTR)
2511 obj = coerce_ref (value_ind (obj));
2512 obj_type_name = TYPE_NAME (value_type (obj));
2513
2514 /* First check whether this is a data member, e.g. a pointer to
2515 a function. */
2516 if (TYPE_CODE (check_typedef (value_type (obj))) == TYPE_CODE_STRUCT)
2517 {
2518 *valp = search_struct_field (name, obj, 0,
2519 check_typedef (value_type (obj)), 0);
2520 if (*valp)
2521 {
2522 *staticp = 1;
2523 do_cleanups (all_cleanups);
2524 return 0;
2525 }
2526 }
2527
2528 /* Retrieve the list of methods with the name NAME. */
2529 value_find_oload_method_list (&temp, name, 0, &fns_ptr, &num_fns,
2530 &xm_worker_vec, &basetype, &boffset);
2531 /* If this is a method only search, and no methods were found
2532 the search has faild. */
2533 if (method == METHOD && (!fns_ptr || !num_fns) && !xm_worker_vec)
2534 error (_("Couldn't find method %s%s%s"),
2535 obj_type_name,
2536 (obj_type_name && *obj_type_name) ? "::" : "",
2537 name);
2538 /* If we are dealing with stub method types, they should have
2539 been resolved by find_method_list via
2540 value_find_oload_method_list above. */
2541 if (fns_ptr)
2542 {
2543 gdb_assert (TYPE_DOMAIN_TYPE (fns_ptr[0].type) != NULL);
2544
2545 src_method_oload_champ = find_oload_champ (args, nargs,
2546 num_fns, fns_ptr, NULL,
2547 NULL, &src_method_badness);
2548
2549 src_method_match_quality = classify_oload_match
2550 (src_method_badness, nargs,
2551 oload_method_static_p (fns_ptr, src_method_oload_champ));
2552
2553 make_cleanup (xfree, src_method_badness);
2554 }
2555
2556 if (VEC_length (xmethod_worker_ptr, xm_worker_vec) > 0)
2557 {
2558 ext_method_oload_champ = find_oload_champ (args, nargs,
2559 0, NULL, xm_worker_vec,
2560 NULL, &ext_method_badness);
2561 ext_method_match_quality = classify_oload_match (ext_method_badness,
2562 nargs, 0);
2563 make_cleanup (xfree, ext_method_badness);
2564 make_cleanup (free_xmethod_worker_vec, xm_worker_vec);
2565 }
2566
2567 if (src_method_oload_champ >= 0 && ext_method_oload_champ >= 0)
2568 {
2569 switch (compare_badness (ext_method_badness, src_method_badness))
2570 {
2571 case 0: /* Src method and xmethod are equally good. */
2572 src_and_ext_equal = 1;
2573 /* If src method and xmethod are equally good, then
2574 xmethod should be the winner. Hence, fall through to the
2575 case where a xmethod is better than the source
2576 method, except when the xmethod match quality is
2577 non-standard. */
2578 /* FALLTHROUGH */
2579 case 1: /* Src method and ext method are incompatible. */
2580 /* If ext method match is not standard, then let source method
2581 win. Otherwise, fallthrough to let xmethod win. */
2582 if (ext_method_match_quality != STANDARD)
2583 {
2584 method_oload_champ = src_method_oload_champ;
2585 method_badness = src_method_badness;
2586 ext_method_oload_champ = -1;
2587 method_match_quality = src_method_match_quality;
2588 break;
2589 }
2590 /* FALLTHROUGH */
2591 case 2: /* Ext method is champion. */
2592 method_oload_champ = ext_method_oload_champ;
2593 method_badness = ext_method_badness;
2594 src_method_oload_champ = -1;
2595 method_match_quality = ext_method_match_quality;
2596 break;
2597 case 3: /* Src method is champion. */
2598 method_oload_champ = src_method_oload_champ;
2599 method_badness = src_method_badness;
2600 ext_method_oload_champ = -1;
2601 method_match_quality = src_method_match_quality;
2602 break;
2603 default:
2604 gdb_assert_not_reached ("Unexpected overload comparison "
2605 "result");
2606 break;
2607 }
2608 }
2609 else if (src_method_oload_champ >= 0)
2610 {
2611 method_oload_champ = src_method_oload_champ;
2612 method_badness = src_method_badness;
2613 method_match_quality = src_method_match_quality;
2614 }
2615 else if (ext_method_oload_champ >= 0)
2616 {
2617 method_oload_champ = ext_method_oload_champ;
2618 method_badness = ext_method_badness;
2619 method_match_quality = ext_method_match_quality;
2620 }
2621 }
2622
2623 if (method == NON_METHOD || method == BOTH)
2624 {
2625 const char *qualified_name = NULL;
2626
2627 /* If the overload match is being search for both as a method
2628 and non member function, the first argument must now be
2629 dereferenced. */
2630 if (method == BOTH)
2631 args[0] = value_ind (args[0]);
2632
2633 if (fsym)
2634 {
2635 qualified_name = SYMBOL_NATURAL_NAME (fsym);
2636
2637 /* If we have a function with a C++ name, try to extract just
2638 the function part. Do not try this for non-functions (e.g.
2639 function pointers). */
2640 if (qualified_name
2641 && TYPE_CODE (check_typedef (SYMBOL_TYPE (fsym)))
2642 == TYPE_CODE_FUNC)
2643 {
2644 char *temp;
2645
2646 temp = cp_func_name (qualified_name);
2647
2648 /* If cp_func_name did not remove anything, the name of the
2649 symbol did not include scope or argument types - it was
2650 probably a C-style function. */
2651 if (temp)
2652 {
2653 make_cleanup (xfree, temp);
2654 if (strcmp (temp, qualified_name) == 0)
2655 func_name = NULL;
2656 else
2657 func_name = temp;
2658 }
2659 }
2660 }
2661 else
2662 {
2663 func_name = name;
2664 qualified_name = name;
2665 }
2666
2667 /* If there was no C++ name, this must be a C-style function or
2668 not a function at all. Just return the same symbol. Do the
2669 same if cp_func_name fails for some reason. */
2670 if (func_name == NULL)
2671 {
2672 *symp = fsym;
2673 do_cleanups (all_cleanups);
2674 return 0;
2675 }
2676
2677 func_oload_champ = find_oload_champ_namespace (args, nargs,
2678 func_name,
2679 qualified_name,
2680 &oload_syms,
2681 &func_badness,
2682 no_adl);
2683
2684 if (func_oload_champ >= 0)
2685 func_match_quality = classify_oload_match (func_badness, nargs, 0);
2686
2687 make_cleanup (xfree, oload_syms);
2688 make_cleanup (xfree, func_badness);
2689 }
2690
2691 /* Did we find a match ? */
2692 if (method_oload_champ == -1 && func_oload_champ == -1)
2693 throw_error (NOT_FOUND_ERROR,
2694 _("No symbol \"%s\" in current context."),
2695 name);
2696
2697 /* If we have found both a method match and a function
2698 match, find out which one is better, and calculate match
2699 quality. */
2700 if (method_oload_champ >= 0 && func_oload_champ >= 0)
2701 {
2702 switch (compare_badness (func_badness, method_badness))
2703 {
2704 case 0: /* Top two contenders are equally good. */
2705 /* FIXME: GDB does not support the general ambiguous case.
2706 All candidates should be collected and presented the
2707 user. */
2708 error (_("Ambiguous overload resolution"));
2709 break;
2710 case 1: /* Incomparable top contenders. */
2711 /* This is an error incompatible candidates
2712 should not have been proposed. */
2713 error (_("Internal error: incompatible "
2714 "overload candidates proposed"));
2715 break;
2716 case 2: /* Function champion. */
2717 method_oload_champ = -1;
2718 match_quality = func_match_quality;
2719 break;
2720 case 3: /* Method champion. */
2721 func_oload_champ = -1;
2722 match_quality = method_match_quality;
2723 break;
2724 default:
2725 error (_("Internal error: unexpected overload comparison result"));
2726 break;
2727 }
2728 }
2729 else
2730 {
2731 /* We have either a method match or a function match. */
2732 if (method_oload_champ >= 0)
2733 match_quality = method_match_quality;
2734 else
2735 match_quality = func_match_quality;
2736 }
2737
2738 if (match_quality == INCOMPATIBLE)
2739 {
2740 if (method == METHOD)
2741 error (_("Cannot resolve method %s%s%s to any overloaded instance"),
2742 obj_type_name,
2743 (obj_type_name && *obj_type_name) ? "::" : "",
2744 name);
2745 else
2746 error (_("Cannot resolve function %s to any overloaded instance"),
2747 func_name);
2748 }
2749 else if (match_quality == NON_STANDARD)
2750 {
2751 if (method == METHOD)
2752 warning (_("Using non-standard conversion to match "
2753 "method %s%s%s to supplied arguments"),
2754 obj_type_name,
2755 (obj_type_name && *obj_type_name) ? "::" : "",
2756 name);
2757 else
2758 warning (_("Using non-standard conversion to match "
2759 "function %s to supplied arguments"),
2760 func_name);
2761 }
2762
2763 if (staticp != NULL)
2764 *staticp = oload_method_static_p (fns_ptr, method_oload_champ);
2765
2766 if (method_oload_champ >= 0)
2767 {
2768 if (src_method_oload_champ >= 0)
2769 {
2770 if (TYPE_FN_FIELD_VIRTUAL_P (fns_ptr, method_oload_champ))
2771 *valp = value_virtual_fn_field (&temp, fns_ptr, method_oload_champ,
2772 basetype, boffset);
2773 else
2774 *valp = value_fn_field (&temp, fns_ptr, method_oload_champ,
2775 basetype, boffset);
2776 }
2777 else
2778 {
2779 *valp = value_of_xmethod (clone_xmethod_worker
2780 (VEC_index (xmethod_worker_ptr, xm_worker_vec,
2781 ext_method_oload_champ)));
2782 }
2783 }
2784 else
2785 *symp = oload_syms[func_oload_champ];
2786
2787 if (objp)
2788 {
2789 struct type *temp_type = check_typedef (value_type (temp));
2790 struct type *objtype = check_typedef (obj_type);
2791
2792 if (TYPE_CODE (temp_type) != TYPE_CODE_PTR
2793 && (TYPE_CODE (objtype) == TYPE_CODE_PTR
2794 || TYPE_CODE (objtype) == TYPE_CODE_REF))
2795 {
2796 temp = value_addr (temp);
2797 }
2798 *objp = temp;
2799 }
2800
2801 do_cleanups (all_cleanups);
2802
2803 switch (match_quality)
2804 {
2805 case INCOMPATIBLE:
2806 return 100;
2807 case NON_STANDARD:
2808 return 10;
2809 default: /* STANDARD */
2810 return 0;
2811 }
2812 }
2813
2814 /* Find the best overload match, searching for FUNC_NAME in namespaces
2815 contained in QUALIFIED_NAME until it either finds a good match or
2816 runs out of namespaces. It stores the overloaded functions in
2817 *OLOAD_SYMS, and the badness vector in *OLOAD_CHAMP_BV. The
2818 calling function is responsible for freeing *OLOAD_SYMS and
2819 *OLOAD_CHAMP_BV. If NO_ADL, argument dependent lookup is not
2820 performned. */
2821
2822 static int
2823 find_oload_champ_namespace (struct value **args, int nargs,
2824 const char *func_name,
2825 const char *qualified_name,
2826 struct symbol ***oload_syms,
2827 struct badness_vector **oload_champ_bv,
2828 const int no_adl)
2829 {
2830 int oload_champ;
2831
2832 find_oload_champ_namespace_loop (args, nargs,
2833 func_name,
2834 qualified_name, 0,
2835 oload_syms, oload_champ_bv,
2836 &oload_champ,
2837 no_adl);
2838
2839 return oload_champ;
2840 }
2841
2842 /* Helper function for find_oload_champ_namespace; NAMESPACE_LEN is
2843 how deep we've looked for namespaces, and the champ is stored in
2844 OLOAD_CHAMP. The return value is 1 if the champ is a good one, 0
2845 if it isn't. Other arguments are the same as in
2846 find_oload_champ_namespace
2847
2848 It is the caller's responsibility to free *OLOAD_SYMS and
2849 *OLOAD_CHAMP_BV. */
2850
2851 static int
2852 find_oload_champ_namespace_loop (struct value **args, int nargs,
2853 const char *func_name,
2854 const char *qualified_name,
2855 int namespace_len,
2856 struct symbol ***oload_syms,
2857 struct badness_vector **oload_champ_bv,
2858 int *oload_champ,
2859 const int no_adl)
2860 {
2861 int next_namespace_len = namespace_len;
2862 int searched_deeper = 0;
2863 int num_fns = 0;
2864 struct cleanup *old_cleanups;
2865 int new_oload_champ;
2866 struct symbol **new_oload_syms;
2867 struct badness_vector *new_oload_champ_bv;
2868 char *new_namespace;
2869
2870 if (next_namespace_len != 0)
2871 {
2872 gdb_assert (qualified_name[next_namespace_len] == ':');
2873 next_namespace_len += 2;
2874 }
2875 next_namespace_len +=
2876 cp_find_first_component (qualified_name + next_namespace_len);
2877
2878 /* Initialize these to values that can safely be xfree'd. */
2879 *oload_syms = NULL;
2880 *oload_champ_bv = NULL;
2881
2882 /* First, see if we have a deeper namespace we can search in.
2883 If we get a good match there, use it. */
2884
2885 if (qualified_name[next_namespace_len] == ':')
2886 {
2887 searched_deeper = 1;
2888
2889 if (find_oload_champ_namespace_loop (args, nargs,
2890 func_name, qualified_name,
2891 next_namespace_len,
2892 oload_syms, oload_champ_bv,
2893 oload_champ, no_adl))
2894 {
2895 return 1;
2896 }
2897 };
2898
2899 /* If we reach here, either we're in the deepest namespace or we
2900 didn't find a good match in a deeper namespace. But, in the
2901 latter case, we still have a bad match in a deeper namespace;
2902 note that we might not find any match at all in the current
2903 namespace. (There's always a match in the deepest namespace,
2904 because this overload mechanism only gets called if there's a
2905 function symbol to start off with.) */
2906
2907 old_cleanups = make_cleanup (xfree, *oload_syms);
2908 make_cleanup (xfree, *oload_champ_bv);
2909 new_namespace = alloca (namespace_len + 1);
2910 strncpy (new_namespace, qualified_name, namespace_len);
2911 new_namespace[namespace_len] = '\0';
2912 new_oload_syms = make_symbol_overload_list (func_name,
2913 new_namespace);
2914
2915 /* If we have reached the deepest level perform argument
2916 determined lookup. */
2917 if (!searched_deeper && !no_adl)
2918 {
2919 int ix;
2920 struct type **arg_types;
2921
2922 /* Prepare list of argument types for overload resolution. */
2923 arg_types = (struct type **)
2924 alloca (nargs * (sizeof (struct type *)));
2925 for (ix = 0; ix < nargs; ix++)
2926 arg_types[ix] = value_type (args[ix]);
2927 make_symbol_overload_list_adl (arg_types, nargs, func_name);
2928 }
2929
2930 while (new_oload_syms[num_fns])
2931 ++num_fns;
2932
2933 new_oload_champ = find_oload_champ (args, nargs, num_fns,
2934 NULL, NULL, new_oload_syms,
2935 &new_oload_champ_bv);
2936
2937 /* Case 1: We found a good match. Free earlier matches (if any),
2938 and return it. Case 2: We didn't find a good match, but we're
2939 not the deepest function. Then go with the bad match that the
2940 deeper function found. Case 3: We found a bad match, and we're
2941 the deepest function. Then return what we found, even though
2942 it's a bad match. */
2943
2944 if (new_oload_champ != -1
2945 && classify_oload_match (new_oload_champ_bv, nargs, 0) == STANDARD)
2946 {
2947 *oload_syms = new_oload_syms;
2948 *oload_champ = new_oload_champ;
2949 *oload_champ_bv = new_oload_champ_bv;
2950 do_cleanups (old_cleanups);
2951 return 1;
2952 }
2953 else if (searched_deeper)
2954 {
2955 xfree (new_oload_syms);
2956 xfree (new_oload_champ_bv);
2957 discard_cleanups (old_cleanups);
2958 return 0;
2959 }
2960 else
2961 {
2962 *oload_syms = new_oload_syms;
2963 *oload_champ = new_oload_champ;
2964 *oload_champ_bv = new_oload_champ_bv;
2965 do_cleanups (old_cleanups);
2966 return 0;
2967 }
2968 }
2969
2970 /* Look for a function to take NARGS args of ARGS. Find
2971 the best match from among the overloaded methods or functions
2972 given by FNS_PTR or OLOAD_SYMS or XM_WORKER_VEC, respectively.
2973 One, and only one of FNS_PTR, OLOAD_SYMS and XM_WORKER_VEC can be
2974 non-NULL.
2975
2976 If XM_WORKER_VEC is NULL, then the length of the arrays FNS_PTR
2977 or OLOAD_SYMS (whichever is non-NULL) is specified in NUM_FNS.
2978
2979 Return the index of the best match; store an indication of the
2980 quality of the match in OLOAD_CHAMP_BV.
2981
2982 It is the caller's responsibility to free *OLOAD_CHAMP_BV. */
2983
2984 static int
2985 find_oload_champ (struct value **args, int nargs,
2986 int num_fns, struct fn_field *fns_ptr,
2987 VEC (xmethod_worker_ptr) *xm_worker_vec,
2988 struct symbol **oload_syms,
2989 struct badness_vector **oload_champ_bv)
2990 {
2991 int ix;
2992 int fn_count;
2993 int xm_worker_vec_n = VEC_length (xmethod_worker_ptr, xm_worker_vec);
2994 /* A measure of how good an overloaded instance is. */
2995 struct badness_vector *bv;
2996 /* Index of best overloaded function. */
2997 int oload_champ = -1;
2998 /* Current ambiguity state for overload resolution. */
2999 int oload_ambiguous = 0;
3000 /* 0 => no ambiguity, 1 => two good funcs, 2 => incomparable funcs. */
3001
3002 /* A champion can be found among methods alone, or among functions
3003 alone, or in xmethods alone, but not in more than one of these
3004 groups. */
3005 gdb_assert ((fns_ptr != NULL) + (oload_syms != NULL) + (xm_worker_vec != NULL)
3006 == 1);
3007
3008 *oload_champ_bv = NULL;
3009
3010 fn_count = (xm_worker_vec != NULL
3011 ? VEC_length (xmethod_worker_ptr, xm_worker_vec)
3012 : num_fns);
3013 /* Consider each candidate in turn. */
3014 for (ix = 0; ix < fn_count; ix++)
3015 {
3016 int jj;
3017 int static_offset = 0;
3018 int nparms;
3019 struct type **parm_types;
3020 struct xmethod_worker *worker = NULL;
3021
3022 if (xm_worker_vec != NULL)
3023 {
3024 worker = VEC_index (xmethod_worker_ptr, xm_worker_vec, ix);
3025 parm_types = get_xmethod_arg_types (worker, &nparms);
3026 }
3027 else
3028 {
3029 if (fns_ptr != NULL)
3030 {
3031 nparms = TYPE_NFIELDS (TYPE_FN_FIELD_TYPE (fns_ptr, ix));
3032 static_offset = oload_method_static_p (fns_ptr, ix);
3033 }
3034 else
3035 nparms = TYPE_NFIELDS (SYMBOL_TYPE (oload_syms[ix]));
3036
3037 parm_types = (struct type **)
3038 xmalloc (nparms * (sizeof (struct type *)));
3039 for (jj = 0; jj < nparms; jj++)
3040 parm_types[jj] = (fns_ptr != NULL
3041 ? (TYPE_FN_FIELD_ARGS (fns_ptr, ix)[jj].type)
3042 : TYPE_FIELD_TYPE (SYMBOL_TYPE (oload_syms[ix]),
3043 jj));
3044 }
3045
3046 /* Compare parameter types to supplied argument types. Skip
3047 THIS for static methods. */
3048 bv = rank_function (parm_types, nparms,
3049 args + static_offset,
3050 nargs - static_offset);
3051
3052 if (!*oload_champ_bv)
3053 {
3054 *oload_champ_bv = bv;
3055 oload_champ = 0;
3056 }
3057 else /* See whether current candidate is better or worse than
3058 previous best. */
3059 switch (compare_badness (bv, *oload_champ_bv))
3060 {
3061 case 0: /* Top two contenders are equally good. */
3062 oload_ambiguous = 1;
3063 break;
3064 case 1: /* Incomparable top contenders. */
3065 oload_ambiguous = 2;
3066 break;
3067 case 2: /* New champion, record details. */
3068 *oload_champ_bv = bv;
3069 oload_ambiguous = 0;
3070 oload_champ = ix;
3071 break;
3072 case 3:
3073 default:
3074 break;
3075 }
3076 xfree (parm_types);
3077 if (overload_debug)
3078 {
3079 if (fns_ptr != NULL)
3080 fprintf_filtered (gdb_stderr,
3081 "Overloaded method instance %s, # of parms %d\n",
3082 fns_ptr[ix].physname, nparms);
3083 else if (xm_worker_vec != NULL)
3084 fprintf_filtered (gdb_stderr,
3085 "Xmethod worker, # of parms %d\n",
3086 nparms);
3087 else
3088 fprintf_filtered (gdb_stderr,
3089 "Overloaded function instance "
3090 "%s # of parms %d\n",
3091 SYMBOL_DEMANGLED_NAME (oload_syms[ix]),
3092 nparms);
3093 for (jj = 0; jj < nargs - static_offset; jj++)
3094 fprintf_filtered (gdb_stderr,
3095 "...Badness @ %d : %d\n",
3096 jj, bv->rank[jj].rank);
3097 fprintf_filtered (gdb_stderr, "Overload resolution "
3098 "champion is %d, ambiguous? %d\n",
3099 oload_champ, oload_ambiguous);
3100 }
3101 }
3102
3103 return oload_champ;
3104 }
3105
3106 /* Return 1 if we're looking at a static method, 0 if we're looking at
3107 a non-static method or a function that isn't a method. */
3108
3109 static int
3110 oload_method_static_p (struct fn_field *fns_ptr, int index)
3111 {
3112 if (fns_ptr && index >= 0 && TYPE_FN_FIELD_STATIC_P (fns_ptr, index))
3113 return 1;
3114 else
3115 return 0;
3116 }
3117
3118 /* Check how good an overload match OLOAD_CHAMP_BV represents. */
3119
3120 static enum oload_classification
3121 classify_oload_match (struct badness_vector *oload_champ_bv,
3122 int nargs,
3123 int static_offset)
3124 {
3125 int ix;
3126 enum oload_classification worst = STANDARD;
3127
3128 for (ix = 1; ix <= nargs - static_offset; ix++)
3129 {
3130 /* If this conversion is as bad as INCOMPATIBLE_TYPE_BADNESS
3131 or worse return INCOMPATIBLE. */
3132 if (compare_ranks (oload_champ_bv->rank[ix],
3133 INCOMPATIBLE_TYPE_BADNESS) <= 0)
3134 return INCOMPATIBLE; /* Truly mismatched types. */
3135 /* Otherwise If this conversion is as bad as
3136 NS_POINTER_CONVERSION_BADNESS or worse return NON_STANDARD. */
3137 else if (compare_ranks (oload_champ_bv->rank[ix],
3138 NS_POINTER_CONVERSION_BADNESS) <= 0)
3139 worst = NON_STANDARD; /* Non-standard type conversions
3140 needed. */
3141 }
3142
3143 /* If no INCOMPATIBLE classification was found, return the worst one
3144 that was found (if any). */
3145 return worst;
3146 }
3147
3148 /* C++: return 1 is NAME is a legitimate name for the destructor of
3149 type TYPE. If TYPE does not have a destructor, or if NAME is
3150 inappropriate for TYPE, an error is signaled. Parameter TYPE should not yet
3151 have CHECK_TYPEDEF applied, this function will apply it itself. */
3152
3153 int
3154 destructor_name_p (const char *name, struct type *type)
3155 {
3156 if (name[0] == '~')
3157 {
3158 const char *dname = type_name_no_tag_or_error (type);
3159 const char *cp = strchr (dname, '<');
3160 unsigned int len;
3161
3162 /* Do not compare the template part for template classes. */
3163 if (cp == NULL)
3164 len = strlen (dname);
3165 else
3166 len = cp - dname;
3167 if (strlen (name + 1) != len || strncmp (dname, name + 1, len) != 0)
3168 error (_("name of destructor must equal name of class"));
3169 else
3170 return 1;
3171 }
3172 return 0;
3173 }
3174
3175 /* Find an enum constant named NAME in TYPE. TYPE must be an "enum
3176 class". If the name is found, return a value representing it;
3177 otherwise throw an exception. */
3178
3179 static struct value *
3180 enum_constant_from_type (struct type *type, const char *name)
3181 {
3182 int i;
3183 int name_len = strlen (name);
3184
3185 gdb_assert (TYPE_CODE (type) == TYPE_CODE_ENUM
3186 && TYPE_DECLARED_CLASS (type));
3187
3188 for (i = TYPE_N_BASECLASSES (type); i < TYPE_NFIELDS (type); ++i)
3189 {
3190 const char *fname = TYPE_FIELD_NAME (type, i);
3191 int len;
3192
3193 if (TYPE_FIELD_LOC_KIND (type, i) != FIELD_LOC_KIND_ENUMVAL
3194 || fname == NULL)
3195 continue;
3196
3197 /* Look for the trailing "::NAME", since enum class constant
3198 names are qualified here. */
3199 len = strlen (fname);
3200 if (len + 2 >= name_len
3201 && fname[len - name_len - 2] == ':'
3202 && fname[len - name_len - 1] == ':'
3203 && strcmp (&fname[len - name_len], name) == 0)
3204 return value_from_longest (type, TYPE_FIELD_ENUMVAL (type, i));
3205 }
3206
3207 error (_("no constant named \"%s\" in enum \"%s\""),
3208 name, TYPE_TAG_NAME (type));
3209 }
3210
3211 /* C++: Given an aggregate type CURTYPE, and a member name NAME,
3212 return the appropriate member (or the address of the member, if
3213 WANT_ADDRESS). This function is used to resolve user expressions
3214 of the form "DOMAIN::NAME". For more details on what happens, see
3215 the comment before value_struct_elt_for_reference. */
3216
3217 struct value *
3218 value_aggregate_elt (struct type *curtype, const char *name,
3219 struct type *expect_type, int want_address,
3220 enum noside noside)
3221 {
3222 switch (TYPE_CODE (curtype))
3223 {
3224 case TYPE_CODE_STRUCT:
3225 case TYPE_CODE_UNION:
3226 return value_struct_elt_for_reference (curtype, 0, curtype,
3227 name, expect_type,
3228 want_address, noside);
3229 case TYPE_CODE_NAMESPACE:
3230 return value_namespace_elt (curtype, name,
3231 want_address, noside);
3232
3233 case TYPE_CODE_ENUM:
3234 return enum_constant_from_type (curtype, name);
3235
3236 default:
3237 internal_error (__FILE__, __LINE__,
3238 _("non-aggregate type in value_aggregate_elt"));
3239 }
3240 }
3241
3242 /* Compares the two method/function types T1 and T2 for "equality"
3243 with respect to the methods' parameters. If the types of the
3244 two parameter lists are the same, returns 1; 0 otherwise. This
3245 comparison may ignore any artificial parameters in T1 if
3246 SKIP_ARTIFICIAL is non-zero. This function will ALWAYS skip
3247 the first artificial parameter in T1, assumed to be a 'this' pointer.
3248
3249 The type T2 is expected to have come from make_params (in eval.c). */
3250
3251 static int
3252 compare_parameters (struct type *t1, struct type *t2, int skip_artificial)
3253 {
3254 int start = 0;
3255
3256 if (TYPE_NFIELDS (t1) > 0 && TYPE_FIELD_ARTIFICIAL (t1, 0))
3257 ++start;
3258
3259 /* If skipping artificial fields, find the first real field
3260 in T1. */
3261 if (skip_artificial)
3262 {
3263 while (start < TYPE_NFIELDS (t1)
3264 && TYPE_FIELD_ARTIFICIAL (t1, start))
3265 ++start;
3266 }
3267
3268 /* Now compare parameters. */
3269
3270 /* Special case: a method taking void. T1 will contain no
3271 non-artificial fields, and T2 will contain TYPE_CODE_VOID. */
3272 if ((TYPE_NFIELDS (t1) - start) == 0 && TYPE_NFIELDS (t2) == 1
3273 && TYPE_CODE (TYPE_FIELD_TYPE (t2, 0)) == TYPE_CODE_VOID)
3274 return 1;
3275
3276 if ((TYPE_NFIELDS (t1) - start) == TYPE_NFIELDS (t2))
3277 {
3278 int i;
3279
3280 for (i = 0; i < TYPE_NFIELDS (t2); ++i)
3281 {
3282 if (compare_ranks (rank_one_type (TYPE_FIELD_TYPE (t1, start + i),
3283 TYPE_FIELD_TYPE (t2, i), NULL),
3284 EXACT_MATCH_BADNESS) != 0)
3285 return 0;
3286 }
3287
3288 return 1;
3289 }
3290
3291 return 0;
3292 }
3293
3294 /* C++: Given an aggregate type CURTYPE, and a member name NAME,
3295 return the address of this member as a "pointer to member" type.
3296 If INTYPE is non-null, then it will be the type of the member we
3297 are looking for. This will help us resolve "pointers to member
3298 functions". This function is used to resolve user expressions of
3299 the form "DOMAIN::NAME". */
3300
3301 static struct value *
3302 value_struct_elt_for_reference (struct type *domain, int offset,
3303 struct type *curtype, const char *name,
3304 struct type *intype,
3305 int want_address,
3306 enum noside noside)
3307 {
3308 struct type *t = curtype;
3309 int i;
3310 struct value *v, *result;
3311
3312 if (TYPE_CODE (t) != TYPE_CODE_STRUCT
3313 && TYPE_CODE (t) != TYPE_CODE_UNION)
3314 error (_("Internal error: non-aggregate type "
3315 "to value_struct_elt_for_reference"));
3316
3317 for (i = TYPE_NFIELDS (t) - 1; i >= TYPE_N_BASECLASSES (t); i--)
3318 {
3319 const char *t_field_name = TYPE_FIELD_NAME (t, i);
3320
3321 if (t_field_name && strcmp (t_field_name, name) == 0)
3322 {
3323 if (field_is_static (&TYPE_FIELD (t, i)))
3324 {
3325 v = value_static_field (t, i);
3326 if (want_address)
3327 v = value_addr (v);
3328 return v;
3329 }
3330 if (TYPE_FIELD_PACKED (t, i))
3331 error (_("pointers to bitfield members not allowed"));
3332
3333 if (want_address)
3334 return value_from_longest
3335 (lookup_memberptr_type (TYPE_FIELD_TYPE (t, i), domain),
3336 offset + (LONGEST) (TYPE_FIELD_BITPOS (t, i) >> 3));
3337 else if (noside != EVAL_NORMAL)
3338 return allocate_value (TYPE_FIELD_TYPE (t, i));
3339 else
3340 {
3341 /* Try to evaluate NAME as a qualified name with implicit
3342 this pointer. In this case, attempt to return the
3343 equivalent to `this->*(&TYPE::NAME)'. */
3344 v = value_of_this_silent (current_language);
3345 if (v != NULL)
3346 {
3347 struct value *ptr;
3348 long mem_offset;
3349 struct type *type, *tmp;
3350
3351 ptr = value_aggregate_elt (domain, name, NULL, 1, noside);
3352 type = check_typedef (value_type (ptr));
3353 gdb_assert (type != NULL
3354 && TYPE_CODE (type) == TYPE_CODE_MEMBERPTR);
3355 tmp = lookup_pointer_type (TYPE_DOMAIN_TYPE (type));
3356 v = value_cast_pointers (tmp, v, 1);
3357 mem_offset = value_as_long (ptr);
3358 tmp = lookup_pointer_type (TYPE_TARGET_TYPE (type));
3359 result = value_from_pointer (tmp,
3360 value_as_long (v) + mem_offset);
3361 return value_ind (result);
3362 }
3363
3364 error (_("Cannot reference non-static field \"%s\""), name);
3365 }
3366 }
3367 }
3368
3369 /* C++: If it was not found as a data field, then try to return it
3370 as a pointer to a method. */
3371
3372 /* Perform all necessary dereferencing. */
3373 while (intype && TYPE_CODE (intype) == TYPE_CODE_PTR)
3374 intype = TYPE_TARGET_TYPE (intype);
3375
3376 for (i = TYPE_NFN_FIELDS (t) - 1; i >= 0; --i)
3377 {
3378 const char *t_field_name = TYPE_FN_FIELDLIST_NAME (t, i);
3379 char dem_opname[64];
3380
3381 if (strncmp (t_field_name, "__", 2) == 0
3382 || strncmp (t_field_name, "op", 2) == 0
3383 || strncmp (t_field_name, "type", 4) == 0)
3384 {
3385 if (cplus_demangle_opname (t_field_name,
3386 dem_opname, DMGL_ANSI))
3387 t_field_name = dem_opname;
3388 else if (cplus_demangle_opname (t_field_name,
3389 dem_opname, 0))
3390 t_field_name = dem_opname;
3391 }
3392 if (t_field_name && strcmp (t_field_name, name) == 0)
3393 {
3394 int j;
3395 int len = TYPE_FN_FIELDLIST_LENGTH (t, i);
3396 struct fn_field *f = TYPE_FN_FIELDLIST1 (t, i);
3397
3398 check_stub_method_group (t, i);
3399
3400 if (intype)
3401 {
3402 for (j = 0; j < len; ++j)
3403 {
3404 if (compare_parameters (TYPE_FN_FIELD_TYPE (f, j), intype, 0)
3405 || compare_parameters (TYPE_FN_FIELD_TYPE (f, j),
3406 intype, 1))
3407 break;
3408 }
3409
3410 if (j == len)
3411 error (_("no member function matches "
3412 "that type instantiation"));
3413 }
3414 else
3415 {
3416 int ii;
3417
3418 j = -1;
3419 for (ii = 0; ii < len; ++ii)
3420 {
3421 /* Skip artificial methods. This is necessary if,
3422 for example, the user wants to "print
3423 subclass::subclass" with only one user-defined
3424 constructor. There is no ambiguity in this case.
3425 We are careful here to allow artificial methods
3426 if they are the unique result. */
3427 if (TYPE_FN_FIELD_ARTIFICIAL (f, ii))
3428 {
3429 if (j == -1)
3430 j = ii;
3431 continue;
3432 }
3433
3434 /* Desired method is ambiguous if more than one
3435 method is defined. */
3436 if (j != -1 && !TYPE_FN_FIELD_ARTIFICIAL (f, j))
3437 error (_("non-unique member `%s' requires "
3438 "type instantiation"), name);
3439
3440 j = ii;
3441 }
3442
3443 if (j == -1)
3444 error (_("no matching member function"));
3445 }
3446
3447 if (TYPE_FN_FIELD_STATIC_P (f, j))
3448 {
3449 struct symbol *s =
3450 lookup_symbol (TYPE_FN_FIELD_PHYSNAME (f, j),
3451 0, VAR_DOMAIN, 0);
3452
3453 if (s == NULL)
3454 return NULL;
3455
3456 if (want_address)
3457 return value_addr (read_var_value (s, 0));
3458 else
3459 return read_var_value (s, 0);
3460 }
3461
3462 if (TYPE_FN_FIELD_VIRTUAL_P (f, j))
3463 {
3464 if (want_address)
3465 {
3466 result = allocate_value
3467 (lookup_methodptr_type (TYPE_FN_FIELD_TYPE (f, j)));
3468 cplus_make_method_ptr (value_type (result),
3469 value_contents_writeable (result),
3470 TYPE_FN_FIELD_VOFFSET (f, j), 1);
3471 }
3472 else if (noside == EVAL_AVOID_SIDE_EFFECTS)
3473 return allocate_value (TYPE_FN_FIELD_TYPE (f, j));
3474 else
3475 error (_("Cannot reference virtual member function \"%s\""),
3476 name);
3477 }
3478 else
3479 {
3480 struct symbol *s =
3481 lookup_symbol (TYPE_FN_FIELD_PHYSNAME (f, j),
3482 0, VAR_DOMAIN, 0);
3483
3484 if (s == NULL)
3485 return NULL;
3486
3487 v = read_var_value (s, 0);
3488 if (!want_address)
3489 result = v;
3490 else
3491 {
3492 result = allocate_value (lookup_methodptr_type (TYPE_FN_FIELD_TYPE (f, j)));
3493 cplus_make_method_ptr (value_type (result),
3494 value_contents_writeable (result),
3495 value_address (v), 0);
3496 }
3497 }
3498 return result;
3499 }
3500 }
3501 for (i = TYPE_N_BASECLASSES (t) - 1; i >= 0; i--)
3502 {
3503 struct value *v;
3504 int base_offset;
3505
3506 if (BASETYPE_VIA_VIRTUAL (t, i))
3507 base_offset = 0;
3508 else
3509 base_offset = TYPE_BASECLASS_BITPOS (t, i) / 8;
3510 v = value_struct_elt_for_reference (domain,
3511 offset + base_offset,
3512 TYPE_BASECLASS (t, i),
3513 name, intype,
3514 want_address, noside);
3515 if (v)
3516 return v;
3517 }
3518
3519 /* As a last chance, pretend that CURTYPE is a namespace, and look
3520 it up that way; this (frequently) works for types nested inside
3521 classes. */
3522
3523 return value_maybe_namespace_elt (curtype, name,
3524 want_address, noside);
3525 }
3526
3527 /* C++: Return the member NAME of the namespace given by the type
3528 CURTYPE. */
3529
3530 static struct value *
3531 value_namespace_elt (const struct type *curtype,
3532 const char *name, int want_address,
3533 enum noside noside)
3534 {
3535 struct value *retval = value_maybe_namespace_elt (curtype, name,
3536 want_address,
3537 noside);
3538
3539 if (retval == NULL)
3540 error (_("No symbol \"%s\" in namespace \"%s\"."),
3541 name, TYPE_TAG_NAME (curtype));
3542
3543 return retval;
3544 }
3545
3546 /* A helper function used by value_namespace_elt and
3547 value_struct_elt_for_reference. It looks up NAME inside the
3548 context CURTYPE; this works if CURTYPE is a namespace or if CURTYPE
3549 is a class and NAME refers to a type in CURTYPE itself (as opposed
3550 to, say, some base class of CURTYPE). */
3551
3552 static struct value *
3553 value_maybe_namespace_elt (const struct type *curtype,
3554 const char *name, int want_address,
3555 enum noside noside)
3556 {
3557 const char *namespace_name = TYPE_TAG_NAME (curtype);
3558 struct symbol *sym;
3559 struct value *result;
3560
3561 sym = cp_lookup_symbol_namespace (namespace_name, name,
3562 get_selected_block (0), VAR_DOMAIN);
3563
3564 if (sym == NULL)
3565 {
3566 char *concatenated_name = alloca (strlen (namespace_name) + 2
3567 + strlen (name) + 1);
3568
3569 sprintf (concatenated_name, "%s::%s", namespace_name, name);
3570 sym = lookup_static_symbol_aux (concatenated_name, VAR_DOMAIN);
3571 }
3572
3573 if (sym == NULL)
3574 return NULL;
3575 else if ((noside == EVAL_AVOID_SIDE_EFFECTS)
3576 && (SYMBOL_CLASS (sym) == LOC_TYPEDEF))
3577 result = allocate_value (SYMBOL_TYPE (sym));
3578 else
3579 result = value_of_variable (sym, get_selected_block (0));
3580
3581 if (result && want_address)
3582 result = value_addr (result);
3583
3584 return result;
3585 }
3586
3587 /* Given a pointer or a reference value V, find its real (RTTI) type.
3588
3589 Other parameters FULL, TOP, USING_ENC as with value_rtti_type()
3590 and refer to the values computed for the object pointed to. */
3591
3592 struct type *
3593 value_rtti_indirect_type (struct value *v, int *full,
3594 int *top, int *using_enc)
3595 {
3596 struct value *target;
3597 struct type *type, *real_type, *target_type;
3598
3599 type = value_type (v);
3600 type = check_typedef (type);
3601 if (TYPE_CODE (type) == TYPE_CODE_REF)
3602 target = coerce_ref (v);
3603 else if (TYPE_CODE (type) == TYPE_CODE_PTR)
3604 target = value_ind (v);
3605 else
3606 return NULL;
3607
3608 real_type = value_rtti_type (target, full, top, using_enc);
3609
3610 if (real_type)
3611 {
3612 /* Copy qualifiers to the referenced object. */
3613 target_type = value_type (target);
3614 real_type = make_cv_type (TYPE_CONST (target_type),
3615 TYPE_VOLATILE (target_type), real_type, NULL);
3616 if (TYPE_CODE (type) == TYPE_CODE_REF)
3617 real_type = lookup_reference_type (real_type);
3618 else if (TYPE_CODE (type) == TYPE_CODE_PTR)
3619 real_type = lookup_pointer_type (real_type);
3620 else
3621 internal_error (__FILE__, __LINE__, _("Unexpected value type."));
3622
3623 /* Copy qualifiers to the pointer/reference. */
3624 real_type = make_cv_type (TYPE_CONST (type), TYPE_VOLATILE (type),
3625 real_type, NULL);
3626 }
3627
3628 return real_type;
3629 }
3630
3631 /* Given a value pointed to by ARGP, check its real run-time type, and
3632 if that is different from the enclosing type, create a new value
3633 using the real run-time type as the enclosing type (and of the same
3634 type as ARGP) and return it, with the embedded offset adjusted to
3635 be the correct offset to the enclosed object. RTYPE is the type,
3636 and XFULL, XTOP, and XUSING_ENC are the other parameters, computed
3637 by value_rtti_type(). If these are available, they can be supplied
3638 and a second call to value_rtti_type() is avoided. (Pass RTYPE ==
3639 NULL if they're not available. */
3640
3641 struct value *
3642 value_full_object (struct value *argp,
3643 struct type *rtype,
3644 int xfull, int xtop,
3645 int xusing_enc)
3646 {
3647 struct type *real_type;
3648 int full = 0;
3649 int top = -1;
3650 int using_enc = 0;
3651 struct value *new_val;
3652
3653 if (rtype)
3654 {
3655 real_type = rtype;
3656 full = xfull;
3657 top = xtop;
3658 using_enc = xusing_enc;
3659 }
3660 else
3661 real_type = value_rtti_type (argp, &full, &top, &using_enc);
3662
3663 /* If no RTTI data, or if object is already complete, do nothing. */
3664 if (!real_type || real_type == value_enclosing_type (argp))
3665 return argp;
3666
3667 /* In a destructor we might see a real type that is a superclass of
3668 the object's type. In this case it is better to leave the object
3669 as-is. */
3670 if (full
3671 && TYPE_LENGTH (real_type) < TYPE_LENGTH (value_enclosing_type (argp)))
3672 return argp;
3673
3674 /* If we have the full object, but for some reason the enclosing
3675 type is wrong, set it. */
3676 /* pai: FIXME -- sounds iffy */
3677 if (full)
3678 {
3679 argp = value_copy (argp);
3680 set_value_enclosing_type (argp, real_type);
3681 return argp;
3682 }
3683
3684 /* Check if object is in memory. */
3685 if (VALUE_LVAL (argp) != lval_memory)
3686 {
3687 warning (_("Couldn't retrieve complete object of RTTI "
3688 "type %s; object may be in register(s)."),
3689 TYPE_NAME (real_type));
3690
3691 return argp;
3692 }
3693
3694 /* All other cases -- retrieve the complete object. */
3695 /* Go back by the computed top_offset from the beginning of the
3696 object, adjusting for the embedded offset of argp if that's what
3697 value_rtti_type used for its computation. */
3698 new_val = value_at_lazy (real_type, value_address (argp) - top +
3699 (using_enc ? 0 : value_embedded_offset (argp)));
3700 deprecated_set_value_type (new_val, value_type (argp));
3701 set_value_embedded_offset (new_val, (using_enc
3702 ? top + value_embedded_offset (argp)
3703 : top));
3704 return new_val;
3705 }
3706
3707
3708 /* Return the value of the local variable, if one exists. Throw error
3709 otherwise, such as if the request is made in an inappropriate context. */
3710
3711 struct value *
3712 value_of_this (const struct language_defn *lang)
3713 {
3714 struct symbol *sym;
3715 struct block *b;
3716 struct frame_info *frame;
3717
3718 if (!lang->la_name_of_this)
3719 error (_("no `this' in current language"));
3720
3721 frame = get_selected_frame (_("no frame selected"));
3722
3723 b = get_frame_block (frame, NULL);
3724
3725 sym = lookup_language_this (lang, b);
3726 if (sym == NULL)
3727 error (_("current stack frame does not contain a variable named `%s'"),
3728 lang->la_name_of_this);
3729
3730 return read_var_value (sym, frame);
3731 }
3732
3733 /* Return the value of the local variable, if one exists. Return NULL
3734 otherwise. Never throw error. */
3735
3736 struct value *
3737 value_of_this_silent (const struct language_defn *lang)
3738 {
3739 struct value *ret = NULL;
3740 volatile struct gdb_exception except;
3741
3742 TRY_CATCH (except, RETURN_MASK_ERROR)
3743 {
3744 ret = value_of_this (lang);
3745 }
3746
3747 return ret;
3748 }
3749
3750 /* Create a slice (sub-string, sub-array) of ARRAY, that is LENGTH
3751 elements long, starting at LOWBOUND. The result has the same lower
3752 bound as the original ARRAY. */
3753
3754 struct value *
3755 value_slice (struct value *array, int lowbound, int length)
3756 {
3757 struct type *slice_range_type, *slice_type, *range_type;
3758 LONGEST lowerbound, upperbound;
3759 struct value *slice;
3760 struct type *array_type;
3761
3762 array_type = check_typedef (value_type (array));
3763 if (TYPE_CODE (array_type) != TYPE_CODE_ARRAY
3764 && TYPE_CODE (array_type) != TYPE_CODE_STRING)
3765 error (_("cannot take slice of non-array"));
3766
3767 range_type = TYPE_INDEX_TYPE (array_type);
3768 if (get_discrete_bounds (range_type, &lowerbound, &upperbound) < 0)
3769 error (_("slice from bad array or bitstring"));
3770
3771 if (lowbound < lowerbound || length < 0
3772 || lowbound + length - 1 > upperbound)
3773 error (_("slice out of range"));
3774
3775 /* FIXME-type-allocation: need a way to free this type when we are
3776 done with it. */
3777 slice_range_type = create_static_range_type ((struct type *) NULL,
3778 TYPE_TARGET_TYPE (range_type),
3779 lowbound,
3780 lowbound + length - 1);
3781
3782 {
3783 struct type *element_type = TYPE_TARGET_TYPE (array_type);
3784 LONGEST offset
3785 = (lowbound - lowerbound) * TYPE_LENGTH (check_typedef (element_type));
3786
3787 slice_type = create_array_type ((struct type *) NULL,
3788 element_type,
3789 slice_range_type);
3790 TYPE_CODE (slice_type) = TYPE_CODE (array_type);
3791
3792 if (VALUE_LVAL (array) == lval_memory && value_lazy (array))
3793 slice = allocate_value_lazy (slice_type);
3794 else
3795 {
3796 slice = allocate_value (slice_type);
3797 value_contents_copy (slice, 0, array, offset,
3798 TYPE_LENGTH (slice_type));
3799 }
3800
3801 set_value_component_location (slice, array);
3802 VALUE_FRAME_ID (slice) = VALUE_FRAME_ID (array);
3803 set_value_offset (slice, value_offset (array) + offset);
3804 }
3805
3806 return slice;
3807 }
3808
3809 /* Create a value for a FORTRAN complex number. Currently most of the
3810 time values are coerced to COMPLEX*16 (i.e. a complex number
3811 composed of 2 doubles. This really should be a smarter routine
3812 that figures out precision inteligently as opposed to assuming
3813 doubles. FIXME: fmb */
3814
3815 struct value *
3816 value_literal_complex (struct value *arg1,
3817 struct value *arg2,
3818 struct type *type)
3819 {
3820 struct value *val;
3821 struct type *real_type = TYPE_TARGET_TYPE (type);
3822
3823 val = allocate_value (type);
3824 arg1 = value_cast (real_type, arg1);
3825 arg2 = value_cast (real_type, arg2);
3826
3827 memcpy (value_contents_raw (val),
3828 value_contents (arg1), TYPE_LENGTH (real_type));
3829 memcpy (value_contents_raw (val) + TYPE_LENGTH (real_type),
3830 value_contents (arg2), TYPE_LENGTH (real_type));
3831 return val;
3832 }
3833
3834 /* Cast a value into the appropriate complex data type. */
3835
3836 static struct value *
3837 cast_into_complex (struct type *type, struct value *val)
3838 {
3839 struct type *real_type = TYPE_TARGET_TYPE (type);
3840
3841 if (TYPE_CODE (value_type (val)) == TYPE_CODE_COMPLEX)
3842 {
3843 struct type *val_real_type = TYPE_TARGET_TYPE (value_type (val));
3844 struct value *re_val = allocate_value (val_real_type);
3845 struct value *im_val = allocate_value (val_real_type);
3846
3847 memcpy (value_contents_raw (re_val),
3848 value_contents (val), TYPE_LENGTH (val_real_type));
3849 memcpy (value_contents_raw (im_val),
3850 value_contents (val) + TYPE_LENGTH (val_real_type),
3851 TYPE_LENGTH (val_real_type));
3852
3853 return value_literal_complex (re_val, im_val, type);
3854 }
3855 else if (TYPE_CODE (value_type (val)) == TYPE_CODE_FLT
3856 || TYPE_CODE (value_type (val)) == TYPE_CODE_INT)
3857 return value_literal_complex (val,
3858 value_zero (real_type, not_lval),
3859 type);
3860 else
3861 error (_("cannot cast non-number to complex"));
3862 }
3863
3864 void
3865 _initialize_valops (void)
3866 {
3867 add_setshow_boolean_cmd ("overload-resolution", class_support,
3868 &overload_resolution, _("\
3869 Set overload resolution in evaluating C++ functions."), _("\
3870 Show overload resolution in evaluating C++ functions."),
3871 NULL, NULL,
3872 show_overload_resolution,
3873 &setlist, &showlist);
3874 overload_resolution = 1;
3875 }
This page took 0.123876 seconds and 4 git commands to generate.