| File: | src/gnu/usr.bin/binutils/gdb/valops.c |
| Warning: | line 2083, column 3 Value stored to 'old_cleanups' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
| 1 | /* Perform non-arithmetic operations on values, for GDB. |
| 2 | Copyright 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, |
| 3 | 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004 |
| 4 | Free Software Foundation, Inc. |
| 5 | |
| 6 | This file is part of GDB. |
| 7 | |
| 8 | This program is free software; you can redistribute it and/or modify |
| 9 | it under the terms of the GNU General Public License as published by |
| 10 | the Free Software Foundation; either version 2 of the License, or |
| 11 | (at your option) any later version. |
| 12 | |
| 13 | This program is distributed in the hope that it will be useful, |
| 14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 16 | GNU General Public License for more details. |
| 17 | |
| 18 | You should have received a copy of the GNU General Public License |
| 19 | along with this program; if not, write to the Free Software |
| 20 | Foundation, Inc., 59 Temple Place - Suite 330, |
| 21 | Boston, MA 02111-1307, USA. */ |
| 22 | |
| 23 | #include "defs.h" |
| 24 | #include "symtab.h" |
| 25 | #include "gdbtypes.h" |
| 26 | #include "value.h" |
| 27 | #include "frame.h" |
| 28 | #include "inferior.h" |
| 29 | #include "gdbcore.h" |
| 30 | #include "target.h" |
| 31 | #include "demangle.h" |
| 32 | #include "language.h" |
| 33 | #include "gdbcmd.h" |
| 34 | #include "regcache.h" |
| 35 | #include "cp-abi.h" |
| 36 | #include "block.h" |
| 37 | #include "infcall.h" |
| 38 | #include "dictionary.h" |
| 39 | #include "cp-support.h" |
| 40 | |
| 41 | #include <errno(*__errno()).h> |
| 42 | #include "gdb_string.h" |
| 43 | #include "gdb_assert.h" |
| 44 | #include "cp-support.h" |
| 45 | #include "observer.h" |
| 46 | |
| 47 | extern int overload_debug; |
| 48 | /* Local functions. */ |
| 49 | |
| 50 | static int typecmp (int staticp, int varargs, int nargs, |
| 51 | struct field t1[], struct value *t2[]); |
| 52 | |
| 53 | static struct value *search_struct_field (char *, struct value *, int, |
| 54 | struct type *, int); |
| 55 | |
| 56 | static struct value *search_struct_method (char *, struct value **, |
| 57 | struct value **, |
| 58 | int, int *, struct type *); |
| 59 | |
| 60 | static int find_oload_champ_namespace (struct type **arg_types, int nargs, |
| 61 | const char *func_name, |
| 62 | const char *qualified_name, |
| 63 | struct symbol ***oload_syms, |
| 64 | struct badness_vector **oload_champ_bv); |
| 65 | |
| 66 | static |
| 67 | int find_oload_champ_namespace_loop (struct type **arg_types, int nargs, |
| 68 | const char *func_name, |
| 69 | const char *qualified_name, |
| 70 | int namespace_len, |
| 71 | struct symbol ***oload_syms, |
| 72 | struct badness_vector **oload_champ_bv, |
| 73 | int *oload_champ); |
| 74 | |
| 75 | static int find_oload_champ (struct type **arg_types, int nargs, int method, |
| 76 | int num_fns, |
| 77 | struct fn_field *fns_ptr, |
| 78 | struct symbol **oload_syms, |
| 79 | struct badness_vector **oload_champ_bv); |
| 80 | |
| 81 | static int oload_method_static (int method, struct fn_field *fns_ptr, |
| 82 | int index); |
| 83 | |
| 84 | enum oload_classification { STANDARD, NON_STANDARD, INCOMPATIBLE }; |
| 85 | |
| 86 | static enum |
| 87 | oload_classification classify_oload_match (struct badness_vector |
| 88 | * oload_champ_bv, |
| 89 | int nargs, |
| 90 | int static_offset); |
| 91 | |
| 92 | static int check_field_in (struct type *, const char *); |
| 93 | |
| 94 | static struct value *value_struct_elt_for_reference (struct type *domain, |
| 95 | int offset, |
| 96 | struct type *curtype, |
| 97 | char *name, |
| 98 | struct type *intype, |
| 99 | enum noside noside); |
| 100 | |
| 101 | static struct value *value_namespace_elt (const struct type *curtype, |
| 102 | char *name, |
| 103 | enum noside noside); |
| 104 | |
| 105 | static struct value *value_maybe_namespace_elt (const struct type *curtype, |
| 106 | char *name, |
| 107 | enum noside noside); |
| 108 | |
| 109 | static CORE_ADDR allocate_space_in_inferior (int); |
| 110 | |
| 111 | static struct value *cast_into_complex (struct type *, struct value *); |
| 112 | |
| 113 | static struct fn_field *find_method_list (struct value ** argp, char *method, |
| 114 | int offset, |
| 115 | struct type *type, int *num_fns, |
| 116 | struct type **basetype, |
| 117 | int *boffset); |
| 118 | |
| 119 | void _initialize_valops (void); |
| 120 | |
| 121 | /* Flag for whether we want to abandon failed expression evals by default. */ |
| 122 | |
| 123 | #if 0 |
| 124 | static int auto_abandon = 0; |
| 125 | #endif |
| 126 | |
| 127 | int overload_resolution = 0; |
| 128 | |
| 129 | /* Find the address of function name NAME in the inferior. */ |
| 130 | |
| 131 | struct value * |
| 132 | find_function_in_inferior (const char *name) |
| 133 | { |
| 134 | struct symbol *sym; |
| 135 | struct minimal_symbol *msymbol; |
| 136 | |
| 137 | sym = lookup_symbol (name, 0, VAR_DOMAIN, 0, NULL((void*)0)); |
| 138 | if (sym != NULL((void*)0)) |
| 139 | { |
| 140 | if (SYMBOL_CLASS (sym)(sym)->aclass != LOC_BLOCK) |
| 141 | error (_("\"%s\" exists in this program but is not a function.")("\"%s\" exists in this program but is not a function."), |
| 142 | name); |
| 143 | |
| 144 | if (TYPE_PROTOTYPED (SYMBOL_TYPE (sym))(((sym)->type)->main_type->flags & (1 << 7 ))) |
| 145 | return value_of_variable (sym, NULL((void*)0)); |
| 146 | } |
| 147 | |
| 148 | msymbol = lookup_minimal_symbol (name, NULL((void*)0), NULL((void*)0)); |
| 149 | if (msymbol != NULL((void*)0)) |
| 150 | { |
| 151 | struct type *type; |
| 152 | CORE_ADDR maddr; |
| 153 | |
| 154 | type = lookup_pointer_type (builtin_type_char); |
| 155 | type = lookup_function_type (type); |
| 156 | type = lookup_pointer_type (type); |
| 157 | maddr = SYMBOL_VALUE_ADDRESS (msymbol)(msymbol)->ginfo.value.address; |
| 158 | return value_from_pointer (type, maddr); |
| 159 | } |
| 160 | |
| 161 | if (!target_has_execution(current_target.to_has_execution)) |
| 162 | error ("evaluation of this expression requires the target program to be active"); |
| 163 | else |
| 164 | error ("evaluation of this expression requires the program to have a function \"%s\".", name); |
| 165 | } |
| 166 | |
| 167 | /* Allocate NBYTES of space in the inferior using the inferior's malloc |
| 168 | and return a value that is a pointer to the allocated space. */ |
| 169 | |
| 170 | struct value * |
| 171 | value_allocate_space_in_inferior (int len) |
| 172 | { |
| 173 | struct value *blocklen; |
| 174 | struct value *val = find_function_in_inferior (NAME_OF_MALLOC(gdbarch_name_of_malloc (current_gdbarch))); |
| 175 | |
| 176 | blocklen = value_from_longest (builtin_type_int, (LONGESTlong) len); |
| 177 | val = call_function_by_hand (val, 1, &blocklen); |
| 178 | if (value_logical_not (val)) |
| 179 | { |
| 180 | if (!target_has_execution(current_target.to_has_execution)) |
| 181 | error ("No memory available to program now: you need to start the target first"); |
| 182 | else |
| 183 | error ("No memory available to program: call to malloc failed"); |
| 184 | } |
| 185 | return val; |
| 186 | } |
| 187 | |
| 188 | static CORE_ADDR |
| 189 | allocate_space_in_inferior (int len) |
| 190 | { |
| 191 | return value_as_long (value_allocate_space_in_inferior (len)); |
| 192 | } |
| 193 | |
| 194 | /* Cast value ARG2 to type TYPE and return as a value. |
| 195 | More general than a C cast: accepts any two types of the same length, |
| 196 | and if ARG2 is an lvalue it can be cast into anything at all. */ |
| 197 | /* In C++, casts may change pointer or object representations. */ |
| 198 | |
| 199 | struct value * |
| 200 | value_cast (struct type *type, struct value *arg2) |
| 201 | { |
| 202 | enum type_code code1; |
| 203 | enum type_code code2; |
| 204 | int scalar; |
| 205 | struct type *type2; |
| 206 | |
| 207 | int convert_to_boolean = 0; |
| 208 | |
| 209 | if (VALUE_TYPE (arg2)(arg2)->type == type) |
| 210 | return arg2; |
| 211 | |
| 212 | CHECK_TYPEDEF (type)(type) = check_typedef (type); |
| 213 | code1 = TYPE_CODE (type)(type)->main_type->code; |
| 214 | COERCE_REF (arg2)do { struct type *value_type_arg_tmp = check_typedef ((arg2)-> type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF ) arg2 = value_at_lazy ((value_type_arg_tmp)->main_type-> target_type, unpack_pointer ((arg2)->type, ((void)((arg2)-> lazy && value_fetch_lazy(arg2)), ((char *) (arg2)-> aligner.contents + (arg2)->embedded_offset))), ((arg2)-> bfd_section)); } while (0); |
| 215 | type2 = check_typedef (VALUE_TYPE (arg2)(arg2)->type); |
| 216 | |
| 217 | /* A cast to an undetermined-length array_type, such as (TYPE [])OBJECT, |
| 218 | is treated like a cast to (TYPE [N])OBJECT, |
| 219 | where N is sizeof(OBJECT)/sizeof(TYPE). */ |
| 220 | if (code1 == TYPE_CODE_ARRAY) |
| 221 | { |
| 222 | struct type *element_type = TYPE_TARGET_TYPE (type)(type)->main_type->target_type; |
| 223 | unsigned element_length = TYPE_LENGTH (check_typedef (element_type))(check_typedef (element_type))->length; |
| 224 | if (element_length > 0 |
| 225 | && TYPE_ARRAY_UPPER_BOUND_TYPE (type)(type)->main_type->upper_bound_type == BOUND_CANNOT_BE_DETERMINED) |
| 226 | { |
| 227 | struct type *range_type = TYPE_INDEX_TYPE (type)(((type)->main_type->fields[0]).type); |
| 228 | int val_length = TYPE_LENGTH (type2)(type2)->length; |
| 229 | LONGESTlong low_bound, high_bound, new_length; |
| 230 | if (get_discrete_bounds (range_type, &low_bound, &high_bound) < 0) |
| 231 | low_bound = 0, high_bound = 0; |
| 232 | new_length = val_length / element_length; |
| 233 | if (val_length % element_length != 0) |
| 234 | warning ("array element type size does not divide object size in cast"); |
| 235 | /* FIXME-type-allocation: need a way to free this type when we are |
| 236 | done with it. */ |
| 237 | range_type = create_range_type ((struct type *) NULL((void*)0), |
| 238 | TYPE_TARGET_TYPE (range_type)(range_type)->main_type->target_type, |
| 239 | low_bound, |
| 240 | new_length + low_bound - 1); |
| 241 | VALUE_TYPE (arg2)(arg2)->type = create_array_type ((struct type *) NULL((void*)0), |
| 242 | element_type, range_type); |
| 243 | return arg2; |
| 244 | } |
| 245 | } |
| 246 | |
| 247 | if (current_language->c_style_arrays |
| 248 | && TYPE_CODE (type2)(type2)->main_type->code == TYPE_CODE_ARRAY) |
| 249 | arg2 = value_coerce_array (arg2); |
| 250 | |
| 251 | if (TYPE_CODE (type2)(type2)->main_type->code == TYPE_CODE_FUNC) |
| 252 | arg2 = value_coerce_function (arg2); |
| 253 | |
| 254 | type2 = check_typedef (VALUE_TYPE (arg2)(arg2)->type); |
| 255 | COERCE_VARYING_ARRAY (arg2, type2); |
| 256 | code2 = TYPE_CODE (type2)(type2)->main_type->code; |
| 257 | |
| 258 | if (code1 == TYPE_CODE_COMPLEX) |
| 259 | return cast_into_complex (type, arg2); |
| 260 | if (code1 == TYPE_CODE_BOOL) |
| 261 | { |
| 262 | code1 = TYPE_CODE_INT; |
| 263 | convert_to_boolean = 1; |
| 264 | } |
| 265 | if (code1 == TYPE_CODE_CHAR) |
| 266 | code1 = TYPE_CODE_INT; |
| 267 | if (code2 == TYPE_CODE_BOOL || code2 == TYPE_CODE_CHAR) |
| 268 | code2 = TYPE_CODE_INT; |
| 269 | |
| 270 | scalar = (code2 == TYPE_CODE_INT || code2 == TYPE_CODE_FLT |
| 271 | || code2 == TYPE_CODE_ENUM || code2 == TYPE_CODE_RANGE); |
| 272 | |
| 273 | if (code1 == TYPE_CODE_STRUCT |
| 274 | && code2 == TYPE_CODE_STRUCT |
| 275 | && TYPE_NAME (type)(type)->main_type->name != 0) |
| 276 | { |
| 277 | /* Look in the type of the source to see if it contains the |
| 278 | type of the target as a superclass. If so, we'll need to |
| 279 | offset the object in addition to changing its type. */ |
| 280 | struct value *v = search_struct_field (type_name_no_tag (type), |
| 281 | arg2, 0, type2, 1); |
| 282 | if (v) |
| 283 | { |
| 284 | VALUE_TYPE (v)(v)->type = type; |
| 285 | return v; |
| 286 | } |
| 287 | } |
| 288 | if (code1 == TYPE_CODE_FLT && scalar) |
| 289 | return value_from_double (type, value_as_double (arg2)); |
| 290 | else if ((code1 == TYPE_CODE_INT || code1 == TYPE_CODE_ENUM |
| 291 | || code1 == TYPE_CODE_RANGE) |
| 292 | && (scalar || code2 == TYPE_CODE_PTR)) |
| 293 | { |
| 294 | LONGESTlong longest; |
| 295 | |
| 296 | if (deprecated_hp_som_som_object_present /* if target compiled by HP aCC */ |
| 297 | && (code2 == TYPE_CODE_PTR)) |
| 298 | { |
| 299 | unsigned int *ptr; |
| 300 | struct value *retvalp; |
| 301 | |
| 302 | switch (TYPE_CODE (TYPE_TARGET_TYPE (type2))((type2)->main_type->target_type)->main_type->code) |
| 303 | { |
| 304 | /* With HP aCC, pointers to data members have a bias */ |
| 305 | case TYPE_CODE_MEMBER: |
| 306 | retvalp = value_from_longest (type, value_as_long (arg2)); |
| 307 | /* force evaluation */ |
| 308 | ptr = (unsigned int *) VALUE_CONTENTS (retvalp)((void)((retvalp)->lazy && value_fetch_lazy(retvalp )), ((char *) (retvalp)->aligner.contents + (retvalp)-> embedded_offset)); |
| 309 | *ptr &= ~0x20000000; /* zap 29th bit to remove bias */ |
| 310 | return retvalp; |
| 311 | |
| 312 | /* While pointers to methods don't really point to a function */ |
| 313 | case TYPE_CODE_METHOD: |
| 314 | error ("Pointers to methods not supported with HP aCC"); |
| 315 | |
| 316 | default: |
| 317 | break; /* fall out and go to normal handling */ |
| 318 | } |
| 319 | } |
| 320 | |
| 321 | /* When we cast pointers to integers, we mustn't use |
| 322 | POINTER_TO_ADDRESS to find the address the pointer |
| 323 | represents, as value_as_long would. GDB should evaluate |
| 324 | expressions just as the compiler would --- and the compiler |
| 325 | sees a cast as a simple reinterpretation of the pointer's |
| 326 | bits. */ |
| 327 | if (code2 == TYPE_CODE_PTR) |
| 328 | longest = extract_unsigned_integer (VALUE_CONTENTS (arg2)((void)((arg2)->lazy && value_fetch_lazy(arg2)), ( (char *) (arg2)->aligner.contents + (arg2)->embedded_offset )), |
| 329 | TYPE_LENGTH (type2)(type2)->length); |
| 330 | else |
| 331 | longest = value_as_long (arg2); |
| 332 | return value_from_longest (type, convert_to_boolean ? |
| 333 | (LONGESTlong) (longest ? 1 : 0) : longest); |
| 334 | } |
| 335 | else if (code1 == TYPE_CODE_PTR && (code2 == TYPE_CODE_INT || |
| 336 | code2 == TYPE_CODE_ENUM || |
| 337 | code2 == TYPE_CODE_RANGE)) |
| 338 | { |
| 339 | /* TYPE_LENGTH (type) is the length of a pointer, but we really |
| 340 | want the length of an address! -- we are really dealing with |
| 341 | addresses (i.e., gdb representations) not pointers (i.e., |
| 342 | target representations) here. |
| 343 | |
| 344 | This allows things like "print *(int *)0x01000234" to work |
| 345 | without printing a misleading message -- which would |
| 346 | otherwise occur when dealing with a target having two byte |
| 347 | pointers and four byte addresses. */ |
| 348 | |
| 349 | int addr_bit = TARGET_ADDR_BIT(gdbarch_addr_bit (current_gdbarch)); |
| 350 | |
| 351 | LONGESTlong longest = value_as_long (arg2); |
| 352 | if (addr_bit < sizeof (LONGESTlong) * HOST_CHAR_BIT8) |
| 353 | { |
| 354 | if (longest >= ((LONGESTlong) 1 << addr_bit) |
| 355 | || longest <= -((LONGESTlong) 1 << addr_bit)) |
| 356 | warning ("value truncated"); |
| 357 | } |
| 358 | return value_from_longest (type, longest); |
| 359 | } |
| 360 | else if (TYPE_LENGTH (type)(type)->length == TYPE_LENGTH (type2)(type2)->length) |
| 361 | { |
| 362 | if (code1 == TYPE_CODE_PTR && code2 == TYPE_CODE_PTR) |
| 363 | { |
| 364 | struct type *t1 = check_typedef (TYPE_TARGET_TYPE (type)(type)->main_type->target_type); |
| 365 | struct type *t2 = check_typedef (TYPE_TARGET_TYPE (type2)(type2)->main_type->target_type); |
| 366 | if (TYPE_CODE (t1)(t1)->main_type->code == TYPE_CODE_STRUCT |
| 367 | && TYPE_CODE (t2)(t2)->main_type->code == TYPE_CODE_STRUCT |
| 368 | && !value_logical_not (arg2)) |
| 369 | { |
| 370 | struct value *v; |
| 371 | |
| 372 | /* Look in the type of the source to see if it contains the |
| 373 | type of the target as a superclass. If so, we'll need to |
| 374 | offset the pointer rather than just change its type. */ |
| 375 | if (TYPE_NAME (t1)(t1)->main_type->name != NULL((void*)0)) |
| 376 | { |
| 377 | v = search_struct_field (type_name_no_tag (t1), |
| 378 | value_ind (arg2), 0, t2, 1); |
| 379 | if (v) |
| 380 | { |
| 381 | v = value_addr (v); |
| 382 | VALUE_TYPE (v)(v)->type = type; |
| 383 | return v; |
| 384 | } |
| 385 | } |
| 386 | |
| 387 | /* Look in the type of the target to see if it contains the |
| 388 | type of the source as a superclass. If so, we'll need to |
| 389 | offset the pointer rather than just change its type. |
| 390 | FIXME: This fails silently with virtual inheritance. */ |
| 391 | if (TYPE_NAME (t2)(t2)->main_type->name != NULL((void*)0)) |
| 392 | { |
| 393 | v = search_struct_field (type_name_no_tag (t2), |
| 394 | value_zero (t1, not_lval), 0, t1, 1); |
| 395 | if (v) |
| 396 | { |
| 397 | CORE_ADDR addr2 = value_as_address (arg2); |
| 398 | addr2 -= (VALUE_ADDRESS (v)(v)->location.address |
| 399 | + VALUE_OFFSET (v)(v)->offset |
| 400 | + VALUE_EMBEDDED_OFFSET (v)((v)->embedded_offset)); |
| 401 | return value_from_pointer (type, addr2); |
| 402 | } |
| 403 | } |
| 404 | } |
| 405 | /* No superclass found, just fall through to change ptr type. */ |
| 406 | } |
| 407 | VALUE_TYPE (arg2)(arg2)->type = type; |
| 408 | arg2 = value_change_enclosing_type (arg2, type); |
| 409 | VALUE_POINTED_TO_OFFSET (arg2)((arg2)->pointed_to_offset) = 0; /* pai: chk_val */ |
| 410 | return arg2; |
| 411 | } |
| 412 | else if (VALUE_LVAL (arg2)(arg2)->lval == lval_memory) |
| 413 | { |
| 414 | return value_at_lazy (type, VALUE_ADDRESS (arg2)(arg2)->location.address + VALUE_OFFSET (arg2)(arg2)->offset, |
| 415 | VALUE_BFD_SECTION (arg2)((arg2)->bfd_section)); |
| 416 | } |
| 417 | else if (code1 == TYPE_CODE_VOID) |
| 418 | { |
| 419 | return value_zero (builtin_type_void, not_lval); |
| 420 | } |
| 421 | else |
| 422 | { |
| 423 | error ("Invalid cast."); |
| 424 | return 0; |
| 425 | } |
| 426 | } |
| 427 | |
| 428 | /* Create a value of type TYPE that is zero, and return it. */ |
| 429 | |
| 430 | struct value * |
| 431 | value_zero (struct type *type, enum lval_type lv) |
| 432 | { |
| 433 | struct value *val = allocate_value (type); |
| 434 | |
| 435 | memset (VALUE_CONTENTS (val)((void)((val)->lazy && value_fetch_lazy(val)), ((char *) (val)->aligner.contents + (val)->embedded_offset)), 0, TYPE_LENGTH (check_typedef (type))(check_typedef (type))->length); |
| 436 | VALUE_LVAL (val)(val)->lval = lv; |
| 437 | |
| 438 | return val; |
| 439 | } |
| 440 | |
| 441 | /* Return a value with type TYPE located at ADDR. |
| 442 | |
| 443 | Call value_at only if the data needs to be fetched immediately; |
| 444 | if we can be 'lazy' and defer the fetch, perhaps indefinately, call |
| 445 | value_at_lazy instead. value_at_lazy simply records the address of |
| 446 | the data and sets the lazy-evaluation-required flag. The lazy flag |
| 447 | is tested in the VALUE_CONTENTS macro, which is used if and when |
| 448 | the contents are actually required. |
| 449 | |
| 450 | Note: value_at does *NOT* handle embedded offsets; perform such |
| 451 | adjustments before or after calling it. */ |
| 452 | |
| 453 | struct value * |
| 454 | value_at (struct type *type, CORE_ADDR addr, asection *sect) |
| 455 | { |
| 456 | struct value *val; |
| 457 | |
| 458 | if (TYPE_CODE (check_typedef (type))(check_typedef (type))->main_type->code == TYPE_CODE_VOID) |
| 459 | error ("Attempt to dereference a generic pointer."); |
| 460 | |
| 461 | val = allocate_value (type); |
| 462 | |
| 463 | read_memory (addr, VALUE_CONTENTS_ALL_RAW (val)((char *) (val)->aligner.contents), TYPE_LENGTH (type)(type)->length); |
| 464 | |
| 465 | VALUE_LVAL (val)(val)->lval = lval_memory; |
| 466 | VALUE_ADDRESS (val)(val)->location.address = addr; |
| 467 | VALUE_BFD_SECTION (val)((val)->bfd_section) = sect; |
| 468 | |
| 469 | return val; |
| 470 | } |
| 471 | |
| 472 | /* Return a lazy value with type TYPE located at ADDR (cf. value_at). */ |
| 473 | |
| 474 | struct value * |
| 475 | value_at_lazy (struct type *type, CORE_ADDR addr, asection *sect) |
| 476 | { |
| 477 | struct value *val; |
| 478 | |
| 479 | if (TYPE_CODE (check_typedef (type))(check_typedef (type))->main_type->code == TYPE_CODE_VOID) |
| 480 | error ("Attempt to dereference a generic pointer."); |
| 481 | |
| 482 | val = allocate_value (type); |
| 483 | |
| 484 | VALUE_LVAL (val)(val)->lval = lval_memory; |
| 485 | VALUE_ADDRESS (val)(val)->location.address = addr; |
| 486 | VALUE_LAZY (val)(val)->lazy = 1; |
| 487 | VALUE_BFD_SECTION (val)((val)->bfd_section) = sect; |
| 488 | |
| 489 | return val; |
| 490 | } |
| 491 | |
| 492 | /* Called only from the VALUE_CONTENTS and VALUE_CONTENTS_ALL macros, |
| 493 | if the current data for a variable needs to be loaded into |
| 494 | VALUE_CONTENTS(VAL). Fetches the data from the user's process, and |
| 495 | clears the lazy flag to indicate that the data in the buffer is valid. |
| 496 | |
| 497 | If the value is zero-length, we avoid calling read_memory, which would |
| 498 | abort. We mark the value as fetched anyway -- all 0 bytes of it. |
| 499 | |
| 500 | This function returns a value because it is used in the VALUE_CONTENTS |
| 501 | macro as part of an expression, where a void would not work. The |
| 502 | value is ignored. */ |
| 503 | |
| 504 | int |
| 505 | value_fetch_lazy (struct value *val) |
| 506 | { |
| 507 | CORE_ADDR addr = VALUE_ADDRESS (val)(val)->location.address + VALUE_OFFSET (val)(val)->offset; |
| 508 | int length = TYPE_LENGTH (VALUE_ENCLOSING_TYPE (val))((val)->enclosing_type)->length; |
| 509 | |
| 510 | struct type *type = VALUE_TYPE (val)(val)->type; |
| 511 | if (length) |
| 512 | read_memory (addr, VALUE_CONTENTS_ALL_RAW (val)((char *) (val)->aligner.contents), length); |
| 513 | |
| 514 | VALUE_LAZY (val)(val)->lazy = 0; |
| 515 | return 0; |
| 516 | } |
| 517 | |
| 518 | |
| 519 | /* Store the contents of FROMVAL into the location of TOVAL. |
| 520 | Return a new value with the location of TOVAL and contents of FROMVAL. */ |
| 521 | |
| 522 | struct value * |
| 523 | value_assign (struct value *toval, struct value *fromval) |
| 524 | { |
| 525 | struct type *type; |
| 526 | struct value *val; |
| 527 | struct frame_id old_frame; |
| 528 | |
| 529 | if (!toval->modifiable) |
| 530 | error ("Left operand of assignment is not a modifiable lvalue."); |
| 531 | |
| 532 | COERCE_REF (toval)do { struct type *value_type_arg_tmp = check_typedef ((toval) ->type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF) toval = value_at_lazy ((value_type_arg_tmp)-> main_type->target_type, unpack_pointer ((toval)->type, ( (void)((toval)->lazy && value_fetch_lazy(toval)), ( (char *) (toval)->aligner.contents + (toval)->embedded_offset ))), ((toval)->bfd_section)); } while (0); |
| 533 | |
| 534 | type = VALUE_TYPE (toval)(toval)->type; |
| 535 | if (VALUE_LVAL (toval)(toval)->lval != lval_internalvar) |
| 536 | fromval = value_cast (type, fromval); |
| 537 | else |
| 538 | COERCE_ARRAY (fromval)do { do { struct type *value_type_arg_tmp = check_typedef ((fromval )->type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF) fromval = value_at_lazy ((value_type_arg_tmp) ->main_type->target_type, unpack_pointer ((fromval)-> type, ((void)((fromval)->lazy && value_fetch_lazy( fromval)), ((char *) (fromval)->aligner.contents + (fromval )->embedded_offset))), ((fromval)->bfd_section)); } while (0); if (current_language->c_style_arrays && ((fromval )->type)->main_type->code == TYPE_CODE_ARRAY) fromval = value_coerce_array (fromval); if (((fromval)->type)-> main_type->code == TYPE_CODE_FUNC) fromval = value_coerce_function (fromval); } while (0); |
| 539 | CHECK_TYPEDEF (type)(type) = check_typedef (type); |
| 540 | |
| 541 | /* Since modifying a register can trash the frame chain, and modifying memory |
| 542 | can trash the frame cache, we save the old frame and then restore the new |
| 543 | frame afterwards. */ |
| 544 | old_frame = get_frame_id (deprecated_selected_frame); |
| 545 | |
| 546 | switch (VALUE_LVAL (toval)(toval)->lval) |
| 547 | { |
| 548 | case lval_internalvar: |
| 549 | set_internalvar (VALUE_INTERNALVAR (toval)(toval)->location.internalvar, fromval); |
| 550 | val = value_copy (VALUE_INTERNALVAR (toval)(toval)->location.internalvar->value); |
| 551 | val = value_change_enclosing_type (val, VALUE_ENCLOSING_TYPE (fromval)(fromval)->enclosing_type); |
| 552 | VALUE_EMBEDDED_OFFSET (val)((val)->embedded_offset) = VALUE_EMBEDDED_OFFSET (fromval)((fromval)->embedded_offset); |
| 553 | VALUE_POINTED_TO_OFFSET (val)((val)->pointed_to_offset) = VALUE_POINTED_TO_OFFSET (fromval)((fromval)->pointed_to_offset); |
| 554 | return val; |
| 555 | |
| 556 | case lval_internalvar_component: |
| 557 | set_internalvar_component (VALUE_INTERNALVAR (toval)(toval)->location.internalvar, |
| 558 | VALUE_OFFSET (toval)(toval)->offset, |
| 559 | VALUE_BITPOS (toval)(toval)->bitpos, |
| 560 | VALUE_BITSIZE (toval)(toval)->bitsize, |
| 561 | fromval); |
| 562 | break; |
| 563 | |
| 564 | case lval_memory: |
| 565 | { |
| 566 | char *dest_buffer; |
| 567 | CORE_ADDR changed_addr; |
| 568 | int changed_len; |
| 569 | char buffer[sizeof (LONGESTlong)]; |
| 570 | |
| 571 | if (VALUE_BITSIZE (toval)(toval)->bitsize) |
| 572 | { |
| 573 | /* We assume that the argument to read_memory is in units of |
| 574 | host chars. FIXME: Is that correct? */ |
| 575 | changed_len = (VALUE_BITPOS (toval)(toval)->bitpos |
| 576 | + VALUE_BITSIZE (toval)(toval)->bitsize |
| 577 | + HOST_CHAR_BIT8 - 1) |
| 578 | / HOST_CHAR_BIT8; |
| 579 | |
| 580 | if (changed_len > (int) sizeof (LONGESTlong)) |
| 581 | error ("Can't handle bitfields which don't fit in a %d bit word.", |
| 582 | (int) sizeof (LONGESTlong) * HOST_CHAR_BIT8); |
| 583 | |
| 584 | read_memory (VALUE_ADDRESS (toval)(toval)->location.address + VALUE_OFFSET (toval)(toval)->offset, |
| 585 | buffer, changed_len); |
| 586 | modify_field (buffer, value_as_long (fromval), |
| 587 | VALUE_BITPOS (toval)(toval)->bitpos, VALUE_BITSIZE (toval)(toval)->bitsize); |
| 588 | changed_addr = VALUE_ADDRESS (toval)(toval)->location.address + VALUE_OFFSET (toval)(toval)->offset; |
| 589 | dest_buffer = buffer; |
| 590 | } |
| 591 | else |
| 592 | { |
| 593 | changed_addr = VALUE_ADDRESS (toval)(toval)->location.address + VALUE_OFFSET (toval)(toval)->offset; |
| 594 | changed_len = TYPE_LENGTH (type)(type)->length; |
| 595 | dest_buffer = VALUE_CONTENTS (fromval)((void)((fromval)->lazy && value_fetch_lazy(fromval )), ((char *) (fromval)->aligner.contents + (fromval)-> embedded_offset)); |
| 596 | } |
| 597 | |
| 598 | write_memory (changed_addr, dest_buffer, changed_len); |
| 599 | if (deprecated_memory_changed_hook) |
| 600 | deprecated_memory_changed_hook (changed_addr, changed_len); |
| 601 | } |
| 602 | break; |
| 603 | |
| 604 | case lval_reg_frame_relative: |
| 605 | case lval_register: |
| 606 | { |
| 607 | struct frame_info *frame; |
| 608 | int value_reg; |
| 609 | |
| 610 | /* Figure out which frame this is in currently. */ |
| 611 | if (VALUE_LVAL (toval)(toval)->lval == lval_register) |
| 612 | { |
| 613 | frame = get_current_frame (); |
| 614 | value_reg = VALUE_REGNO (toval)(toval)->regno; |
| 615 | } |
| 616 | else |
| 617 | { |
| 618 | frame = frame_find_by_id (VALUE_FRAME_ID (toval)((toval)->frame_id)); |
| 619 | value_reg = VALUE_FRAME_REGNUM (toval)((toval)->location.regnum); |
| 620 | } |
| 621 | |
| 622 | if (!frame) |
| 623 | error ("Value being assigned to is no longer active."); |
| 624 | |
| 625 | if (VALUE_LVAL (toval)(toval)->lval == lval_reg_frame_relative |
| 626 | && CONVERT_REGISTER_P (VALUE_FRAME_REGNUM (toval), type)(gdbarch_convert_register_p (current_gdbarch, ((toval)->location .regnum), type))) |
| 627 | { |
| 628 | /* If TOVAL is a special machine register requiring |
| 629 | conversion of program values to a special raw format. */ |
| 630 | VALUE_TO_REGISTER (frame, VALUE_FRAME_REGNUM (toval),(gdbarch_value_to_register (current_gdbarch, frame, ((toval)-> location.regnum), type, ((void)((fromval)->lazy && value_fetch_lazy(fromval)), ((char *) (fromval)->aligner. contents + (fromval)->embedded_offset)))) |
| 631 | type, VALUE_CONTENTS (fromval))(gdbarch_value_to_register (current_gdbarch, frame, ((toval)-> location.regnum), type, ((void)((fromval)->lazy && value_fetch_lazy(fromval)), ((char *) (fromval)->aligner. contents + (fromval)->embedded_offset)))); |
| 632 | } |
| 633 | else |
| 634 | { |
| 635 | /* TOVAL is stored in a series of registers in the frame |
| 636 | specified by the structure. Copy that value out, |
| 637 | modify it, and copy it back in. */ |
| 638 | int amount_copied; |
| 639 | int amount_to_copy; |
| 640 | char *buffer; |
| 641 | int reg_offset; |
| 642 | int byte_offset; |
| 643 | int regno; |
| 644 | |
| 645 | /* Locate the first register that falls in the value that |
| 646 | needs to be transfered. Compute the offset of the |
| 647 | value in that register. */ |
| 648 | { |
| 649 | int offset; |
| 650 | for (reg_offset = value_reg, offset = 0; |
| 651 | offset + register_size (current_gdbarch, reg_offset) <= VALUE_OFFSET (toval)(toval)->offset; |
| 652 | reg_offset++); |
| 653 | byte_offset = VALUE_OFFSET (toval)(toval)->offset - offset; |
| 654 | } |
| 655 | |
| 656 | /* Compute the number of register aligned values that need |
| 657 | to be copied. */ |
| 658 | if (VALUE_BITSIZE (toval)(toval)->bitsize) |
| 659 | amount_to_copy = byte_offset + 1; |
| 660 | else |
| 661 | amount_to_copy = byte_offset + TYPE_LENGTH (type)(type)->length; |
| 662 | |
| 663 | /* And a bounce buffer. Be slightly over generous. */ |
| 664 | buffer = (char *) alloca (amount_to_copy + MAX_REGISTER_SIZE)__builtin_alloca(amount_to_copy + MAX_REGISTER_SIZE); |
| 665 | |
| 666 | /* Copy it in. */ |
| 667 | for (regno = reg_offset, amount_copied = 0; |
| 668 | amount_copied < amount_to_copy; |
| 669 | amount_copied += register_size (current_gdbarch, regno), regno++) |
| 670 | frame_register_read (frame, regno, buffer + amount_copied); |
| 671 | |
| 672 | /* Modify what needs to be modified. */ |
| 673 | if (VALUE_BITSIZE (toval)(toval)->bitsize) |
| 674 | modify_field (buffer + byte_offset, |
| 675 | value_as_long (fromval), |
| 676 | VALUE_BITPOS (toval)(toval)->bitpos, VALUE_BITSIZE (toval)(toval)->bitsize); |
| 677 | else |
| 678 | memcpy (buffer + byte_offset, VALUE_CONTENTS (fromval)((void)((fromval)->lazy && value_fetch_lazy(fromval )), ((char *) (fromval)->aligner.contents + (fromval)-> embedded_offset)), |
| 679 | TYPE_LENGTH (type)(type)->length); |
| 680 | |
| 681 | /* Copy it out. */ |
| 682 | for (regno = reg_offset, amount_copied = 0; |
| 683 | amount_copied < amount_to_copy; |
| 684 | amount_copied += register_size (current_gdbarch, regno), regno++) |
| 685 | put_frame_register (frame, regno, buffer + amount_copied); |
| 686 | |
| 687 | } |
| 688 | if (deprecated_register_changed_hook) |
| 689 | deprecated_register_changed_hook (-1); |
| 690 | observer_notify_target_changed (¤t_target); |
| 691 | break; |
| 692 | } |
| 693 | |
| 694 | default: |
| 695 | error ("Left operand of assignment is not an lvalue."); |
| 696 | } |
| 697 | |
| 698 | /* Assigning to the stack pointer, frame pointer, and other |
| 699 | (architecture and calling convention specific) registers may |
| 700 | cause the frame cache to be out of date. Assigning to memory |
| 701 | also can. We just do this on all assignments to registers or |
| 702 | memory, for simplicity's sake; I doubt the slowdown matters. */ |
| 703 | switch (VALUE_LVAL (toval)(toval)->lval) |
| 704 | { |
| 705 | case lval_memory: |
| 706 | case lval_register: |
| 707 | case lval_reg_frame_relative: |
| 708 | |
| 709 | reinit_frame_cache (); |
| 710 | |
| 711 | /* Having destoroyed the frame cache, restore the selected frame. */ |
| 712 | |
| 713 | /* FIXME: cagney/2002-11-02: There has to be a better way of |
| 714 | doing this. Instead of constantly saving/restoring the |
| 715 | frame. Why not create a get_selected_frame() function that, |
| 716 | having saved the selected frame's ID can automatically |
| 717 | re-find the previously selected frame automatically. */ |
| 718 | |
| 719 | { |
| 720 | struct frame_info *fi = frame_find_by_id (old_frame); |
| 721 | if (fi != NULL((void*)0)) |
| 722 | select_frame (fi); |
| 723 | } |
| 724 | |
| 725 | break; |
| 726 | default: |
| 727 | break; |
| 728 | } |
| 729 | |
| 730 | /* If the field does not entirely fill a LONGEST, then zero the sign bits. |
| 731 | If the field is signed, and is negative, then sign extend. */ |
| 732 | if ((VALUE_BITSIZE (toval)(toval)->bitsize > 0) |
| 733 | && (VALUE_BITSIZE (toval)(toval)->bitsize < 8 * (int) sizeof (LONGESTlong))) |
| 734 | { |
| 735 | LONGESTlong fieldval = value_as_long (fromval); |
| 736 | LONGESTlong valmask = (((ULONGESTunsigned long) 1) << VALUE_BITSIZE (toval)(toval)->bitsize) - 1; |
| 737 | |
| 738 | fieldval &= valmask; |
| 739 | if (!TYPE_UNSIGNED (type)((type)->main_type->flags & (1 << 0)) && (fieldval & (valmask ^ (valmask >> 1)))) |
| 740 | fieldval |= ~valmask; |
| 741 | |
| 742 | fromval = value_from_longest (type, fieldval); |
| 743 | } |
| 744 | |
| 745 | val = value_copy (toval); |
| 746 | memcpy (VALUE_CONTENTS_RAW (val)((char *) (val)->aligner.contents + (val)->embedded_offset ), VALUE_CONTENTS (fromval)((void)((fromval)->lazy && value_fetch_lazy(fromval )), ((char *) (fromval)->aligner.contents + (fromval)-> embedded_offset)), |
| 747 | TYPE_LENGTH (type)(type)->length); |
| 748 | VALUE_TYPE (val)(val)->type = type; |
| 749 | val = value_change_enclosing_type (val, VALUE_ENCLOSING_TYPE (fromval)(fromval)->enclosing_type); |
| 750 | VALUE_EMBEDDED_OFFSET (val)((val)->embedded_offset) = VALUE_EMBEDDED_OFFSET (fromval)((fromval)->embedded_offset); |
| 751 | VALUE_POINTED_TO_OFFSET (val)((val)->pointed_to_offset) = VALUE_POINTED_TO_OFFSET (fromval)((fromval)->pointed_to_offset); |
| 752 | |
| 753 | return val; |
| 754 | } |
| 755 | |
| 756 | /* Extend a value VAL to COUNT repetitions of its type. */ |
| 757 | |
| 758 | struct value * |
| 759 | value_repeat (struct value *arg1, int count) |
| 760 | { |
| 761 | struct value *val; |
| 762 | |
| 763 | if (VALUE_LVAL (arg1)(arg1)->lval != lval_memory) |
| 764 | error ("Only values in memory can be extended with '@'."); |
| 765 | if (count < 1) |
| 766 | error ("Invalid number %d of repetitions.", count); |
| 767 | |
| 768 | val = allocate_repeat_value (VALUE_ENCLOSING_TYPE (arg1)(arg1)->enclosing_type, count); |
| 769 | |
| 770 | read_memory (VALUE_ADDRESS (arg1)(arg1)->location.address + VALUE_OFFSET (arg1)(arg1)->offset, |
| 771 | VALUE_CONTENTS_ALL_RAW (val)((char *) (val)->aligner.contents), |
| 772 | TYPE_LENGTH (VALUE_ENCLOSING_TYPE (val))((val)->enclosing_type)->length); |
| 773 | VALUE_LVAL (val)(val)->lval = lval_memory; |
| 774 | VALUE_ADDRESS (val)(val)->location.address = VALUE_ADDRESS (arg1)(arg1)->location.address + VALUE_OFFSET (arg1)(arg1)->offset; |
| 775 | |
| 776 | return val; |
| 777 | } |
| 778 | |
| 779 | struct value * |
| 780 | value_of_variable (struct symbol *var, struct block *b) |
| 781 | { |
| 782 | struct value *val; |
| 783 | struct frame_info *frame = NULL((void*)0); |
| 784 | |
| 785 | if (!b) |
| 786 | frame = NULL((void*)0); /* Use selected frame. */ |
| 787 | else if (symbol_read_needs_frame (var)) |
| 788 | { |
| 789 | frame = block_innermost_frame (b); |
| 790 | if (!frame) |
| 791 | { |
| 792 | if (BLOCK_FUNCTION (b)(b)->function |
| 793 | && SYMBOL_PRINT_NAME (BLOCK_FUNCTION (b))(demangle ? (symbol_natural_name (&((b)->function)-> ginfo)) : ((b)->function)->ginfo.name)) |
| 794 | error ("No frame is currently executing in block %s.", |
| 795 | SYMBOL_PRINT_NAME (BLOCK_FUNCTION (b))(demangle ? (symbol_natural_name (&((b)->function)-> ginfo)) : ((b)->function)->ginfo.name)); |
| 796 | else |
| 797 | error ("No frame is currently executing in specified block"); |
| 798 | } |
| 799 | } |
| 800 | |
| 801 | val = read_var_value (var, frame); |
| 802 | if (!val) |
| 803 | error ("Address of symbol \"%s\" is unknown.", SYMBOL_PRINT_NAME (var)(demangle ? (symbol_natural_name (&(var)->ginfo)) : (var )->ginfo.name)); |
| 804 | |
| 805 | return val; |
| 806 | } |
| 807 | |
| 808 | /* Given a value which is an array, return a value which is a pointer to its |
| 809 | first element, regardless of whether or not the array has a nonzero lower |
| 810 | bound. |
| 811 | |
| 812 | FIXME: A previous comment here indicated that this routine should be |
| 813 | substracting the array's lower bound. It's not clear to me that this |
| 814 | is correct. Given an array subscripting operation, it would certainly |
| 815 | work to do the adjustment here, essentially computing: |
| 816 | |
| 817 | (&array[0] - (lowerbound * sizeof array[0])) + (index * sizeof array[0]) |
| 818 | |
| 819 | However I believe a more appropriate and logical place to account for |
| 820 | the lower bound is to do so in value_subscript, essentially computing: |
| 821 | |
| 822 | (&array[0] + ((index - lowerbound) * sizeof array[0])) |
| 823 | |
| 824 | As further evidence consider what would happen with operations other |
| 825 | than array subscripting, where the caller would get back a value that |
| 826 | had an address somewhere before the actual first element of the array, |
| 827 | and the information about the lower bound would be lost because of |
| 828 | the coercion to pointer type. |
| 829 | */ |
| 830 | |
| 831 | struct value * |
| 832 | value_coerce_array (struct value *arg1) |
| 833 | { |
| 834 | struct type *type = check_typedef (VALUE_TYPE (arg1)(arg1)->type); |
| 835 | |
| 836 | if (VALUE_LVAL (arg1)(arg1)->lval != lval_memory) |
| 837 | error ("Attempt to take address of value not located in memory."); |
| 838 | |
| 839 | return value_from_pointer (lookup_pointer_type (TYPE_TARGET_TYPE (type)(type)->main_type->target_type), |
| 840 | (VALUE_ADDRESS (arg1)(arg1)->location.address + VALUE_OFFSET (arg1)(arg1)->offset)); |
| 841 | } |
| 842 | |
| 843 | /* Given a value which is a function, return a value which is a pointer |
| 844 | to it. */ |
| 845 | |
| 846 | struct value * |
| 847 | value_coerce_function (struct value *arg1) |
| 848 | { |
| 849 | struct value *retval; |
| 850 | |
| 851 | if (VALUE_LVAL (arg1)(arg1)->lval != lval_memory) |
| 852 | error ("Attempt to take address of value not located in memory."); |
| 853 | |
| 854 | retval = value_from_pointer (lookup_pointer_type (VALUE_TYPE (arg1)(arg1)->type), |
| 855 | (VALUE_ADDRESS (arg1)(arg1)->location.address + VALUE_OFFSET (arg1)(arg1)->offset)); |
| 856 | VALUE_BFD_SECTION (retval)((retval)->bfd_section) = VALUE_BFD_SECTION (arg1)((arg1)->bfd_section); |
| 857 | return retval; |
| 858 | } |
| 859 | |
| 860 | /* Return a pointer value for the object for which ARG1 is the contents. */ |
| 861 | |
| 862 | struct value * |
| 863 | value_addr (struct value *arg1) |
| 864 | { |
| 865 | struct value *arg2; |
| 866 | |
| 867 | struct type *type = check_typedef (VALUE_TYPE (arg1)(arg1)->type); |
| 868 | if (TYPE_CODE (type)(type)->main_type->code == TYPE_CODE_REF) |
| 869 | { |
| 870 | /* Copy the value, but change the type from (T&) to (T*). |
| 871 | We keep the same location information, which is efficient, |
| 872 | and allows &(&X) to get the location containing the reference. */ |
| 873 | arg2 = value_copy (arg1); |
| 874 | VALUE_TYPE (arg2)(arg2)->type = lookup_pointer_type (TYPE_TARGET_TYPE (type)(type)->main_type->target_type); |
| 875 | return arg2; |
| 876 | } |
| 877 | if (TYPE_CODE (type)(type)->main_type->code == TYPE_CODE_FUNC) |
| 878 | return value_coerce_function (arg1); |
| 879 | |
| 880 | if (VALUE_LVAL (arg1)(arg1)->lval != lval_memory) |
| 881 | error ("Attempt to take address of value not located in memory."); |
| 882 | |
| 883 | /* Get target memory address */ |
| 884 | arg2 = value_from_pointer (lookup_pointer_type (VALUE_TYPE (arg1)(arg1)->type), |
| 885 | (VALUE_ADDRESS (arg1)(arg1)->location.address |
| 886 | + VALUE_OFFSET (arg1)(arg1)->offset |
| 887 | + VALUE_EMBEDDED_OFFSET (arg1)((arg1)->embedded_offset))); |
| 888 | |
| 889 | /* This may be a pointer to a base subobject; so remember the |
| 890 | full derived object's type ... */ |
| 891 | arg2 = value_change_enclosing_type (arg2, lookup_pointer_type (VALUE_ENCLOSING_TYPE (arg1)(arg1)->enclosing_type)); |
| 892 | /* ... and also the relative position of the subobject in the full object */ |
| 893 | VALUE_POINTED_TO_OFFSET (arg2)((arg2)->pointed_to_offset) = VALUE_EMBEDDED_OFFSET (arg1)((arg1)->embedded_offset); |
| 894 | VALUE_BFD_SECTION (arg2)((arg2)->bfd_section) = VALUE_BFD_SECTION (arg1)((arg1)->bfd_section); |
| 895 | return arg2; |
| 896 | } |
| 897 | |
| 898 | /* Given a value of a pointer type, apply the C unary * operator to it. */ |
| 899 | |
| 900 | struct value * |
| 901 | value_ind (struct value *arg1) |
| 902 | { |
| 903 | struct type *base_type; |
| 904 | struct value *arg2; |
| 905 | |
| 906 | COERCE_ARRAY (arg1)do { do { struct type *value_type_arg_tmp = check_typedef ((arg1 )->type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF) arg1 = value_at_lazy ((value_type_arg_tmp)-> main_type->target_type, unpack_pointer ((arg1)->type, ( (void)((arg1)->lazy && value_fetch_lazy(arg1)), (( char *) (arg1)->aligner.contents + (arg1)->embedded_offset ))), ((arg1)->bfd_section)); } while (0); if (current_language ->c_style_arrays && ((arg1)->type)->main_type ->code == TYPE_CODE_ARRAY) arg1 = value_coerce_array (arg1 ); if (((arg1)->type)->main_type->code == TYPE_CODE_FUNC ) arg1 = value_coerce_function (arg1); } while (0); |
| 907 | |
| 908 | base_type = check_typedef (VALUE_TYPE (arg1)(arg1)->type); |
| 909 | |
| 910 | if (TYPE_CODE (base_type)(base_type)->main_type->code == TYPE_CODE_MEMBER) |
| 911 | error ("not implemented: member types in value_ind"); |
| 912 | |
| 913 | /* Allow * on an integer so we can cast it to whatever we want. |
| 914 | This returns an int, which seems like the most C-like thing |
| 915 | to do. "long long" variables are rare enough that |
| 916 | BUILTIN_TYPE_LONGEST would seem to be a mistake. */ |
| 917 | if (TYPE_CODE (base_type)(base_type)->main_type->code == TYPE_CODE_INT) |
| 918 | return value_at_lazy (builtin_type_int, |
| 919 | (CORE_ADDR) value_as_long (arg1), |
| 920 | VALUE_BFD_SECTION (arg1)((arg1)->bfd_section)); |
| 921 | else if (TYPE_CODE (base_type)(base_type)->main_type->code == TYPE_CODE_PTR) |
| 922 | { |
| 923 | struct type *enc_type; |
| 924 | /* We may be pointing to something embedded in a larger object */ |
| 925 | /* Get the real type of the enclosing object */ |
| 926 | enc_type = check_typedef (VALUE_ENCLOSING_TYPE (arg1)(arg1)->enclosing_type); |
| 927 | enc_type = TYPE_TARGET_TYPE (enc_type)(enc_type)->main_type->target_type; |
| 928 | /* Retrieve the enclosing object pointed to */ |
| 929 | arg2 = value_at_lazy (enc_type, |
| 930 | value_as_address (arg1) - VALUE_POINTED_TO_OFFSET (arg1)((arg1)->pointed_to_offset), |
| 931 | VALUE_BFD_SECTION (arg1)((arg1)->bfd_section)); |
| 932 | /* Re-adjust type */ |
| 933 | VALUE_TYPE (arg2)(arg2)->type = TYPE_TARGET_TYPE (base_type)(base_type)->main_type->target_type; |
| 934 | /* Add embedding info */ |
| 935 | arg2 = value_change_enclosing_type (arg2, enc_type); |
| 936 | VALUE_EMBEDDED_OFFSET (arg2)((arg2)->embedded_offset) = VALUE_POINTED_TO_OFFSET (arg1)((arg1)->pointed_to_offset); |
| 937 | |
| 938 | /* We may be pointing to an object of some derived type */ |
| 939 | arg2 = value_full_object (arg2, NULL((void*)0), 0, 0, 0); |
| 940 | return arg2; |
| 941 | } |
| 942 | |
| 943 | error ("Attempt to take contents of a non-pointer value."); |
| 944 | return 0; /* For lint -- never reached */ |
| 945 | } |
| 946 | |
| 947 | /* Pushing small parts of stack frames. */ |
| 948 | |
| 949 | /* Push one word (the size of object that a register holds). */ |
| 950 | |
| 951 | CORE_ADDR |
| 952 | push_word (CORE_ADDR sp, ULONGESTunsigned long word) |
| 953 | { |
| 954 | int len = DEPRECATED_REGISTER_SIZE(gdbarch_deprecated_register_size (current_gdbarch)); |
| 955 | char buffer[MAX_REGISTER_SIZE]; |
| 956 | |
| 957 | store_unsigned_integer (buffer, len, word); |
| 958 | if (INNER_THAN (1, 2)(gdbarch_inner_than (current_gdbarch, 1, 2))) |
| 959 | { |
| 960 | /* stack grows downward */ |
| 961 | sp -= len; |
| 962 | write_memory (sp, buffer, len); |
| 963 | } |
| 964 | else |
| 965 | { |
| 966 | /* stack grows upward */ |
| 967 | write_memory (sp, buffer, len); |
| 968 | sp += len; |
| 969 | } |
| 970 | |
| 971 | return sp; |
| 972 | } |
| 973 | |
| 974 | /* Push LEN bytes with data at BUFFER. */ |
| 975 | |
| 976 | CORE_ADDR |
| 977 | push_bytes (CORE_ADDR sp, char *buffer, int len) |
| 978 | { |
| 979 | if (INNER_THAN (1, 2)(gdbarch_inner_than (current_gdbarch, 1, 2))) |
| 980 | { |
| 981 | /* stack grows downward */ |
| 982 | sp -= len; |
| 983 | write_memory (sp, buffer, len); |
| 984 | } |
| 985 | else |
| 986 | { |
| 987 | /* stack grows upward */ |
| 988 | write_memory (sp, buffer, len); |
| 989 | sp += len; |
| 990 | } |
| 991 | |
| 992 | return sp; |
| 993 | } |
| 994 | |
| 995 | /* Create a value for an array by allocating space in the inferior, copying |
| 996 | the data into that space, and then setting up an array value. |
| 997 | |
| 998 | The array bounds are set from LOWBOUND and HIGHBOUND, and the array is |
| 999 | populated from the values passed in ELEMVEC. |
| 1000 | |
| 1001 | The element type of the array is inherited from the type of the |
| 1002 | first element, and all elements must have the same size (though we |
| 1003 | don't currently enforce any restriction on their types). */ |
| 1004 | |
| 1005 | struct value * |
| 1006 | value_array (int lowbound, int highbound, struct value **elemvec) |
| 1007 | { |
| 1008 | int nelem; |
| 1009 | int idx; |
| 1010 | unsigned int typelength; |
| 1011 | struct value *val; |
| 1012 | struct type *rangetype; |
| 1013 | struct type *arraytype; |
| 1014 | CORE_ADDR addr; |
| 1015 | |
| 1016 | /* Validate that the bounds are reasonable and that each of the elements |
| 1017 | have the same size. */ |
| 1018 | |
| 1019 | nelem = highbound - lowbound + 1; |
| 1020 | if (nelem <= 0) |
| 1021 | { |
| 1022 | error ("bad array bounds (%d, %d)", lowbound, highbound); |
| 1023 | } |
| 1024 | typelength = TYPE_LENGTH (VALUE_ENCLOSING_TYPE (elemvec[0]))((elemvec[0])->enclosing_type)->length; |
| 1025 | for (idx = 1; idx < nelem; idx++) |
| 1026 | { |
| 1027 | if (TYPE_LENGTH (VALUE_ENCLOSING_TYPE (elemvec[idx]))((elemvec[idx])->enclosing_type)->length != typelength) |
| 1028 | { |
| 1029 | error ("array elements must all be the same size"); |
| 1030 | } |
| 1031 | } |
| 1032 | |
| 1033 | rangetype = create_range_type ((struct type *) NULL((void*)0), builtin_type_int, |
| 1034 | lowbound, highbound); |
| 1035 | arraytype = create_array_type ((struct type *) NULL((void*)0), |
| 1036 | VALUE_ENCLOSING_TYPE (elemvec[0])(elemvec[0])->enclosing_type, rangetype); |
| 1037 | |
| 1038 | if (!current_language->c_style_arrays) |
| 1039 | { |
| 1040 | val = allocate_value (arraytype); |
| 1041 | for (idx = 0; idx < nelem; idx++) |
| 1042 | { |
| 1043 | memcpy (VALUE_CONTENTS_ALL_RAW (val)((char *) (val)->aligner.contents) + (idx * typelength), |
| 1044 | VALUE_CONTENTS_ALL (elemvec[idx])((void) ((elemvec[idx])->lazy && value_fetch_lazy( elemvec[idx])), ((char *) (elemvec[idx])->aligner.contents )), |
| 1045 | typelength); |
| 1046 | } |
| 1047 | VALUE_BFD_SECTION (val)((val)->bfd_section) = VALUE_BFD_SECTION (elemvec[0])((elemvec[0])->bfd_section); |
| 1048 | return val; |
| 1049 | } |
| 1050 | |
| 1051 | /* Allocate space to store the array in the inferior, and then initialize |
| 1052 | it by copying in each element. FIXME: Is it worth it to create a |
| 1053 | local buffer in which to collect each value and then write all the |
| 1054 | bytes in one operation? */ |
| 1055 | |
| 1056 | addr = allocate_space_in_inferior (nelem * typelength); |
| 1057 | for (idx = 0; idx < nelem; idx++) |
| 1058 | { |
| 1059 | write_memory (addr + (idx * typelength), VALUE_CONTENTS_ALL (elemvec[idx])((void) ((elemvec[idx])->lazy && value_fetch_lazy( elemvec[idx])), ((char *) (elemvec[idx])->aligner.contents )), |
| 1060 | typelength); |
| 1061 | } |
| 1062 | |
| 1063 | /* Create the array type and set up an array value to be evaluated lazily. */ |
| 1064 | |
| 1065 | val = value_at_lazy (arraytype, addr, VALUE_BFD_SECTION (elemvec[0])((elemvec[0])->bfd_section)); |
| 1066 | return (val); |
| 1067 | } |
| 1068 | |
| 1069 | /* Create a value for a string constant by allocating space in the inferior, |
| 1070 | copying the data into that space, and returning the address with type |
| 1071 | TYPE_CODE_STRING. PTR points to the string constant data; LEN is number |
| 1072 | of characters. |
| 1073 | Note that string types are like array of char types with a lower bound of |
| 1074 | zero and an upper bound of LEN - 1. Also note that the string may contain |
| 1075 | embedded null bytes. */ |
| 1076 | |
| 1077 | struct value * |
| 1078 | value_string (char *ptr, int len) |
| 1079 | { |
| 1080 | struct value *val; |
| 1081 | int lowbound = current_language->string_lower_bound; |
| 1082 | struct type *rangetype = create_range_type ((struct type *) NULL((void*)0), |
| 1083 | builtin_type_int, |
| 1084 | lowbound, len + lowbound - 1); |
| 1085 | struct type *stringtype |
| 1086 | = create_string_type ((struct type *) NULL((void*)0), rangetype); |
| 1087 | CORE_ADDR addr; |
| 1088 | |
| 1089 | if (current_language->c_style_arrays == 0) |
| 1090 | { |
| 1091 | val = allocate_value (stringtype); |
| 1092 | memcpy (VALUE_CONTENTS_RAW (val)((char *) (val)->aligner.contents + (val)->embedded_offset ), ptr, len); |
| 1093 | return val; |
| 1094 | } |
| 1095 | |
| 1096 | |
| 1097 | /* Allocate space to store the string in the inferior, and then |
| 1098 | copy LEN bytes from PTR in gdb to that address in the inferior. */ |
| 1099 | |
| 1100 | addr = allocate_space_in_inferior (len); |
| 1101 | write_memory (addr, ptr, len); |
| 1102 | |
| 1103 | val = value_at_lazy (stringtype, addr, NULL((void*)0)); |
| 1104 | return (val); |
| 1105 | } |
| 1106 | |
| 1107 | struct value * |
| 1108 | value_bitstring (char *ptr, int len) |
| 1109 | { |
| 1110 | struct value *val; |
| 1111 | struct type *domain_type = create_range_type (NULL((void*)0), builtin_type_int, |
| 1112 | 0, len - 1); |
| 1113 | struct type *type = create_set_type ((struct type *) NULL((void*)0), domain_type); |
| 1114 | TYPE_CODE (type)(type)->main_type->code = TYPE_CODE_BITSTRING; |
| 1115 | val = allocate_value (type); |
| 1116 | memcpy (VALUE_CONTENTS_RAW (val)((char *) (val)->aligner.contents + (val)->embedded_offset ), ptr, TYPE_LENGTH (type)(type)->length); |
| 1117 | return val; |
| 1118 | } |
| 1119 | |
| 1120 | /* See if we can pass arguments in T2 to a function which takes arguments |
| 1121 | of types T1. T1 is a list of NARGS arguments, and T2 is a NULL-terminated |
| 1122 | vector. If some arguments need coercion of some sort, then the coerced |
| 1123 | values are written into T2. Return value is 0 if the arguments could be |
| 1124 | matched, or the position at which they differ if not. |
| 1125 | |
| 1126 | STATICP is nonzero if the T1 argument list came from a |
| 1127 | static member function. T2 will still include the ``this'' pointer, |
| 1128 | but it will be skipped. |
| 1129 | |
| 1130 | For non-static member functions, we ignore the first argument, |
| 1131 | which is the type of the instance variable. This is because we want |
| 1132 | to handle calls with objects from derived classes. This is not |
| 1133 | entirely correct: we should actually check to make sure that a |
| 1134 | requested operation is type secure, shouldn't we? FIXME. */ |
| 1135 | |
| 1136 | static int |
| 1137 | typecmp (int staticp, int varargs, int nargs, |
| 1138 | struct field t1[], struct value *t2[]) |
| 1139 | { |
| 1140 | int i; |
| 1141 | |
| 1142 | if (t2 == 0) |
| 1143 | internal_error (__FILE__"/usr/src/gnu/usr.bin/binutils/gdb/valops.c", __LINE__1143, "typecmp: no argument list"); |
| 1144 | |
| 1145 | /* Skip ``this'' argument if applicable. T2 will always include THIS. */ |
| 1146 | if (staticp) |
| 1147 | t2 ++; |
| 1148 | |
| 1149 | for (i = 0; |
| 1150 | (i < nargs) && TYPE_CODE (t1[i].type)(t1[i].type)->main_type->code != TYPE_CODE_VOID; |
| 1151 | i++) |
| 1152 | { |
| 1153 | struct type *tt1, *tt2; |
| 1154 | |
| 1155 | if (!t2[i]) |
| 1156 | return i + 1; |
| 1157 | |
| 1158 | tt1 = check_typedef (t1[i].type); |
| 1159 | tt2 = check_typedef (VALUE_TYPE (t2[i])(t2[i])->type); |
| 1160 | |
| 1161 | if (TYPE_CODE (tt1)(tt1)->main_type->code == TYPE_CODE_REF |
| 1162 | /* We should be doing hairy argument matching, as below. */ |
| 1163 | && (TYPE_CODE (check_typedef (TYPE_TARGET_TYPE (tt1)))(check_typedef ((tt1)->main_type->target_type))->main_type ->code == TYPE_CODE (tt2)(tt2)->main_type->code)) |
| 1164 | { |
| 1165 | if (TYPE_CODE (tt2)(tt2)->main_type->code == TYPE_CODE_ARRAY) |
| 1166 | t2[i] = value_coerce_array (t2[i]); |
| 1167 | else |
| 1168 | t2[i] = value_addr (t2[i]); |
| 1169 | continue; |
| 1170 | } |
| 1171 | |
| 1172 | /* djb - 20000715 - Until the new type structure is in the |
| 1173 | place, and we can attempt things like implicit conversions, |
| 1174 | we need to do this so you can take something like a map<const |
| 1175 | char *>, and properly access map["hello"], because the |
| 1176 | argument to [] will be a reference to a pointer to a char, |
| 1177 | and the argument will be a pointer to a char. */ |
| 1178 | while ( TYPE_CODE(tt1)(tt1)->main_type->code == TYPE_CODE_REF || |
| 1179 | TYPE_CODE (tt1)(tt1)->main_type->code == TYPE_CODE_PTR) |
| 1180 | { |
| 1181 | tt1 = check_typedef( TYPE_TARGET_TYPE(tt1)(tt1)->main_type->target_type ); |
| 1182 | } |
| 1183 | while ( TYPE_CODE(tt2)(tt2)->main_type->code == TYPE_CODE_ARRAY || |
| 1184 | TYPE_CODE(tt2)(tt2)->main_type->code == TYPE_CODE_PTR || |
| 1185 | TYPE_CODE(tt2)(tt2)->main_type->code == TYPE_CODE_REF) |
| 1186 | { |
| 1187 | tt2 = check_typedef( TYPE_TARGET_TYPE(tt2)(tt2)->main_type->target_type ); |
| 1188 | } |
| 1189 | if (TYPE_CODE (tt1)(tt1)->main_type->code == TYPE_CODE (tt2)(tt2)->main_type->code) |
| 1190 | continue; |
| 1191 | /* Array to pointer is a `trivial conversion' according to the ARM. */ |
| 1192 | |
| 1193 | /* We should be doing much hairier argument matching (see section 13.2 |
| 1194 | of the ARM), but as a quick kludge, just check for the same type |
| 1195 | code. */ |
| 1196 | if (TYPE_CODE (t1[i].type)(t1[i].type)->main_type->code != TYPE_CODE (VALUE_TYPE (t2[i]))((t2[i])->type)->main_type->code) |
| 1197 | return i + 1; |
| 1198 | } |
| 1199 | if (varargs || t2[i] == NULL((void*)0)) |
| 1200 | return 0; |
| 1201 | return i + 1; |
| 1202 | } |
| 1203 | |
| 1204 | /* Helper function used by value_struct_elt to recurse through baseclasses. |
| 1205 | Look for a field NAME in ARG1. Adjust the address of ARG1 by OFFSET bytes, |
| 1206 | and search in it assuming it has (class) type TYPE. |
| 1207 | If found, return value, else return NULL. |
| 1208 | |
| 1209 | If LOOKING_FOR_BASECLASS, then instead of looking for struct fields, |
| 1210 | look for a baseclass named NAME. */ |
| 1211 | |
| 1212 | static struct value * |
| 1213 | search_struct_field (char *name, struct value *arg1, int offset, |
| 1214 | struct type *type, int looking_for_baseclass) |
| 1215 | { |
| 1216 | int i; |
| 1217 | int nbases = TYPE_N_BASECLASSES (type)(type)->main_type->type_specific.cplus_stuff->n_baseclasses; |
| 1218 | |
| 1219 | CHECK_TYPEDEF (type)(type) = check_typedef (type); |
| 1220 | |
| 1221 | if (!looking_for_baseclass) |
| 1222 | for (i = TYPE_NFIELDS (type)(type)->main_type->nfields - 1; i >= nbases; i--) |
| 1223 | { |
| 1224 | char *t_field_name = TYPE_FIELD_NAME (type, i)(((type)->main_type->fields[i]).name); |
| 1225 | |
| 1226 | if (t_field_name && (strcmp_iw (t_field_name, name) == 0)) |
| 1227 | { |
| 1228 | struct value *v; |
| 1229 | if (TYPE_FIELD_STATIC (type, i)((type)->main_type->fields[i].static_kind != 0)) |
| 1230 | { |
| 1231 | v = value_static_field (type, i); |
| 1232 | if (v == 0) |
| 1233 | error ("field %s is nonexistent or has been optimised out", |
| 1234 | name); |
| 1235 | } |
| 1236 | else |
| 1237 | { |
| 1238 | v = value_primitive_field (arg1, offset, i, type); |
| 1239 | if (v == 0) |
| 1240 | error ("there is no field named %s", name); |
| 1241 | } |
| 1242 | return v; |
| 1243 | } |
| 1244 | |
| 1245 | if (t_field_name |
| 1246 | && (t_field_name[0] == '\0' |
| 1247 | || (TYPE_CODE (type)(type)->main_type->code == TYPE_CODE_UNION |
| 1248 | && (strcmp_iw (t_field_name, "else") == 0)))) |
| 1249 | { |
| 1250 | struct type *field_type = TYPE_FIELD_TYPE (type, i)(((type)->main_type->fields[i]).type); |
| 1251 | if (TYPE_CODE (field_type)(field_type)->main_type->code == TYPE_CODE_UNION |
| 1252 | || TYPE_CODE (field_type)(field_type)->main_type->code == TYPE_CODE_STRUCT) |
| 1253 | { |
| 1254 | /* Look for a match through the fields of an anonymous union, |
| 1255 | or anonymous struct. C++ provides anonymous unions. |
| 1256 | |
| 1257 | In the GNU Chill (now deleted from GDB) |
| 1258 | implementation of variant record types, each |
| 1259 | <alternative field> has an (anonymous) union type, |
| 1260 | each member of the union represents a <variant |
| 1261 | alternative>. Each <variant alternative> is |
| 1262 | represented as a struct, with a member for each |
| 1263 | <variant field>. */ |
| 1264 | |
| 1265 | struct value *v; |
| 1266 | int new_offset = offset; |
| 1267 | |
| 1268 | /* This is pretty gross. In G++, the offset in an |
| 1269 | anonymous union is relative to the beginning of the |
| 1270 | enclosing struct. In the GNU Chill (now deleted |
| 1271 | from GDB) implementation of variant records, the |
| 1272 | bitpos is zero in an anonymous union field, so we |
| 1273 | have to add the offset of the union here. */ |
| 1274 | if (TYPE_CODE (field_type)(field_type)->main_type->code == TYPE_CODE_STRUCT |
| 1275 | || (TYPE_NFIELDS (field_type)(field_type)->main_type->nfields > 0 |
| 1276 | && TYPE_FIELD_BITPOS (field_type, 0)(((field_type)->main_type->fields[0]).loc.bitpos) == 0)) |
| 1277 | new_offset += TYPE_FIELD_BITPOS (type, i)(((type)->main_type->fields[i]).loc.bitpos) / 8; |
| 1278 | |
| 1279 | v = search_struct_field (name, arg1, new_offset, field_type, |
| 1280 | looking_for_baseclass); |
| 1281 | if (v) |
| 1282 | return v; |
| 1283 | } |
| 1284 | } |
| 1285 | } |
| 1286 | |
| 1287 | for (i = 0; i < nbases; i++) |
| 1288 | { |
| 1289 | struct value *v; |
| 1290 | struct type *basetype = check_typedef (TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type); |
| 1291 | /* If we are looking for baseclasses, this is what we get when we |
| 1292 | hit them. But it could happen that the base part's member name |
| 1293 | is not yet filled in. */ |
| 1294 | int found_baseclass = (looking_for_baseclass |
| 1295 | && TYPE_BASECLASS_NAME (type, i)(type)->main_type->fields[i].name != NULL((void*)0) |
| 1296 | && (strcmp_iw (name, TYPE_BASECLASS_NAME (type, i)(type)->main_type->fields[i].name) == 0)); |
| 1297 | |
| 1298 | if (BASETYPE_VIA_VIRTUAL (type, i)((type)->main_type->type_specific.cplus_stuff->virtual_field_bits == ((void*)0) ? 0 : (((type)->main_type->type_specific .cplus_stuff->virtual_field_bits)[((i))>>3] & (1 << (((i))&7))))) |
| 1299 | { |
| 1300 | int boffset; |
| 1301 | struct value *v2 = allocate_value (basetype); |
| 1302 | |
| 1303 | boffset = baseclass_offset (type, i, |
| 1304 | VALUE_CONTENTS (arg1)((void)((arg1)->lazy && value_fetch_lazy(arg1)), ( (char *) (arg1)->aligner.contents + (arg1)->embedded_offset )) + offset, |
| 1305 | VALUE_ADDRESS (arg1)(arg1)->location.address |
| 1306 | + VALUE_OFFSET (arg1)(arg1)->offset + offset); |
| 1307 | if (boffset == -1) |
| 1308 | error ("virtual baseclass botch"); |
| 1309 | |
| 1310 | /* The virtual base class pointer might have been clobbered by the |
| 1311 | user program. Make sure that it still points to a valid memory |
| 1312 | location. */ |
| 1313 | |
| 1314 | boffset += offset; |
| 1315 | if (boffset < 0 || boffset >= TYPE_LENGTH (type)(type)->length) |
| 1316 | { |
| 1317 | CORE_ADDR base_addr; |
| 1318 | |
| 1319 | base_addr = VALUE_ADDRESS (arg1)(arg1)->location.address + VALUE_OFFSET (arg1)(arg1)->offset + boffset; |
| 1320 | if (target_read_memory (base_addr, VALUE_CONTENTS_RAW (v2)((char *) (v2)->aligner.contents + (v2)->embedded_offset ), |
| 1321 | TYPE_LENGTH (basetype)(basetype)->length) != 0) |
| 1322 | error ("virtual baseclass botch"); |
| 1323 | VALUE_LVAL (v2)(v2)->lval = lval_memory; |
| 1324 | VALUE_ADDRESS (v2)(v2)->location.address = base_addr; |
| 1325 | } |
| 1326 | else |
| 1327 | { |
| 1328 | VALUE_LVAL (v2)(v2)->lval = VALUE_LVAL (arg1)(arg1)->lval; |
| 1329 | VALUE_ADDRESS (v2)(v2)->location.address = VALUE_ADDRESS (arg1)(arg1)->location.address; |
| 1330 | VALUE_OFFSET (v2)(v2)->offset = VALUE_OFFSET (arg1)(arg1)->offset + boffset; |
| 1331 | if (VALUE_LAZY (arg1)(arg1)->lazy) |
| 1332 | VALUE_LAZY (v2)(v2)->lazy = 1; |
| 1333 | else |
| 1334 | memcpy (VALUE_CONTENTS_RAW (v2)((char *) (v2)->aligner.contents + (v2)->embedded_offset ), |
| 1335 | VALUE_CONTENTS_RAW (arg1)((char *) (arg1)->aligner.contents + (arg1)->embedded_offset ) + boffset, |
| 1336 | TYPE_LENGTH (basetype)(basetype)->length); |
| 1337 | } |
| 1338 | |
| 1339 | if (found_baseclass) |
| 1340 | return v2; |
| 1341 | v = search_struct_field (name, v2, 0, TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type, |
| 1342 | looking_for_baseclass); |
| 1343 | } |
| 1344 | else if (found_baseclass) |
| 1345 | v = value_primitive_field (arg1, offset, i, type); |
| 1346 | else |
| 1347 | v = search_struct_field (name, arg1, |
| 1348 | offset + TYPE_BASECLASS_BITPOS (type, i)(((type)->main_type->fields[i]).loc.bitpos) / 8, |
| 1349 | basetype, looking_for_baseclass); |
| 1350 | if (v) |
| 1351 | return v; |
| 1352 | } |
| 1353 | return NULL((void*)0); |
| 1354 | } |
| 1355 | |
| 1356 | |
| 1357 | /* Return the offset (in bytes) of the virtual base of type BASETYPE |
| 1358 | * in an object pointed to by VALADDR (on the host), assumed to be of |
| 1359 | * type TYPE. OFFSET is number of bytes beyond start of ARG to start |
| 1360 | * looking (in case VALADDR is the contents of an enclosing object). |
| 1361 | * |
| 1362 | * This routine recurses on the primary base of the derived class because |
| 1363 | * the virtual base entries of the primary base appear before the other |
| 1364 | * virtual base entries. |
| 1365 | * |
| 1366 | * If the virtual base is not found, a negative integer is returned. |
| 1367 | * The magnitude of the negative integer is the number of entries in |
| 1368 | * the virtual table to skip over (entries corresponding to various |
| 1369 | * ancestral classes in the chain of primary bases). |
| 1370 | * |
| 1371 | * Important: This assumes the HP / Taligent C++ runtime |
| 1372 | * conventions. Use baseclass_offset() instead to deal with g++ |
| 1373 | * conventions. */ |
| 1374 | |
| 1375 | void |
| 1376 | find_rt_vbase_offset (struct type *type, struct type *basetype, char *valaddr, |
| 1377 | int offset, int *boffset_p, int *skip_p) |
| 1378 | { |
| 1379 | int boffset; /* offset of virtual base */ |
| 1380 | int index; /* displacement to use in virtual table */ |
| 1381 | int skip; |
| 1382 | |
| 1383 | struct value *vp; |
| 1384 | CORE_ADDR vtbl; /* the virtual table pointer */ |
| 1385 | struct type *pbc; /* the primary base class */ |
| 1386 | |
| 1387 | /* Look for the virtual base recursively in the primary base, first. |
| 1388 | * This is because the derived class object and its primary base |
| 1389 | * subobject share the primary virtual table. */ |
| 1390 | |
| 1391 | boffset = 0; |
| 1392 | pbc = TYPE_PRIMARY_BASE (type)(((type)->main_type->type_specific.cplus_stuff->runtime_ptr )->primary_base); |
| 1393 | if (pbc) |
| 1394 | { |
| 1395 | find_rt_vbase_offset (pbc, basetype, valaddr, offset, &boffset, &skip); |
| 1396 | if (skip < 0) |
| 1397 | { |
| 1398 | *boffset_p = boffset; |
| 1399 | *skip_p = -1; |
| 1400 | return; |
| 1401 | } |
| 1402 | } |
| 1403 | else |
| 1404 | skip = 0; |
| 1405 | |
| 1406 | |
| 1407 | /* Find the index of the virtual base according to HP/Taligent |
| 1408 | runtime spec. (Depth-first, left-to-right.) */ |
| 1409 | index = virtual_base_index_skip_primaries (basetype, type); |
| 1410 | |
| 1411 | if (index < 0) |
| 1412 | { |
| 1413 | *skip_p = skip + virtual_base_list_length_skip_primaries (type); |
| 1414 | *boffset_p = 0; |
| 1415 | return; |
| 1416 | } |
| 1417 | |
| 1418 | /* pai: FIXME -- 32x64 possible problem */ |
| 1419 | /* First word (4 bytes) in object layout is the vtable pointer */ |
| 1420 | vtbl = *(CORE_ADDR *) (valaddr + offset); |
| 1421 | |
| 1422 | /* Before the constructor is invoked, things are usually zero'd out. */ |
| 1423 | if (vtbl == 0) |
| 1424 | error ("Couldn't find virtual table -- object may not be constructed yet."); |
| 1425 | |
| 1426 | |
| 1427 | /* Find virtual base's offset -- jump over entries for primary base |
| 1428 | * ancestors, then use the index computed above. But also adjust by |
| 1429 | * HP_ACC_VBASE_START for the vtable slots before the start of the |
| 1430 | * virtual base entries. Offset is negative -- virtual base entries |
| 1431 | * appear _before_ the address point of the virtual table. */ |
| 1432 | |
| 1433 | /* pai: FIXME -- 32x64 problem, if word = 8 bytes, change multiplier |
| 1434 | & use long type */ |
| 1435 | |
| 1436 | /* epstein : FIXME -- added param for overlay section. May not be correct */ |
| 1437 | vp = value_at (builtin_type_int, vtbl + 4 * (-skip - index - HP_ACC_VBASE_START2), NULL((void*)0)); |
| 1438 | boffset = value_as_long (vp); |
| 1439 | *skip_p = -1; |
| 1440 | *boffset_p = boffset; |
| 1441 | return; |
| 1442 | } |
| 1443 | |
| 1444 | |
| 1445 | /* Helper function used by value_struct_elt to recurse through baseclasses. |
| 1446 | Look for a field NAME in ARG1. Adjust the address of ARG1 by OFFSET bytes, |
| 1447 | and search in it assuming it has (class) type TYPE. |
| 1448 | If found, return value, else if name matched and args not return (value)-1, |
| 1449 | else return NULL. */ |
| 1450 | |
| 1451 | static struct value * |
| 1452 | search_struct_method (char *name, struct value **arg1p, |
| 1453 | struct value **args, int offset, |
| 1454 | int *static_memfuncp, struct type *type) |
| 1455 | { |
| 1456 | int i; |
| 1457 | struct value *v; |
| 1458 | int name_matched = 0; |
| 1459 | char dem_opname[64]; |
| 1460 | |
| 1461 | CHECK_TYPEDEF (type)(type) = check_typedef (type); |
| 1462 | for (i = TYPE_NFN_FIELDS (type)(type)->main_type->type_specific.cplus_stuff->nfn_fields - 1; i >= 0; i--) |
| 1463 | { |
| 1464 | char *t_field_name = TYPE_FN_FIELDLIST_NAME (type, i)(type)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].name; |
| 1465 | /* FIXME! May need to check for ARM demangling here */ |
| 1466 | if (strncmp (t_field_name, "__", 2) == 0 || |
| 1467 | strncmp (t_field_name, "op", 2) == 0 || |
| 1468 | strncmp (t_field_name, "type", 4) == 0) |
| 1469 | { |
| 1470 | if (cplus_demangle_opname (t_field_name, dem_opname, DMGL_ANSI(1 << 1))) |
| 1471 | t_field_name = dem_opname; |
| 1472 | else if (cplus_demangle_opname (t_field_name, dem_opname, 0)) |
| 1473 | t_field_name = dem_opname; |
| 1474 | } |
| 1475 | if (t_field_name && (strcmp_iw (t_field_name, name) == 0)) |
| 1476 | { |
| 1477 | int j = TYPE_FN_FIELDLIST_LENGTH (type, i)(type)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].length - 1; |
| 1478 | struct fn_field *f = TYPE_FN_FIELDLIST1 (type, i)(type)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].fn_fields; |
| 1479 | name_matched = 1; |
| 1480 | |
| 1481 | check_stub_method_group (type, i); |
| 1482 | if (j > 0 && args == 0) |
| 1483 | error ("cannot resolve overloaded method `%s': no arguments supplied", name); |
| 1484 | else if (j == 0 && args == 0) |
| 1485 | { |
| 1486 | v = value_fn_field (arg1p, f, j, type, offset); |
| 1487 | if (v != NULL((void*)0)) |
| 1488 | return v; |
| 1489 | } |
| 1490 | else |
| 1491 | while (j >= 0) |
| 1492 | { |
| 1493 | if (!typecmp (TYPE_FN_FIELD_STATIC_P (f, j)((f)[j].voffset == 1), |
| 1494 | TYPE_VARARGS (TYPE_FN_FIELD_TYPE (f, j))(((f)[j].type)->main_type->flags & (1 << 11)), |
| 1495 | TYPE_NFIELDS (TYPE_FN_FIELD_TYPE (f, j))((f)[j].type)->main_type->nfields, |
| 1496 | TYPE_FN_FIELD_ARGS (f, j)((f)[j].type)->main_type->fields, args)) |
| 1497 | { |
| 1498 | if (TYPE_FN_FIELD_VIRTUAL_P (f, j)((f)[j].voffset > 1)) |
| 1499 | return value_virtual_fn_field (arg1p, f, j, type, offset); |
| 1500 | if (TYPE_FN_FIELD_STATIC_P (f, j)((f)[j].voffset == 1) && static_memfuncp) |
| 1501 | *static_memfuncp = 1; |
| 1502 | v = value_fn_field (arg1p, f, j, type, offset); |
| 1503 | if (v != NULL((void*)0)) |
| 1504 | return v; |
| 1505 | } |
| 1506 | j--; |
| 1507 | } |
| 1508 | } |
| 1509 | } |
| 1510 | |
| 1511 | for (i = TYPE_N_BASECLASSES (type)(type)->main_type->type_specific.cplus_stuff->n_baseclasses - 1; i >= 0; i--) |
| 1512 | { |
| 1513 | int base_offset; |
| 1514 | |
| 1515 | if (BASETYPE_VIA_VIRTUAL (type, i)((type)->main_type->type_specific.cplus_stuff->virtual_field_bits == ((void*)0) ? 0 : (((type)->main_type->type_specific .cplus_stuff->virtual_field_bits)[((i))>>3] & (1 << (((i))&7))))) |
| 1516 | { |
| 1517 | if (TYPE_HAS_VTABLE (type)(((type)->main_type->type_specific.cplus_stuff->runtime_ptr ) && (((type)->main_type->type_specific.cplus_stuff ->runtime_ptr)->has_vtable))) |
| 1518 | { |
| 1519 | /* HP aCC compiled type, search for virtual base offset |
| 1520 | according to HP/Taligent runtime spec. */ |
| 1521 | int skip; |
| 1522 | find_rt_vbase_offset (type, TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type, |
| 1523 | VALUE_CONTENTS_ALL (*arg1p)((void) ((*arg1p)->lazy && value_fetch_lazy(*arg1p )), ((char *) (*arg1p)->aligner.contents)), |
| 1524 | offset + VALUE_EMBEDDED_OFFSET (*arg1p)((*arg1p)->embedded_offset), |
| 1525 | &base_offset, &skip); |
| 1526 | if (skip >= 0) |
| 1527 | error ("Virtual base class offset not found in vtable"); |
| 1528 | } |
| 1529 | else |
| 1530 | { |
| 1531 | struct type *baseclass = check_typedef (TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type); |
| 1532 | char *base_valaddr; |
| 1533 | |
| 1534 | /* The virtual base class pointer might have been clobbered by the |
| 1535 | user program. Make sure that it still points to a valid memory |
| 1536 | location. */ |
| 1537 | |
| 1538 | if (offset < 0 || offset >= TYPE_LENGTH (type)(type)->length) |
| 1539 | { |
| 1540 | base_valaddr = (char *) alloca (TYPE_LENGTH (baseclass))__builtin_alloca((baseclass)->length); |
| 1541 | if (target_read_memory (VALUE_ADDRESS (*arg1p)(*arg1p)->location.address |
| 1542 | + VALUE_OFFSET (*arg1p)(*arg1p)->offset + offset, |
| 1543 | base_valaddr, |
| 1544 | TYPE_LENGTH (baseclass)(baseclass)->length) != 0) |
| 1545 | error ("virtual baseclass botch"); |
| 1546 | } |
| 1547 | else |
| 1548 | base_valaddr = VALUE_CONTENTS (*arg1p)((void)((*arg1p)->lazy && value_fetch_lazy(*arg1p) ), ((char *) (*arg1p)->aligner.contents + (*arg1p)->embedded_offset )) + offset; |
| 1549 | |
| 1550 | base_offset = |
| 1551 | baseclass_offset (type, i, base_valaddr, |
| 1552 | VALUE_ADDRESS (*arg1p)(*arg1p)->location.address |
| 1553 | + VALUE_OFFSET (*arg1p)(*arg1p)->offset + offset); |
| 1554 | if (base_offset == -1) |
| 1555 | error ("virtual baseclass botch"); |
| 1556 | } |
| 1557 | } |
| 1558 | else |
| 1559 | { |
| 1560 | base_offset = TYPE_BASECLASS_BITPOS (type, i)(((type)->main_type->fields[i]).loc.bitpos) / 8; |
| 1561 | } |
| 1562 | v = search_struct_method (name, arg1p, args, base_offset + offset, |
| 1563 | static_memfuncp, TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type); |
| 1564 | if (v == (struct value *) - 1) |
| 1565 | { |
| 1566 | name_matched = 1; |
| 1567 | } |
| 1568 | else if (v) |
| 1569 | { |
| 1570 | /* FIXME-bothner: Why is this commented out? Why is it here? */ |
| 1571 | /* *arg1p = arg1_tmp; */ |
| 1572 | return v; |
| 1573 | } |
| 1574 | } |
| 1575 | if (name_matched) |
| 1576 | return (struct value *) - 1; |
| 1577 | else |
| 1578 | return NULL((void*)0); |
| 1579 | } |
| 1580 | |
| 1581 | /* Given *ARGP, a value of type (pointer to a)* structure/union, |
| 1582 | extract the component named NAME from the ultimate target structure/union |
| 1583 | and return it as a value with its appropriate type. |
| 1584 | ERR is used in the error message if *ARGP's type is wrong. |
| 1585 | |
| 1586 | C++: ARGS is a list of argument types to aid in the selection of |
| 1587 | an appropriate method. Also, handle derived types. |
| 1588 | |
| 1589 | STATIC_MEMFUNCP, if non-NULL, points to a caller-supplied location |
| 1590 | where the truthvalue of whether the function that was resolved was |
| 1591 | a static member function or not is stored. |
| 1592 | |
| 1593 | ERR is an error message to be printed in case the field is not found. */ |
| 1594 | |
| 1595 | struct value * |
| 1596 | value_struct_elt (struct value **argp, struct value **args, |
| 1597 | char *name, int *static_memfuncp, char *err) |
| 1598 | { |
| 1599 | struct type *t; |
| 1600 | struct value *v; |
| 1601 | |
| 1602 | COERCE_ARRAY (*argp)do { do { struct type *value_type_arg_tmp = check_typedef ((* argp)->type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF) *argp = value_at_lazy ((value_type_arg_tmp )->main_type->target_type, unpack_pointer ((*argp)-> type, ((void)((*argp)->lazy && value_fetch_lazy(*argp )), ((char *) (*argp)->aligner.contents + (*argp)->embedded_offset ))), ((*argp)->bfd_section)); } while (0); if (current_language ->c_style_arrays && ((*argp)->type)->main_type ->code == TYPE_CODE_ARRAY) *argp = value_coerce_array (*argp ); if (((*argp)->type)->main_type->code == TYPE_CODE_FUNC ) *argp = value_coerce_function (*argp); } while (0); |
| 1603 | |
| 1604 | t = check_typedef (VALUE_TYPE (*argp)(*argp)->type); |
| 1605 | |
| 1606 | /* Follow pointers until we get to a non-pointer. */ |
| 1607 | |
| 1608 | while (TYPE_CODE (t)(t)->main_type->code == TYPE_CODE_PTR || TYPE_CODE (t)(t)->main_type->code == TYPE_CODE_REF) |
| 1609 | { |
| 1610 | *argp = value_ind (*argp); |
| 1611 | /* Don't coerce fn pointer to fn and then back again! */ |
| 1612 | if (TYPE_CODE (VALUE_TYPE (*argp))((*argp)->type)->main_type->code != TYPE_CODE_FUNC) |
| 1613 | COERCE_ARRAY (*argp)do { do { struct type *value_type_arg_tmp = check_typedef ((* argp)->type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF) *argp = value_at_lazy ((value_type_arg_tmp )->main_type->target_type, unpack_pointer ((*argp)-> type, ((void)((*argp)->lazy && value_fetch_lazy(*argp )), ((char *) (*argp)->aligner.contents + (*argp)->embedded_offset ))), ((*argp)->bfd_section)); } while (0); if (current_language ->c_style_arrays && ((*argp)->type)->main_type ->code == TYPE_CODE_ARRAY) *argp = value_coerce_array (*argp ); if (((*argp)->type)->main_type->code == TYPE_CODE_FUNC ) *argp = value_coerce_function (*argp); } while (0); |
| 1614 | t = check_typedef (VALUE_TYPE (*argp)(*argp)->type); |
| 1615 | } |
| 1616 | |
| 1617 | if (TYPE_CODE (t)(t)->main_type->code == TYPE_CODE_MEMBER) |
| 1618 | error ("not implemented: member type in value_struct_elt"); |
| 1619 | |
| 1620 | if (TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_STRUCT |
| 1621 | && TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_UNION) |
| 1622 | error ("Attempt to extract a component of a value that is not a %s.", err); |
| 1623 | |
| 1624 | /* Assume it's not, unless we see that it is. */ |
| 1625 | if (static_memfuncp) |
| 1626 | *static_memfuncp = 0; |
| 1627 | |
| 1628 | if (!args) |
| 1629 | { |
| 1630 | /* if there are no arguments ...do this... */ |
| 1631 | |
| 1632 | /* Try as a field first, because if we succeed, there |
| 1633 | is less work to be done. */ |
| 1634 | v = search_struct_field (name, *argp, 0, t, 0); |
| 1635 | if (v) |
| 1636 | return v; |
| 1637 | |
| 1638 | /* C++: If it was not found as a data field, then try to |
| 1639 | return it as a pointer to a method. */ |
| 1640 | |
| 1641 | if (destructor_name_p (name, t)) |
| 1642 | error ("Cannot get value of destructor"); |
| 1643 | |
| 1644 | v = search_struct_method (name, argp, args, 0, static_memfuncp, t); |
| 1645 | |
| 1646 | if (v == (struct value *) - 1) |
| 1647 | error ("Cannot take address of a method"); |
| 1648 | else if (v == 0) |
| 1649 | { |
| 1650 | if (TYPE_NFN_FIELDS (t)(t)->main_type->type_specific.cplus_stuff->nfn_fields) |
| 1651 | error ("There is no member or method named %s.", name); |
| 1652 | else |
| 1653 | error ("There is no member named %s.", name); |
| 1654 | } |
| 1655 | return v; |
| 1656 | } |
| 1657 | |
| 1658 | if (destructor_name_p (name, t)) |
| 1659 | { |
| 1660 | if (!args[1]) |
| 1661 | { |
| 1662 | /* Destructors are a special case. */ |
| 1663 | int m_index, f_index; |
| 1664 | |
| 1665 | v = NULL((void*)0); |
| 1666 | if (get_destructor_fn_field (t, &m_index, &f_index)) |
| 1667 | { |
| 1668 | v = value_fn_field (NULL((void*)0), TYPE_FN_FIELDLIST1 (t, m_index)(t)->main_type->type_specific.cplus_stuff->fn_fieldlists [m_index].fn_fields, |
| 1669 | f_index, NULL((void*)0), 0); |
| 1670 | } |
| 1671 | if (v == NULL((void*)0)) |
| 1672 | error ("could not find destructor function named %s.", name); |
| 1673 | else |
| 1674 | return v; |
| 1675 | } |
| 1676 | else |
| 1677 | { |
| 1678 | error ("destructor should not have any argument"); |
| 1679 | } |
| 1680 | } |
| 1681 | else |
| 1682 | v = search_struct_method (name, argp, args, 0, static_memfuncp, t); |
| 1683 | |
| 1684 | if (v == (struct value *) - 1) |
| 1685 | { |
| 1686 | error ("One of the arguments you tried to pass to %s could not be converted to what the function wants.", name); |
| 1687 | } |
| 1688 | else if (v == 0) |
| 1689 | { |
| 1690 | /* See if user tried to invoke data as function. If so, |
| 1691 | hand it back. If it's not callable (i.e., a pointer to function), |
| 1692 | gdb should give an error. */ |
| 1693 | v = search_struct_field (name, *argp, 0, t, 0); |
| 1694 | } |
| 1695 | |
| 1696 | if (!v) |
| 1697 | error ("Structure has no component named %s.", name); |
| 1698 | return v; |
| 1699 | } |
| 1700 | |
| 1701 | /* Search through the methods of an object (and its bases) |
| 1702 | * to find a specified method. Return the pointer to the |
| 1703 | * fn_field list of overloaded instances. |
| 1704 | * Helper function for value_find_oload_list. |
| 1705 | * ARGP is a pointer to a pointer to a value (the object) |
| 1706 | * METHOD is a string containing the method name |
| 1707 | * OFFSET is the offset within the value |
| 1708 | * TYPE is the assumed type of the object |
| 1709 | * NUM_FNS is the number of overloaded instances |
| 1710 | * BASETYPE is set to the actual type of the subobject where the method is found |
| 1711 | * BOFFSET is the offset of the base subobject where the method is found */ |
| 1712 | |
| 1713 | static struct fn_field * |
| 1714 | find_method_list (struct value **argp, char *method, int offset, |
| 1715 | struct type *type, int *num_fns, |
| 1716 | struct type **basetype, int *boffset) |
| 1717 | { |
| 1718 | int i; |
| 1719 | struct fn_field *f; |
| 1720 | CHECK_TYPEDEF (type)(type) = check_typedef (type); |
| 1721 | |
| 1722 | *num_fns = 0; |
| 1723 | |
| 1724 | /* First check in object itself */ |
| 1725 | for (i = TYPE_NFN_FIELDS (type)(type)->main_type->type_specific.cplus_stuff->nfn_fields - 1; i >= 0; i--) |
| 1726 | { |
| 1727 | /* pai: FIXME What about operators and type conversions? */ |
| 1728 | char *fn_field_name = TYPE_FN_FIELDLIST_NAME (type, i)(type)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].name; |
| 1729 | if (fn_field_name && (strcmp_iw (fn_field_name, method) == 0)) |
| 1730 | { |
| 1731 | int len = TYPE_FN_FIELDLIST_LENGTH (type, i)(type)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].length; |
| 1732 | struct fn_field *f = TYPE_FN_FIELDLIST1 (type, i)(type)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].fn_fields; |
| 1733 | |
| 1734 | *num_fns = len; |
| 1735 | *basetype = type; |
| 1736 | *boffset = offset; |
| 1737 | |
| 1738 | /* Resolve any stub methods. */ |
| 1739 | check_stub_method_group (type, i); |
| 1740 | |
| 1741 | return f; |
| 1742 | } |
| 1743 | } |
| 1744 | |
| 1745 | /* Not found in object, check in base subobjects */ |
| 1746 | for (i = TYPE_N_BASECLASSES (type)(type)->main_type->type_specific.cplus_stuff->n_baseclasses - 1; i >= 0; i--) |
| 1747 | { |
| 1748 | int base_offset; |
| 1749 | if (BASETYPE_VIA_VIRTUAL (type, i)((type)->main_type->type_specific.cplus_stuff->virtual_field_bits == ((void*)0) ? 0 : (((type)->main_type->type_specific .cplus_stuff->virtual_field_bits)[((i))>>3] & (1 << (((i))&7))))) |
| 1750 | { |
| 1751 | if (TYPE_HAS_VTABLE (type)(((type)->main_type->type_specific.cplus_stuff->runtime_ptr ) && (((type)->main_type->type_specific.cplus_stuff ->runtime_ptr)->has_vtable))) |
| 1752 | { |
| 1753 | /* HP aCC compiled type, search for virtual base offset |
| 1754 | * according to HP/Taligent runtime spec. */ |
| 1755 | int skip; |
| 1756 | find_rt_vbase_offset (type, TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type, |
| 1757 | VALUE_CONTENTS_ALL (*argp)((void) ((*argp)->lazy && value_fetch_lazy(*argp)) , ((char *) (*argp)->aligner.contents)), |
| 1758 | offset + VALUE_EMBEDDED_OFFSET (*argp)((*argp)->embedded_offset), |
| 1759 | &base_offset, &skip); |
| 1760 | if (skip >= 0) |
| 1761 | error ("Virtual base class offset not found in vtable"); |
| 1762 | } |
| 1763 | else |
| 1764 | { |
| 1765 | /* probably g++ runtime model */ |
| 1766 | base_offset = VALUE_OFFSET (*argp)(*argp)->offset + offset; |
| 1767 | base_offset = |
| 1768 | baseclass_offset (type, i, |
| 1769 | VALUE_CONTENTS (*argp)((void)((*argp)->lazy && value_fetch_lazy(*argp)), ((char *) (*argp)->aligner.contents + (*argp)->embedded_offset )) + base_offset, |
| 1770 | VALUE_ADDRESS (*argp)(*argp)->location.address + base_offset); |
| 1771 | if (base_offset == -1) |
| 1772 | error ("virtual baseclass botch"); |
| 1773 | } |
| 1774 | } |
| 1775 | else |
| 1776 | /* non-virtual base, simply use bit position from debug info */ |
| 1777 | { |
| 1778 | base_offset = TYPE_BASECLASS_BITPOS (type, i)(((type)->main_type->fields[i]).loc.bitpos) / 8; |
| 1779 | } |
| 1780 | f = find_method_list (argp, method, base_offset + offset, |
| 1781 | TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type, num_fns, basetype, |
| 1782 | boffset); |
| 1783 | if (f) |
| 1784 | return f; |
| 1785 | } |
| 1786 | return NULL((void*)0); |
| 1787 | } |
| 1788 | |
| 1789 | /* Return the list of overloaded methods of a specified name. |
| 1790 | * ARGP is a pointer to a pointer to a value (the object) |
| 1791 | * METHOD is the method name |
| 1792 | * OFFSET is the offset within the value contents |
| 1793 | * NUM_FNS is the number of overloaded instances |
| 1794 | * BASETYPE is set to the type of the base subobject that defines the method |
| 1795 | * BOFFSET is the offset of the base subobject which defines the method */ |
| 1796 | |
| 1797 | struct fn_field * |
| 1798 | value_find_oload_method_list (struct value **argp, char *method, int offset, |
| 1799 | int *num_fns, struct type **basetype, |
| 1800 | int *boffset) |
| 1801 | { |
| 1802 | struct type *t; |
| 1803 | |
| 1804 | t = check_typedef (VALUE_TYPE (*argp)(*argp)->type); |
| 1805 | |
| 1806 | /* code snarfed from value_struct_elt */ |
| 1807 | while (TYPE_CODE (t)(t)->main_type->code == TYPE_CODE_PTR || TYPE_CODE (t)(t)->main_type->code == TYPE_CODE_REF) |
| 1808 | { |
| 1809 | *argp = value_ind (*argp); |
| 1810 | /* Don't coerce fn pointer to fn and then back again! */ |
| 1811 | if (TYPE_CODE (VALUE_TYPE (*argp))((*argp)->type)->main_type->code != TYPE_CODE_FUNC) |
| 1812 | COERCE_ARRAY (*argp)do { do { struct type *value_type_arg_tmp = check_typedef ((* argp)->type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF) *argp = value_at_lazy ((value_type_arg_tmp )->main_type->target_type, unpack_pointer ((*argp)-> type, ((void)((*argp)->lazy && value_fetch_lazy(*argp )), ((char *) (*argp)->aligner.contents + (*argp)->embedded_offset ))), ((*argp)->bfd_section)); } while (0); if (current_language ->c_style_arrays && ((*argp)->type)->main_type ->code == TYPE_CODE_ARRAY) *argp = value_coerce_array (*argp ); if (((*argp)->type)->main_type->code == TYPE_CODE_FUNC ) *argp = value_coerce_function (*argp); } while (0); |
| 1813 | t = check_typedef (VALUE_TYPE (*argp)(*argp)->type); |
| 1814 | } |
| 1815 | |
| 1816 | if (TYPE_CODE (t)(t)->main_type->code == TYPE_CODE_MEMBER) |
| 1817 | error ("Not implemented: member type in value_find_oload_lis"); |
| 1818 | |
| 1819 | if (TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_STRUCT |
| 1820 | && TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_UNION) |
| 1821 | error ("Attempt to extract a component of a value that is not a struct or union"); |
| 1822 | |
| 1823 | return find_method_list (argp, method, 0, t, num_fns, basetype, boffset); |
| 1824 | } |
| 1825 | |
| 1826 | /* Given an array of argument types (ARGTYPES) (which includes an |
| 1827 | entry for "this" in the case of C++ methods), the number of |
| 1828 | arguments NARGS, the NAME of a function whether it's a method or |
| 1829 | not (METHOD), and the degree of laxness (LAX) in conforming to |
| 1830 | overload resolution rules in ANSI C++, find the best function that |
| 1831 | matches on the argument types according to the overload resolution |
| 1832 | rules. |
| 1833 | |
| 1834 | In the case of class methods, the parameter OBJ is an object value |
| 1835 | in which to search for overloaded methods. |
| 1836 | |
| 1837 | In the case of non-method functions, the parameter FSYM is a symbol |
| 1838 | corresponding to one of the overloaded functions. |
| 1839 | |
| 1840 | Return value is an integer: 0 -> good match, 10 -> debugger applied |
| 1841 | non-standard coercions, 100 -> incompatible. |
| 1842 | |
| 1843 | If a method is being searched for, VALP will hold the value. |
| 1844 | If a non-method is being searched for, SYMP will hold the symbol for it. |
| 1845 | |
| 1846 | If a method is being searched for, and it is a static method, |
| 1847 | then STATICP will point to a non-zero value. |
| 1848 | |
| 1849 | Note: This function does *not* check the value of |
| 1850 | overload_resolution. Caller must check it to see whether overload |
| 1851 | resolution is permitted. |
| 1852 | */ |
| 1853 | |
| 1854 | int |
| 1855 | find_overload_match (struct type **arg_types, int nargs, char *name, int method, |
| 1856 | int lax, struct value **objp, struct symbol *fsym, |
| 1857 | struct value **valp, struct symbol **symp, int *staticp) |
| 1858 | { |
| 1859 | struct value *obj = (objp ? *objp : NULL((void*)0)); |
| 1860 | |
| 1861 | int oload_champ; /* Index of best overloaded function */ |
| 1862 | |
| 1863 | struct badness_vector *oload_champ_bv = NULL((void*)0); /* The measure for the current best match */ |
| 1864 | |
| 1865 | struct value *temp = obj; |
| 1866 | struct fn_field *fns_ptr = NULL((void*)0); /* For methods, the list of overloaded methods */ |
| 1867 | struct symbol **oload_syms = NULL((void*)0); /* For non-methods, the list of overloaded function symbols */ |
| 1868 | int num_fns = 0; /* Number of overloaded instances being considered */ |
| 1869 | struct type *basetype = NULL((void*)0); |
| 1870 | int boffset; |
| 1871 | int ix; |
| 1872 | int static_offset; |
| 1873 | struct cleanup *old_cleanups = NULL((void*)0); |
| 1874 | |
| 1875 | const char *obj_type_name = NULL((void*)0); |
| 1876 | char *func_name = NULL((void*)0); |
| 1877 | enum oload_classification match_quality; |
| 1878 | |
| 1879 | /* Get the list of overloaded methods or functions */ |
| 1880 | if (method) |
| 1881 | { |
| 1882 | obj_type_name = TYPE_NAME (VALUE_TYPE (obj))((obj)->type)->main_type->name; |
| 1883 | /* Hack: evaluate_subexp_standard often passes in a pointer |
| 1884 | value rather than the object itself, so try again */ |
| 1885 | if ((!obj_type_name || !*obj_type_name) && |
| 1886 | (TYPE_CODE (VALUE_TYPE (obj))((obj)->type)->main_type->code == TYPE_CODE_PTR)) |
| 1887 | obj_type_name = TYPE_NAME (TYPE_TARGET_TYPE (VALUE_TYPE (obj)))(((obj)->type)->main_type->target_type)->main_type ->name; |
| 1888 | |
| 1889 | fns_ptr = value_find_oload_method_list (&temp, name, 0, |
| 1890 | &num_fns, |
| 1891 | &basetype, &boffset); |
| 1892 | if (!fns_ptr || !num_fns) |
| 1893 | error ("Couldn't find method %s%s%s", |
| 1894 | obj_type_name, |
| 1895 | (obj_type_name && *obj_type_name) ? "::" : "", |
| 1896 | name); |
| 1897 | /* If we are dealing with stub method types, they should have |
| 1898 | been resolved by find_method_list via value_find_oload_method_list |
| 1899 | above. */ |
| 1900 | gdb_assert (TYPE_DOMAIN_TYPE (fns_ptr[0].type) != NULL)((void) (((fns_ptr[0].type)->main_type->vptr_basetype != ((void*)0)) ? 0 : (internal_error ("/usr/src/gnu/usr.bin/binutils/gdb/valops.c" , 1900, "%s: Assertion `%s' failed.", __PRETTY_FUNCTION__, "TYPE_DOMAIN_TYPE (fns_ptr[0].type) != NULL" ), 0))); |
| 1901 | oload_champ = find_oload_champ (arg_types, nargs, method, num_fns, |
| 1902 | fns_ptr, oload_syms, &oload_champ_bv); |
| 1903 | } |
| 1904 | else |
| 1905 | { |
| 1906 | const char *qualified_name = SYMBOL_CPLUS_DEMANGLED_NAME (fsym)(fsym)->ginfo.language_specific.cplus_specific.demangled_name; |
| 1907 | func_name = cp_func_name (qualified_name); |
| 1908 | |
| 1909 | /* If the name is NULL this must be a C-style function. |
| 1910 | Just return the same symbol. */ |
| 1911 | if (func_name == NULL((void*)0)) |
| 1912 | { |
| 1913 | *symp = fsym; |
| 1914 | return 0; |
| 1915 | } |
| 1916 | |
| 1917 | old_cleanups = make_cleanup (xfree, func_name); |
| 1918 | make_cleanup (xfree, oload_syms); |
| 1919 | make_cleanup (xfree, oload_champ_bv); |
| 1920 | |
| 1921 | oload_champ = find_oload_champ_namespace (arg_types, nargs, |
| 1922 | func_name, |
| 1923 | qualified_name, |
| 1924 | &oload_syms, |
| 1925 | &oload_champ_bv); |
| 1926 | } |
| 1927 | |
| 1928 | /* Check how bad the best match is. */ |
| 1929 | |
| 1930 | match_quality |
| 1931 | = classify_oload_match (oload_champ_bv, nargs, |
| 1932 | oload_method_static (method, fns_ptr, |
| 1933 | oload_champ)); |
| 1934 | |
| 1935 | if (match_quality == INCOMPATIBLE) |
| 1936 | { |
| 1937 | if (method) |
| 1938 | error ("Cannot resolve method %s%s%s to any overloaded instance", |
| 1939 | obj_type_name, |
| 1940 | (obj_type_name && *obj_type_name) ? "::" : "", |
| 1941 | name); |
| 1942 | else |
| 1943 | error ("Cannot resolve function %s to any overloaded instance", |
| 1944 | func_name); |
| 1945 | } |
| 1946 | else if (match_quality == NON_STANDARD) |
| 1947 | { |
| 1948 | if (method) |
| 1949 | warning ("Using non-standard conversion to match method %s%s%s to supplied arguments", |
| 1950 | obj_type_name, |
| 1951 | (obj_type_name && *obj_type_name) ? "::" : "", |
| 1952 | name); |
| 1953 | else |
| 1954 | warning ("Using non-standard conversion to match function %s to supplied arguments", |
| 1955 | func_name); |
| 1956 | } |
| 1957 | |
| 1958 | if (method) |
| 1959 | { |
| 1960 | if (staticp != NULL((void*)0)) |
| 1961 | *staticp = oload_method_static (method, fns_ptr, oload_champ); |
| 1962 | if (TYPE_FN_FIELD_VIRTUAL_P (fns_ptr, oload_champ)((fns_ptr)[oload_champ].voffset > 1)) |
| 1963 | *valp = value_virtual_fn_field (&temp, fns_ptr, oload_champ, basetype, boffset); |
| 1964 | else |
| 1965 | *valp = value_fn_field (&temp, fns_ptr, oload_champ, basetype, boffset); |
| 1966 | } |
| 1967 | else |
| 1968 | { |
| 1969 | *symp = oload_syms[oload_champ]; |
| 1970 | } |
| 1971 | |
| 1972 | if (objp) |
| 1973 | { |
| 1974 | if (TYPE_CODE (VALUE_TYPE (temp))((temp)->type)->main_type->code != TYPE_CODE_PTR |
| 1975 | && TYPE_CODE (VALUE_TYPE (*objp))((*objp)->type)->main_type->code == TYPE_CODE_PTR) |
| 1976 | { |
| 1977 | temp = value_addr (temp); |
| 1978 | } |
| 1979 | *objp = temp; |
| 1980 | } |
| 1981 | if (old_cleanups != NULL((void*)0)) |
| 1982 | do_cleanups (old_cleanups); |
| 1983 | |
| 1984 | switch (match_quality) |
| 1985 | { |
| 1986 | case INCOMPATIBLE: |
| 1987 | return 100; |
| 1988 | case NON_STANDARD: |
| 1989 | return 10; |
| 1990 | default: /* STANDARD */ |
| 1991 | return 0; |
| 1992 | } |
| 1993 | } |
| 1994 | |
| 1995 | /* Find the best overload match, searching for FUNC_NAME in namespaces |
| 1996 | contained in QUALIFIED_NAME until it either finds a good match or |
| 1997 | runs out of namespaces. It stores the overloaded functions in |
| 1998 | *OLOAD_SYMS, and the badness vector in *OLOAD_CHAMP_BV. The |
| 1999 | calling function is responsible for freeing *OLOAD_SYMS and |
| 2000 | *OLOAD_CHAMP_BV. */ |
| 2001 | |
| 2002 | static int |
| 2003 | find_oload_champ_namespace (struct type **arg_types, int nargs, |
| 2004 | const char *func_name, |
| 2005 | const char *qualified_name, |
| 2006 | struct symbol ***oload_syms, |
| 2007 | struct badness_vector **oload_champ_bv) |
| 2008 | { |
| 2009 | int oload_champ; |
| 2010 | |
| 2011 | find_oload_champ_namespace_loop (arg_types, nargs, |
| 2012 | func_name, |
| 2013 | qualified_name, 0, |
| 2014 | oload_syms, oload_champ_bv, |
| 2015 | &oload_champ); |
| 2016 | |
| 2017 | return oload_champ; |
| 2018 | } |
| 2019 | |
| 2020 | /* Helper function for find_oload_champ_namespace; NAMESPACE_LEN is |
| 2021 | how deep we've looked for namespaces, and the champ is stored in |
| 2022 | OLOAD_CHAMP. The return value is 1 if the champ is a good one, 0 |
| 2023 | if it isn't. |
| 2024 | |
| 2025 | It is the caller's responsibility to free *OLOAD_SYMS and |
| 2026 | *OLOAD_CHAMP_BV. */ |
| 2027 | |
| 2028 | static int |
| 2029 | find_oload_champ_namespace_loop (struct type **arg_types, int nargs, |
| 2030 | const char *func_name, |
| 2031 | const char *qualified_name, |
| 2032 | int namespace_len, |
| 2033 | struct symbol ***oload_syms, |
| 2034 | struct badness_vector **oload_champ_bv, |
| 2035 | int *oload_champ) |
| 2036 | { |
| 2037 | int next_namespace_len = namespace_len; |
| 2038 | int searched_deeper = 0; |
| 2039 | int num_fns = 0; |
| 2040 | struct cleanup *old_cleanups; |
| 2041 | int new_oload_champ; |
| 2042 | struct symbol **new_oload_syms; |
| 2043 | struct badness_vector *new_oload_champ_bv; |
| 2044 | char *new_namespace; |
| 2045 | |
| 2046 | if (next_namespace_len != 0) |
| 2047 | { |
| 2048 | gdb_assert (qualified_name[next_namespace_len] == ':')((void) ((qualified_name[next_namespace_len] == ':') ? 0 : (internal_error ("/usr/src/gnu/usr.bin/binutils/gdb/valops.c", 2048, "%s: Assertion `%s' failed." , __PRETTY_FUNCTION__, "qualified_name[next_namespace_len] == ':'" ), 0))); |
| 2049 | next_namespace_len += 2; |
| 2050 | } |
| 2051 | next_namespace_len |
| 2052 | += cp_find_first_component (qualified_name + next_namespace_len); |
| 2053 | |
| 2054 | /* Initialize these to values that can safely be xfree'd. */ |
| 2055 | *oload_syms = NULL((void*)0); |
| 2056 | *oload_champ_bv = NULL((void*)0); |
| 2057 | |
| 2058 | /* First, see if we have a deeper namespace we can search in. If we |
| 2059 | get a good match there, use it. */ |
| 2060 | |
| 2061 | if (qualified_name[next_namespace_len] == ':') |
| 2062 | { |
| 2063 | searched_deeper = 1; |
| 2064 | |
| 2065 | if (find_oload_champ_namespace_loop (arg_types, nargs, |
| 2066 | func_name, qualified_name, |
| 2067 | next_namespace_len, |
| 2068 | oload_syms, oload_champ_bv, |
| 2069 | oload_champ)) |
| 2070 | { |
| 2071 | return 1; |
| 2072 | } |
| 2073 | }; |
| 2074 | |
| 2075 | /* If we reach here, either we're in the deepest namespace or we |
| 2076 | didn't find a good match in a deeper namespace. But, in the |
| 2077 | latter case, we still have a bad match in a deeper namespace; |
| 2078 | note that we might not find any match at all in the current |
| 2079 | namespace. (There's always a match in the deepest namespace, |
| 2080 | because this overload mechanism only gets called if there's a |
| 2081 | function symbol to start off with.) */ |
| 2082 | |
| 2083 | old_cleanups = make_cleanup (xfree, *oload_syms); |
Value stored to 'old_cleanups' is never read | |
| 2084 | old_cleanups = make_cleanup (xfree, *oload_champ_bv); |
| 2085 | new_namespace = alloca (namespace_len + 1)__builtin_alloca(namespace_len + 1); |
| 2086 | strncpy (new_namespace, qualified_name, namespace_len); |
| 2087 | new_namespace[namespace_len] = '\0'; |
| 2088 | new_oload_syms = make_symbol_overload_list (func_name, |
| 2089 | new_namespace); |
| 2090 | while (new_oload_syms[num_fns]) |
| 2091 | ++num_fns; |
| 2092 | |
| 2093 | new_oload_champ = find_oload_champ (arg_types, nargs, 0, num_fns, |
| 2094 | NULL((void*)0), new_oload_syms, |
| 2095 | &new_oload_champ_bv); |
| 2096 | |
| 2097 | /* Case 1: We found a good match. Free earlier matches (if any), |
| 2098 | and return it. Case 2: We didn't find a good match, but we're |
| 2099 | not the deepest function. Then go with the bad match that the |
| 2100 | deeper function found. Case 3: We found a bad match, and we're |
| 2101 | the deepest function. Then return what we found, even though |
| 2102 | it's a bad match. */ |
| 2103 | |
| 2104 | if (new_oload_champ != -1 |
| 2105 | && classify_oload_match (new_oload_champ_bv, nargs, 0) == STANDARD) |
| 2106 | { |
| 2107 | *oload_syms = new_oload_syms; |
| 2108 | *oload_champ = new_oload_champ; |
| 2109 | *oload_champ_bv = new_oload_champ_bv; |
| 2110 | do_cleanups (old_cleanups); |
| 2111 | return 1; |
| 2112 | } |
| 2113 | else if (searched_deeper) |
| 2114 | { |
| 2115 | xfree (new_oload_syms); |
| 2116 | xfree (new_oload_champ_bv); |
| 2117 | discard_cleanups (old_cleanups); |
| 2118 | return 0; |
| 2119 | } |
| 2120 | else |
| 2121 | { |
| 2122 | gdb_assert (new_oload_champ != -1)((void) ((new_oload_champ != -1) ? 0 : (internal_error ("/usr/src/gnu/usr.bin/binutils/gdb/valops.c" , 2122, "%s: Assertion `%s' failed.", __PRETTY_FUNCTION__, "new_oload_champ != -1" ), 0))); |
| 2123 | *oload_syms = new_oload_syms; |
| 2124 | *oload_champ = new_oload_champ; |
| 2125 | *oload_champ_bv = new_oload_champ_bv; |
| 2126 | discard_cleanups (old_cleanups); |
| 2127 | return 0; |
| 2128 | } |
| 2129 | } |
| 2130 | |
| 2131 | /* Look for a function to take NARGS args of types ARG_TYPES. Find |
| 2132 | the best match from among the overloaded methods or functions |
| 2133 | (depending on METHOD) given by FNS_PTR or OLOAD_SYMS, respectively. |
| 2134 | The number of methods/functions in the list is given by NUM_FNS. |
| 2135 | Return the index of the best match; store an indication of the |
| 2136 | quality of the match in OLOAD_CHAMP_BV. |
| 2137 | |
| 2138 | It is the caller's responsibility to free *OLOAD_CHAMP_BV. */ |
| 2139 | |
| 2140 | static int |
| 2141 | find_oload_champ (struct type **arg_types, int nargs, int method, |
| 2142 | int num_fns, struct fn_field *fns_ptr, |
| 2143 | struct symbol **oload_syms, |
| 2144 | struct badness_vector **oload_champ_bv) |
| 2145 | { |
| 2146 | int ix; |
| 2147 | struct badness_vector *bv; /* A measure of how good an overloaded instance is */ |
| 2148 | int oload_champ = -1; /* Index of best overloaded function */ |
| 2149 | int oload_ambiguous = 0; /* Current ambiguity state for overload resolution */ |
| 2150 | /* 0 => no ambiguity, 1 => two good funcs, 2 => incomparable funcs */ |
| 2151 | |
| 2152 | *oload_champ_bv = NULL((void*)0); |
| 2153 | |
| 2154 | /* Consider each candidate in turn */ |
| 2155 | for (ix = 0; ix < num_fns; ix++) |
| 2156 | { |
| 2157 | int jj; |
| 2158 | int static_offset = oload_method_static (method, fns_ptr, ix); |
| 2159 | int nparms; |
| 2160 | struct type **parm_types; |
| 2161 | |
| 2162 | if (method) |
| 2163 | { |
| 2164 | nparms = TYPE_NFIELDS (TYPE_FN_FIELD_TYPE (fns_ptr, ix))((fns_ptr)[ix].type)->main_type->nfields; |
| 2165 | } |
| 2166 | else |
| 2167 | { |
| 2168 | /* If it's not a method, this is the proper place */ |
| 2169 | nparms=TYPE_NFIELDS(SYMBOL_TYPE(oload_syms[ix]))((oload_syms[ix])->type)->main_type->nfields; |
| 2170 | } |
| 2171 | |
| 2172 | /* Prepare array of parameter types */ |
| 2173 | parm_types = (struct type **) xmalloc (nparms * (sizeof (struct type *))); |
| 2174 | for (jj = 0; jj < nparms; jj++) |
| 2175 | parm_types[jj] = (method |
| 2176 | ? (TYPE_FN_FIELD_ARGS (fns_ptr, ix)((fns_ptr)[ix].type)->main_type->fields[jj].type) |
| 2177 | : TYPE_FIELD_TYPE (SYMBOL_TYPE (oload_syms[ix]), jj)((((oload_syms[ix])->type)->main_type->fields[jj]).type )); |
| 2178 | |
| 2179 | /* Compare parameter types to supplied argument types. Skip THIS for |
| 2180 | static methods. */ |
| 2181 | bv = rank_function (parm_types, nparms, arg_types + static_offset, |
| 2182 | nargs - static_offset); |
| 2183 | |
| 2184 | if (!*oload_champ_bv) |
| 2185 | { |
| 2186 | *oload_champ_bv = bv; |
| 2187 | oload_champ = 0; |
| 2188 | } |
| 2189 | else |
| 2190 | /* See whether current candidate is better or worse than previous best */ |
| 2191 | switch (compare_badness (bv, *oload_champ_bv)) |
| 2192 | { |
| 2193 | case 0: |
| 2194 | oload_ambiguous = 1; /* top two contenders are equally good */ |
| 2195 | break; |
| 2196 | case 1: |
| 2197 | oload_ambiguous = 2; /* incomparable top contenders */ |
| 2198 | break; |
| 2199 | case 2: |
| 2200 | *oload_champ_bv = bv; /* new champion, record details */ |
| 2201 | oload_ambiguous = 0; |
| 2202 | oload_champ = ix; |
| 2203 | break; |
| 2204 | case 3: |
| 2205 | default: |
| 2206 | break; |
| 2207 | } |
| 2208 | xfree (parm_types); |
| 2209 | if (overload_debug) |
| 2210 | { |
| 2211 | if (method) |
| 2212 | fprintf_filtered (gdb_stderr,"Overloaded method instance %s, # of parms %d\n", fns_ptr[ix].physname, nparms); |
| 2213 | else |
| 2214 | fprintf_filtered (gdb_stderr,"Overloaded function instance %s # of parms %d\n", SYMBOL_DEMANGLED_NAME (oload_syms[ix])(symbol_demangled_name (&(oload_syms[ix])->ginfo)), nparms); |
| 2215 | for (jj = 0; jj < nargs - static_offset; jj++) |
| 2216 | fprintf_filtered (gdb_stderr,"...Badness @ %d : %d\n", jj, bv->rank[jj]); |
| 2217 | fprintf_filtered (gdb_stderr,"Overload resolution champion is %d, ambiguous? %d\n", oload_champ, oload_ambiguous); |
| 2218 | } |
| 2219 | } |
| 2220 | |
| 2221 | return oload_champ; |
| 2222 | } |
| 2223 | |
| 2224 | /* Return 1 if we're looking at a static method, 0 if we're looking at |
| 2225 | a non-static method or a function that isn't a method. */ |
| 2226 | |
| 2227 | static int |
| 2228 | oload_method_static (int method, struct fn_field *fns_ptr, int index) |
| 2229 | { |
| 2230 | if (method && TYPE_FN_FIELD_STATIC_P (fns_ptr, index)((fns_ptr)[index].voffset == 1)) |
| 2231 | return 1; |
| 2232 | else |
| 2233 | return 0; |
| 2234 | } |
| 2235 | |
| 2236 | /* Check how good an overload match OLOAD_CHAMP_BV represents. */ |
| 2237 | |
| 2238 | static enum oload_classification |
| 2239 | classify_oload_match (struct badness_vector *oload_champ_bv, |
| 2240 | int nargs, |
| 2241 | int static_offset) |
| 2242 | { |
| 2243 | int ix; |
| 2244 | |
| 2245 | for (ix = 1; ix <= nargs - static_offset; ix++) |
| 2246 | { |
| 2247 | if (oload_champ_bv->rank[ix] >= 100) |
| 2248 | return INCOMPATIBLE; /* truly mismatched types */ |
| 2249 | else if (oload_champ_bv->rank[ix] >= 10) |
| 2250 | return NON_STANDARD; /* non-standard type conversions needed */ |
| 2251 | } |
| 2252 | |
| 2253 | return STANDARD; /* Only standard conversions needed. */ |
| 2254 | } |
| 2255 | |
| 2256 | /* C++: return 1 is NAME is a legitimate name for the destructor |
| 2257 | of type TYPE. If TYPE does not have a destructor, or |
| 2258 | if NAME is inappropriate for TYPE, an error is signaled. */ |
| 2259 | int |
| 2260 | destructor_name_p (const char *name, const struct type *type) |
| 2261 | { |
| 2262 | /* destructors are a special case. */ |
| 2263 | |
| 2264 | if (name[0] == '~') |
| 2265 | { |
| 2266 | char *dname = type_name_no_tag (type); |
| 2267 | char *cp = strchr (dname, '<'); |
| 2268 | unsigned int len; |
| 2269 | |
| 2270 | /* Do not compare the template part for template classes. */ |
| 2271 | if (cp == NULL((void*)0)) |
| 2272 | len = strlen (dname); |
| 2273 | else |
| 2274 | len = cp - dname; |
| 2275 | if (strlen (name + 1) != len || strncmp (dname, name + 1, len) != 0) |
| 2276 | error ("name of destructor must equal name of class"); |
| 2277 | else |
| 2278 | return 1; |
| 2279 | } |
| 2280 | return 0; |
| 2281 | } |
| 2282 | |
| 2283 | /* Helper function for check_field: Given TYPE, a structure/union, |
| 2284 | return 1 if the component named NAME from the ultimate |
| 2285 | target structure/union is defined, otherwise, return 0. */ |
| 2286 | |
| 2287 | static int |
| 2288 | check_field_in (struct type *type, const char *name) |
| 2289 | { |
| 2290 | int i; |
| 2291 | |
| 2292 | for (i = TYPE_NFIELDS (type)(type)->main_type->nfields - 1; i >= TYPE_N_BASECLASSES (type)(type)->main_type->type_specific.cplus_stuff->n_baseclasses; i--) |
| 2293 | { |
| 2294 | char *t_field_name = TYPE_FIELD_NAME (type, i)(((type)->main_type->fields[i]).name); |
| 2295 | if (t_field_name && (strcmp_iw (t_field_name, name) == 0)) |
| 2296 | return 1; |
| 2297 | } |
| 2298 | |
| 2299 | /* C++: If it was not found as a data field, then try to |
| 2300 | return it as a pointer to a method. */ |
| 2301 | |
| 2302 | /* Destructors are a special case. */ |
| 2303 | if (destructor_name_p (name, type)) |
| 2304 | { |
| 2305 | int m_index, f_index; |
| 2306 | |
| 2307 | return get_destructor_fn_field (type, &m_index, &f_index); |
| 2308 | } |
| 2309 | |
| 2310 | for (i = TYPE_NFN_FIELDS (type)(type)->main_type->type_specific.cplus_stuff->nfn_fields - 1; i >= 0; --i) |
| 2311 | { |
| 2312 | if (strcmp_iw (TYPE_FN_FIELDLIST_NAME (type, i)(type)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].name, name) == 0) |
| 2313 | return 1; |
| 2314 | } |
| 2315 | |
| 2316 | for (i = TYPE_N_BASECLASSES (type)(type)->main_type->type_specific.cplus_stuff->n_baseclasses - 1; i >= 0; i--) |
| 2317 | if (check_field_in (TYPE_BASECLASS (type, i)(type)->main_type->fields[i].type, name)) |
| 2318 | return 1; |
| 2319 | |
| 2320 | return 0; |
| 2321 | } |
| 2322 | |
| 2323 | |
| 2324 | /* C++: Given ARG1, a value of type (pointer to a)* structure/union, |
| 2325 | return 1 if the component named NAME from the ultimate |
| 2326 | target structure/union is defined, otherwise, return 0. */ |
| 2327 | |
| 2328 | int |
| 2329 | check_field (struct value *arg1, const char *name) |
| 2330 | { |
| 2331 | struct type *t; |
| 2332 | |
| 2333 | COERCE_ARRAY (arg1)do { do { struct type *value_type_arg_tmp = check_typedef ((arg1 )->type); if ((value_type_arg_tmp)->main_type->code == TYPE_CODE_REF) arg1 = value_at_lazy ((value_type_arg_tmp)-> main_type->target_type, unpack_pointer ((arg1)->type, ( (void)((arg1)->lazy && value_fetch_lazy(arg1)), (( char *) (arg1)->aligner.contents + (arg1)->embedded_offset ))), ((arg1)->bfd_section)); } while (0); if (current_language ->c_style_arrays && ((arg1)->type)->main_type ->code == TYPE_CODE_ARRAY) arg1 = value_coerce_array (arg1 ); if (((arg1)->type)->main_type->code == TYPE_CODE_FUNC ) arg1 = value_coerce_function (arg1); } while (0); |
| 2334 | |
| 2335 | t = VALUE_TYPE (arg1)(arg1)->type; |
| 2336 | |
| 2337 | /* Follow pointers until we get to a non-pointer. */ |
| 2338 | |
| 2339 | for (;;) |
| 2340 | { |
| 2341 | CHECK_TYPEDEF (t)(t) = check_typedef (t); |
| 2342 | if (TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_PTR && TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_REF) |
| 2343 | break; |
| 2344 | t = TYPE_TARGET_TYPE (t)(t)->main_type->target_type; |
| 2345 | } |
| 2346 | |
| 2347 | if (TYPE_CODE (t)(t)->main_type->code == TYPE_CODE_MEMBER) |
| 2348 | error ("not implemented: member type in check_field"); |
| 2349 | |
| 2350 | if (TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_STRUCT |
| 2351 | && TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_UNION) |
| 2352 | error ("Internal error: `this' is not an aggregate"); |
| 2353 | |
| 2354 | return check_field_in (t, name); |
| 2355 | } |
| 2356 | |
| 2357 | /* C++: Given an aggregate type CURTYPE, and a member name NAME, |
| 2358 | return the appropriate member. This function is used to resolve |
| 2359 | user expressions of the form "DOMAIN::NAME". For more details on |
| 2360 | what happens, see the comment before |
| 2361 | value_struct_elt_for_reference. */ |
| 2362 | |
| 2363 | struct value * |
| 2364 | value_aggregate_elt (struct type *curtype, |
| 2365 | char *name, |
| 2366 | enum noside noside) |
| 2367 | { |
| 2368 | switch (TYPE_CODE (curtype)(curtype)->main_type->code) |
| 2369 | { |
| 2370 | case TYPE_CODE_STRUCT: |
| 2371 | case TYPE_CODE_UNION: |
| 2372 | return value_struct_elt_for_reference (curtype, 0, curtype, name, NULL((void*)0), |
| 2373 | noside); |
| 2374 | case TYPE_CODE_NAMESPACE: |
| 2375 | return value_namespace_elt (curtype, name, noside); |
| 2376 | default: |
| 2377 | internal_error (__FILE__"/usr/src/gnu/usr.bin/binutils/gdb/valops.c", __LINE__2377, |
| 2378 | "non-aggregate type in value_aggregate_elt"); |
| 2379 | } |
| 2380 | } |
| 2381 | |
| 2382 | /* C++: Given an aggregate type CURTYPE, and a member name NAME, |
| 2383 | return the address of this member as a "pointer to member" |
| 2384 | type. If INTYPE is non-null, then it will be the type |
| 2385 | of the member we are looking for. This will help us resolve |
| 2386 | "pointers to member functions". This function is used |
| 2387 | to resolve user expressions of the form "DOMAIN::NAME". */ |
| 2388 | |
| 2389 | static struct value * |
| 2390 | value_struct_elt_for_reference (struct type *domain, int offset, |
| 2391 | struct type *curtype, char *name, |
| 2392 | struct type *intype, |
| 2393 | enum noside noside) |
| 2394 | { |
| 2395 | struct type *t = curtype; |
| 2396 | int i; |
| 2397 | struct value *v; |
| 2398 | |
| 2399 | if (TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_STRUCT |
| 2400 | && TYPE_CODE (t)(t)->main_type->code != TYPE_CODE_UNION) |
| 2401 | error ("Internal error: non-aggregate type to value_struct_elt_for_reference"); |
| 2402 | |
| 2403 | for (i = TYPE_NFIELDS (t)(t)->main_type->nfields - 1; i >= TYPE_N_BASECLASSES (t)(t)->main_type->type_specific.cplus_stuff->n_baseclasses; i--) |
| 2404 | { |
| 2405 | char *t_field_name = TYPE_FIELD_NAME (t, i)(((t)->main_type->fields[i]).name); |
| 2406 | |
| 2407 | if (t_field_name && strcmp (t_field_name, name) == 0) |
| 2408 | { |
| 2409 | if (TYPE_FIELD_STATIC (t, i)((t)->main_type->fields[i].static_kind != 0)) |
| 2410 | { |
| 2411 | v = value_static_field (t, i); |
| 2412 | if (v == NULL((void*)0)) |
| 2413 | error ("static field %s has been optimized out", |
| 2414 | name); |
| 2415 | return v; |
| 2416 | } |
| 2417 | if (TYPE_FIELD_PACKED (t, i)((((t)->main_type->fields[i]).bitsize)!=0)) |
| 2418 | error ("pointers to bitfield members not allowed"); |
| 2419 | |
| 2420 | return value_from_longest |
| 2421 | (lookup_reference_type (lookup_member_type (TYPE_FIELD_TYPE (t, i)(((t)->main_type->fields[i]).type), |
| 2422 | domain)), |
| 2423 | offset + (LONGESTlong) (TYPE_FIELD_BITPOS (t, i)(((t)->main_type->fields[i]).loc.bitpos) >> 3)); |
| 2424 | } |
| 2425 | } |
| 2426 | |
| 2427 | /* C++: If it was not found as a data field, then try to |
| 2428 | return it as a pointer to a method. */ |
| 2429 | |
| 2430 | /* Destructors are a special case. */ |
| 2431 | if (destructor_name_p (name, t)) |
| 2432 | { |
| 2433 | error ("member pointers to destructors not implemented yet"); |
| 2434 | } |
| 2435 | |
| 2436 | /* Perform all necessary dereferencing. */ |
| 2437 | while (intype && TYPE_CODE (intype)(intype)->main_type->code == TYPE_CODE_PTR) |
| 2438 | intype = TYPE_TARGET_TYPE (intype)(intype)->main_type->target_type; |
| 2439 | |
| 2440 | for (i = TYPE_NFN_FIELDS (t)(t)->main_type->type_specific.cplus_stuff->nfn_fields - 1; i >= 0; --i) |
| 2441 | { |
| 2442 | char *t_field_name = TYPE_FN_FIELDLIST_NAME (t, i)(t)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].name; |
| 2443 | char dem_opname[64]; |
| 2444 | |
| 2445 | if (strncmp (t_field_name, "__", 2) == 0 || |
| 2446 | strncmp (t_field_name, "op", 2) == 0 || |
| 2447 | strncmp (t_field_name, "type", 4) == 0) |
| 2448 | { |
| 2449 | if (cplus_demangle_opname (t_field_name, dem_opname, DMGL_ANSI(1 << 1))) |
| 2450 | t_field_name = dem_opname; |
| 2451 | else if (cplus_demangle_opname (t_field_name, dem_opname, 0)) |
| 2452 | t_field_name = dem_opname; |
| 2453 | } |
| 2454 | if (t_field_name && strcmp (t_field_name, name) == 0) |
| 2455 | { |
| 2456 | int j = TYPE_FN_FIELDLIST_LENGTH (t, i)(t)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].length; |
| 2457 | struct fn_field *f = TYPE_FN_FIELDLIST1 (t, i)(t)->main_type->type_specific.cplus_stuff->fn_fieldlists [i].fn_fields; |
| 2458 | |
| 2459 | check_stub_method_group (t, i); |
| 2460 | |
| 2461 | if (intype == 0 && j > 1) |
| 2462 | error ("non-unique member `%s' requires type instantiation", name); |
| 2463 | if (intype) |
| 2464 | { |
| 2465 | while (j--) |
| 2466 | if (TYPE_FN_FIELD_TYPE (f, j)(f)[j].type == intype) |
| 2467 | break; |
| 2468 | if (j < 0) |
| 2469 | error ("no member function matches that type instantiation"); |
| 2470 | } |
| 2471 | else |
| 2472 | j = 0; |
| 2473 | |
| 2474 | if (TYPE_FN_FIELD_VIRTUAL_P (f, j)((f)[j].voffset > 1)) |
| 2475 | { |
| 2476 | return value_from_longest |
| 2477 | (lookup_reference_type |
| 2478 | (lookup_member_type (TYPE_FN_FIELD_TYPE (f, j)(f)[j].type, |
| 2479 | domain)), |
| 2480 | (LONGESTlong) METHOD_PTR_FROM_VOFFSET (TYPE_FN_FIELD_VOFFSET (f, j))(0x80000000 + (((f)[j].voffset-2)))); |
| 2481 | } |
| 2482 | else |
| 2483 | { |
| 2484 | struct symbol *s = lookup_symbol (TYPE_FN_FIELD_PHYSNAME (f, j)(f)[j].physname, |
| 2485 | 0, VAR_DOMAIN, 0, NULL((void*)0)); |
| 2486 | if (s == NULL((void*)0)) |
| 2487 | { |
| 2488 | v = 0; |
| 2489 | } |
| 2490 | else |
| 2491 | { |
| 2492 | v = read_var_value (s, 0); |
| 2493 | #if 0 |
| 2494 | VALUE_TYPE (v)(v)->type = lookup_reference_type |
| 2495 | (lookup_member_type (TYPE_FN_FIELD_TYPE (f, j)(f)[j].type, |
| 2496 | domain)); |
| 2497 | #endif |
| 2498 | } |
| 2499 | return v; |
| 2500 | } |
| 2501 | } |
| 2502 | } |
| 2503 | for (i = TYPE_N_BASECLASSES (t)(t)->main_type->type_specific.cplus_stuff->n_baseclasses - 1; i >= 0; i--) |
| 2504 | { |
| 2505 | struct value *v; |
| 2506 | int base_offset; |
| 2507 | |
| 2508 | if (BASETYPE_VIA_VIRTUAL (t, i)((t)->main_type->type_specific.cplus_stuff->virtual_field_bits == ((void*)0) ? 0 : (((t)->main_type->type_specific.cplus_stuff ->virtual_field_bits)[((i))>>3] & (1 << (( (i))&7))))) |
| 2509 | base_offset = 0; |
| 2510 | else |
| 2511 | base_offset = TYPE_BASECLASS_BITPOS (t, i)(((t)->main_type->fields[i]).loc.bitpos) / 8; |
| 2512 | v = value_struct_elt_for_reference (domain, |
| 2513 | offset + base_offset, |
| 2514 | TYPE_BASECLASS (t, i)(t)->main_type->fields[i].type, |
| 2515 | name, |
| 2516 | intype, |
| 2517 | noside); |
| 2518 | if (v) |
| 2519 | return v; |
| 2520 | } |
| 2521 | |
| 2522 | /* As a last chance, pretend that CURTYPE is a namespace, and look |
| 2523 | it up that way; this (frequently) works for types nested inside |
| 2524 | classes. */ |
| 2525 | |
| 2526 | return value_maybe_namespace_elt (curtype, name, noside); |
| 2527 | } |
| 2528 | |
| 2529 | /* C++: Return the member NAME of the namespace given by the type |
| 2530 | CURTYPE. */ |
| 2531 | |
| 2532 | static struct value * |
| 2533 | value_namespace_elt (const struct type *curtype, |
| 2534 | char *name, |
| 2535 | enum noside noside) |
| 2536 | { |
| 2537 | struct value *retval = value_maybe_namespace_elt (curtype, name, |
| 2538 | noside); |
| 2539 | |
| 2540 | if (retval == NULL((void*)0)) |
| 2541 | error ("No symbol \"%s\" in namespace \"%s\".", name, |
| 2542 | TYPE_TAG_NAME (curtype)(curtype)->main_type->tag_name); |
| 2543 | |
| 2544 | return retval; |
| 2545 | } |
| 2546 | |
| 2547 | /* A helper function used by value_namespace_elt and |
| 2548 | value_struct_elt_for_reference. It looks up NAME inside the |
| 2549 | context CURTYPE; this works if CURTYPE is a namespace or if CURTYPE |
| 2550 | is a class and NAME refers to a type in CURTYPE itself (as opposed |
| 2551 | to, say, some base class of CURTYPE). */ |
| 2552 | |
| 2553 | static struct value * |
| 2554 | value_maybe_namespace_elt (const struct type *curtype, |
| 2555 | char *name, |
| 2556 | enum noside noside) |
| 2557 | { |
| 2558 | const char *namespace_name = TYPE_TAG_NAME (curtype)(curtype)->main_type->tag_name; |
| 2559 | struct symbol *sym; |
| 2560 | |
| 2561 | sym = cp_lookup_symbol_namespace (namespace_name, name, NULL((void*)0), |
| 2562 | get_selected_block (0), VAR_DOMAIN, |
| 2563 | NULL((void*)0)); |
| 2564 | |
| 2565 | if (sym == NULL((void*)0)) |
| 2566 | return NULL((void*)0); |
| 2567 | else if ((noside == EVAL_AVOID_SIDE_EFFECTS) |
| 2568 | && (SYMBOL_CLASS (sym)(sym)->aclass == LOC_TYPEDEF)) |
| 2569 | return allocate_value (SYMBOL_TYPE (sym)(sym)->type); |
| 2570 | else |
| 2571 | return value_of_variable (sym, get_selected_block (0)); |
| 2572 | } |
| 2573 | |
| 2574 | /* Given a pointer value V, find the real (RTTI) type |
| 2575 | of the object it points to. |
| 2576 | Other parameters FULL, TOP, USING_ENC as with value_rtti_type() |
| 2577 | and refer to the values computed for the object pointed to. */ |
| 2578 | |
| 2579 | struct type * |
| 2580 | value_rtti_target_type (struct value *v, int *full, int *top, int *using_enc) |
| 2581 | { |
| 2582 | struct value *target; |
| 2583 | |
| 2584 | target = value_ind (v); |
| 2585 | |
| 2586 | return value_rtti_type (target, full, top, using_enc); |
| 2587 | } |
| 2588 | |
| 2589 | /* Given a value pointed to by ARGP, check its real run-time type, and |
| 2590 | if that is different from the enclosing type, create a new value |
| 2591 | using the real run-time type as the enclosing type (and of the same |
| 2592 | type as ARGP) and return it, with the embedded offset adjusted to |
| 2593 | be the correct offset to the enclosed object |
| 2594 | RTYPE is the type, and XFULL, XTOP, and XUSING_ENC are the other |
| 2595 | parameters, computed by value_rtti_type(). If these are available, |
| 2596 | they can be supplied and a second call to value_rtti_type() is avoided. |
| 2597 | (Pass RTYPE == NULL if they're not available */ |
| 2598 | |
| 2599 | struct value * |
| 2600 | value_full_object (struct value *argp, struct type *rtype, int xfull, int xtop, |
| 2601 | int xusing_enc) |
| 2602 | { |
| 2603 | struct type *real_type; |
| 2604 | int full = 0; |
| 2605 | int top = -1; |
| 2606 | int using_enc = 0; |
| 2607 | struct value *new_val; |
| 2608 | |
| 2609 | if (rtype) |
| 2610 | { |
| 2611 | real_type = rtype; |
| 2612 | full = xfull; |
| 2613 | top = xtop; |
| 2614 | using_enc = xusing_enc; |
| 2615 | } |
| 2616 | else |
| 2617 | real_type = value_rtti_type (argp, &full, &top, &using_enc); |
| 2618 | |
| 2619 | /* If no RTTI data, or if object is already complete, do nothing */ |
| 2620 | if (!real_type || real_type == VALUE_ENCLOSING_TYPE (argp)(argp)->enclosing_type) |
| 2621 | return argp; |
| 2622 | |
| 2623 | /* If we have the full object, but for some reason the enclosing |
| 2624 | type is wrong, set it *//* pai: FIXME -- sounds iffy */ |
| 2625 | if (full) |
| 2626 | { |
| 2627 | argp = value_change_enclosing_type (argp, real_type); |
| 2628 | return argp; |
| 2629 | } |
| 2630 | |
| 2631 | /* Check if object is in memory */ |
| 2632 | if (VALUE_LVAL (argp)(argp)->lval != lval_memory) |
| 2633 | { |
| 2634 | warning ("Couldn't retrieve complete object of RTTI type %s; object may be in register(s).", TYPE_NAME (real_type)(real_type)->main_type->name); |
| 2635 | |
| 2636 | return argp; |
| 2637 | } |
| 2638 | |
| 2639 | /* All other cases -- retrieve the complete object */ |
| 2640 | /* Go back by the computed top_offset from the beginning of the object, |
| 2641 | adjusting for the embedded offset of argp if that's what value_rtti_type |
| 2642 | used for its computation. */ |
| 2643 | new_val = value_at_lazy (real_type, VALUE_ADDRESS (argp)(argp)->location.address - top + |
| 2644 | (using_enc ? 0 : VALUE_EMBEDDED_OFFSET (argp)((argp)->embedded_offset)), |
| 2645 | VALUE_BFD_SECTION (argp)((argp)->bfd_section)); |
| 2646 | VALUE_TYPE (new_val)(new_val)->type = VALUE_TYPE (argp)(argp)->type; |
| 2647 | VALUE_EMBEDDED_OFFSET (new_val)((new_val)->embedded_offset) = using_enc ? top + VALUE_EMBEDDED_OFFSET (argp)((argp)->embedded_offset) : top; |
| 2648 | return new_val; |
| 2649 | } |
| 2650 | |
| 2651 | |
| 2652 | |
| 2653 | |
| 2654 | /* Return the value of the local variable, if one exists. |
| 2655 | Flag COMPLAIN signals an error if the request is made in an |
| 2656 | inappropriate context. */ |
| 2657 | |
| 2658 | struct value * |
| 2659 | value_of_local (const char *name, int complain) |
| 2660 | { |
| 2661 | struct symbol *func, *sym; |
| 2662 | struct block *b; |
| 2663 | struct value * ret; |
| 2664 | |
| 2665 | if (deprecated_selected_frame == 0) |
| 2666 | { |
| 2667 | if (complain) |
| 2668 | error ("no frame selected"); |
| 2669 | else |
| 2670 | return 0; |
| 2671 | } |
| 2672 | |
| 2673 | func = get_frame_function (deprecated_selected_frame); |
| 2674 | if (!func) |
| 2675 | { |
| 2676 | if (complain) |
| 2677 | error ("no `%s' in nameless context", name); |
| 2678 | else |
| 2679 | return 0; |
| 2680 | } |
| 2681 | |
| 2682 | b = SYMBOL_BLOCK_VALUE (func)(func)->ginfo.value.block; |
| 2683 | if (dict_empty (BLOCK_DICT (b)(b)->dict)) |
| 2684 | { |
| 2685 | if (complain) |
| 2686 | error ("no args, no `%s'", name); |
| 2687 | else |
| 2688 | return 0; |
| 2689 | } |
| 2690 | |
| 2691 | /* Calling lookup_block_symbol is necessary to get the LOC_REGISTER |
| 2692 | symbol instead of the LOC_ARG one (if both exist). */ |
| 2693 | sym = lookup_block_symbol (b, name, NULL((void*)0), VAR_DOMAIN); |
| 2694 | if (sym == NULL((void*)0)) |
| 2695 | { |
| 2696 | if (complain) |
| 2697 | error ("current stack frame does not contain a variable named `%s'", name); |
| 2698 | else |
| 2699 | return NULL((void*)0); |
| 2700 | } |
| 2701 | |
| 2702 | ret = read_var_value (sym, deprecated_selected_frame); |
| 2703 | if (ret == 0 && complain) |
| 2704 | error ("`%s' argument unreadable", name); |
| 2705 | return ret; |
| 2706 | } |
| 2707 | |
| 2708 | /* C++/Objective-C: return the value of the class instance variable, |
| 2709 | if one exists. Flag COMPLAIN signals an error if the request is |
| 2710 | made in an inappropriate context. */ |
| 2711 | |
| 2712 | struct value * |
| 2713 | value_of_this (int complain) |
| 2714 | { |
| 2715 | if (current_language->la_language == language_objc) |
| 2716 | return value_of_local ("self", complain); |
| 2717 | else |
| 2718 | return value_of_local ("this", complain); |
| 2719 | } |
| 2720 | |
| 2721 | /* Create a slice (sub-string, sub-array) of ARRAY, that is LENGTH elements |
| 2722 | long, starting at LOWBOUND. The result has the same lower bound as |
| 2723 | the original ARRAY. */ |
| 2724 | |
| 2725 | struct value * |
| 2726 | value_slice (struct value *array, int lowbound, int length) |
| 2727 | { |
| 2728 | struct type *slice_range_type, *slice_type, *range_type; |
| 2729 | LONGESTlong lowerbound, upperbound; |
| 2730 | struct value *slice; |
| 2731 | struct type *array_type; |
| 2732 | array_type = check_typedef (VALUE_TYPE (array)(array)->type); |
| 2733 | COERCE_VARYING_ARRAY (array, array_type); |
| 2734 | if (TYPE_CODE (array_type)(array_type)->main_type->code != TYPE_CODE_ARRAY |
| 2735 | && TYPE_CODE (array_type)(array_type)->main_type->code != TYPE_CODE_STRING |
| 2736 | && TYPE_CODE (array_type)(array_type)->main_type->code != TYPE_CODE_BITSTRING) |
| 2737 | error ("cannot take slice of non-array"); |
| 2738 | range_type = TYPE_INDEX_TYPE (array_type)(((array_type)->main_type->fields[0]).type); |
| 2739 | if (get_discrete_bounds (range_type, &lowerbound, &upperbound) < 0) |
| 2740 | error ("slice from bad array or bitstring"); |
| 2741 | if (lowbound < lowerbound || length < 0 |
| 2742 | || lowbound + length - 1 > upperbound) |
| 2743 | error ("slice out of range"); |
| 2744 | /* FIXME-type-allocation: need a way to free this type when we are |
| 2745 | done with it. */ |
| 2746 | slice_range_type = create_range_type ((struct type *) NULL((void*)0), |
| 2747 | TYPE_TARGET_TYPE (range_type)(range_type)->main_type->target_type, |
| 2748 | lowbound, lowbound + length - 1); |
| 2749 | if (TYPE_CODE (array_type)(array_type)->main_type->code == TYPE_CODE_BITSTRING) |
| 2750 | { |
| 2751 | int i; |
| 2752 | slice_type = create_set_type ((struct type *) NULL((void*)0), slice_range_type); |
| 2753 | TYPE_CODE (slice_type)(slice_type)->main_type->code = TYPE_CODE_BITSTRING; |
| 2754 | slice = value_zero (slice_type, not_lval); |
| 2755 | for (i = 0; i < length; i++) |
| 2756 | { |
| 2757 | int element = value_bit_index (array_type, |
| 2758 | VALUE_CONTENTS (array)((void)((array)->lazy && value_fetch_lazy(array)), ((char *) (array)->aligner.contents + (array)->embedded_offset )), |
| 2759 | lowbound + i); |
| 2760 | if (element < 0) |
| 2761 | error ("internal error accessing bitstring"); |
| 2762 | else if (element > 0) |
| 2763 | { |
| 2764 | int j = i % TARGET_CHAR_BIT8; |
| 2765 | if (BITS_BIG_ENDIAN((gdbarch_byte_order (current_gdbarch)) == BFD_ENDIAN_BIG)) |
| 2766 | j = TARGET_CHAR_BIT8 - 1 - j; |
| 2767 | VALUE_CONTENTS_RAW (slice)((char *) (slice)->aligner.contents + (slice)->embedded_offset )[i / TARGET_CHAR_BIT8] |= (1 << j); |
| 2768 | } |
| 2769 | } |
| 2770 | /* We should set the address, bitssize, and bitspos, so the clice |
| 2771 | can be used on the LHS, but that may require extensions to |
| 2772 | value_assign. For now, just leave as a non_lval. FIXME. */ |
| 2773 | } |
| 2774 | else |
| 2775 | { |
| 2776 | struct type *element_type = TYPE_TARGET_TYPE (array_type)(array_type)->main_type->target_type; |
| 2777 | LONGESTlong offset |
| 2778 | = (lowbound - lowerbound) * TYPE_LENGTH (check_typedef (element_type))(check_typedef (element_type))->length; |
| 2779 | slice_type = create_array_type ((struct type *) NULL((void*)0), element_type, |
| 2780 | slice_range_type); |
| 2781 | TYPE_CODE (slice_type)(slice_type)->main_type->code = TYPE_CODE (array_type)(array_type)->main_type->code; |
| 2782 | slice = allocate_value (slice_type); |
| 2783 | if (VALUE_LAZY (array)(array)->lazy) |
| 2784 | VALUE_LAZY (slice)(slice)->lazy = 1; |
| 2785 | else |
| 2786 | memcpy (VALUE_CONTENTS (slice)((void)((slice)->lazy && value_fetch_lazy(slice)), ((char *) (slice)->aligner.contents + (slice)->embedded_offset )), VALUE_CONTENTS (array)((void)((array)->lazy && value_fetch_lazy(array)), ((char *) (array)->aligner.contents + (array)->embedded_offset )) + offset, |
| 2787 | TYPE_LENGTH (slice_type)(slice_type)->length); |
| 2788 | if (VALUE_LVAL (array)(array)->lval == lval_internalvar) |
| 2789 | VALUE_LVAL (slice)(slice)->lval = lval_internalvar_component; |
| 2790 | else |
| 2791 | VALUE_LVAL (slice)(slice)->lval = VALUE_LVAL (array)(array)->lval; |
| 2792 | VALUE_ADDRESS (slice)(slice)->location.address = VALUE_ADDRESS (array)(array)->location.address; |
| 2793 | VALUE_OFFSET (slice)(slice)->offset = VALUE_OFFSET (array)(array)->offset + offset; |
| 2794 | } |
| 2795 | return slice; |
| 2796 | } |
| 2797 | |
| 2798 | /* Create a value for a FORTRAN complex number. Currently most of |
| 2799 | the time values are coerced to COMPLEX*16 (i.e. a complex number |
| 2800 | composed of 2 doubles. This really should be a smarter routine |
| 2801 | that figures out precision inteligently as opposed to assuming |
| 2802 | doubles. FIXME: fmb */ |
| 2803 | |
| 2804 | struct value * |
| 2805 | value_literal_complex (struct value *arg1, struct value *arg2, struct type *type) |
| 2806 | { |
| 2807 | struct value *val; |
| 2808 | struct type *real_type = TYPE_TARGET_TYPE (type)(type)->main_type->target_type; |
| 2809 | |
| 2810 | val = allocate_value (type); |
| 2811 | arg1 = value_cast (real_type, arg1); |
| 2812 | arg2 = value_cast (real_type, arg2); |
| 2813 | |
| 2814 | memcpy (VALUE_CONTENTS_RAW (val)((char *) (val)->aligner.contents + (val)->embedded_offset ), |
| 2815 | VALUE_CONTENTS (arg1)((void)((arg1)->lazy && value_fetch_lazy(arg1)), ( (char *) (arg1)->aligner.contents + (arg1)->embedded_offset )), TYPE_LENGTH (real_type)(real_type)->length); |
| 2816 | memcpy (VALUE_CONTENTS_RAW (val)((char *) (val)->aligner.contents + (val)->embedded_offset ) + TYPE_LENGTH (real_type)(real_type)->length, |
| 2817 | VALUE_CONTENTS (arg2)((void)((arg2)->lazy && value_fetch_lazy(arg2)), ( (char *) (arg2)->aligner.contents + (arg2)->embedded_offset )), TYPE_LENGTH (real_type)(real_type)->length); |
| 2818 | return val; |
| 2819 | } |
| 2820 | |
| 2821 | /* Cast a value into the appropriate complex data type. */ |
| 2822 | |
| 2823 | static struct value * |
| 2824 | cast_into_complex (struct type *type, struct value *val) |
| 2825 | { |
| 2826 | struct type *real_type = TYPE_TARGET_TYPE (type)(type)->main_type->target_type; |
| 2827 | if (TYPE_CODE (VALUE_TYPE (val))((val)->type)->main_type->code == TYPE_CODE_COMPLEX) |
| 2828 | { |
| 2829 | struct type *val_real_type = TYPE_TARGET_TYPE (VALUE_TYPE (val))((val)->type)->main_type->target_type; |
| 2830 | struct value *re_val = allocate_value (val_real_type); |
| 2831 | struct value *im_val = allocate_value (val_real_type); |
| 2832 | |
| 2833 | memcpy (VALUE_CONTENTS_RAW (re_val)((char *) (re_val)->aligner.contents + (re_val)->embedded_offset ), |
| 2834 | VALUE_CONTENTS (val)((void)((val)->lazy && value_fetch_lazy(val)), ((char *) (val)->aligner.contents + (val)->embedded_offset)), TYPE_LENGTH (val_real_type)(val_real_type)->length); |
| 2835 | memcpy (VALUE_CONTENTS_RAW (im_val)((char *) (im_val)->aligner.contents + (im_val)->embedded_offset ), |
| 2836 | VALUE_CONTENTS (val)((void)((val)->lazy && value_fetch_lazy(val)), ((char *) (val)->aligner.contents + (val)->embedded_offset)) + TYPE_LENGTH (val_real_type)(val_real_type)->length, |
| 2837 | TYPE_LENGTH (val_real_type)(val_real_type)->length); |
| 2838 | |
| 2839 | return value_literal_complex (re_val, im_val, type); |
| 2840 | } |
| 2841 | else if (TYPE_CODE (VALUE_TYPE (val))((val)->type)->main_type->code == TYPE_CODE_FLT |
| 2842 | || TYPE_CODE (VALUE_TYPE (val))((val)->type)->main_type->code == TYPE_CODE_INT) |
| 2843 | return value_literal_complex (val, value_zero (real_type, not_lval), type); |
| 2844 | else |
| 2845 | error ("cannot cast non-number to complex"); |
| 2846 | } |
| 2847 | |
| 2848 | void |
| 2849 | _initialize_valops (void) |
| 2850 | { |
| 2851 | #if 0 |
| 2852 | deprecated_add_show_from_set |
| 2853 | (add_set_cmd ("abandon", class_support, var_boolean, (char *) &auto_abandon, |
| 2854 | "Set automatic abandonment of expressions upon failure.", |
| 2855 | &setlist), |
| 2856 | &showlist); |
| 2857 | #endif |
| 2858 | |
| 2859 | deprecated_add_show_from_set |
| 2860 | (add_set_cmd ("overload-resolution", class_support, var_boolean, (char *) &overload_resolution, |
| 2861 | "Set overload resolution in evaluating C++ functions.", |
| 2862 | &setlist), |
| 2863 | &showlist); |
| 2864 | overload_resolution = 1; |
| 2865 | } |