Automatic date update in version.in
[binutils-gdb.git] / gdb / dwarf2 / expr.c
blob3549745df0476bf9d00e2b8547138a93d21d0945
1 /* DWARF 2 Expression Evaluator.
3 Copyright (C) 2001-2022 Free Software Foundation, Inc.
5 Contributed by Daniel Berlin (dan@dberlin.org)
7 This file is part of GDB.
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3 of the License, or
12 (at your option) any later version.
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include "defs.h"
23 #include "block.h"
24 #include "symtab.h"
25 #include "gdbtypes.h"
26 #include "value.h"
27 #include "gdbcore.h"
28 #include "dwarf2.h"
29 #include "dwarf2/expr.h"
30 #include "dwarf2/loc.h"
31 #include "dwarf2/read.h"
32 #include "frame.h"
33 #include "gdbsupport/underlying.h"
34 #include "gdbarch.h"
35 #include "objfiles.h"
37 /* This holds gdbarch-specific types used by the DWARF expression
38 evaluator. See comments in execute_stack_op. */
40 struct dwarf_gdbarch_types
42 struct type *dw_types[3] {};
45 /* Cookie for gdbarch data. */
47 static const registry<gdbarch>::key<dwarf_gdbarch_types> dwarf_arch_cookie;
49 /* Ensure that a FRAME is defined, throw an exception otherwise. */
51 static void
52 ensure_have_frame (frame_info *frame, const char *op_name)
54 if (frame == nullptr)
55 throw_error (GENERIC_ERROR,
56 _("%s evaluation requires a frame."), op_name);
59 /* Ensure that a PER_CU is defined and throw an exception otherwise. */
61 static void
62 ensure_have_per_cu (dwarf2_per_cu_data *per_cu, const char* op_name)
64 if (per_cu == nullptr)
65 throw_error (GENERIC_ERROR,
66 _("%s evaluation requires a compilation unit."), op_name);
69 /* Return the number of bytes overlapping a contiguous chunk of N_BITS
70 bits whose first bit is located at bit offset START. */
72 static size_t
73 bits_to_bytes (ULONGEST start, ULONGEST n_bits)
75 return (start % HOST_CHAR_BIT + n_bits + HOST_CHAR_BIT - 1) / HOST_CHAR_BIT;
78 /* See expr.h. */
80 CORE_ADDR
81 read_addr_from_reg (frame_info *frame, int reg)
83 struct gdbarch *gdbarch = get_frame_arch (frame);
84 int regnum = dwarf_reg_to_regnum_or_error (gdbarch, reg);
86 return address_from_register (regnum, frame);
89 struct piece_closure
91 /* Reference count. */
92 int refc = 0;
94 /* The objfile from which this closure's expression came. */
95 dwarf2_per_objfile *per_objfile = nullptr;
97 /* The CU from which this closure's expression came. */
98 dwarf2_per_cu_data *per_cu = nullptr;
100 /* The pieces describing this variable. */
101 std::vector<dwarf_expr_piece> pieces;
103 /* Frame ID of frame to which a register value is relative, used
104 only by DWARF_VALUE_REGISTER. */
105 struct frame_id frame_id;
108 /* Allocate a closure for a value formed from separately-described
109 PIECES. */
111 static piece_closure *
112 allocate_piece_closure (dwarf2_per_cu_data *per_cu,
113 dwarf2_per_objfile *per_objfile,
114 std::vector<dwarf_expr_piece> &&pieces,
115 frame_info *frame)
117 piece_closure *c = new piece_closure;
119 c->refc = 1;
120 /* We must capture this here due to sharing of DWARF state. */
121 c->per_objfile = per_objfile;
122 c->per_cu = per_cu;
123 c->pieces = std::move (pieces);
124 if (frame == nullptr)
125 c->frame_id = null_frame_id;
126 else
127 c->frame_id = get_frame_id (frame);
129 for (dwarf_expr_piece &piece : c->pieces)
130 if (piece.location == DWARF_VALUE_STACK)
131 value_incref (piece.v.value);
133 return c;
136 /* Read or write a pieced value V. If FROM != NULL, operate in "write
137 mode": copy FROM into the pieces comprising V. If FROM == NULL,
138 operate in "read mode": fetch the contents of the (lazy) value V by
139 composing it from its pieces. If CHECK_OPTIMIZED is true, then no
140 reading or writing is done; instead the return value of this
141 function is true if any piece is optimized out. When
142 CHECK_OPTIMIZED is true, FROM must be nullptr. */
144 static bool
145 rw_pieced_value (value *v, value *from, bool check_optimized)
147 int i;
148 LONGEST offset = 0, max_offset;
149 gdb_byte *v_contents;
150 const gdb_byte *from_contents;
151 piece_closure *c
152 = (piece_closure *) value_computed_closure (v);
153 gdb::byte_vector buffer;
154 bool bits_big_endian = type_byte_order (value_type (v)) == BFD_ENDIAN_BIG;
156 gdb_assert (!check_optimized || from == nullptr);
157 if (from != nullptr)
159 from_contents = value_contents (from).data ();
160 v_contents = nullptr;
162 else
164 if (value_type (v) != value_enclosing_type (v))
165 internal_error (__FILE__, __LINE__,
166 _("Should not be able to create a lazy value with "
167 "an enclosing type"));
168 if (check_optimized)
169 v_contents = nullptr;
170 else
171 v_contents = value_contents_raw (v).data ();
172 from_contents = nullptr;
175 ULONGEST bits_to_skip = 8 * value_offset (v);
176 if (value_bitsize (v))
178 bits_to_skip += (8 * value_offset (value_parent (v))
179 + value_bitpos (v));
180 if (from != nullptr
181 && (type_byte_order (value_type (from))
182 == BFD_ENDIAN_BIG))
184 /* Use the least significant bits of FROM. */
185 max_offset = 8 * TYPE_LENGTH (value_type (from));
186 offset = max_offset - value_bitsize (v);
188 else
189 max_offset = value_bitsize (v);
191 else
192 max_offset = 8 * TYPE_LENGTH (value_type (v));
194 /* Advance to the first non-skipped piece. */
195 for (i = 0; i < c->pieces.size () && bits_to_skip >= c->pieces[i].size; i++)
196 bits_to_skip -= c->pieces[i].size;
198 for (; i < c->pieces.size () && offset < max_offset; i++)
200 dwarf_expr_piece *p = &c->pieces[i];
201 size_t this_size_bits, this_size;
203 this_size_bits = p->size - bits_to_skip;
204 if (this_size_bits > max_offset - offset)
205 this_size_bits = max_offset - offset;
207 switch (p->location)
209 case DWARF_VALUE_REGISTER:
211 frame_info *frame = frame_find_by_id (c->frame_id);
212 gdbarch *arch = get_frame_arch (frame);
213 int gdb_regnum = dwarf_reg_to_regnum_or_error (arch, p->v.regno);
214 ULONGEST reg_bits = 8 * register_size (arch, gdb_regnum);
215 int optim, unavail;
217 if (gdbarch_byte_order (arch) == BFD_ENDIAN_BIG
218 && p->offset + p->size < reg_bits)
220 /* Big-endian, and we want less than full size. */
221 bits_to_skip += reg_bits - (p->offset + p->size);
223 else
224 bits_to_skip += p->offset;
226 this_size = bits_to_bytes (bits_to_skip, this_size_bits);
227 buffer.resize (this_size);
229 if (from == nullptr)
231 /* Read mode. */
232 if (!get_frame_register_bytes (frame, gdb_regnum,
233 bits_to_skip / 8,
234 buffer, &optim, &unavail))
236 if (optim)
238 if (check_optimized)
239 return true;
240 mark_value_bits_optimized_out (v, offset,
241 this_size_bits);
243 if (unavail && !check_optimized)
244 mark_value_bits_unavailable (v, offset,
245 this_size_bits);
246 break;
249 if (!check_optimized)
250 copy_bitwise (v_contents, offset,
251 buffer.data (), bits_to_skip % 8,
252 this_size_bits, bits_big_endian);
254 else
256 /* Write mode. */
257 if (bits_to_skip % 8 != 0 || this_size_bits % 8 != 0)
259 /* Data is copied non-byte-aligned into the register.
260 Need some bits from original register value. */
261 get_frame_register_bytes (frame, gdb_regnum,
262 bits_to_skip / 8,
263 buffer, &optim, &unavail);
264 if (optim)
265 throw_error (OPTIMIZED_OUT_ERROR,
266 _("Can't do read-modify-write to "
267 "update bitfield; containing word "
268 "has been optimized out"));
269 if (unavail)
270 throw_error (NOT_AVAILABLE_ERROR,
271 _("Can't do read-modify-write to "
272 "update bitfield; containing word "
273 "is unavailable"));
276 copy_bitwise (buffer.data (), bits_to_skip % 8,
277 from_contents, offset,
278 this_size_bits, bits_big_endian);
279 put_frame_register_bytes (frame, gdb_regnum,
280 bits_to_skip / 8,
281 buffer);
284 break;
286 case DWARF_VALUE_MEMORY:
288 if (check_optimized)
289 break;
291 bits_to_skip += p->offset;
293 CORE_ADDR start_addr = p->v.mem.addr + bits_to_skip / 8;
295 if (bits_to_skip % 8 == 0 && this_size_bits % 8 == 0
296 && offset % 8 == 0)
298 /* Everything is byte-aligned; no buffer needed. */
299 if (from != nullptr)
300 write_memory_with_notification (start_addr,
301 (from_contents
302 + offset / 8),
303 this_size_bits / 8);
304 else
305 read_value_memory (v, offset,
306 p->v.mem.in_stack_memory,
307 p->v.mem.addr + bits_to_skip / 8,
308 v_contents + offset / 8,
309 this_size_bits / 8);
310 break;
313 this_size = bits_to_bytes (bits_to_skip, this_size_bits);
314 buffer.resize (this_size);
316 if (from == nullptr)
318 /* Read mode. */
319 read_value_memory (v, offset,
320 p->v.mem.in_stack_memory,
321 p->v.mem.addr + bits_to_skip / 8,
322 buffer.data (), this_size);
323 copy_bitwise (v_contents, offset,
324 buffer.data (), bits_to_skip % 8,
325 this_size_bits, bits_big_endian);
327 else
329 /* Write mode. */
330 if (bits_to_skip % 8 != 0 || this_size_bits % 8 != 0)
332 if (this_size <= 8)
334 /* Perform a single read for small sizes. */
335 read_memory (start_addr, buffer.data (),
336 this_size);
338 else
340 /* Only the first and last bytes can possibly have
341 any bits reused. */
342 read_memory (start_addr, buffer.data (), 1);
343 read_memory (start_addr + this_size - 1,
344 &buffer[this_size - 1], 1);
348 copy_bitwise (buffer.data (), bits_to_skip % 8,
349 from_contents, offset,
350 this_size_bits, bits_big_endian);
351 write_memory_with_notification (start_addr,
352 buffer.data (),
353 this_size);
356 break;
358 case DWARF_VALUE_STACK:
360 if (check_optimized)
361 break;
363 if (from != nullptr)
365 mark_value_bits_optimized_out (v, offset, this_size_bits);
366 break;
369 gdbarch *objfile_gdbarch = c->per_objfile->objfile->arch ();
370 ULONGEST stack_value_size_bits
371 = 8 * TYPE_LENGTH (value_type (p->v.value));
373 /* Use zeroes if piece reaches beyond stack value. */
374 if (p->offset + p->size > stack_value_size_bits)
375 break;
377 /* Piece is anchored at least significant bit end. */
378 if (gdbarch_byte_order (objfile_gdbarch) == BFD_ENDIAN_BIG)
379 bits_to_skip += stack_value_size_bits - p->offset - p->size;
380 else
381 bits_to_skip += p->offset;
383 copy_bitwise (v_contents, offset,
384 value_contents_all (p->v.value).data (),
385 bits_to_skip,
386 this_size_bits, bits_big_endian);
388 break;
390 case DWARF_VALUE_LITERAL:
392 if (check_optimized)
393 break;
395 if (from != nullptr)
397 mark_value_bits_optimized_out (v, offset, this_size_bits);
398 break;
401 ULONGEST literal_size_bits = 8 * p->v.literal.length;
402 size_t n = this_size_bits;
404 /* Cut off at the end of the implicit value. */
405 bits_to_skip += p->offset;
406 if (bits_to_skip >= literal_size_bits)
407 break;
408 if (n > literal_size_bits - bits_to_skip)
409 n = literal_size_bits - bits_to_skip;
411 copy_bitwise (v_contents, offset,
412 p->v.literal.data, bits_to_skip,
413 n, bits_big_endian);
415 break;
417 case DWARF_VALUE_IMPLICIT_POINTER:
418 if (from != nullptr)
420 mark_value_bits_optimized_out (v, offset, this_size_bits);
421 break;
424 /* These bits show up as zeros -- but do not cause the value to
425 be considered optimized-out. */
426 break;
428 case DWARF_VALUE_OPTIMIZED_OUT:
429 if (check_optimized)
430 return true;
431 mark_value_bits_optimized_out (v, offset, this_size_bits);
432 break;
434 default:
435 internal_error (__FILE__, __LINE__, _("invalid location type"));
438 offset += this_size_bits;
439 bits_to_skip = 0;
442 return false;
445 static void
446 read_pieced_value (value *v)
448 rw_pieced_value (v, nullptr, false);
451 static void
452 write_pieced_value (value *to, value *from)
454 rw_pieced_value (to, from, false);
457 static bool
458 is_optimized_out_pieced_value (value *v)
460 return rw_pieced_value (v, nullptr, true);
463 /* An implementation of an lval_funcs method to see whether a value is
464 a synthetic pointer. */
466 static int
467 check_pieced_synthetic_pointer (const value *value, LONGEST bit_offset,
468 int bit_length)
470 piece_closure *c = (piece_closure *) value_computed_closure (value);
471 int i;
473 bit_offset += 8 * value_offset (value);
474 if (value_bitsize (value))
475 bit_offset += value_bitpos (value);
477 for (i = 0; i < c->pieces.size () && bit_length > 0; i++)
479 dwarf_expr_piece *p = &c->pieces[i];
480 size_t this_size_bits = p->size;
482 if (bit_offset > 0)
484 if (bit_offset >= this_size_bits)
486 bit_offset -= this_size_bits;
487 continue;
490 bit_length -= this_size_bits - bit_offset;
491 bit_offset = 0;
493 else
494 bit_length -= this_size_bits;
496 if (p->location != DWARF_VALUE_IMPLICIT_POINTER)
497 return 0;
500 return 1;
503 /* An implementation of an lval_funcs method to indirect through a
504 pointer. This handles the synthetic pointer case when needed. */
506 static value *
507 indirect_pieced_value (value *value)
509 piece_closure *c
510 = (piece_closure *) value_computed_closure (value);
511 int i;
512 dwarf_expr_piece *piece = NULL;
514 struct type *type = check_typedef (value_type (value));
515 if (type->code () != TYPE_CODE_PTR)
516 return NULL;
518 int bit_length = 8 * TYPE_LENGTH (type);
519 LONGEST bit_offset = 8 * value_offset (value);
520 if (value_bitsize (value))
521 bit_offset += value_bitpos (value);
523 for (i = 0; i < c->pieces.size () && bit_length > 0; i++)
525 dwarf_expr_piece *p = &c->pieces[i];
526 size_t this_size_bits = p->size;
528 if (bit_offset > 0)
530 if (bit_offset >= this_size_bits)
532 bit_offset -= this_size_bits;
533 continue;
536 bit_length -= this_size_bits - bit_offset;
537 bit_offset = 0;
539 else
540 bit_length -= this_size_bits;
542 if (p->location != DWARF_VALUE_IMPLICIT_POINTER)
543 return NULL;
545 if (bit_length != 0)
546 error (_("Invalid use of DW_OP_implicit_pointer"));
548 piece = p;
549 break;
552 gdb_assert (piece != NULL && c->per_cu != nullptr);
553 frame_info *frame = get_selected_frame (_("No frame selected."));
555 /* This is an offset requested by GDB, such as value subscripts.
556 However, due to how synthetic pointers are implemented, this is
557 always presented to us as a pointer type. This means we have to
558 sign-extend it manually as appropriate. Use raw
559 extract_signed_integer directly rather than value_as_address and
560 sign extend afterwards on architectures that would need it
561 (mostly everywhere except MIPS, which has signed addresses) as
562 the later would go through gdbarch_pointer_to_address and thus
563 return a CORE_ADDR with high bits set on architectures that
564 encode address spaces and other things in CORE_ADDR. */
565 bfd_endian byte_order = gdbarch_byte_order (get_frame_arch (frame));
566 LONGEST byte_offset
567 = extract_signed_integer (value_contents (value), byte_order);
568 byte_offset += piece->v.ptr.offset;
570 return indirect_synthetic_pointer (piece->v.ptr.die_sect_off,
571 byte_offset, c->per_cu,
572 c->per_objfile, frame, type);
575 /* Implementation of the coerce_ref method of lval_funcs for synthetic C++
576 references. */
578 static value *
579 coerce_pieced_ref (const value *value)
581 struct type *type = check_typedef (value_type (value));
583 if (value_bits_synthetic_pointer (value, value_embedded_offset (value),
584 TARGET_CHAR_BIT * TYPE_LENGTH (type)))
586 const piece_closure *closure
587 = (piece_closure *) value_computed_closure (value);
588 frame_info *frame
589 = get_selected_frame (_("No frame selected."));
591 /* gdb represents synthetic pointers as pieced values with a single
592 piece. */
593 gdb_assert (closure != NULL);
594 gdb_assert (closure->pieces.size () == 1);
596 return indirect_synthetic_pointer
597 (closure->pieces[0].v.ptr.die_sect_off,
598 closure->pieces[0].v.ptr.offset,
599 closure->per_cu, closure->per_objfile, frame, type);
601 else
603 /* Else: not a synthetic reference; do nothing. */
604 return NULL;
608 static void *
609 copy_pieced_value_closure (const value *v)
611 piece_closure *c = (piece_closure *) value_computed_closure (v);
613 ++c->refc;
614 return c;
617 static void
618 free_pieced_value_closure (value *v)
620 piece_closure *c = (piece_closure *) value_computed_closure (v);
622 --c->refc;
623 if (c->refc == 0)
625 for (dwarf_expr_piece &p : c->pieces)
626 if (p.location == DWARF_VALUE_STACK)
627 value_decref (p.v.value);
629 delete c;
633 /* Functions for accessing a variable described by DW_OP_piece. */
634 static const struct lval_funcs pieced_value_funcs = {
635 read_pieced_value,
636 write_pieced_value,
637 is_optimized_out_pieced_value,
638 indirect_pieced_value,
639 coerce_pieced_ref,
640 check_pieced_synthetic_pointer,
641 copy_pieced_value_closure,
642 free_pieced_value_closure
645 /* Given context CTX, section offset SECT_OFF, and compilation unit
646 data PER_CU, execute the "variable value" operation on the DIE
647 found at SECT_OFF. */
649 static value *
650 sect_variable_value (sect_offset sect_off,
651 dwarf2_per_cu_data *per_cu,
652 dwarf2_per_objfile *per_objfile)
654 const char *var_name = nullptr;
655 struct type *die_type
656 = dwarf2_fetch_die_type_sect_off (sect_off, per_cu, per_objfile,
657 &var_name);
659 if (die_type == NULL)
660 error (_("Bad DW_OP_GNU_variable_value DIE."));
662 /* Note: Things still work when the following test is removed. This
663 test and error is here to conform to the proposed specification. */
664 if (die_type->code () != TYPE_CODE_INT
665 && die_type->code () != TYPE_CODE_ENUM
666 && die_type->code () != TYPE_CODE_RANGE
667 && die_type->code () != TYPE_CODE_PTR)
668 error (_("Type of DW_OP_GNU_variable_value DIE must be an integer or pointer."));
670 if (var_name != nullptr)
672 value *result = compute_var_value (var_name);
673 if (result != nullptr)
674 return result;
677 struct type *type = lookup_pointer_type (die_type);
678 frame_info *frame = get_selected_frame (_("No frame selected."));
679 return indirect_synthetic_pointer (sect_off, 0, per_cu, per_objfile, frame,
680 type, true);
683 /* Return the type used for DWARF operations where the type is
684 unspecified in the DWARF spec. Only certain sizes are
685 supported. */
687 struct type *
688 dwarf_expr_context::address_type () const
690 gdbarch *arch = this->m_per_objfile->objfile->arch ();
691 dwarf_gdbarch_types *types = dwarf_arch_cookie.get (arch);
692 if (types == nullptr)
693 types = dwarf_arch_cookie.emplace (arch);
694 int ndx;
696 if (this->m_addr_size == 2)
697 ndx = 0;
698 else if (this->m_addr_size == 4)
699 ndx = 1;
700 else if (this->m_addr_size == 8)
701 ndx = 2;
702 else
703 error (_("Unsupported address size in DWARF expressions: %d bits"),
704 8 * this->m_addr_size);
706 if (types->dw_types[ndx] == NULL)
707 types->dw_types[ndx]
708 = arch_integer_type (arch, 8 * this->m_addr_size,
709 0, "<signed DWARF address type>");
711 return types->dw_types[ndx];
714 /* Create a new context for the expression evaluator. */
716 dwarf_expr_context::dwarf_expr_context (dwarf2_per_objfile *per_objfile,
717 int addr_size)
718 : m_addr_size (addr_size),
719 m_per_objfile (per_objfile)
723 /* Push VALUE onto the stack. */
725 void
726 dwarf_expr_context::push (struct value *value, bool in_stack_memory)
728 this->m_stack.emplace_back (value, in_stack_memory);
731 /* Push VALUE onto the stack. */
733 void
734 dwarf_expr_context::push_address (CORE_ADDR value, bool in_stack_memory)
736 push (value_from_ulongest (address_type (), value), in_stack_memory);
739 /* Pop the top item off of the stack. */
741 void
742 dwarf_expr_context::pop ()
744 if (this->m_stack.empty ())
745 error (_("dwarf expression stack underflow"));
747 this->m_stack.pop_back ();
750 /* Retrieve the N'th item on the stack. */
752 struct value *
753 dwarf_expr_context::fetch (int n)
755 if (this->m_stack.size () <= n)
756 error (_("Asked for position %d of stack, "
757 "stack only has %zu elements on it."),
758 n, this->m_stack.size ());
759 return this->m_stack[this->m_stack.size () - (1 + n)].value;
762 /* See expr.h. */
764 void
765 dwarf_expr_context::get_frame_base (const gdb_byte **start,
766 size_t * length)
768 ensure_have_frame (this->m_frame, "DW_OP_fbreg");
770 const block *bl = get_frame_block (this->m_frame, NULL);
772 if (bl == NULL)
773 error (_("frame address is not available."));
775 /* Use block_linkage_function, which returns a real (not inlined)
776 function, instead of get_frame_function, which may return an
777 inlined function. */
778 symbol *framefunc = block_linkage_function (bl);
780 /* If we found a frame-relative symbol then it was certainly within
781 some function associated with a frame. If we can't find the frame,
782 something has gone wrong. */
783 gdb_assert (framefunc != NULL);
785 func_get_frame_base_dwarf_block (framefunc,
786 get_frame_address_in_block (this->m_frame),
787 start, length);
790 /* See expr.h. */
792 struct type *
793 dwarf_expr_context::get_base_type (cu_offset die_cu_off)
795 if (this->m_per_cu == nullptr)
796 return builtin_type (this->m_per_objfile->objfile->arch ())->builtin_int;
798 struct type *result = dwarf2_get_die_type (die_cu_off, this->m_per_cu,
799 this->m_per_objfile);
801 if (result == nullptr)
802 error (_("Could not find type for operation"));
804 return result;
807 /* See expr.h. */
809 void
810 dwarf_expr_context::dwarf_call (cu_offset die_cu_off)
812 ensure_have_per_cu (this->m_per_cu, "DW_OP_call");
814 frame_info *frame = this->m_frame;
816 auto get_pc_from_frame = [frame] ()
818 ensure_have_frame (frame, "DW_OP_call");
819 return get_frame_address_in_block (frame);
822 dwarf2_locexpr_baton block
823 = dwarf2_fetch_die_loc_cu_off (die_cu_off, this->m_per_cu,
824 this->m_per_objfile, get_pc_from_frame);
826 /* DW_OP_call_ref is currently not supported. */
827 gdb_assert (block.per_cu == this->m_per_cu);
829 this->eval (block.data, block.size);
832 /* See expr.h. */
834 void
835 dwarf_expr_context::read_mem (gdb_byte *buf, CORE_ADDR addr,
836 size_t length)
838 if (length == 0)
839 return;
841 /* Prefer the passed-in memory, if it exists. */
842 if (this->m_addr_info != nullptr)
844 CORE_ADDR offset = addr - this->m_addr_info->addr;
846 if (offset < this->m_addr_info->valaddr.size ()
847 && offset + length <= this->m_addr_info->valaddr.size ())
849 memcpy (buf, this->m_addr_info->valaddr.data (), length);
850 return;
854 read_memory (addr, buf, length);
857 /* See expr.h. */
859 void
860 dwarf_expr_context::push_dwarf_reg_entry_value (call_site_parameter_kind kind,
861 call_site_parameter_u kind_u,
862 int deref_size)
864 ensure_have_per_cu (this->m_per_cu, "DW_OP_entry_value");
865 ensure_have_frame (this->m_frame, "DW_OP_entry_value");
867 dwarf2_per_cu_data *caller_per_cu;
868 dwarf2_per_objfile *caller_per_objfile;
869 frame_info *caller_frame = get_prev_frame (this->m_frame);
870 call_site_parameter *parameter
871 = dwarf_expr_reg_to_entry_parameter (this->m_frame, kind, kind_u,
872 &caller_per_cu,
873 &caller_per_objfile);
874 const gdb_byte *data_src
875 = deref_size == -1 ? parameter->value : parameter->data_value;
876 size_t size
877 = deref_size == -1 ? parameter->value_size : parameter->data_value_size;
879 /* DEREF_SIZE size is not verified here. */
880 if (data_src == nullptr)
881 throw_error (NO_ENTRY_VALUE_ERROR,
882 _("Cannot resolve DW_AT_call_data_value"));
884 /* We are about to evaluate an expression in the context of the caller
885 of the current frame. This evaluation context may be different from
886 the current (callee's) context), so temporarily set the caller's context.
888 It is possible for the caller to be from a different objfile from the
889 callee if the call is made through a function pointer. */
890 scoped_restore save_frame = make_scoped_restore (&this->m_frame,
891 caller_frame);
892 scoped_restore save_per_cu = make_scoped_restore (&this->m_per_cu,
893 caller_per_cu);
894 scoped_restore save_addr_info = make_scoped_restore (&this->m_addr_info,
895 nullptr);
896 scoped_restore save_per_objfile = make_scoped_restore (&this->m_per_objfile,
897 caller_per_objfile);
899 scoped_restore save_addr_size = make_scoped_restore (&this->m_addr_size);
900 this->m_addr_size = this->m_per_cu->addr_size ();
902 this->eval (data_src, size);
905 /* See expr.h. */
907 value *
908 dwarf_expr_context::fetch_result (struct type *type, struct type *subobj_type,
909 LONGEST subobj_offset, bool as_lval)
911 value *retval = nullptr;
912 gdbarch *arch = this->m_per_objfile->objfile->arch ();
914 if (type == nullptr)
915 type = address_type ();
917 if (subobj_type == nullptr)
918 subobj_type = type;
920 if (this->m_pieces.size () > 0)
922 ULONGEST bit_size = 0;
924 for (dwarf_expr_piece &piece : this->m_pieces)
925 bit_size += piece.size;
926 /* Complain if the expression is larger than the size of the
927 outer type. */
928 if (bit_size > 8 * TYPE_LENGTH (type))
929 invalid_synthetic_pointer ();
931 piece_closure *c
932 = allocate_piece_closure (this->m_per_cu, this->m_per_objfile,
933 std::move (this->m_pieces), this->m_frame);
934 retval = allocate_computed_value (subobj_type,
935 &pieced_value_funcs, c);
936 set_value_offset (retval, subobj_offset);
938 else
940 /* If AS_LVAL is false, means that the implicit conversion
941 from a location description to value is expected. */
942 if (!as_lval)
943 this->m_location = DWARF_VALUE_STACK;
945 switch (this->m_location)
947 case DWARF_VALUE_REGISTER:
949 gdbarch *f_arch = get_frame_arch (this->m_frame);
950 int dwarf_regnum
951 = longest_to_int (value_as_long (this->fetch (0)));
952 int gdb_regnum = dwarf_reg_to_regnum_or_error (f_arch,
953 dwarf_regnum);
955 if (subobj_offset != 0)
956 error (_("cannot use offset on synthetic pointer to register"));
958 gdb_assert (this->m_frame != NULL);
960 retval = value_from_register (subobj_type, gdb_regnum,
961 this->m_frame);
962 if (value_optimized_out (retval))
964 /* This means the register has undefined value / was
965 not saved. As we're computing the location of some
966 variable etc. in the program, not a value for
967 inspecting a register ($pc, $sp, etc.), return a
968 generic optimized out value instead, so that we show
969 <optimized out> instead of <not saved>. */
970 value *tmp = allocate_value (subobj_type);
971 value_contents_copy (tmp, 0, retval, 0,
972 TYPE_LENGTH (subobj_type));
973 retval = tmp;
976 break;
978 case DWARF_VALUE_MEMORY:
980 struct type *ptr_type;
981 CORE_ADDR address = this->fetch_address (0);
982 bool in_stack_memory = this->fetch_in_stack_memory (0);
984 /* DW_OP_deref_size (and possibly other operations too) may
985 create a pointer instead of an address. Ideally, the
986 pointer to address conversion would be performed as part
987 of those operations, but the type of the object to
988 which the address refers is not known at the time of
989 the operation. Therefore, we do the conversion here
990 since the type is readily available. */
992 switch (subobj_type->code ())
994 case TYPE_CODE_FUNC:
995 case TYPE_CODE_METHOD:
996 ptr_type = builtin_type (arch)->builtin_func_ptr;
997 break;
998 default:
999 ptr_type = builtin_type (arch)->builtin_data_ptr;
1000 break;
1002 address = value_as_address (value_from_pointer (ptr_type, address));
1004 retval = value_at_lazy (subobj_type,
1005 address + subobj_offset);
1006 if (in_stack_memory)
1007 set_value_stack (retval, 1);
1009 break;
1011 case DWARF_VALUE_STACK:
1013 value *val = this->fetch (0);
1014 size_t n = TYPE_LENGTH (value_type (val));
1015 size_t len = TYPE_LENGTH (subobj_type);
1016 size_t max = TYPE_LENGTH (type);
1018 if (subobj_offset + len > max)
1019 invalid_synthetic_pointer ();
1021 retval = allocate_value (subobj_type);
1023 /* The given offset is relative to the actual object. */
1024 if (gdbarch_byte_order (arch) == BFD_ENDIAN_BIG)
1025 subobj_offset += n - max;
1027 copy (value_contents_all (val).slice (subobj_offset, len),
1028 value_contents_raw (retval));
1030 break;
1032 case DWARF_VALUE_LITERAL:
1034 size_t n = TYPE_LENGTH (subobj_type);
1036 if (subobj_offset + n > this->m_len)
1037 invalid_synthetic_pointer ();
1039 retval = allocate_value (subobj_type);
1040 bfd_byte *contents = value_contents_raw (retval).data ();
1041 memcpy (contents, this->m_data + subobj_offset, n);
1043 break;
1045 case DWARF_VALUE_OPTIMIZED_OUT:
1046 retval = allocate_optimized_out_value (subobj_type);
1047 break;
1049 /* DWARF_VALUE_IMPLICIT_POINTER was converted to a pieced
1050 operation by execute_stack_op. */
1051 case DWARF_VALUE_IMPLICIT_POINTER:
1052 /* DWARF_VALUE_OPTIMIZED_OUT can't occur in this context --
1053 it can only be encountered when making a piece. */
1054 default:
1055 internal_error (__FILE__, __LINE__, _("invalid location type"));
1059 set_value_initialized (retval, this->m_initialized);
1061 return retval;
1064 /* See expr.h. */
1066 value *
1067 dwarf_expr_context::evaluate (const gdb_byte *addr, size_t len, bool as_lval,
1068 dwarf2_per_cu_data *per_cu, frame_info *frame,
1069 const struct property_addr_info *addr_info,
1070 struct type *type, struct type *subobj_type,
1071 LONGEST subobj_offset)
1073 this->m_per_cu = per_cu;
1074 this->m_frame = frame;
1075 this->m_addr_info = addr_info;
1077 eval (addr, len);
1078 return fetch_result (type, subobj_type, subobj_offset, as_lval);
1081 /* Require that TYPE be an integral type; throw an exception if not. */
1083 static void
1084 dwarf_require_integral (struct type *type)
1086 if (type->code () != TYPE_CODE_INT
1087 && type->code () != TYPE_CODE_CHAR
1088 && type->code () != TYPE_CODE_BOOL)
1089 error (_("integral type expected in DWARF expression"));
1092 /* Return the unsigned form of TYPE. TYPE is necessarily an integral
1093 type. */
1095 static struct type *
1096 get_unsigned_type (struct gdbarch *gdbarch, struct type *type)
1098 switch (TYPE_LENGTH (type))
1100 case 1:
1101 return builtin_type (gdbarch)->builtin_uint8;
1102 case 2:
1103 return builtin_type (gdbarch)->builtin_uint16;
1104 case 4:
1105 return builtin_type (gdbarch)->builtin_uint32;
1106 case 8:
1107 return builtin_type (gdbarch)->builtin_uint64;
1108 default:
1109 error (_("no unsigned variant found for type, while evaluating "
1110 "DWARF expression"));
1114 /* Return the signed form of TYPE. TYPE is necessarily an integral
1115 type. */
1117 static struct type *
1118 get_signed_type (struct gdbarch *gdbarch, struct type *type)
1120 switch (TYPE_LENGTH (type))
1122 case 1:
1123 return builtin_type (gdbarch)->builtin_int8;
1124 case 2:
1125 return builtin_type (gdbarch)->builtin_int16;
1126 case 4:
1127 return builtin_type (gdbarch)->builtin_int32;
1128 case 8:
1129 return builtin_type (gdbarch)->builtin_int64;
1130 default:
1131 error (_("no signed variant found for type, while evaluating "
1132 "DWARF expression"));
1136 /* Retrieve the N'th item on the stack, converted to an address. */
1138 CORE_ADDR
1139 dwarf_expr_context::fetch_address (int n)
1141 gdbarch *arch = this->m_per_objfile->objfile->arch ();
1142 value *result_val = fetch (n);
1143 bfd_endian byte_order = gdbarch_byte_order (arch);
1144 ULONGEST result;
1146 dwarf_require_integral (value_type (result_val));
1147 result = extract_unsigned_integer (value_contents (result_val), byte_order);
1149 /* For most architectures, calling extract_unsigned_integer() alone
1150 is sufficient for extracting an address. However, some
1151 architectures (e.g. MIPS) use signed addresses and using
1152 extract_unsigned_integer() will not produce a correct
1153 result. Make sure we invoke gdbarch_integer_to_address()
1154 for those architectures which require it. */
1155 if (gdbarch_integer_to_address_p (arch))
1157 gdb_byte *buf = (gdb_byte *) alloca (this->m_addr_size);
1158 type *int_type = get_unsigned_type (arch,
1159 value_type (result_val));
1161 store_unsigned_integer (buf, this->m_addr_size, byte_order, result);
1162 return gdbarch_integer_to_address (arch, int_type, buf);
1165 return (CORE_ADDR) result;
1168 /* Retrieve the in_stack_memory flag of the N'th item on the stack. */
1170 bool
1171 dwarf_expr_context::fetch_in_stack_memory (int n)
1173 if (this->m_stack.size () <= n)
1174 error (_("Asked for position %d of stack, "
1175 "stack only has %zu elements on it."),
1176 n, this->m_stack.size ());
1177 return this->m_stack[this->m_stack.size () - (1 + n)].in_stack_memory;
1180 /* Return true if the expression stack is empty. */
1182 bool
1183 dwarf_expr_context::stack_empty_p () const
1185 return m_stack.empty ();
1188 /* Add a new piece to the dwarf_expr_context's piece list. */
1189 void
1190 dwarf_expr_context::add_piece (ULONGEST size, ULONGEST offset)
1192 this->m_pieces.emplace_back ();
1193 dwarf_expr_piece &p = this->m_pieces.back ();
1195 p.location = this->m_location;
1196 p.size = size;
1197 p.offset = offset;
1199 if (p.location == DWARF_VALUE_LITERAL)
1201 p.v.literal.data = this->m_data;
1202 p.v.literal.length = this->m_len;
1204 else if (stack_empty_p ())
1206 p.location = DWARF_VALUE_OPTIMIZED_OUT;
1207 /* Also reset the context's location, for our callers. This is
1208 a somewhat strange approach, but this lets us avoid setting
1209 the location to DWARF_VALUE_MEMORY in all the individual
1210 cases in the evaluator. */
1211 this->m_location = DWARF_VALUE_OPTIMIZED_OUT;
1213 else if (p.location == DWARF_VALUE_MEMORY)
1215 p.v.mem.addr = fetch_address (0);
1216 p.v.mem.in_stack_memory = fetch_in_stack_memory (0);
1218 else if (p.location == DWARF_VALUE_IMPLICIT_POINTER)
1220 p.v.ptr.die_sect_off = (sect_offset) this->m_len;
1221 p.v.ptr.offset = value_as_long (fetch (0));
1223 else if (p.location == DWARF_VALUE_REGISTER)
1224 p.v.regno = value_as_long (fetch (0));
1225 else
1227 p.v.value = fetch (0);
1231 /* Evaluate the expression at ADDR (LEN bytes long). */
1233 void
1234 dwarf_expr_context::eval (const gdb_byte *addr, size_t len)
1236 int old_recursion_depth = this->m_recursion_depth;
1238 execute_stack_op (addr, addr + len);
1240 /* RECURSION_DEPTH becomes invalid if an exception was thrown here. */
1242 gdb_assert (this->m_recursion_depth == old_recursion_depth);
1245 /* Helper to read a uleb128 value or throw an error. */
1247 const gdb_byte *
1248 safe_read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end,
1249 uint64_t *r)
1251 buf = gdb_read_uleb128 (buf, buf_end, r);
1252 if (buf == NULL)
1253 error (_("DWARF expression error: ran off end of buffer reading uleb128 value"));
1254 return buf;
1257 /* Helper to read a sleb128 value or throw an error. */
1259 const gdb_byte *
1260 safe_read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end,
1261 int64_t *r)
1263 buf = gdb_read_sleb128 (buf, buf_end, r);
1264 if (buf == NULL)
1265 error (_("DWARF expression error: ran off end of buffer reading sleb128 value"));
1266 return buf;
1269 const gdb_byte *
1270 safe_skip_leb128 (const gdb_byte *buf, const gdb_byte *buf_end)
1272 buf = gdb_skip_leb128 (buf, buf_end);
1273 if (buf == NULL)
1274 error (_("DWARF expression error: ran off end of buffer reading leb128 value"));
1275 return buf;
1279 /* Check that the current operator is either at the end of an
1280 expression, or that it is followed by a composition operator or by
1281 DW_OP_GNU_uninit (which should terminate the expression). */
1283 void
1284 dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end,
1285 const char *op_name)
1287 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece
1288 && *op_ptr != DW_OP_GNU_uninit)
1289 error (_("DWARF-2 expression error: `%s' operations must be "
1290 "used either alone or in conjunction with DW_OP_piece "
1291 "or DW_OP_bit_piece."),
1292 op_name);
1295 /* Return true iff the types T1 and T2 are "the same". This only does
1296 checks that might reasonably be needed to compare DWARF base
1297 types. */
1299 static int
1300 base_types_equal_p (struct type *t1, struct type *t2)
1302 if (t1->code () != t2->code ())
1303 return 0;
1304 if (t1->is_unsigned () != t2->is_unsigned ())
1305 return 0;
1306 return TYPE_LENGTH (t1) == TYPE_LENGTH (t2);
1309 /* If <BUF..BUF_END] contains DW_FORM_block* with single DW_OP_reg* return the
1310 DWARF register number. Otherwise return -1. */
1313 dwarf_block_to_dwarf_reg (const gdb_byte *buf, const gdb_byte *buf_end)
1315 uint64_t dwarf_reg;
1317 if (buf_end <= buf)
1318 return -1;
1319 if (*buf >= DW_OP_reg0 && *buf <= DW_OP_reg31)
1321 if (buf_end - buf != 1)
1322 return -1;
1323 return *buf - DW_OP_reg0;
1326 if (*buf == DW_OP_regval_type || *buf == DW_OP_GNU_regval_type)
1328 buf++;
1329 buf = gdb_read_uleb128 (buf, buf_end, &dwarf_reg);
1330 if (buf == NULL)
1331 return -1;
1332 buf = gdb_skip_leb128 (buf, buf_end);
1333 if (buf == NULL)
1334 return -1;
1336 else if (*buf == DW_OP_regx)
1338 buf++;
1339 buf = gdb_read_uleb128 (buf, buf_end, &dwarf_reg);
1340 if (buf == NULL)
1341 return -1;
1343 else
1344 return -1;
1345 if (buf != buf_end || (int) dwarf_reg != dwarf_reg)
1346 return -1;
1347 return dwarf_reg;
1350 /* If <BUF..BUF_END] contains DW_FORM_block* with just DW_OP_breg*(0) and
1351 DW_OP_deref* return the DWARF register number. Otherwise return -1.
1352 DEREF_SIZE_RETURN contains -1 for DW_OP_deref; otherwise it contains the
1353 size from DW_OP_deref_size. */
1356 dwarf_block_to_dwarf_reg_deref (const gdb_byte *buf, const gdb_byte *buf_end,
1357 CORE_ADDR *deref_size_return)
1359 uint64_t dwarf_reg;
1360 int64_t offset;
1362 if (buf_end <= buf)
1363 return -1;
1365 if (*buf >= DW_OP_breg0 && *buf <= DW_OP_breg31)
1367 dwarf_reg = *buf - DW_OP_breg0;
1368 buf++;
1369 if (buf >= buf_end)
1370 return -1;
1372 else if (*buf == DW_OP_bregx)
1374 buf++;
1375 buf = gdb_read_uleb128 (buf, buf_end, &dwarf_reg);
1376 if (buf == NULL)
1377 return -1;
1378 if ((int) dwarf_reg != dwarf_reg)
1379 return -1;
1381 else
1382 return -1;
1384 buf = gdb_read_sleb128 (buf, buf_end, &offset);
1385 if (buf == NULL)
1386 return -1;
1387 if (offset != 0)
1388 return -1;
1390 if (*buf == DW_OP_deref)
1392 buf++;
1393 *deref_size_return = -1;
1395 else if (*buf == DW_OP_deref_size)
1397 buf++;
1398 if (buf >= buf_end)
1399 return -1;
1400 *deref_size_return = *buf++;
1402 else
1403 return -1;
1405 if (buf != buf_end)
1406 return -1;
1408 return dwarf_reg;
1411 /* If <BUF..BUF_END] contains DW_FORM_block* with single DW_OP_fbreg(X) fill
1412 in FB_OFFSET_RETURN with the X offset and return 1. Otherwise return 0. */
1415 dwarf_block_to_fb_offset (const gdb_byte *buf, const gdb_byte *buf_end,
1416 CORE_ADDR *fb_offset_return)
1418 int64_t fb_offset;
1420 if (buf_end <= buf)
1421 return 0;
1423 if (*buf != DW_OP_fbreg)
1424 return 0;
1425 buf++;
1427 buf = gdb_read_sleb128 (buf, buf_end, &fb_offset);
1428 if (buf == NULL)
1429 return 0;
1430 *fb_offset_return = fb_offset;
1431 if (buf != buf_end || fb_offset != (LONGEST) *fb_offset_return)
1432 return 0;
1434 return 1;
1437 /* If <BUF..BUF_END] contains DW_FORM_block* with single DW_OP_bregSP(X) fill
1438 in SP_OFFSET_RETURN with the X offset and return 1. Otherwise return 0.
1439 The matched SP register number depends on GDBARCH. */
1442 dwarf_block_to_sp_offset (struct gdbarch *gdbarch, const gdb_byte *buf,
1443 const gdb_byte *buf_end, CORE_ADDR *sp_offset_return)
1445 uint64_t dwarf_reg;
1446 int64_t sp_offset;
1448 if (buf_end <= buf)
1449 return 0;
1450 if (*buf >= DW_OP_breg0 && *buf <= DW_OP_breg31)
1452 dwarf_reg = *buf - DW_OP_breg0;
1453 buf++;
1455 else
1457 if (*buf != DW_OP_bregx)
1458 return 0;
1459 buf++;
1460 buf = gdb_read_uleb128 (buf, buf_end, &dwarf_reg);
1461 if (buf == NULL)
1462 return 0;
1465 if (dwarf_reg_to_regnum (gdbarch, dwarf_reg)
1466 != gdbarch_sp_regnum (gdbarch))
1467 return 0;
1469 buf = gdb_read_sleb128 (buf, buf_end, &sp_offset);
1470 if (buf == NULL)
1471 return 0;
1472 *sp_offset_return = sp_offset;
1473 if (buf != buf_end || sp_offset != (LONGEST) *sp_offset_return)
1474 return 0;
1476 return 1;
1479 /* The engine for the expression evaluator. Using the context in this
1480 object, evaluate the expression between OP_PTR and OP_END. */
1482 void
1483 dwarf_expr_context::execute_stack_op (const gdb_byte *op_ptr,
1484 const gdb_byte *op_end)
1486 gdbarch *arch = this->m_per_objfile->objfile->arch ();
1487 bfd_endian byte_order = gdbarch_byte_order (arch);
1488 /* Old-style "untyped" DWARF values need special treatment in a
1489 couple of places, specifically DW_OP_mod and DW_OP_shr. We need
1490 a special type for these values so we can distinguish them from
1491 values that have an explicit type, because explicitly-typed
1492 values do not need special treatment. This special type must be
1493 different (in the `==' sense) from any base type coming from the
1494 CU. */
1495 type *address_type = this->address_type ();
1497 this->m_location = DWARF_VALUE_MEMORY;
1498 this->m_initialized = 1; /* Default is initialized. */
1500 if (this->m_recursion_depth > this->m_max_recursion_depth)
1501 error (_("DWARF-2 expression error: Loop detected (%d)."),
1502 this->m_recursion_depth);
1503 this->m_recursion_depth++;
1505 while (op_ptr < op_end)
1507 dwarf_location_atom op = (dwarf_location_atom) *op_ptr++;
1508 ULONGEST result;
1509 /* Assume the value is not in stack memory.
1510 Code that knows otherwise sets this to true.
1511 Some arithmetic on stack addresses can probably be assumed to still
1512 be a stack address, but we skip this complication for now.
1513 This is just an optimization, so it's always ok to punt
1514 and leave this as false. */
1515 bool in_stack_memory = false;
1516 uint64_t uoffset, reg;
1517 int64_t offset;
1518 value *result_val = NULL;
1520 /* The DWARF expression might have a bug causing an infinite
1521 loop. In that case, quitting is the only way out. */
1522 QUIT;
1524 switch (op)
1526 case DW_OP_lit0:
1527 case DW_OP_lit1:
1528 case DW_OP_lit2:
1529 case DW_OP_lit3:
1530 case DW_OP_lit4:
1531 case DW_OP_lit5:
1532 case DW_OP_lit6:
1533 case DW_OP_lit7:
1534 case DW_OP_lit8:
1535 case DW_OP_lit9:
1536 case DW_OP_lit10:
1537 case DW_OP_lit11:
1538 case DW_OP_lit12:
1539 case DW_OP_lit13:
1540 case DW_OP_lit14:
1541 case DW_OP_lit15:
1542 case DW_OP_lit16:
1543 case DW_OP_lit17:
1544 case DW_OP_lit18:
1545 case DW_OP_lit19:
1546 case DW_OP_lit20:
1547 case DW_OP_lit21:
1548 case DW_OP_lit22:
1549 case DW_OP_lit23:
1550 case DW_OP_lit24:
1551 case DW_OP_lit25:
1552 case DW_OP_lit26:
1553 case DW_OP_lit27:
1554 case DW_OP_lit28:
1555 case DW_OP_lit29:
1556 case DW_OP_lit30:
1557 case DW_OP_lit31:
1558 result = op - DW_OP_lit0;
1559 result_val = value_from_ulongest (address_type, result);
1560 break;
1562 case DW_OP_addr:
1563 result = extract_unsigned_integer (op_ptr,
1564 this->m_addr_size, byte_order);
1565 op_ptr += this->m_addr_size;
1566 /* Some versions of GCC emit DW_OP_addr before
1567 DW_OP_GNU_push_tls_address. In this case the value is an
1568 index, not an address. We don't support things like
1569 branching between the address and the TLS op. */
1570 if (op_ptr >= op_end || *op_ptr != DW_OP_GNU_push_tls_address)
1571 result += this->m_per_objfile->objfile->text_section_offset ();
1572 result_val = value_from_ulongest (address_type, result);
1573 break;
1575 case DW_OP_addrx:
1576 case DW_OP_GNU_addr_index:
1577 ensure_have_per_cu (this->m_per_cu, "DW_OP_addrx");
1579 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uoffset);
1580 result = dwarf2_read_addr_index (this->m_per_cu, this->m_per_objfile,
1581 uoffset);
1582 result += this->m_per_objfile->objfile->text_section_offset ();
1583 result_val = value_from_ulongest (address_type, result);
1584 break;
1585 case DW_OP_GNU_const_index:
1586 ensure_have_per_cu (this->m_per_cu, "DW_OP_GNU_const_index");
1588 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uoffset);
1589 result = dwarf2_read_addr_index (this->m_per_cu, this->m_per_objfile,
1590 uoffset);
1591 result_val = value_from_ulongest (address_type, result);
1592 break;
1594 case DW_OP_const1u:
1595 result = extract_unsigned_integer (op_ptr, 1, byte_order);
1596 result_val = value_from_ulongest (address_type, result);
1597 op_ptr += 1;
1598 break;
1599 case DW_OP_const1s:
1600 result = extract_signed_integer (op_ptr, 1, byte_order);
1601 result_val = value_from_ulongest (address_type, result);
1602 op_ptr += 1;
1603 break;
1604 case DW_OP_const2u:
1605 result = extract_unsigned_integer (op_ptr, 2, byte_order);
1606 result_val = value_from_ulongest (address_type, result);
1607 op_ptr += 2;
1608 break;
1609 case DW_OP_const2s:
1610 result = extract_signed_integer (op_ptr, 2, byte_order);
1611 result_val = value_from_ulongest (address_type, result);
1612 op_ptr += 2;
1613 break;
1614 case DW_OP_const4u:
1615 result = extract_unsigned_integer (op_ptr, 4, byte_order);
1616 result_val = value_from_ulongest (address_type, result);
1617 op_ptr += 4;
1618 break;
1619 case DW_OP_const4s:
1620 result = extract_signed_integer (op_ptr, 4, byte_order);
1621 result_val = value_from_ulongest (address_type, result);
1622 op_ptr += 4;
1623 break;
1624 case DW_OP_const8u:
1625 result = extract_unsigned_integer (op_ptr, 8, byte_order);
1626 result_val = value_from_ulongest (address_type, result);
1627 op_ptr += 8;
1628 break;
1629 case DW_OP_const8s:
1630 result = extract_signed_integer (op_ptr, 8, byte_order);
1631 result_val = value_from_ulongest (address_type, result);
1632 op_ptr += 8;
1633 break;
1634 case DW_OP_constu:
1635 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uoffset);
1636 result = uoffset;
1637 result_val = value_from_ulongest (address_type, result);
1638 break;
1639 case DW_OP_consts:
1640 op_ptr = safe_read_sleb128 (op_ptr, op_end, &offset);
1641 result = offset;
1642 result_val = value_from_ulongest (address_type, result);
1643 break;
1645 /* The DW_OP_reg operations are required to occur alone in
1646 location expressions. */
1647 case DW_OP_reg0:
1648 case DW_OP_reg1:
1649 case DW_OP_reg2:
1650 case DW_OP_reg3:
1651 case DW_OP_reg4:
1652 case DW_OP_reg5:
1653 case DW_OP_reg6:
1654 case DW_OP_reg7:
1655 case DW_OP_reg8:
1656 case DW_OP_reg9:
1657 case DW_OP_reg10:
1658 case DW_OP_reg11:
1659 case DW_OP_reg12:
1660 case DW_OP_reg13:
1661 case DW_OP_reg14:
1662 case DW_OP_reg15:
1663 case DW_OP_reg16:
1664 case DW_OP_reg17:
1665 case DW_OP_reg18:
1666 case DW_OP_reg19:
1667 case DW_OP_reg20:
1668 case DW_OP_reg21:
1669 case DW_OP_reg22:
1670 case DW_OP_reg23:
1671 case DW_OP_reg24:
1672 case DW_OP_reg25:
1673 case DW_OP_reg26:
1674 case DW_OP_reg27:
1675 case DW_OP_reg28:
1676 case DW_OP_reg29:
1677 case DW_OP_reg30:
1678 case DW_OP_reg31:
1679 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_reg");
1681 result = op - DW_OP_reg0;
1682 result_val = value_from_ulongest (address_type, result);
1683 this->m_location = DWARF_VALUE_REGISTER;
1684 break;
1686 case DW_OP_regx:
1687 op_ptr = safe_read_uleb128 (op_ptr, op_end, &reg);
1688 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx");
1690 result = reg;
1691 result_val = value_from_ulongest (address_type, result);
1692 this->m_location = DWARF_VALUE_REGISTER;
1693 break;
1695 case DW_OP_implicit_value:
1697 uint64_t len;
1699 op_ptr = safe_read_uleb128 (op_ptr, op_end, &len);
1700 if (op_ptr + len > op_end)
1701 error (_("DW_OP_implicit_value: too few bytes available."));
1702 this->m_len = len;
1703 this->m_data = op_ptr;
1704 this->m_location = DWARF_VALUE_LITERAL;
1705 op_ptr += len;
1706 dwarf_expr_require_composition (op_ptr, op_end,
1707 "DW_OP_implicit_value");
1709 goto no_push;
1711 case DW_OP_stack_value:
1712 this->m_location = DWARF_VALUE_STACK;
1713 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value");
1714 goto no_push;
1716 case DW_OP_implicit_pointer:
1717 case DW_OP_GNU_implicit_pointer:
1719 int64_t len;
1720 ensure_have_per_cu (this->m_per_cu, "DW_OP_implicit_pointer");
1722 int ref_addr_size = this->m_per_cu->ref_addr_size ();
1724 /* The referred-to DIE of sect_offset kind. */
1725 this->m_len = extract_unsigned_integer (op_ptr, ref_addr_size,
1726 byte_order);
1727 op_ptr += ref_addr_size;
1729 /* The byte offset into the data. */
1730 op_ptr = safe_read_sleb128 (op_ptr, op_end, &len);
1731 result = (ULONGEST) len;
1732 result_val = value_from_ulongest (address_type, result);
1734 this->m_location = DWARF_VALUE_IMPLICIT_POINTER;
1735 dwarf_expr_require_composition (op_ptr, op_end,
1736 "DW_OP_implicit_pointer");
1738 break;
1740 case DW_OP_breg0:
1741 case DW_OP_breg1:
1742 case DW_OP_breg2:
1743 case DW_OP_breg3:
1744 case DW_OP_breg4:
1745 case DW_OP_breg5:
1746 case DW_OP_breg6:
1747 case DW_OP_breg7:
1748 case DW_OP_breg8:
1749 case DW_OP_breg9:
1750 case DW_OP_breg10:
1751 case DW_OP_breg11:
1752 case DW_OP_breg12:
1753 case DW_OP_breg13:
1754 case DW_OP_breg14:
1755 case DW_OP_breg15:
1756 case DW_OP_breg16:
1757 case DW_OP_breg17:
1758 case DW_OP_breg18:
1759 case DW_OP_breg19:
1760 case DW_OP_breg20:
1761 case DW_OP_breg21:
1762 case DW_OP_breg22:
1763 case DW_OP_breg23:
1764 case DW_OP_breg24:
1765 case DW_OP_breg25:
1766 case DW_OP_breg26:
1767 case DW_OP_breg27:
1768 case DW_OP_breg28:
1769 case DW_OP_breg29:
1770 case DW_OP_breg30:
1771 case DW_OP_breg31:
1773 op_ptr = safe_read_sleb128 (op_ptr, op_end, &offset);
1774 ensure_have_frame (this->m_frame, "DW_OP_breg");
1776 result = read_addr_from_reg (this->m_frame, op - DW_OP_breg0);
1777 result += offset;
1778 result_val = value_from_ulongest (address_type, result);
1780 break;
1781 case DW_OP_bregx:
1783 op_ptr = safe_read_uleb128 (op_ptr, op_end, &reg);
1784 op_ptr = safe_read_sleb128 (op_ptr, op_end, &offset);
1785 ensure_have_frame (this->m_frame, "DW_OP_bregx");
1787 result = read_addr_from_reg (this->m_frame, reg);
1788 result += offset;
1789 result_val = value_from_ulongest (address_type, result);
1791 break;
1792 case DW_OP_fbreg:
1794 const gdb_byte *datastart;
1795 size_t datalen;
1797 op_ptr = safe_read_sleb128 (op_ptr, op_end, &offset);
1799 /* Rather than create a whole new context, we simply
1800 backup the current stack locally and install a new empty stack,
1801 then reset it afterwards, effectively erasing whatever the
1802 recursive call put there. */
1803 std::vector<dwarf_stack_value> saved_stack = std::move (this->m_stack);
1804 this->m_stack.clear ();
1806 /* FIXME: cagney/2003-03-26: This code should be using
1807 get_frame_base_address(), and then implement a dwarf2
1808 specific this_base method. */
1809 this->get_frame_base (&datastart, &datalen);
1810 eval (datastart, datalen);
1811 if (this->m_location == DWARF_VALUE_MEMORY)
1812 result = fetch_address (0);
1813 else if (this->m_location == DWARF_VALUE_REGISTER)
1814 result
1815 = read_addr_from_reg (this->m_frame, value_as_long (fetch (0)));
1816 else
1817 error (_("Not implemented: computing frame "
1818 "base using explicit value operator"));
1819 result = result + offset;
1820 result_val = value_from_ulongest (address_type, result);
1821 in_stack_memory = true;
1823 /* Restore the content of the original stack. */
1824 this->m_stack = std::move (saved_stack);
1826 this->m_location = DWARF_VALUE_MEMORY;
1828 break;
1830 case DW_OP_dup:
1831 result_val = fetch (0);
1832 in_stack_memory = fetch_in_stack_memory (0);
1833 break;
1835 case DW_OP_drop:
1836 pop ();
1837 goto no_push;
1839 case DW_OP_pick:
1840 offset = *op_ptr++;
1841 result_val = fetch (offset);
1842 in_stack_memory = fetch_in_stack_memory (offset);
1843 break;
1845 case DW_OP_swap:
1847 if (this->m_stack.size () < 2)
1848 error (_("Not enough elements for "
1849 "DW_OP_swap. Need 2, have %zu."),
1850 this->m_stack.size ());
1852 dwarf_stack_value &t1 = this->m_stack[this->m_stack.size () - 1];
1853 dwarf_stack_value &t2 = this->m_stack[this->m_stack.size () - 2];
1854 std::swap (t1, t2);
1855 goto no_push;
1858 case DW_OP_over:
1859 result_val = fetch (1);
1860 in_stack_memory = fetch_in_stack_memory (1);
1861 break;
1863 case DW_OP_rot:
1865 if (this->m_stack.size () < 3)
1866 error (_("Not enough elements for "
1867 "DW_OP_rot. Need 3, have %zu."),
1868 this->m_stack.size ());
1870 dwarf_stack_value temp = this->m_stack[this->m_stack.size () - 1];
1871 this->m_stack[this->m_stack.size () - 1]
1872 = this->m_stack[this->m_stack.size () - 2];
1873 this->m_stack[this->m_stack.size () - 2]
1874 = this->m_stack[this->m_stack.size () - 3];
1875 this->m_stack[this->m_stack.size () - 3] = temp;
1876 goto no_push;
1879 case DW_OP_deref:
1880 case DW_OP_deref_size:
1881 case DW_OP_deref_type:
1882 case DW_OP_GNU_deref_type:
1884 int addr_size = (op == DW_OP_deref ? this->m_addr_size : *op_ptr++);
1885 gdb_byte *buf = (gdb_byte *) alloca (addr_size);
1886 CORE_ADDR addr = fetch_address (0);
1887 struct type *type;
1889 pop ();
1891 if (op == DW_OP_deref_type || op == DW_OP_GNU_deref_type)
1893 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uoffset);
1894 cu_offset type_die_cu_off = (cu_offset) uoffset;
1895 type = get_base_type (type_die_cu_off);
1897 else
1898 type = address_type;
1900 this->read_mem (buf, addr, addr_size);
1902 /* If the size of the object read from memory is different
1903 from the type length, we need to zero-extend it. */
1904 if (TYPE_LENGTH (type) != addr_size)
1906 ULONGEST datum =
1907 extract_unsigned_integer (buf, addr_size, byte_order);
1909 buf = (gdb_byte *) alloca (TYPE_LENGTH (type));
1910 store_unsigned_integer (buf, TYPE_LENGTH (type),
1911 byte_order, datum);
1914 result_val = value_from_contents_and_address (type, buf, addr);
1915 break;
1918 case DW_OP_abs:
1919 case DW_OP_neg:
1920 case DW_OP_not:
1921 case DW_OP_plus_uconst:
1923 /* Unary operations. */
1924 result_val = fetch (0);
1925 pop ();
1927 switch (op)
1929 case DW_OP_abs:
1930 if (value_less (result_val,
1931 value_zero (value_type (result_val), not_lval)))
1932 result_val = value_neg (result_val);
1933 break;
1934 case DW_OP_neg:
1935 result_val = value_neg (result_val);
1936 break;
1937 case DW_OP_not:
1938 dwarf_require_integral (value_type (result_val));
1939 result_val = value_complement (result_val);
1940 break;
1941 case DW_OP_plus_uconst:
1942 dwarf_require_integral (value_type (result_val));
1943 result = value_as_long (result_val);
1944 op_ptr = safe_read_uleb128 (op_ptr, op_end, &reg);
1945 result += reg;
1946 result_val = value_from_ulongest (address_type, result);
1947 break;
1950 break;
1952 case DW_OP_and:
1953 case DW_OP_div:
1954 case DW_OP_minus:
1955 case DW_OP_mod:
1956 case DW_OP_mul:
1957 case DW_OP_or:
1958 case DW_OP_plus:
1959 case DW_OP_shl:
1960 case DW_OP_shr:
1961 case DW_OP_shra:
1962 case DW_OP_xor:
1963 case DW_OP_le:
1964 case DW_OP_ge:
1965 case DW_OP_eq:
1966 case DW_OP_lt:
1967 case DW_OP_gt:
1968 case DW_OP_ne:
1970 /* Binary operations. */
1971 struct value *first, *second;
1973 second = fetch (0);
1974 pop ();
1976 first = fetch (0);
1977 pop ();
1979 if (! base_types_equal_p (value_type (first), value_type (second)))
1980 error (_("Incompatible types on DWARF stack"));
1982 switch (op)
1984 case DW_OP_and:
1985 dwarf_require_integral (value_type (first));
1986 dwarf_require_integral (value_type (second));
1987 result_val = value_binop (first, second, BINOP_BITWISE_AND);
1988 break;
1989 case DW_OP_div:
1990 result_val = value_binop (first, second, BINOP_DIV);
1991 break;
1992 case DW_OP_minus:
1993 result_val = value_binop (first, second, BINOP_SUB);
1994 break;
1995 case DW_OP_mod:
1997 int cast_back = 0;
1998 struct type *orig_type = value_type (first);
2000 /* We have to special-case "old-style" untyped values
2001 -- these must have mod computed using unsigned
2002 math. */
2003 if (orig_type == address_type)
2005 struct type *utype = get_unsigned_type (arch, orig_type);
2007 cast_back = 1;
2008 first = value_cast (utype, first);
2009 second = value_cast (utype, second);
2011 /* Note that value_binop doesn't handle float or
2012 decimal float here. This seems unimportant. */
2013 result_val = value_binop (first, second, BINOP_MOD);
2014 if (cast_back)
2015 result_val = value_cast (orig_type, result_val);
2017 break;
2018 case DW_OP_mul:
2019 result_val = value_binop (first, second, BINOP_MUL);
2020 break;
2021 case DW_OP_or:
2022 dwarf_require_integral (value_type (first));
2023 dwarf_require_integral (value_type (second));
2024 result_val = value_binop (first, second, BINOP_BITWISE_IOR);
2025 break;
2026 case DW_OP_plus:
2027 result_val = value_binop (first, second, BINOP_ADD);
2028 break;
2029 case DW_OP_shl:
2030 dwarf_require_integral (value_type (first));
2031 dwarf_require_integral (value_type (second));
2032 result_val = value_binop (first, second, BINOP_LSH);
2033 break;
2034 case DW_OP_shr:
2035 dwarf_require_integral (value_type (first));
2036 dwarf_require_integral (value_type (second));
2037 if (!value_type (first)->is_unsigned ())
2039 struct type *utype
2040 = get_unsigned_type (arch, value_type (first));
2042 first = value_cast (utype, first);
2045 result_val = value_binop (first, second, BINOP_RSH);
2046 /* Make sure we wind up with the same type we started
2047 with. */
2048 if (value_type (result_val) != value_type (second))
2049 result_val = value_cast (value_type (second), result_val);
2050 break;
2051 case DW_OP_shra:
2052 dwarf_require_integral (value_type (first));
2053 dwarf_require_integral (value_type (second));
2054 if (value_type (first)->is_unsigned ())
2056 struct type *stype
2057 = get_signed_type (arch, value_type (first));
2059 first = value_cast (stype, first);
2062 result_val = value_binop (first, second, BINOP_RSH);
2063 /* Make sure we wind up with the same type we started
2064 with. */
2065 if (value_type (result_val) != value_type (second))
2066 result_val = value_cast (value_type (second), result_val);
2067 break;
2068 case DW_OP_xor:
2069 dwarf_require_integral (value_type (first));
2070 dwarf_require_integral (value_type (second));
2071 result_val = value_binop (first, second, BINOP_BITWISE_XOR);
2072 break;
2073 case DW_OP_le:
2074 /* A <= B is !(B < A). */
2075 result = ! value_less (second, first);
2076 result_val = value_from_ulongest (address_type, result);
2077 break;
2078 case DW_OP_ge:
2079 /* A >= B is !(A < B). */
2080 result = ! value_less (first, second);
2081 result_val = value_from_ulongest (address_type, result);
2082 break;
2083 case DW_OP_eq:
2084 result = value_equal (first, second);
2085 result_val = value_from_ulongest (address_type, result);
2086 break;
2087 case DW_OP_lt:
2088 result = value_less (first, second);
2089 result_val = value_from_ulongest (address_type, result);
2090 break;
2091 case DW_OP_gt:
2092 /* A > B is B < A. */
2093 result = value_less (second, first);
2094 result_val = value_from_ulongest (address_type, result);
2095 break;
2096 case DW_OP_ne:
2097 result = ! value_equal (first, second);
2098 result_val = value_from_ulongest (address_type, result);
2099 break;
2100 default:
2101 internal_error (__FILE__, __LINE__,
2102 _("Can't be reached."));
2105 break;
2107 case DW_OP_call_frame_cfa:
2108 ensure_have_frame (this->m_frame, "DW_OP_call_frame_cfa");
2110 result = dwarf2_frame_cfa (this->m_frame);
2111 result_val = value_from_ulongest (address_type, result);
2112 in_stack_memory = true;
2113 break;
2115 case DW_OP_GNU_push_tls_address:
2116 case DW_OP_form_tls_address:
2117 /* Variable is at a constant offset in the thread-local
2118 storage block into the objfile for the current thread and
2119 the dynamic linker module containing this expression. Here
2120 we return returns the offset from that base. The top of the
2121 stack has the offset from the beginning of the thread
2122 control block at which the variable is located. Nothing
2123 should follow this operator, so the top of stack would be
2124 returned. */
2125 result = value_as_long (fetch (0));
2126 pop ();
2127 result = target_translate_tls_address (this->m_per_objfile->objfile,
2128 result);
2129 result_val = value_from_ulongest (address_type, result);
2130 break;
2132 case DW_OP_skip:
2133 offset = extract_signed_integer (op_ptr, 2, byte_order);
2134 op_ptr += 2;
2135 op_ptr += offset;
2136 goto no_push;
2138 case DW_OP_bra:
2140 struct value *val;
2142 offset = extract_signed_integer (op_ptr, 2, byte_order);
2143 op_ptr += 2;
2144 val = fetch (0);
2145 dwarf_require_integral (value_type (val));
2146 if (value_as_long (val) != 0)
2147 op_ptr += offset;
2148 pop ();
2150 goto no_push;
2152 case DW_OP_nop:
2153 goto no_push;
2155 case DW_OP_piece:
2157 uint64_t size;
2159 /* Record the piece. */
2160 op_ptr = safe_read_uleb128 (op_ptr, op_end, &size);
2161 add_piece (8 * size, 0);
2163 /* Pop off the address/regnum, and reset the location
2164 type. */
2165 if (this->m_location != DWARF_VALUE_LITERAL
2166 && this->m_location != DWARF_VALUE_OPTIMIZED_OUT)
2167 pop ();
2168 this->m_location = DWARF_VALUE_MEMORY;
2170 goto no_push;
2172 case DW_OP_bit_piece:
2174 uint64_t size, uleb_offset;
2176 /* Record the piece. */
2177 op_ptr = safe_read_uleb128 (op_ptr, op_end, &size);
2178 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uleb_offset);
2179 add_piece (size, uleb_offset);
2181 /* Pop off the address/regnum, and reset the location
2182 type. */
2183 if (this->m_location != DWARF_VALUE_LITERAL
2184 && this->m_location != DWARF_VALUE_OPTIMIZED_OUT)
2185 pop ();
2186 this->m_location = DWARF_VALUE_MEMORY;
2188 goto no_push;
2190 case DW_OP_GNU_uninit:
2191 if (op_ptr != op_end)
2192 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
2193 "be the very last op."));
2195 this->m_initialized = 0;
2196 goto no_push;
2198 case DW_OP_call2:
2200 cu_offset cu_off
2201 = (cu_offset) extract_unsigned_integer (op_ptr, 2, byte_order);
2202 op_ptr += 2;
2203 this->dwarf_call (cu_off);
2205 goto no_push;
2207 case DW_OP_call4:
2209 cu_offset cu_off
2210 = (cu_offset) extract_unsigned_integer (op_ptr, 4, byte_order);
2211 op_ptr += 4;
2212 this->dwarf_call (cu_off);
2214 goto no_push;
2216 case DW_OP_GNU_variable_value:
2218 ensure_have_per_cu (this->m_per_cu, "DW_OP_GNU_variable_value");
2219 int ref_addr_size = this->m_per_cu->ref_addr_size ();
2221 sect_offset sect_off
2222 = (sect_offset) extract_unsigned_integer (op_ptr,
2223 ref_addr_size,
2224 byte_order);
2225 op_ptr += ref_addr_size;
2226 result_val = sect_variable_value (sect_off, this->m_per_cu,
2227 this->m_per_objfile);
2228 result_val = value_cast (address_type, result_val);
2230 break;
2232 case DW_OP_entry_value:
2233 case DW_OP_GNU_entry_value:
2235 uint64_t len;
2236 CORE_ADDR deref_size;
2237 union call_site_parameter_u kind_u;
2239 op_ptr = safe_read_uleb128 (op_ptr, op_end, &len);
2240 if (op_ptr + len > op_end)
2241 error (_("DW_OP_entry_value: too few bytes available."));
2243 kind_u.dwarf_reg = dwarf_block_to_dwarf_reg (op_ptr, op_ptr + len);
2244 if (kind_u.dwarf_reg != -1)
2246 op_ptr += len;
2247 this->push_dwarf_reg_entry_value (CALL_SITE_PARAMETER_DWARF_REG,
2248 kind_u,
2249 -1 /* deref_size */);
2250 goto no_push;
2253 kind_u.dwarf_reg = dwarf_block_to_dwarf_reg_deref (op_ptr,
2254 op_ptr + len,
2255 &deref_size);
2256 if (kind_u.dwarf_reg != -1)
2258 if (deref_size == -1)
2259 deref_size = this->m_addr_size;
2260 op_ptr += len;
2261 this->push_dwarf_reg_entry_value (CALL_SITE_PARAMETER_DWARF_REG,
2262 kind_u, deref_size);
2263 goto no_push;
2266 error (_("DWARF-2 expression error: DW_OP_entry_value is "
2267 "supported only for single DW_OP_reg* "
2268 "or for DW_OP_breg*(0)+DW_OP_deref*"));
2271 case DW_OP_GNU_parameter_ref:
2273 union call_site_parameter_u kind_u;
2275 kind_u.param_cu_off
2276 = (cu_offset) extract_unsigned_integer (op_ptr, 4, byte_order);
2277 op_ptr += 4;
2278 this->push_dwarf_reg_entry_value (CALL_SITE_PARAMETER_PARAM_OFFSET,
2279 kind_u,
2280 -1 /* deref_size */);
2282 goto no_push;
2284 case DW_OP_const_type:
2285 case DW_OP_GNU_const_type:
2287 int n;
2288 const gdb_byte *data;
2289 struct type *type;
2291 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uoffset);
2292 cu_offset type_die_cu_off = (cu_offset) uoffset;
2294 n = *op_ptr++;
2295 data = op_ptr;
2296 op_ptr += n;
2298 type = get_base_type (type_die_cu_off);
2300 if (TYPE_LENGTH (type) != n)
2301 error (_("DW_OP_const_type has different sizes for type and data"));
2303 result_val = value_from_contents (type, data);
2305 break;
2307 case DW_OP_regval_type:
2308 case DW_OP_GNU_regval_type:
2310 op_ptr = safe_read_uleb128 (op_ptr, op_end, &reg);
2311 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uoffset);
2312 cu_offset type_die_cu_off = (cu_offset) uoffset;
2314 ensure_have_frame (this->m_frame, "DW_OP_regval_type");
2316 struct type *type = get_base_type (type_die_cu_off);
2317 int regnum
2318 = dwarf_reg_to_regnum_or_error (get_frame_arch (this->m_frame),
2319 reg);
2320 result_val = value_from_register (type, regnum, this->m_frame);
2322 break;
2324 case DW_OP_convert:
2325 case DW_OP_GNU_convert:
2326 case DW_OP_reinterpret:
2327 case DW_OP_GNU_reinterpret:
2329 struct type *type;
2331 op_ptr = safe_read_uleb128 (op_ptr, op_end, &uoffset);
2332 cu_offset type_die_cu_off = (cu_offset) uoffset;
2334 if (to_underlying (type_die_cu_off) == 0)
2335 type = address_type;
2336 else
2337 type = get_base_type (type_die_cu_off);
2339 result_val = fetch (0);
2340 pop ();
2342 if (op == DW_OP_convert || op == DW_OP_GNU_convert)
2343 result_val = value_cast (type, result_val);
2344 else if (type == value_type (result_val))
2346 /* Nothing. */
2348 else if (TYPE_LENGTH (type)
2349 != TYPE_LENGTH (value_type (result_val)))
2350 error (_("DW_OP_reinterpret has wrong size"));
2351 else
2352 result_val
2353 = value_from_contents (type,
2354 value_contents_all (result_val).data ());
2356 break;
2358 case DW_OP_push_object_address:
2359 /* Return the address of the object we are currently observing. */
2360 if (this->m_addr_info == nullptr
2361 || (this->m_addr_info->valaddr.data () == nullptr
2362 && this->m_addr_info->addr == 0))
2363 error (_("Location address is not set."));
2365 result_val
2366 = value_from_ulongest (address_type, this->m_addr_info->addr);
2367 break;
2369 default:
2370 error (_("Unhandled dwarf expression opcode 0x%x"), op);
2373 /* Most things push a result value. */
2374 gdb_assert (result_val != NULL);
2375 push (result_val, in_stack_memory);
2376 no_push:
2380 /* To simplify our main caller, if the result is an implicit
2381 pointer, then make a pieced value. This is ok because we can't
2382 have implicit pointers in contexts where pieces are invalid. */
2383 if (this->m_location == DWARF_VALUE_IMPLICIT_POINTER)
2384 add_piece (8 * this->m_addr_size, 0);
2386 this->m_recursion_depth--;
2387 gdb_assert (this->m_recursion_depth >= 0);