4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License"). You may not use this file except in compliance
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
24 * Copyright 2005 Sun Microsystems, Inc. All rights reserved.
25 * Use is subject to license terms.
29 * Copyright (c) 2012 by Delphix. All rights reserved.
32 #include <sys/types.h>
33 #include <sys/sysmacros.h>
34 #include <sys/isa_defs.h>
43 #include <dt_grammar.h>
44 #include <dt_parser.h>
45 #include <dt_provider.h>
47 static void dt_cg_node(dt_node_t
*, dt_irlist_t
*, dt_regset_t
*);
50 dt_cg_node_alloc(uint_t label
, dif_instr_t instr
)
52 dt_irnode_t
*dip
= malloc(sizeof (dt_irnode_t
));
55 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOMEM
);
57 dip
->di_label
= label
;
58 dip
->di_instr
= instr
;
59 dip
->di_extern
= NULL
;
66 * Code generator wrapper function for ctf_member_info. If we are given a
67 * reference to a forward declaration tag, search the entire type space for
68 * the actual definition and then call ctf_member_info on the result.
71 dt_cg_membinfo(ctf_file_t
*fp
, ctf_id_t type
, const char *s
, ctf_membinfo_t
*mp
)
73 while (ctf_type_kind(fp
, type
) == CTF_K_FORWARD
) {
74 char n
[DT_TYPE_NAMELEN
];
75 dtrace_typeinfo_t dtt
;
77 if (ctf_type_name(fp
, type
, n
, sizeof (n
)) == NULL
||
78 dt_type_lookup(n
, &dtt
) == -1 || (
79 dtt
.dtt_ctfp
== fp
&& dtt
.dtt_type
== type
))
80 break; /* unable to improve our position */
83 type
= ctf_type_resolve(fp
, dtt
.dtt_type
);
86 if (ctf_member_info(fp
, type
, s
, mp
) == CTF_ERR
)
87 return (NULL
); /* ctf_errno is set for us */
93 dt_cg_xsetx(dt_irlist_t
*dlp
, dt_ident_t
*idp
, uint_t lbl
, int reg
, uint64_t x
)
95 int flag
= idp
!= NULL
? DT_INT_PRIVATE
: DT_INT_SHARED
;
96 int intoff
= dt_inttab_insert(yypcb
->pcb_inttab
, x
, flag
);
97 dif_instr_t instr
= DIF_INSTR_SETX((uint_t
)intoff
, reg
);
100 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOMEM
);
102 if (intoff
> DIF_INTOFF_MAX
)
103 longjmp(yypcb
->pcb_jmpbuf
, EDT_INT2BIG
);
105 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl
, instr
));
108 dlp
->dl_last
->di_extern
= idp
;
112 dt_cg_setx(dt_irlist_t
*dlp
, int reg
, uint64_t x
)
114 dt_cg_xsetx(dlp
, NULL
, DT_LBL_NONE
, reg
, x
);
118 * When loading bit-fields, we want to convert a byte count in the range
119 * 1-8 to the closest power of 2 (e.g. 3->4, 5->8, etc). The clp2() function
120 * is a clever implementation from "Hacker's Delight" by Henry Warren, Jr.
137 * Lookup the correct load opcode to use for the specified node and CTF type.
138 * We determine the size and convert it to a 3-bit index. Our lookup table
139 * is constructed to use a 5-bit index, consisting of the 3-bit size 0-7, a
140 * bit for the sign, and a bit for userland address. For example, a 4-byte
141 * signed load from userland would be at the following table index:
142 * user=1 sign=1 size=4 => binary index 11011 = decimal index 27
145 dt_cg_load(dt_node_t
*dnp
, ctf_file_t
*ctfp
, ctf_id_t type
)
147 static const uint_t ops
[] = {
148 DIF_OP_LDUB
, DIF_OP_LDUH
, 0, DIF_OP_LDUW
,
150 DIF_OP_LDSB
, DIF_OP_LDSH
, 0, DIF_OP_LDSW
,
152 DIF_OP_ULDUB
, DIF_OP_ULDUH
, 0, DIF_OP_ULDUW
,
153 0, 0, 0, DIF_OP_ULDX
,
154 DIF_OP_ULDSB
, DIF_OP_ULDSH
, 0, DIF_OP_ULDSW
,
155 0, 0, 0, DIF_OP_ULDX
,
162 * If we're loading a bit-field, the size of our load is found by
163 * rounding cte_bits up to a byte boundary and then finding the
164 * nearest power of two to this value (see clp2(), above).
166 if ((dnp
->dn_flags
& DT_NF_BITFIELD
) &&
167 ctf_type_encoding(ctfp
, type
, &e
) != CTF_ERR
)
168 size
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
);
170 size
= ctf_type_size(ctfp
, type
);
172 if (size
< 1 || size
> 8 || (size
& (size
- 1)) != 0) {
173 xyerror(D_UNKNOWN
, "internal error -- cg cannot load "
174 "size %ld when passed by value\n", (long)size
);
177 size
--; /* convert size to 3-bit index */
179 if (dnp
->dn_flags
& DT_NF_SIGNED
)
181 if (dnp
->dn_flags
& DT_NF_USERLAND
)
188 dt_cg_ptrsize(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
,
191 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
199 type
= ctf_type_resolve(ctfp
, dnp
->dn_type
);
200 kind
= ctf_type_kind(ctfp
, type
);
201 assert(kind
== CTF_K_POINTER
|| kind
== CTF_K_ARRAY
);
203 if (kind
== CTF_K_ARRAY
) {
204 if (ctf_array_info(ctfp
, type
, &r
) != 0) {
205 yypcb
->pcb_hdl
->dt_ctferr
= ctf_errno(ctfp
);
206 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
208 type
= r
.ctr_contents
;
210 type
= ctf_type_reference(ctfp
, type
);
212 if ((size
= ctf_type_size(ctfp
, type
)) == 1)
213 return; /* multiply or divide by one can be omitted */
215 sreg
= dt_regset_alloc(drp
);
216 dt_cg_setx(dlp
, sreg
, size
);
217 instr
= DIF_INSTR_FMT(op
, dreg
, sreg
, dreg
);
218 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
219 dt_regset_free(drp
, sreg
);
223 * If the result of a "." or "->" operation is a bit-field, we use this routine
224 * to generate an epilogue to the load instruction that extracts the value. In
225 * the diagrams below the "ld??" is the load instruction that is generated to
226 * load the containing word that is generating prior to calling this function.
228 * Epilogue for unsigned fields: Epilogue for signed fields:
230 * ldu? [r1], r1 lds? [r1], r1
231 * setx USHIFT, r2 setx 64 - SSHIFT, r2
232 * srl r1, r2, r1 sll r1, r2, r1
233 * setx (1 << bits) - 1, r2 setx 64 - bits, r2
234 * and r1, r2, r1 sra r1, r2, r1
236 * The *SHIFT constants above changes value depending on the endian-ness of our
237 * target architecture. Refer to the comments below for more details.
240 dt_cg_field_get(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
,
241 ctf_file_t
*fp
, const ctf_membinfo_t
*mp
)
248 if (ctf_type_encoding(fp
, mp
->ctm_type
, &e
) != 0 || e
.cte_bits
> 64) {
249 xyerror(D_UNKNOWN
, "cg: bad field: off %lu type <%ld> "
250 "bits %u\n", mp
->ctm_offset
, mp
->ctm_type
, e
.cte_bits
);
253 assert(dnp
->dn_op
== DT_TOK_PTR
|| dnp
->dn_op
== DT_TOK_DOT
);
254 r1
= dnp
->dn_left
->dn_reg
;
255 r2
= dt_regset_alloc(drp
);
258 * On little-endian architectures, ctm_offset counts from the right so
259 * ctm_offset % NBBY itself is the amount we want to shift right to
260 * move the value bits to the little end of the register to mask them.
261 * On big-endian architectures, ctm_offset counts from the left so we
262 * must subtract (ctm_offset % NBBY + cte_bits) from the size in bits
263 * we used for the load. The size of our load in turn is found by
264 * rounding cte_bits up to a byte boundary and then finding the
265 * nearest power of two to this value (see clp2(), above). These
266 * properties are used to compute shift as USHIFT or SSHIFT, below.
268 if (dnp
->dn_flags
& DT_NF_SIGNED
) {
270 shift
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
) * NBBY
-
271 mp
->ctm_offset
% NBBY
;
273 shift
= mp
->ctm_offset
% NBBY
+ e
.cte_bits
;
275 dt_cg_setx(dlp
, r2
, 64 - shift
);
276 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, r1
, r2
, r1
);
277 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
279 dt_cg_setx(dlp
, r2
, 64 - e
.cte_bits
);
280 instr
= DIF_INSTR_FMT(DIF_OP_SRA
, r1
, r2
, r1
);
281 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
284 shift
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
) * NBBY
-
285 (mp
->ctm_offset
% NBBY
+ e
.cte_bits
);
287 shift
= mp
->ctm_offset
% NBBY
;
289 dt_cg_setx(dlp
, r2
, shift
);
290 instr
= DIF_INSTR_FMT(DIF_OP_SRL
, r1
, r2
, r1
);
291 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
293 dt_cg_setx(dlp
, r2
, (1ULL << e
.cte_bits
) - 1);
294 instr
= DIF_INSTR_FMT(DIF_OP_AND
, r1
, r2
, r1
);
295 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
298 dt_regset_free(drp
, r2
);
302 * If the destination of a store operation is a bit-field, we use this routine
303 * to generate a prologue to the store instruction that loads the surrounding
304 * bits, clears the destination field, and ORs in the new value of the field.
305 * In the diagram below the "st?" is the store instruction that is generated to
306 * store the containing word that is generating after calling this function.
308 * ld [dst->dn_reg], r1
309 * setx ~(((1 << cte_bits) - 1) << (ctm_offset % NBBY)), r2
312 * setx (1 << cte_bits) - 1, r2
313 * and src->dn_reg, r2, r2
314 * setx ctm_offset % NBBY, r3
318 * st? r1, [dst->dn_reg]
320 * This routine allocates a new register to hold the value to be stored and
321 * returns it. The caller is responsible for freeing this register later.
324 dt_cg_field_set(dt_node_t
*src
, dt_irlist_t
*dlp
,
325 dt_regset_t
*drp
, dt_node_t
*dst
)
327 uint64_t cmask
, fmask
, shift
;
333 ctf_file_t
*fp
, *ofp
;
336 assert(dst
->dn_op
== DT_TOK_PTR
|| dst
->dn_op
== DT_TOK_DOT
);
337 assert(dst
->dn_right
->dn_kind
== DT_NODE_IDENT
);
339 fp
= dst
->dn_left
->dn_ctfp
;
340 type
= ctf_type_resolve(fp
, dst
->dn_left
->dn_type
);
342 if (dst
->dn_op
== DT_TOK_PTR
) {
343 type
= ctf_type_reference(fp
, type
);
344 type
= ctf_type_resolve(fp
, type
);
347 if ((fp
= dt_cg_membinfo(ofp
= fp
, type
,
348 dst
->dn_right
->dn_string
, &m
)) == NULL
) {
349 yypcb
->pcb_hdl
->dt_ctferr
= ctf_errno(ofp
);
350 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
353 if (ctf_type_encoding(fp
, m
.ctm_type
, &e
) != 0 || e
.cte_bits
> 64) {
354 xyerror(D_UNKNOWN
, "cg: bad field: off %lu type <%ld> "
355 "bits %u\n", m
.ctm_offset
, m
.ctm_type
, e
.cte_bits
);
358 r1
= dt_regset_alloc(drp
);
359 r2
= dt_regset_alloc(drp
);
360 r3
= dt_regset_alloc(drp
);
363 * Compute shifts and masks. We need to compute "shift" as the amount
364 * we need to shift left to position our field in the containing word.
365 * Refer to the comments in dt_cg_field_get(), above, for more info.
366 * We then compute fmask as the mask that truncates the value in the
367 * input register to width cte_bits, and cmask as the mask used to
368 * pass through the containing bits and zero the field bits.
371 shift
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
) * NBBY
-
372 (m
.ctm_offset
% NBBY
+ e
.cte_bits
);
374 shift
= m
.ctm_offset
% NBBY
;
376 fmask
= (1ULL << e
.cte_bits
) - 1;
377 cmask
= ~(fmask
<< shift
);
379 instr
= DIF_INSTR_LOAD(
380 dt_cg_load(dst
, fp
, m
.ctm_type
), dst
->dn_reg
, r1
);
381 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
383 dt_cg_setx(dlp
, r2
, cmask
);
384 instr
= DIF_INSTR_FMT(DIF_OP_AND
, r1
, r2
, r1
);
385 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
387 dt_cg_setx(dlp
, r2
, fmask
);
388 instr
= DIF_INSTR_FMT(DIF_OP_AND
, src
->dn_reg
, r2
, r2
);
389 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
391 dt_cg_setx(dlp
, r3
, shift
);
392 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, r2
, r3
, r2
);
393 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
395 instr
= DIF_INSTR_FMT(DIF_OP_OR
, r1
, r2
, r1
);
396 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
398 dt_regset_free(drp
, r3
);
399 dt_regset_free(drp
, r2
);
405 dt_cg_store(dt_node_t
*src
, dt_irlist_t
*dlp
, dt_regset_t
*drp
, dt_node_t
*dst
)
413 * If we're loading a bit-field, the size of our store is found by
414 * rounding dst's cte_bits up to a byte boundary and then finding the
415 * nearest power of two to this value (see clp2(), above).
417 if ((dst
->dn_flags
& DT_NF_BITFIELD
) &&
418 ctf_type_encoding(dst
->dn_ctfp
, dst
->dn_type
, &e
) != CTF_ERR
)
419 size
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
);
421 size
= dt_node_type_size(src
);
423 if (src
->dn_flags
& DT_NF_REF
) {
424 reg
= dt_regset_alloc(drp
);
425 dt_cg_setx(dlp
, reg
, size
);
426 instr
= DIF_INSTR_COPYS(src
->dn_reg
, reg
, dst
->dn_reg
);
427 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
428 dt_regset_free(drp
, reg
);
430 if (dst
->dn_flags
& DT_NF_BITFIELD
)
431 reg
= dt_cg_field_set(src
, dlp
, drp
, dst
);
437 instr
= DIF_INSTR_STORE(DIF_OP_STB
, reg
, dst
->dn_reg
);
440 instr
= DIF_INSTR_STORE(DIF_OP_STH
, reg
, dst
->dn_reg
);
443 instr
= DIF_INSTR_STORE(DIF_OP_STW
, reg
, dst
->dn_reg
);
446 instr
= DIF_INSTR_STORE(DIF_OP_STX
, reg
, dst
->dn_reg
);
449 xyerror(D_UNKNOWN
, "internal error -- cg cannot store "
450 "size %lu when passed by value\n", (ulong_t
)size
);
452 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
454 if (dst
->dn_flags
& DT_NF_BITFIELD
)
455 dt_regset_free(drp
, reg
);
460 * Generate code for a typecast or for argument promotion from the type of the
461 * actual to the type of the formal. We need to generate code for casts when
462 * a scalar type is being narrowed or changing signed-ness. We first shift the
463 * desired bits high (losing excess bits if narrowing) and then shift them down
464 * using logical shift (unsigned result) or arithmetic shift (signed result).
467 dt_cg_typecast(const dt_node_t
*src
, const dt_node_t
*dst
,
468 dt_irlist_t
*dlp
, dt_regset_t
*drp
)
470 size_t srcsize
= dt_node_type_size(src
);
471 size_t dstsize
= dt_node_type_size(dst
);
476 if (!dt_node_is_scalar(dst
))
477 return; /* not a scalar */
478 if (dstsize
== srcsize
&&
479 ((src
->dn_flags
^ dst
->dn_flags
) & DT_NF_SIGNED
) == 0)
480 return; /* not narrowing or changing signed-ness */
481 if (dstsize
> srcsize
&& (src
->dn_flags
& DT_NF_SIGNED
) == 0)
482 return; /* nothing to do in this case */
484 rg
= dt_regset_alloc(drp
);
486 if (dstsize
> srcsize
) {
487 int n
= sizeof (uint64_t) * NBBY
- srcsize
* NBBY
;
488 int s
= (dstsize
- srcsize
) * NBBY
;
490 dt_cg_setx(dlp
, rg
, n
);
492 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, src
->dn_reg
, rg
, dst
->dn_reg
);
493 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
495 if ((dst
->dn_flags
& DT_NF_SIGNED
) || n
== s
) {
496 instr
= DIF_INSTR_FMT(DIF_OP_SRA
,
497 dst
->dn_reg
, rg
, dst
->dn_reg
);
498 dt_irlist_append(dlp
,
499 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
501 dt_cg_setx(dlp
, rg
, s
);
502 instr
= DIF_INSTR_FMT(DIF_OP_SRA
,
503 dst
->dn_reg
, rg
, dst
->dn_reg
);
504 dt_irlist_append(dlp
,
505 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
506 dt_cg_setx(dlp
, rg
, n
- s
);
507 instr
= DIF_INSTR_FMT(DIF_OP_SRL
,
508 dst
->dn_reg
, rg
, dst
->dn_reg
);
509 dt_irlist_append(dlp
,
510 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
512 } else if (dstsize
!= sizeof (uint64_t)) {
513 int n
= sizeof (uint64_t) * NBBY
- dstsize
* NBBY
;
515 dt_cg_setx(dlp
, rg
, n
);
517 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, src
->dn_reg
, rg
, dst
->dn_reg
);
518 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
520 instr
= DIF_INSTR_FMT((dst
->dn_flags
& DT_NF_SIGNED
) ?
521 DIF_OP_SRA
: DIF_OP_SRL
, dst
->dn_reg
, rg
, dst
->dn_reg
);
522 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
525 dt_regset_free(drp
, rg
);
529 * Generate code to push the specified argument list on to the tuple stack.
530 * We use this routine for handling subroutine calls and associative arrays.
531 * We must first generate code for all subexpressions before loading the stack
532 * because any subexpression could itself require the use of the tuple stack.
533 * This holds a number of registers equal to the number of arguments, but this
534 * is not a huge problem because the number of arguments can't exceed the
535 * number of tuple register stack elements anyway. At most one extra register
536 * is required (either by dt_cg_typecast() or for dtdt_size, below). This
537 * implies that a DIF implementation should offer a number of general purpose
538 * registers at least one greater than the number of tuple registers.
541 dt_cg_arglist(dt_ident_t
*idp
, dt_node_t
*args
,
542 dt_irlist_t
*dlp
, dt_regset_t
*drp
)
544 const dt_idsig_t
*isp
= idp
->di_data
;
548 for (dnp
= args
; dnp
!= NULL
; dnp
= dnp
->dn_list
)
549 dt_cg_node(dnp
, dlp
, drp
);
551 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, DIF_INSTR_FLUSHTS
));
553 for (dnp
= args
; dnp
!= NULL
; dnp
= dnp
->dn_list
, i
++) {
559 dt_node_diftype(yypcb
->pcb_hdl
, dnp
, &t
);
561 isp
->dis_args
[i
].dn_reg
= dnp
->dn_reg
; /* re-use register */
562 dt_cg_typecast(dnp
, &isp
->dis_args
[i
], dlp
, drp
);
563 isp
->dis_args
[i
].dn_reg
= -1;
565 if (t
.dtdt_flags
& DIF_TF_BYREF
) {
567 if (t
.dtdt_size
!= 0) {
568 reg
= dt_regset_alloc(drp
);
569 dt_cg_setx(dlp
, reg
, t
.dtdt_size
);
578 instr
= DIF_INSTR_PUSHTS(op
, t
.dtdt_kind
, reg
, dnp
->dn_reg
);
579 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
580 dt_regset_free(drp
, dnp
->dn_reg
);
582 if (reg
!= DIF_REG_R0
)
583 dt_regset_free(drp
, reg
);
586 if (i
> yypcb
->pcb_hdl
->dt_conf
.dtc_diftupregs
)
587 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOTUPREG
);
591 dt_cg_arithmetic_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
,
592 dt_regset_t
*drp
, uint_t op
)
594 int is_ptr_op
= (dnp
->dn_op
== DT_TOK_ADD
|| dnp
->dn_op
== DT_TOK_SUB
||
595 dnp
->dn_op
== DT_TOK_ADD_EQ
|| dnp
->dn_op
== DT_TOK_SUB_EQ
);
597 int lp_is_ptr
= dt_node_is_pointer(dnp
->dn_left
);
598 int rp_is_ptr
= dt_node_is_pointer(dnp
->dn_right
);
602 if (lp_is_ptr
&& rp_is_ptr
) {
603 assert(dnp
->dn_op
== DT_TOK_SUB
);
607 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
608 if (is_ptr_op
&& rp_is_ptr
)
609 dt_cg_ptrsize(dnp
, dlp
, drp
, DIF_OP_MUL
, dnp
->dn_left
->dn_reg
);
611 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
612 if (is_ptr_op
&& lp_is_ptr
)
613 dt_cg_ptrsize(dnp
, dlp
, drp
, DIF_OP_MUL
, dnp
->dn_right
->dn_reg
);
615 instr
= DIF_INSTR_FMT(op
, dnp
->dn_left
->dn_reg
,
616 dnp
->dn_right
->dn_reg
, dnp
->dn_left
->dn_reg
);
618 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
619 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
620 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
622 if (lp_is_ptr
&& rp_is_ptr
)
623 dt_cg_ptrsize(dnp
->dn_right
,
624 dlp
, drp
, DIF_OP_UDIV
, dnp
->dn_reg
);
628 dt_cg_stvar(const dt_ident_t
*idp
)
630 static const uint_t aops
[] = { DIF_OP_STGAA
, DIF_OP_STTAA
, DIF_OP_NOP
};
631 static const uint_t sops
[] = { DIF_OP_STGS
, DIF_OP_STTS
, DIF_OP_STLS
};
633 uint_t i
= (((idp
->di_flags
& DT_IDFLG_LOCAL
) != 0) << 1) |
634 ((idp
->di_flags
& DT_IDFLG_TLS
) != 0);
636 return (idp
->di_kind
== DT_IDENT_ARRAY
? aops
[i
] : sops
[i
]);
640 dt_cg_prearith_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
, uint_t op
)
642 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
648 if (dt_node_is_pointer(dnp
)) {
649 type
= ctf_type_resolve(ctfp
, dnp
->dn_type
);
650 assert(ctf_type_kind(ctfp
, type
) == CTF_K_POINTER
);
651 size
= ctf_type_size(ctfp
, ctf_type_reference(ctfp
, type
));
654 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
655 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
657 reg
= dt_regset_alloc(drp
);
658 dt_cg_setx(dlp
, reg
, size
);
660 instr
= DIF_INSTR_FMT(op
, dnp
->dn_reg
, reg
, dnp
->dn_reg
);
661 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
662 dt_regset_free(drp
, reg
);
665 * If we are modifying a variable, generate an stv instruction from
666 * the variable specified by the identifier. If we are storing to a
667 * memory address, generate code again for the left-hand side using
668 * DT_NF_REF to get the address, and then generate a store to it.
669 * In both paths, we store the value in dnp->dn_reg (the new value).
671 if (dnp
->dn_child
->dn_kind
== DT_NODE_VAR
) {
672 dt_ident_t
*idp
= dt_ident_resolve(dnp
->dn_child
->dn_ident
);
674 idp
->di_flags
|= DT_IDFLG_DIFW
;
675 instr
= DIF_INSTR_STV(dt_cg_stvar(idp
),
676 idp
->di_id
, dnp
->dn_reg
);
677 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
679 uint_t rbit
= dnp
->dn_child
->dn_flags
& DT_NF_REF
;
681 assert(dnp
->dn_child
->dn_flags
& DT_NF_WRITABLE
);
682 assert(dnp
->dn_child
->dn_flags
& DT_NF_LVALUE
);
684 dnp
->dn_child
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
685 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
687 dt_cg_store(dnp
, dlp
, drp
, dnp
->dn_child
);
688 dt_regset_free(drp
, dnp
->dn_child
->dn_reg
);
690 dnp
->dn_left
->dn_flags
&= ~DT_NF_REF
;
691 dnp
->dn_left
->dn_flags
|= rbit
;
696 dt_cg_postarith_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
,
697 dt_regset_t
*drp
, uint_t op
)
699 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
705 if (dt_node_is_pointer(dnp
)) {
706 type
= ctf_type_resolve(ctfp
, dnp
->dn_type
);
707 assert(ctf_type_kind(ctfp
, type
) == CTF_K_POINTER
);
708 size
= ctf_type_size(ctfp
, ctf_type_reference(ctfp
, type
));
711 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
712 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
714 nreg
= dt_regset_alloc(drp
);
715 dt_cg_setx(dlp
, nreg
, size
);
716 instr
= DIF_INSTR_FMT(op
, dnp
->dn_reg
, nreg
, nreg
);
717 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
720 * If we are modifying a variable, generate an stv instruction from
721 * the variable specified by the identifier. If we are storing to a
722 * memory address, generate code again for the left-hand side using
723 * DT_NF_REF to get the address, and then generate a store to it.
724 * In both paths, we store the value from 'nreg' (the new value).
726 if (dnp
->dn_child
->dn_kind
== DT_NODE_VAR
) {
727 dt_ident_t
*idp
= dt_ident_resolve(dnp
->dn_child
->dn_ident
);
729 idp
->di_flags
|= DT_IDFLG_DIFW
;
730 instr
= DIF_INSTR_STV(dt_cg_stvar(idp
), idp
->di_id
, nreg
);
731 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
733 uint_t rbit
= dnp
->dn_child
->dn_flags
& DT_NF_REF
;
734 int oreg
= dnp
->dn_reg
;
736 assert(dnp
->dn_child
->dn_flags
& DT_NF_WRITABLE
);
737 assert(dnp
->dn_child
->dn_flags
& DT_NF_LVALUE
);
739 dnp
->dn_child
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
740 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
743 dt_cg_store(dnp
, dlp
, drp
, dnp
->dn_child
);
746 dt_regset_free(drp
, dnp
->dn_child
->dn_reg
);
747 dnp
->dn_left
->dn_flags
&= ~DT_NF_REF
;
748 dnp
->dn_left
->dn_flags
|= rbit
;
751 dt_regset_free(drp
, nreg
);
755 * Determine if we should perform signed or unsigned comparison for an OP2.
756 * If both operands are of arithmetic type, perform the usual arithmetic
757 * conversions to determine the common real type for comparison [ISOC 6.5.8.3].
760 dt_cg_compare_signed(dt_node_t
*dnp
)
764 if (dt_node_is_string(dnp
->dn_left
) ||
765 dt_node_is_string(dnp
->dn_right
))
766 return (1); /* strings always compare signed */
767 else if (!dt_node_is_arith(dnp
->dn_left
) ||
768 !dt_node_is_arith(dnp
->dn_right
))
769 return (0); /* non-arithmetic types always compare unsigned */
771 bzero(&dn
, sizeof (dn
));
772 dt_node_promote(dnp
->dn_left
, dnp
->dn_right
, &dn
);
773 return (dn
.dn_flags
& DT_NF_SIGNED
);
777 dt_cg_compare_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
, uint_t op
)
779 uint_t lbl_true
= dt_irlist_label(dlp
);
780 uint_t lbl_post
= dt_irlist_label(dlp
);
785 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
786 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
788 if (dt_node_is_string(dnp
->dn_left
) || dt_node_is_string(dnp
->dn_right
))
793 instr
= DIF_INSTR_CMP(opc
, dnp
->dn_left
->dn_reg
, dnp
->dn_right
->dn_reg
);
794 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
795 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
796 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
798 instr
= DIF_INSTR_BRANCH(op
, lbl_true
);
799 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
801 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
802 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
804 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
805 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
807 dt_cg_xsetx(dlp
, NULL
, lbl_true
, dnp
->dn_reg
, 1);
808 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
812 * Code generation for the ternary op requires some trickery with the assembler
813 * in order to conserve registers. We generate code for dn_expr and dn_left
814 * and free their registers so they do not have be consumed across codegen for
815 * dn_right. We insert a dummy MOV at the end of dn_left into the destination
816 * register, which is not yet known because we haven't done dn_right yet, and
817 * save the pointer to this instruction node. We then generate code for
818 * dn_right and use its register as our output. Finally, we reach back and
819 * patch the instruction for dn_left to move its output into this register.
822 dt_cg_ternary_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
824 uint_t lbl_false
= dt_irlist_label(dlp
);
825 uint_t lbl_post
= dt_irlist_label(dlp
);
830 dt_cg_node(dnp
->dn_expr
, dlp
, drp
);
831 instr
= DIF_INSTR_TST(dnp
->dn_expr
->dn_reg
);
832 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
833 dt_regset_free(drp
, dnp
->dn_expr
->dn_reg
);
835 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
836 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
838 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
839 instr
= DIF_INSTR_MOV(dnp
->dn_left
->dn_reg
, DIF_REG_R0
);
840 dip
= dt_cg_node_alloc(DT_LBL_NONE
, instr
); /* save dip for below */
841 dt_irlist_append(dlp
, dip
);
842 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
844 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
845 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
847 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_false
, DIF_INSTR_NOP
));
848 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
849 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
852 * Now that dn_reg is assigned, reach back and patch the correct MOV
853 * instruction into the tail of dn_left. We know dn_reg was unused
854 * at that point because otherwise dn_right couldn't have allocated it.
856 dip
->di_instr
= DIF_INSTR_MOV(dnp
->dn_left
->dn_reg
, dnp
->dn_reg
);
857 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
861 dt_cg_logical_and(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
863 uint_t lbl_false
= dt_irlist_label(dlp
);
864 uint_t lbl_post
= dt_irlist_label(dlp
);
868 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
869 instr
= DIF_INSTR_TST(dnp
->dn_left
->dn_reg
);
870 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
871 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
873 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
874 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
876 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
877 instr
= DIF_INSTR_TST(dnp
->dn_right
->dn_reg
);
878 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
879 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
881 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
882 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
884 dt_cg_setx(dlp
, dnp
->dn_reg
, 1);
886 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
887 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
889 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
890 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_false
, instr
));
892 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
896 dt_cg_logical_xor(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
898 uint_t lbl_next
= dt_irlist_label(dlp
);
899 uint_t lbl_tail
= dt_irlist_label(dlp
);
903 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
904 instr
= DIF_INSTR_TST(dnp
->dn_left
->dn_reg
);
905 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
907 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_next
);
908 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
909 dt_cg_setx(dlp
, dnp
->dn_left
->dn_reg
, 1);
911 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_next
, DIF_INSTR_NOP
));
912 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
914 instr
= DIF_INSTR_TST(dnp
->dn_right
->dn_reg
);
915 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
917 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_tail
);
918 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
919 dt_cg_setx(dlp
, dnp
->dn_right
->dn_reg
, 1);
921 instr
= DIF_INSTR_FMT(DIF_OP_XOR
, dnp
->dn_left
->dn_reg
,
922 dnp
->dn_right
->dn_reg
, dnp
->dn_left
->dn_reg
);
924 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_tail
, instr
));
926 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
927 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
931 dt_cg_logical_or(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
933 uint_t lbl_true
= dt_irlist_label(dlp
);
934 uint_t lbl_false
= dt_irlist_label(dlp
);
935 uint_t lbl_post
= dt_irlist_label(dlp
);
939 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
940 instr
= DIF_INSTR_TST(dnp
->dn_left
->dn_reg
);
941 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
942 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
944 instr
= DIF_INSTR_BRANCH(DIF_OP_BNE
, lbl_true
);
945 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
947 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
948 instr
= DIF_INSTR_TST(dnp
->dn_right
->dn_reg
);
949 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
950 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
952 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
953 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
955 dt_cg_xsetx(dlp
, NULL
, lbl_true
, dnp
->dn_reg
, 1);
957 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
958 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
960 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
961 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_false
, instr
));
963 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
967 dt_cg_logical_neg(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
969 uint_t lbl_zero
= dt_irlist_label(dlp
);
970 uint_t lbl_post
= dt_irlist_label(dlp
);
974 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
975 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
977 instr
= DIF_INSTR_TST(dnp
->dn_reg
);
978 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
980 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_zero
);
981 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
983 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
984 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
986 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
987 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
989 dt_cg_xsetx(dlp
, NULL
, lbl_zero
, dnp
->dn_reg
, 1);
990 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
994 dt_cg_asgn_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1000 * If we are performing a structure assignment of a translated type,
1001 * we must instantiate all members and create a snapshot of the object
1002 * in scratch space. We allocs a chunk of memory, generate code for
1003 * each member, and then set dnp->dn_reg to the scratch object address.
1005 if ((idp
= dt_node_resolve(dnp
->dn_right
, DT_IDENT_XLSOU
)) != NULL
) {
1007 dt_xlator_t
*dxp
= idp
->di_data
;
1008 dt_node_t
*mnp
, dn
, mn
;
1012 * Create two fake dt_node_t's representing operator "." and a
1013 * right-hand identifier child node. These will be repeatedly
1014 * modified according to each instantiated member so that we
1015 * can pass them to dt_cg_store() and effect a member store.
1017 bzero(&dn
, sizeof (dt_node_t
));
1018 dn
.dn_kind
= DT_NODE_OP2
;
1019 dn
.dn_op
= DT_TOK_DOT
;
1023 bzero(&mn
, sizeof (dt_node_t
));
1024 mn
.dn_kind
= DT_NODE_IDENT
;
1025 mn
.dn_op
= DT_TOK_IDENT
;
1028 * Allocate a register for our scratch data pointer. First we
1029 * set it to the size of our data structure, and then replace
1030 * it with the result of an allocs of the specified size.
1032 r1
= dt_regset_alloc(drp
);
1034 ctf_type_size(dxp
->dx_dst_ctfp
, dxp
->dx_dst_base
));
1036 instr
= DIF_INSTR_ALLOCS(r1
, r1
);
1037 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1040 * When dt_cg_asgn_op() is called, we have already generated
1041 * code for dnp->dn_right, which is the translator input. We
1042 * now associate this register with the translator's input
1043 * identifier so it can be referenced during our member loop.
1045 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
1046 dxp
->dx_ident
->di_id
= dnp
->dn_right
->dn_reg
;
1048 for (mnp
= dxp
->dx_members
; mnp
!= NULL
; mnp
= mnp
->dn_list
) {
1050 * Generate code for the translator member expression,
1051 * and then cast the result to the member type.
1053 dt_cg_node(mnp
->dn_membexpr
, dlp
, drp
);
1054 mnp
->dn_reg
= mnp
->dn_membexpr
->dn_reg
;
1055 dt_cg_typecast(mnp
->dn_membexpr
, mnp
, dlp
, drp
);
1058 * Ask CTF for the offset of the member so we can store
1059 * to the appropriate offset. This call has already
1060 * been done once by the parser, so it should succeed.
1062 if (ctf_member_info(dxp
->dx_dst_ctfp
, dxp
->dx_dst_base
,
1063 mnp
->dn_membname
, &ctm
) == CTF_ERR
) {
1064 yypcb
->pcb_hdl
->dt_ctferr
=
1065 ctf_errno(dxp
->dx_dst_ctfp
);
1066 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
1070 * If the destination member is at offset 0, store the
1071 * result directly to r1 (the scratch buffer address).
1072 * Otherwise allocate another temporary for the offset
1073 * and add r1 to it before storing the result.
1075 if (ctm
.ctm_offset
!= 0) {
1076 r2
= dt_regset_alloc(drp
);
1079 * Add the member offset rounded down to the
1080 * nearest byte. If the offset was not aligned
1081 * on a byte boundary, this member is a bit-
1082 * field and dt_cg_store() will handle masking.
1084 dt_cg_setx(dlp
, r2
, ctm
.ctm_offset
/ NBBY
);
1085 instr
= DIF_INSTR_FMT(DIF_OP_ADD
, r1
, r2
, r2
);
1086 dt_irlist_append(dlp
,
1087 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1089 dt_node_type_propagate(mnp
, &dn
);
1090 dn
.dn_right
->dn_string
= mnp
->dn_membname
;
1093 dt_cg_store(mnp
, dlp
, drp
, &dn
);
1094 dt_regset_free(drp
, r2
);
1097 dt_node_type_propagate(mnp
, &dn
);
1098 dn
.dn_right
->dn_string
= mnp
->dn_membname
;
1101 dt_cg_store(mnp
, dlp
, drp
, &dn
);
1104 dt_regset_free(drp
, mnp
->dn_reg
);
1107 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
1108 dxp
->dx_ident
->di_id
= 0;
1110 if (dnp
->dn_right
->dn_reg
!= -1)
1111 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
1113 assert(dnp
->dn_reg
== dnp
->dn_right
->dn_reg
);
1118 * If we are storing to a variable, generate an stv instruction from
1119 * the variable specified by the identifier. If we are storing to a
1120 * memory address, generate code again for the left-hand side using
1121 * DT_NF_REF to get the address, and then generate a store to it.
1122 * In both paths, we assume dnp->dn_reg already has the new value.
1124 if (dnp
->dn_left
->dn_kind
== DT_NODE_VAR
) {
1125 idp
= dt_ident_resolve(dnp
->dn_left
->dn_ident
);
1127 if (idp
->di_kind
== DT_IDENT_ARRAY
)
1128 dt_cg_arglist(idp
, dnp
->dn_left
->dn_args
, dlp
, drp
);
1130 idp
->di_flags
|= DT_IDFLG_DIFW
;
1131 instr
= DIF_INSTR_STV(dt_cg_stvar(idp
),
1132 idp
->di_id
, dnp
->dn_reg
);
1133 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1135 uint_t rbit
= dnp
->dn_left
->dn_flags
& DT_NF_REF
;
1137 assert(dnp
->dn_left
->dn_flags
& DT_NF_WRITABLE
);
1138 assert(dnp
->dn_left
->dn_flags
& DT_NF_LVALUE
);
1140 dnp
->dn_left
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
1142 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
1143 dt_cg_store(dnp
, dlp
, drp
, dnp
->dn_left
);
1144 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
1146 dnp
->dn_left
->dn_flags
&= ~DT_NF_REF
;
1147 dnp
->dn_left
->dn_flags
|= rbit
;
1152 dt_cg_assoc_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1157 assert(dnp
->dn_kind
== DT_NODE_VAR
);
1158 assert(!(dnp
->dn_ident
->di_flags
& DT_IDFLG_LOCAL
));
1159 assert(dnp
->dn_args
!= NULL
);
1161 dt_cg_arglist(dnp
->dn_ident
, dnp
->dn_args
, dlp
, drp
);
1163 dnp
->dn_reg
= dt_regset_alloc(drp
);
1165 if (dnp
->dn_ident
->di_flags
& DT_IDFLG_TLS
)
1170 dnp
->dn_ident
->di_flags
|= DT_IDFLG_DIFR
;
1171 instr
= DIF_INSTR_LDV(op
, dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
1172 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1175 * If the associative array is a pass-by-reference type, then we are
1176 * loading its value as a pointer to either load or store through it.
1177 * The array element in question may not have been faulted in yet, in
1178 * which case DIF_OP_LD*AA will return zero. We append an epilogue
1179 * of instructions similar to the following:
1181 * ld?aa id, %r1 ! base ld?aa instruction above
1182 * tst %r1 ! start of epilogue
1189 * label: < rest of code >
1191 * The idea is that we allocs a zero-filled chunk of scratch space and
1192 * do a DIF_OP_ST*AA to fault in and initialize the array element, and
1193 * then reload it to get the faulted-in address of the new variable
1194 * storage. This isn't cheap, but pass-by-ref associative array values
1195 * are (thus far) uncommon and the allocs cost only occurs once. If
1196 * this path becomes important to DTrace users, we can improve things
1197 * by adding a new DIF opcode to fault in associative array elements.
1199 if (dnp
->dn_flags
& DT_NF_REF
) {
1200 uint_t stvop
= op
== DIF_OP_LDTAA
? DIF_OP_STTAA
: DIF_OP_STGAA
;
1201 uint_t label
= dt_irlist_label(dlp
);
1203 instr
= DIF_INSTR_TST(dnp
->dn_reg
);
1204 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1206 instr
= DIF_INSTR_BRANCH(DIF_OP_BNE
, label
);
1207 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1209 dt_cg_setx(dlp
, dnp
->dn_reg
, dt_node_type_size(dnp
));
1210 instr
= DIF_INSTR_ALLOCS(dnp
->dn_reg
, dnp
->dn_reg
);
1211 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1213 dnp
->dn_ident
->di_flags
|= DT_IDFLG_DIFW
;
1214 instr
= DIF_INSTR_STV(stvop
, dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
1215 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1217 instr
= DIF_INSTR_LDV(op
, dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
1218 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1220 dt_irlist_append(dlp
, dt_cg_node_alloc(label
, DIF_INSTR_NOP
));
1225 dt_cg_array_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1227 dt_probe_t
*prp
= yypcb
->pcb_probe
;
1228 uintmax_t saved
= dnp
->dn_args
->dn_value
;
1229 dt_ident_t
*idp
= dnp
->dn_ident
;
1236 assert(dnp
->dn_kind
== DT_NODE_VAR
);
1237 assert(!(idp
->di_flags
& DT_IDFLG_LOCAL
));
1239 assert(dnp
->dn_args
->dn_kind
== DT_NODE_INT
);
1240 assert(dnp
->dn_args
->dn_list
== NULL
);
1243 * If this is a reference in the args[] array, temporarily modify the
1244 * array index according to the static argument mapping (if any),
1245 * unless the argument reference is provided by a dynamic translator.
1246 * If we're using a dynamic translator for args[], then just set dn_reg
1247 * to an invalid reg and return: DIF_OP_XLARG will fetch the arg later.
1249 if (idp
->di_id
== DIF_VAR_ARGS
) {
1250 if ((idp
->di_kind
== DT_IDENT_XLPTR
||
1251 idp
->di_kind
== DT_IDENT_XLSOU
) &&
1252 dt_xlator_dynamic(idp
->di_data
)) {
1256 dnp
->dn_args
->dn_value
= prp
->pr_mapping
[saved
];
1259 dt_cg_node(dnp
->dn_args
, dlp
, drp
);
1260 dnp
->dn_args
->dn_value
= saved
;
1262 dnp
->dn_reg
= dnp
->dn_args
->dn_reg
;
1264 if (idp
->di_flags
& DT_IDFLG_TLS
)
1269 idp
->di_flags
|= DT_IDFLG_DIFR
;
1271 instr
= DIF_INSTR_LDA(op
, idp
->di_id
,
1272 dnp
->dn_args
->dn_reg
, dnp
->dn_reg
);
1274 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1277 * If this is a reference to the args[] array, we need to take the
1278 * additional step of explicitly eliminating any bits larger than the
1279 * type size: the DIF interpreter in the kernel will always give us
1280 * the raw (64-bit) argument value, and any bits larger than the type
1281 * size may be junk. As a practical matter, this arises only on 64-bit
1282 * architectures and only when the argument index is larger than the
1283 * number of arguments passed directly to DTrace: if a 8-, 16- or
1284 * 32-bit argument must be retrieved from the stack, it is possible
1285 * (and it some cases, likely) that the upper bits will be garbage.
1287 if (idp
->di_id
!= DIF_VAR_ARGS
|| !dt_node_is_scalar(dnp
))
1290 if ((size
= dt_node_type_size(dnp
)) == sizeof (uint64_t))
1293 reg
= dt_regset_alloc(drp
);
1294 assert(size
< sizeof (uint64_t));
1295 n
= sizeof (uint64_t) * NBBY
- size
* NBBY
;
1297 dt_cg_setx(dlp
, reg
, n
);
1299 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, dnp
->dn_reg
, reg
, dnp
->dn_reg
);
1300 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1302 instr
= DIF_INSTR_FMT((dnp
->dn_flags
& DT_NF_SIGNED
) ?
1303 DIF_OP_SRA
: DIF_OP_SRL
, dnp
->dn_reg
, reg
, dnp
->dn_reg
);
1305 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1306 dt_regset_free(drp
, reg
);
1310 * Generate code for an inlined variable reference. Inlines can be used to
1311 * define either scalar or associative array substitutions. For scalars, we
1312 * simply generate code for the parse tree saved in the identifier's din_root,
1313 * and then cast the resulting expression to the inline's declaration type.
1314 * For arrays, we take the input parameter subtrees from dnp->dn_args and
1315 * temporarily store them in the din_root of each din_argv[i] identifier,
1316 * which are themselves inlines and were set up for us by the parser. The
1317 * result is that any reference to the inlined parameter inside the top-level
1318 * din_root will turn into a recursive call to dt_cg_inline() for a scalar
1319 * inline whose din_root will refer to the subtree pointed to by the argument.
1322 dt_cg_inline(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1324 dt_ident_t
*idp
= dnp
->dn_ident
;
1325 dt_idnode_t
*inp
= idp
->di_iarg
;
1331 assert(idp
->di_flags
& DT_IDFLG_INLINE
);
1332 assert(idp
->di_ops
== &dt_idops_inline
);
1334 if (idp
->di_kind
== DT_IDENT_ARRAY
) {
1335 for (i
= 0, pnp
= dnp
->dn_args
;
1336 pnp
!= NULL
; pnp
= pnp
->dn_list
, i
++) {
1337 if (inp
->din_argv
[i
] != NULL
) {
1338 pinp
= inp
->din_argv
[i
]->di_iarg
;
1339 pinp
->din_root
= pnp
;
1344 dt_cg_node(inp
->din_root
, dlp
, drp
);
1345 dnp
->dn_reg
= inp
->din_root
->dn_reg
;
1346 dt_cg_typecast(inp
->din_root
, dnp
, dlp
, drp
);
1348 if (idp
->di_kind
== DT_IDENT_ARRAY
) {
1349 for (i
= 0; i
< inp
->din_argc
; i
++) {
1350 pinp
= inp
->din_argv
[i
]->di_iarg
;
1351 pinp
->din_root
= NULL
;
1356 typedef struct dt_xlmemb
{
1357 dt_ident_t
*dtxl_idp
; /* translated ident */
1358 dt_irlist_t
*dtxl_dlp
; /* instruction list */
1359 dt_regset_t
*dtxl_drp
; /* register set */
1360 int dtxl_sreg
; /* location of the translation input */
1361 int dtxl_dreg
; /* location of our allocated buffer */
1366 dt_cg_xlate_member(const char *name
, ctf_id_t type
, ulong_t off
, void *arg
)
1368 dt_xlmemb_t
*dx
= arg
;
1369 dt_ident_t
*idp
= dx
->dtxl_idp
;
1370 dt_irlist_t
*dlp
= dx
->dtxl_dlp
;
1371 dt_regset_t
*drp
= dx
->dtxl_drp
;
1380 /* Generate code for the translation. */
1382 mnp
= dt_xlator_member(dxp
, name
);
1384 /* If there's no translator for the given member, skip it. */
1388 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
1389 dxp
->dx_ident
->di_id
= dx
->dtxl_sreg
;
1391 dt_cg_node(mnp
->dn_membexpr
, dlp
, drp
);
1393 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
1394 dxp
->dx_ident
->di_id
= 0;
1396 treg
= mnp
->dn_membexpr
->dn_reg
;
1398 /* Compute the offset into our buffer and store the result there. */
1399 reg
= dt_regset_alloc(drp
);
1401 dt_cg_setx(dlp
, reg
, off
/ NBBY
);
1402 instr
= DIF_INSTR_FMT(DIF_OP_ADD
, dx
->dtxl_dreg
, reg
, reg
);
1403 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1405 size
= ctf_type_size(mnp
->dn_membexpr
->dn_ctfp
,
1406 mnp
->dn_membexpr
->dn_type
);
1407 if (dt_node_is_scalar(mnp
->dn_membexpr
)) {
1409 * Copying scalars is simple.
1413 instr
= DIF_INSTR_STORE(DIF_OP_STB
, treg
, reg
);
1416 instr
= DIF_INSTR_STORE(DIF_OP_STH
, treg
, reg
);
1419 instr
= DIF_INSTR_STORE(DIF_OP_STW
, treg
, reg
);
1422 instr
= DIF_INSTR_STORE(DIF_OP_STX
, treg
, reg
);
1425 xyerror(D_UNKNOWN
, "internal error -- unexpected "
1426 "size: %lu\n", (ulong_t
)size
);
1429 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1431 } else if (dt_node_is_string(mnp
->dn_membexpr
)) {
1435 * Use the copys instruction for strings.
1437 szreg
= dt_regset_alloc(drp
);
1438 dt_cg_setx(dlp
, szreg
, size
);
1439 instr
= DIF_INSTR_COPYS(treg
, szreg
, reg
);
1440 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1441 dt_regset_free(drp
, szreg
);
1446 * If it's anything else then we'll just bcopy it.
1448 szreg
= dt_regset_alloc(drp
);
1449 dt_cg_setx(dlp
, szreg
, size
);
1450 dt_irlist_append(dlp
,
1451 dt_cg_node_alloc(DT_LBL_NONE
, DIF_INSTR_FLUSHTS
));
1452 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
,
1454 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1455 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
,
1457 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1458 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
,
1460 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1461 instr
= DIF_INSTR_CALL(DIF_SUBR_BCOPY
, szreg
);
1462 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1463 dt_regset_free(drp
, szreg
);
1466 dt_regset_free(drp
, reg
);
1467 dt_regset_free(drp
, treg
);
1473 * If we're expanding a translated type, we create an appropriately sized
1474 * buffer with alloca() and then translate each member into it.
1477 dt_cg_xlate_expand(dt_node_t
*dnp
, dt_ident_t
*idp
, dt_irlist_t
*dlp
,
1485 dreg
= dt_regset_alloc(drp
);
1486 size
= ctf_type_size(dnp
->dn_ident
->di_ctfp
, dnp
->dn_ident
->di_type
);
1488 /* Call alloca() to create the buffer. */
1489 dt_cg_setx(dlp
, dreg
, size
);
1491 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, DIF_INSTR_FLUSHTS
));
1493 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
, DIF_REG_R0
, dreg
);
1494 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1496 instr
= DIF_INSTR_CALL(DIF_SUBR_ALLOCA
, dreg
);
1497 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1499 /* Generate the translation for each member. */
1503 dlm
.dtxl_sreg
= dnp
->dn_reg
;
1504 dlm
.dtxl_dreg
= dreg
;
1505 (void) ctf_member_iter(dnp
->dn_ident
->di_ctfp
,
1506 dnp
->dn_ident
->di_type
, dt_cg_xlate_member
,
1513 dt_cg_node(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1515 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
1525 switch (dnp
->dn_op
) {
1527 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
1528 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
1529 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1530 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1534 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1535 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1536 dt_cg_asgn_op(dnp
, dlp
, drp
);
1540 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1541 dt_cg_asgn_op(dnp
, dlp
, drp
);
1545 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1546 dt_cg_asgn_op(dnp
, dlp
, drp
);
1550 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_MUL
);
1551 dt_cg_asgn_op(dnp
, dlp
, drp
);
1555 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1556 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SDIV
: DIF_OP_UDIV
);
1557 dt_cg_asgn_op(dnp
, dlp
, drp
);
1561 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1562 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SREM
: DIF_OP_UREM
);
1563 dt_cg_asgn_op(dnp
, dlp
, drp
);
1567 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_AND
);
1568 dt_cg_asgn_op(dnp
, dlp
, drp
);
1572 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_XOR
);
1573 dt_cg_asgn_op(dnp
, dlp
, drp
);
1577 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_OR
);
1578 dt_cg_asgn_op(dnp
, dlp
, drp
);
1582 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SLL
);
1583 dt_cg_asgn_op(dnp
, dlp
, drp
);
1587 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1588 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SRA
: DIF_OP_SRL
);
1589 dt_cg_asgn_op(dnp
, dlp
, drp
);
1592 case DT_TOK_QUESTION
:
1593 dt_cg_ternary_op(dnp
, dlp
, drp
);
1597 dt_cg_logical_or(dnp
, dlp
, drp
);
1601 dt_cg_logical_xor(dnp
, dlp
, drp
);
1605 dt_cg_logical_and(dnp
, dlp
, drp
);
1609 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_OR
);
1613 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_XOR
);
1617 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_AND
);
1621 dt_cg_compare_op(dnp
, dlp
, drp
, DIF_OP_BE
);
1625 dt_cg_compare_op(dnp
, dlp
, drp
, DIF_OP_BNE
);
1629 dt_cg_compare_op(dnp
, dlp
, drp
,
1630 dt_cg_compare_signed(dnp
) ? DIF_OP_BL
: DIF_OP_BLU
);
1634 dt_cg_compare_op(dnp
, dlp
, drp
,
1635 dt_cg_compare_signed(dnp
) ? DIF_OP_BLE
: DIF_OP_BLEU
);
1639 dt_cg_compare_op(dnp
, dlp
, drp
,
1640 dt_cg_compare_signed(dnp
) ? DIF_OP_BG
: DIF_OP_BGU
);
1644 dt_cg_compare_op(dnp
, dlp
, drp
,
1645 dt_cg_compare_signed(dnp
) ? DIF_OP_BGE
: DIF_OP_BGEU
);
1649 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SLL
);
1653 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1654 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SRA
: DIF_OP_SRL
);
1658 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1662 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1666 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_MUL
);
1670 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1671 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SDIV
: DIF_OP_UDIV
);
1675 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1676 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SREM
: DIF_OP_UREM
);
1680 dt_cg_logical_neg(dnp
, dlp
, drp
);
1684 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1685 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1686 instr
= DIF_INSTR_NOT(dnp
->dn_reg
, dnp
->dn_reg
);
1687 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1691 dt_cg_prearith_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1694 case DT_TOK_POSTINC
:
1695 dt_cg_postarith_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1699 dt_cg_prearith_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1702 case DT_TOK_POSTDEC
:
1703 dt_cg_postarith_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1707 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1708 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1712 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1713 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1715 instr
= DIF_INSTR_FMT(DIF_OP_SUB
, DIF_REG_R0
,
1716 dnp
->dn_reg
, dnp
->dn_reg
);
1718 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1722 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1723 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1725 if (dt_node_is_dynamic(dnp
->dn_child
)) {
1727 idp
= dt_node_resolve(dnp
->dn_child
, DT_IDENT_XLPTR
);
1728 assert(idp
!= NULL
);
1729 reg
= dt_cg_xlate_expand(dnp
, idp
, dlp
, drp
);
1731 dt_regset_free(drp
, dnp
->dn_child
->dn_reg
);
1734 } else if (!(dnp
->dn_flags
& DT_NF_REF
)) {
1735 uint_t ubit
= dnp
->dn_flags
& DT_NF_USERLAND
;
1738 * Save and restore DT_NF_USERLAND across dt_cg_load():
1739 * we need the sign bit from dnp and the user bit from
1740 * dnp->dn_child in order to get the proper opcode.
1743 (dnp
->dn_child
->dn_flags
& DT_NF_USERLAND
);
1745 instr
= DIF_INSTR_LOAD(dt_cg_load(dnp
, ctfp
,
1746 dnp
->dn_type
), dnp
->dn_reg
, dnp
->dn_reg
);
1748 dnp
->dn_flags
&= ~DT_NF_USERLAND
;
1749 dnp
->dn_flags
|= ubit
;
1751 dt_irlist_append(dlp
,
1752 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1756 case DT_TOK_ADDROF
: {
1757 uint_t rbit
= dnp
->dn_child
->dn_flags
& DT_NF_REF
;
1759 dnp
->dn_child
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
1760 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1761 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1763 dnp
->dn_child
->dn_flags
&= ~DT_NF_REF
;
1764 dnp
->dn_child
->dn_flags
|= rbit
;
1768 case DT_TOK_SIZEOF
: {
1769 size_t size
= dt_node_sizeof(dnp
->dn_child
);
1770 dnp
->dn_reg
= dt_regset_alloc(drp
);
1772 dt_cg_setx(dlp
, dnp
->dn_reg
, size
);
1776 case DT_TOK_STRINGOF
:
1777 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1778 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1783 * An xlate operator appears in either an XLATOR, indicating a
1784 * reference to a dynamic translator, or an OP2, indicating
1785 * use of the xlate operator in the user's program. For the
1786 * dynamic case, generate an xlate opcode with a reference to
1787 * the corresponding member, pre-computed for us in dn_members.
1789 if (dnp
->dn_kind
== DT_NODE_XLATOR
) {
1790 dt_xlator_t
*dxp
= dnp
->dn_xlator
;
1792 assert(dxp
->dx_ident
->di_flags
& DT_IDFLG_CGREG
);
1793 assert(dxp
->dx_ident
->di_id
!= 0);
1795 dnp
->dn_reg
= dt_regset_alloc(drp
);
1797 if (dxp
->dx_arg
== -1) {
1798 instr
= DIF_INSTR_MOV(
1799 dxp
->dx_ident
->di_id
, dnp
->dn_reg
);
1800 dt_irlist_append(dlp
,
1801 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1806 instr
= DIF_INSTR_XLATE(op
, 0, dnp
->dn_reg
);
1807 dt_irlist_append(dlp
,
1808 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1810 dlp
->dl_last
->di_extern
= dnp
->dn_xmember
;
1814 assert(dnp
->dn_kind
== DT_NODE_OP2
);
1815 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1816 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1820 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1821 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1822 dt_cg_typecast(dnp
->dn_right
, dnp
, dlp
, drp
);
1827 assert(dnp
->dn_right
->dn_kind
== DT_NODE_IDENT
);
1828 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
1831 * If the left-hand side of PTR or DOT is a dynamic variable,
1832 * we expect it to be the output of a D translator. In this
1833 * case, we look up the parse tree corresponding to the member
1834 * that is being accessed and run the code generator over it.
1835 * We then cast the result as if by the assignment operator.
1837 if ((idp
= dt_node_resolve(
1838 dnp
->dn_left
, DT_IDENT_XLSOU
)) != NULL
||
1839 (idp
= dt_node_resolve(
1840 dnp
->dn_left
, DT_IDENT_XLPTR
)) != NULL
) {
1846 mnp
= dt_xlator_member(dxp
, dnp
->dn_right
->dn_string
);
1847 assert(mnp
!= NULL
);
1849 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
1850 dxp
->dx_ident
->di_id
= dnp
->dn_left
->dn_reg
;
1852 dt_cg_node(mnp
->dn_membexpr
, dlp
, drp
);
1853 dnp
->dn_reg
= mnp
->dn_membexpr
->dn_reg
;
1854 dt_cg_typecast(mnp
->dn_membexpr
, dnp
, dlp
, drp
);
1856 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
1857 dxp
->dx_ident
->di_id
= 0;
1859 if (dnp
->dn_left
->dn_reg
!= -1)
1860 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
1864 ctfp
= dnp
->dn_left
->dn_ctfp
;
1865 type
= ctf_type_resolve(ctfp
, dnp
->dn_left
->dn_type
);
1867 if (dnp
->dn_op
== DT_TOK_PTR
) {
1868 type
= ctf_type_reference(ctfp
, type
);
1869 type
= ctf_type_resolve(ctfp
, type
);
1872 if ((ctfp
= dt_cg_membinfo(octfp
= ctfp
, type
,
1873 dnp
->dn_right
->dn_string
, &m
)) == NULL
) {
1874 yypcb
->pcb_hdl
->dt_ctferr
= ctf_errno(octfp
);
1875 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
1878 if (m
.ctm_offset
!= 0) {
1881 reg
= dt_regset_alloc(drp
);
1884 * If the offset is not aligned on a byte boundary, it
1885 * is a bit-field member and we will extract the value
1886 * bits below after we generate the appropriate load.
1888 dt_cg_setx(dlp
, reg
, m
.ctm_offset
/ NBBY
);
1890 instr
= DIF_INSTR_FMT(DIF_OP_ADD
,
1891 dnp
->dn_left
->dn_reg
, reg
, dnp
->dn_left
->dn_reg
);
1893 dt_irlist_append(dlp
,
1894 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1895 dt_regset_free(drp
, reg
);
1898 if (!(dnp
->dn_flags
& DT_NF_REF
)) {
1899 uint_t ubit
= dnp
->dn_flags
& DT_NF_USERLAND
;
1902 * Save and restore DT_NF_USERLAND across dt_cg_load():
1903 * we need the sign bit from dnp and the user bit from
1904 * dnp->dn_left in order to get the proper opcode.
1907 (dnp
->dn_left
->dn_flags
& DT_NF_USERLAND
);
1909 instr
= DIF_INSTR_LOAD(dt_cg_load(dnp
,
1910 ctfp
, m
.ctm_type
), dnp
->dn_left
->dn_reg
,
1911 dnp
->dn_left
->dn_reg
);
1913 dnp
->dn_flags
&= ~DT_NF_USERLAND
;
1914 dnp
->dn_flags
|= ubit
;
1916 dt_irlist_append(dlp
,
1917 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1919 if (dnp
->dn_flags
& DT_NF_BITFIELD
)
1920 dt_cg_field_get(dnp
, dlp
, drp
, ctfp
, &m
);
1923 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
1927 dnp
->dn_reg
= dt_regset_alloc(drp
);
1929 assert(dnp
->dn_kind
== DT_NODE_STRING
);
1930 stroff
= dt_strtab_insert(yypcb
->pcb_strtab
, dnp
->dn_string
);
1933 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOMEM
);
1934 if (stroff
> DIF_STROFF_MAX
)
1935 longjmp(yypcb
->pcb_jmpbuf
, EDT_STR2BIG
);
1937 instr
= DIF_INSTR_SETS((ulong_t
)stroff
, dnp
->dn_reg
);
1938 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1943 * If the specified identifier is a variable on which we have
1944 * set the code generator register flag, then this variable
1945 * has already had code generated for it and saved in di_id.
1946 * Allocate a new register and copy the existing value to it.
1948 if (dnp
->dn_kind
== DT_NODE_VAR
&&
1949 (dnp
->dn_ident
->di_flags
& DT_IDFLG_CGREG
)) {
1950 dnp
->dn_reg
= dt_regset_alloc(drp
);
1951 instr
= DIF_INSTR_MOV(dnp
->dn_ident
->di_id
,
1953 dt_irlist_append(dlp
,
1954 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1959 * Identifiers can represent function calls, variable refs, or
1960 * symbols. First we check for inlined variables, and handle
1961 * them by generating code for the inline parse tree.
1963 if (dnp
->dn_kind
== DT_NODE_VAR
&&
1964 (dnp
->dn_ident
->di_flags
& DT_IDFLG_INLINE
)) {
1965 dt_cg_inline(dnp
, dlp
, drp
);
1969 switch (dnp
->dn_kind
) {
1971 if ((idp
= dnp
->dn_ident
)->di_kind
!= DT_IDENT_FUNC
) {
1972 dnerror(dnp
, D_CG_EXPR
, "%s %s( ) may not be "
1973 "called from a D expression (D program "
1974 "context required)\n",
1975 dt_idkind_name(idp
->di_kind
), idp
->di_name
);
1978 dt_cg_arglist(dnp
->dn_ident
, dnp
->dn_args
, dlp
, drp
);
1980 dnp
->dn_reg
= dt_regset_alloc(drp
);
1981 instr
= DIF_INSTR_CALL(dnp
->dn_ident
->di_id
,
1984 dt_irlist_append(dlp
,
1985 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1990 if (dnp
->dn_ident
->di_kind
== DT_IDENT_XLSOU
||
1991 dnp
->dn_ident
->di_kind
== DT_IDENT_XLPTR
) {
1993 * This can only happen if we have translated
1994 * args[]. See dt_idcook_args() for details.
1996 assert(dnp
->dn_ident
->di_id
== DIF_VAR_ARGS
);
1997 dt_cg_array_op(dnp
, dlp
, drp
);
2001 if (dnp
->dn_ident
->di_kind
== DT_IDENT_ARRAY
) {
2002 if (dnp
->dn_ident
->di_id
> DIF_VAR_ARRAY_MAX
)
2003 dt_cg_assoc_op(dnp
, dlp
, drp
);
2005 dt_cg_array_op(dnp
, dlp
, drp
);
2009 dnp
->dn_reg
= dt_regset_alloc(drp
);
2011 if (dnp
->dn_ident
->di_flags
& DT_IDFLG_LOCAL
)
2013 else if (dnp
->dn_ident
->di_flags
& DT_IDFLG_TLS
)
2018 dnp
->dn_ident
->di_flags
|= DT_IDFLG_DIFR
;
2020 instr
= DIF_INSTR_LDV(op
,
2021 dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
2023 dt_irlist_append(dlp
,
2024 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
2028 dtrace_hdl_t
*dtp
= yypcb
->pcb_hdl
;
2029 dtrace_syminfo_t
*sip
= dnp
->dn_ident
->di_data
;
2032 if (dtrace_lookup_by_name(dtp
,
2033 sip
->dts_object
, sip
->dts_name
, &sym
, NULL
) == -1) {
2034 xyerror(D_UNKNOWN
, "cg failed for symbol %s`%s:"
2035 " %s\n", sip
->dts_object
, sip
->dts_name
,
2036 dtrace_errmsg(dtp
, dtrace_errno(dtp
)));
2039 dnp
->dn_reg
= dt_regset_alloc(drp
);
2040 dt_cg_xsetx(dlp
, dnp
->dn_ident
,
2041 DT_LBL_NONE
, dnp
->dn_reg
, sym
.st_value
);
2043 if (!(dnp
->dn_flags
& DT_NF_REF
)) {
2044 instr
= DIF_INSTR_LOAD(dt_cg_load(dnp
, ctfp
,
2045 dnp
->dn_type
), dnp
->dn_reg
, dnp
->dn_reg
);
2046 dt_irlist_append(dlp
,
2047 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
2053 xyerror(D_UNKNOWN
, "internal error -- node type %u is "
2054 "not valid for an identifier\n", dnp
->dn_kind
);
2059 dnp
->dn_reg
= dt_regset_alloc(drp
);
2060 dt_cg_setx(dlp
, dnp
->dn_reg
, dnp
->dn_value
);
2064 xyerror(D_UNKNOWN
, "internal error -- token type %u is not a "
2065 "valid D compilation token\n", dnp
->dn_op
);
2070 dt_cg(dt_pcb_t
*pcb
, dt_node_t
*dnp
)
2076 if (pcb
->pcb_regs
== NULL
&& (pcb
->pcb_regs
=
2077 dt_regset_create(pcb
->pcb_hdl
->dt_conf
.dtc_difintregs
)) == NULL
)
2078 longjmp(pcb
->pcb_jmpbuf
, EDT_NOMEM
);
2080 dt_regset_reset(pcb
->pcb_regs
);
2081 (void) dt_regset_alloc(pcb
->pcb_regs
); /* allocate %r0 */
2083 if (pcb
->pcb_inttab
!= NULL
)
2084 dt_inttab_destroy(pcb
->pcb_inttab
);
2086 if ((pcb
->pcb_inttab
= dt_inttab_create(yypcb
->pcb_hdl
)) == NULL
)
2087 longjmp(pcb
->pcb_jmpbuf
, EDT_NOMEM
);
2089 if (pcb
->pcb_strtab
!= NULL
)
2090 dt_strtab_destroy(pcb
->pcb_strtab
);
2092 if ((pcb
->pcb_strtab
= dt_strtab_create(BUFSIZ
)) == NULL
)
2093 longjmp(pcb
->pcb_jmpbuf
, EDT_NOMEM
);
2095 dt_irlist_destroy(&pcb
->pcb_ir
);
2096 dt_irlist_create(&pcb
->pcb_ir
);
2098 assert(pcb
->pcb_dret
== NULL
);
2099 pcb
->pcb_dret
= dnp
;
2101 if (dt_node_resolve(dnp
, DT_IDENT_XLPTR
) != NULL
) {
2102 dnerror(dnp
, D_CG_DYN
, "expression cannot evaluate to result "
2103 "of a translated pointer\n");
2107 * If we're generating code for a translator body, assign the input
2108 * parameter to the first available register (i.e. caller passes %r1).
2110 if (dnp
->dn_kind
== DT_NODE_MEMBER
) {
2111 dxp
= dnp
->dn_membxlator
;
2112 dnp
= dnp
->dn_membexpr
;
2114 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
2115 dxp
->dx_ident
->di_id
= dt_regset_alloc(pcb
->pcb_regs
);
2118 dt_cg_node(dnp
, &pcb
->pcb_ir
, pcb
->pcb_regs
);
2120 if ((idp
= dt_node_resolve(dnp
, DT_IDENT_XLSOU
)) != NULL
) {
2121 int reg
= dt_cg_xlate_expand(dnp
, idp
,
2122 &pcb
->pcb_ir
, pcb
->pcb_regs
);
2123 dt_regset_free(pcb
->pcb_regs
, dnp
->dn_reg
);
2127 instr
= DIF_INSTR_RET(dnp
->dn_reg
);
2128 dt_regset_free(pcb
->pcb_regs
, dnp
->dn_reg
);
2129 dt_irlist_append(&pcb
->pcb_ir
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
2131 if (dnp
->dn_kind
== DT_NODE_MEMBER
) {
2132 dt_regset_free(pcb
->pcb_regs
, dxp
->dx_ident
->di_id
);
2133 dxp
->dx_ident
->di_id
= 0;
2134 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
2137 dt_regset_free(pcb
->pcb_regs
, 0);
2138 dt_regset_assert_free(pcb
->pcb_regs
);