4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License, Version 1.0 only
6 * (the "License"). You may not use this file except in compliance
9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 * or http://www.opensolaris.org/os/licensing.
11 * See the License for the specific language governing permissions
12 * and limitations under the License.
14 * When distributing Covered Code, include this CDDL HEADER in each
15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 * If applicable, add the following below this CDDL HEADER, with the
17 * fields enclosed by brackets "[]" replaced with your own identifying
18 * information: Portions Copyright [yyyy] [name of copyright owner]
24 * Copyright 2005 Sun Microsystems, Inc. All rights reserved.
25 * Use is subject to license terms.
29 * Copyright (c) 2012 by Delphix. All rights reserved.
32 #include <sys/types.h>
33 #include <sys/sysmacros.h>
34 #include <sys/isa_defs.h>
43 #include <dt_grammar.h>
44 #include <dt_module.h>
45 #include <dt_parser.h>
46 #include <dt_provider.h>
48 static void dt_cg_node(dt_node_t
*, dt_irlist_t
*, dt_regset_t
*);
51 dt_cg_node_alloc(uint_t label
, dif_instr_t instr
)
53 dt_irnode_t
*dip
= malloc(sizeof (dt_irnode_t
));
56 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOMEM
);
58 dip
->di_label
= label
;
59 dip
->di_instr
= instr
;
60 dip
->di_extern
= NULL
;
67 * Code generator wrapper function for ctf_member_info. If we are given a
68 * reference to a forward declaration tag, search the entire type space for
69 * the actual definition and then call ctf_member_info on the result.
72 dt_cg_membinfo(ctf_file_t
*fp
, ctf_id_t type
, const char *s
, ctf_membinfo_t
*mp
)
74 dt_resolve_forward_decl(&fp
, &type
);
76 if (ctf_member_info(fp
, type
, s
, mp
) == CTF_ERR
)
77 return (NULL
); /* ctf_errno is set for us */
83 dt_cg_xsetx(dt_irlist_t
*dlp
, dt_ident_t
*idp
, uint_t lbl
, int reg
, uint64_t x
)
85 int flag
= idp
!= NULL
? DT_INT_PRIVATE
: DT_INT_SHARED
;
86 int intoff
= dt_inttab_insert(yypcb
->pcb_inttab
, x
, flag
);
87 dif_instr_t instr
= DIF_INSTR_SETX((uint_t
)intoff
, reg
);
90 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOMEM
);
92 if (intoff
> DIF_INTOFF_MAX
)
93 longjmp(yypcb
->pcb_jmpbuf
, EDT_INT2BIG
);
95 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl
, instr
));
98 dlp
->dl_last
->di_extern
= idp
;
102 dt_cg_setx(dt_irlist_t
*dlp
, int reg
, uint64_t x
)
104 dt_cg_xsetx(dlp
, NULL
, DT_LBL_NONE
, reg
, x
);
108 * When loading bit-fields, we want to convert a byte count in the range
109 * 1-8 to the closest power of 2 (e.g. 3->4, 5->8, etc). The clp2() function
110 * is a clever implementation from "Hacker's Delight" by Henry Warren, Jr.
127 * Lookup the correct load opcode to use for the specified node and CTF type.
128 * We determine the size and convert it to a 3-bit index. Our lookup table
129 * is constructed to use a 5-bit index, consisting of the 3-bit size 0-7, a
130 * bit for the sign, and a bit for userland address. For example, a 4-byte
131 * signed load from userland would be at the following table index:
132 * user=1 sign=1 size=4 => binary index 11011 = decimal index 27
135 dt_cg_load(dt_node_t
*dnp
, ctf_file_t
*ctfp
, ctf_id_t type
)
137 static const uint_t ops
[] = {
138 DIF_OP_LDUB
, DIF_OP_LDUH
, 0, DIF_OP_LDUW
,
140 DIF_OP_LDSB
, DIF_OP_LDSH
, 0, DIF_OP_LDSW
,
142 DIF_OP_ULDUB
, DIF_OP_ULDUH
, 0, DIF_OP_ULDUW
,
143 0, 0, 0, DIF_OP_ULDX
,
144 DIF_OP_ULDSB
, DIF_OP_ULDSH
, 0, DIF_OP_ULDSW
,
145 0, 0, 0, DIF_OP_ULDX
,
152 * If we're loading a bit-field, the size of our load is found by
153 * rounding cte_bits up to a byte boundary and then finding the
154 * nearest power of two to this value (see clp2(), above).
156 if ((dnp
->dn_flags
& DT_NF_BITFIELD
) &&
157 ctf_type_encoding(ctfp
, type
, &e
) != CTF_ERR
)
158 size
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
);
160 size
= ctf_type_size(ctfp
, type
);
162 if (size
< 1 || size
> 8 || (size
& (size
- 1)) != 0) {
163 xyerror(D_UNKNOWN
, "internal error -- cg cannot load "
164 "size %ld when passed by value\n", (long)size
);
167 size
--; /* convert size to 3-bit index */
169 if (dnp
->dn_flags
& DT_NF_SIGNED
)
171 if (dnp
->dn_flags
& DT_NF_USERLAND
)
178 dt_cg_ptrsize(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
,
181 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
189 type
= ctf_type_resolve(ctfp
, dnp
->dn_type
);
190 kind
= ctf_type_kind(ctfp
, type
);
191 assert(kind
== CTF_K_POINTER
|| kind
== CTF_K_ARRAY
);
193 if (kind
== CTF_K_ARRAY
) {
194 if (ctf_array_info(ctfp
, type
, &r
) != 0) {
195 yypcb
->pcb_hdl
->dt_ctferr
= ctf_errno(ctfp
);
196 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
198 type
= r
.ctr_contents
;
200 type
= ctf_type_reference(ctfp
, type
);
202 if ((size
= ctf_type_size(ctfp
, type
)) == 1)
203 return; /* multiply or divide by one can be omitted */
205 sreg
= dt_regset_alloc(drp
);
206 dt_cg_setx(dlp
, sreg
, size
);
207 instr
= DIF_INSTR_FMT(op
, dreg
, sreg
, dreg
);
208 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
209 dt_regset_free(drp
, sreg
);
213 * If the result of a "." or "->" operation is a bit-field, we use this routine
214 * to generate an epilogue to the load instruction that extracts the value. In
215 * the diagrams below the "ld??" is the load instruction that is generated to
216 * load the containing word that is generating prior to calling this function.
218 * Epilogue for unsigned fields: Epilogue for signed fields:
220 * ldu? [r1], r1 lds? [r1], r1
221 * setx USHIFT, r2 setx 64 - SSHIFT, r2
222 * srl r1, r2, r1 sll r1, r2, r1
223 * setx (1 << bits) - 1, r2 setx 64 - bits, r2
224 * and r1, r2, r1 sra r1, r2, r1
226 * The *SHIFT constants above changes value depending on the endian-ness of our
227 * target architecture. Refer to the comments below for more details.
230 dt_cg_field_get(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
,
231 ctf_file_t
*fp
, const ctf_membinfo_t
*mp
)
238 if (ctf_type_encoding(fp
, mp
->ctm_type
, &e
) != 0 || e
.cte_bits
> 64) {
239 xyerror(D_UNKNOWN
, "cg: bad field: off %lu type <%ld> "
240 "bits %u\n", mp
->ctm_offset
, mp
->ctm_type
, e
.cte_bits
);
243 assert(dnp
->dn_op
== DT_TOK_PTR
|| dnp
->dn_op
== DT_TOK_DOT
);
244 r1
= dnp
->dn_left
->dn_reg
;
245 r2
= dt_regset_alloc(drp
);
248 * On little-endian architectures, ctm_offset counts from the right so
249 * ctm_offset % NBBY itself is the amount we want to shift right to
250 * move the value bits to the little end of the register to mask them.
251 * On big-endian architectures, ctm_offset counts from the left so we
252 * must subtract (ctm_offset % NBBY + cte_bits) from the size in bits
253 * we used for the load. The size of our load in turn is found by
254 * rounding cte_bits up to a byte boundary and then finding the
255 * nearest power of two to this value (see clp2(), above). These
256 * properties are used to compute shift as USHIFT or SSHIFT, below.
258 if (dnp
->dn_flags
& DT_NF_SIGNED
) {
260 shift
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
) * NBBY
-
261 mp
->ctm_offset
% NBBY
;
263 shift
= mp
->ctm_offset
% NBBY
+ e
.cte_bits
;
265 dt_cg_setx(dlp
, r2
, 64 - shift
);
266 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, r1
, r2
, r1
);
267 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
269 dt_cg_setx(dlp
, r2
, 64 - e
.cte_bits
);
270 instr
= DIF_INSTR_FMT(DIF_OP_SRA
, r1
, r2
, r1
);
271 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
274 shift
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
) * NBBY
-
275 (mp
->ctm_offset
% NBBY
+ e
.cte_bits
);
277 shift
= mp
->ctm_offset
% NBBY
;
279 dt_cg_setx(dlp
, r2
, shift
);
280 instr
= DIF_INSTR_FMT(DIF_OP_SRL
, r1
, r2
, r1
);
281 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
283 dt_cg_setx(dlp
, r2
, (1ULL << e
.cte_bits
) - 1);
284 instr
= DIF_INSTR_FMT(DIF_OP_AND
, r1
, r2
, r1
);
285 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
288 dt_regset_free(drp
, r2
);
292 * If the destination of a store operation is a bit-field, we use this routine
293 * to generate a prologue to the store instruction that loads the surrounding
294 * bits, clears the destination field, and ORs in the new value of the field.
295 * In the diagram below the "st?" is the store instruction that is generated to
296 * store the containing word that is generating after calling this function.
298 * ld [dst->dn_reg], r1
299 * setx ~(((1 << cte_bits) - 1) << (ctm_offset % NBBY)), r2
302 * setx (1 << cte_bits) - 1, r2
303 * and src->dn_reg, r2, r2
304 * setx ctm_offset % NBBY, r3
308 * st? r1, [dst->dn_reg]
310 * This routine allocates a new register to hold the value to be stored and
311 * returns it. The caller is responsible for freeing this register later.
314 dt_cg_field_set(dt_node_t
*src
, dt_irlist_t
*dlp
,
315 dt_regset_t
*drp
, dt_node_t
*dst
)
317 uint64_t cmask
, fmask
, shift
;
323 ctf_file_t
*fp
, *ofp
;
326 assert(dst
->dn_op
== DT_TOK_PTR
|| dst
->dn_op
== DT_TOK_DOT
);
327 assert(dst
->dn_right
->dn_kind
== DT_NODE_IDENT
);
329 fp
= dst
->dn_left
->dn_ctfp
;
330 type
= ctf_type_resolve(fp
, dst
->dn_left
->dn_type
);
332 if (dst
->dn_op
== DT_TOK_PTR
) {
333 type
= ctf_type_reference(fp
, type
);
334 type
= ctf_type_resolve(fp
, type
);
337 if ((fp
= dt_cg_membinfo(ofp
= fp
, type
,
338 dst
->dn_right
->dn_string
, &m
)) == NULL
) {
339 yypcb
->pcb_hdl
->dt_ctferr
= ctf_errno(ofp
);
340 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
343 if (ctf_type_encoding(fp
, m
.ctm_type
, &e
) != 0 || e
.cte_bits
> 64) {
344 xyerror(D_UNKNOWN
, "cg: bad field: off %lu type <%ld> "
345 "bits %u\n", m
.ctm_offset
, m
.ctm_type
, e
.cte_bits
);
348 r1
= dt_regset_alloc(drp
);
349 r2
= dt_regset_alloc(drp
);
350 r3
= dt_regset_alloc(drp
);
353 * Compute shifts and masks. We need to compute "shift" as the amount
354 * we need to shift left to position our field in the containing word.
355 * Refer to the comments in dt_cg_field_get(), above, for more info.
356 * We then compute fmask as the mask that truncates the value in the
357 * input register to width cte_bits, and cmask as the mask used to
358 * pass through the containing bits and zero the field bits.
361 shift
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
) * NBBY
-
362 (m
.ctm_offset
% NBBY
+ e
.cte_bits
);
364 shift
= m
.ctm_offset
% NBBY
;
366 fmask
= (1ULL << e
.cte_bits
) - 1;
367 cmask
= ~(fmask
<< shift
);
369 instr
= DIF_INSTR_LOAD(
370 dt_cg_load(dst
, fp
, m
.ctm_type
), dst
->dn_reg
, r1
);
371 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
373 dt_cg_setx(dlp
, r2
, cmask
);
374 instr
= DIF_INSTR_FMT(DIF_OP_AND
, r1
, r2
, r1
);
375 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
377 dt_cg_setx(dlp
, r2
, fmask
);
378 instr
= DIF_INSTR_FMT(DIF_OP_AND
, src
->dn_reg
, r2
, r2
);
379 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
381 dt_cg_setx(dlp
, r3
, shift
);
382 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, r2
, r3
, r2
);
383 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
385 instr
= DIF_INSTR_FMT(DIF_OP_OR
, r1
, r2
, r1
);
386 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
388 dt_regset_free(drp
, r3
);
389 dt_regset_free(drp
, r2
);
395 dt_cg_store(dt_node_t
*src
, dt_irlist_t
*dlp
, dt_regset_t
*drp
, dt_node_t
*dst
)
403 * If we're loading a bit-field, the size of our store is found by
404 * rounding dst's cte_bits up to a byte boundary and then finding the
405 * nearest power of two to this value (see clp2(), above).
407 if ((dst
->dn_flags
& DT_NF_BITFIELD
) &&
408 ctf_type_encoding(dst
->dn_ctfp
, dst
->dn_type
, &e
) != CTF_ERR
)
409 size
= clp2(P2ROUNDUP(e
.cte_bits
, NBBY
) / NBBY
);
411 size
= dt_node_type_size(src
);
413 if (src
->dn_flags
& DT_NF_REF
) {
414 reg
= dt_regset_alloc(drp
);
415 dt_cg_setx(dlp
, reg
, size
);
416 instr
= DIF_INSTR_COPYS(src
->dn_reg
, reg
, dst
->dn_reg
);
417 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
418 dt_regset_free(drp
, reg
);
420 if (dst
->dn_flags
& DT_NF_BITFIELD
)
421 reg
= dt_cg_field_set(src
, dlp
, drp
, dst
);
427 instr
= DIF_INSTR_STORE(DIF_OP_STB
, reg
, dst
->dn_reg
);
430 instr
= DIF_INSTR_STORE(DIF_OP_STH
, reg
, dst
->dn_reg
);
433 instr
= DIF_INSTR_STORE(DIF_OP_STW
, reg
, dst
->dn_reg
);
436 instr
= DIF_INSTR_STORE(DIF_OP_STX
, reg
, dst
->dn_reg
);
439 xyerror(D_UNKNOWN
, "internal error -- cg cannot store "
440 "size %lu when passed by value\n", (ulong_t
)size
);
442 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
444 if (dst
->dn_flags
& DT_NF_BITFIELD
)
445 dt_regset_free(drp
, reg
);
450 * Generate code for a typecast or for argument promotion from the type of the
451 * actual to the type of the formal. We need to generate code for casts when
452 * a scalar type is being narrowed or changing signed-ness. We first shift the
453 * desired bits high (losing excess bits if narrowing) and then shift them down
454 * using logical shift (unsigned result) or arithmetic shift (signed result).
457 dt_cg_typecast(const dt_node_t
*src
, const dt_node_t
*dst
,
458 dt_irlist_t
*dlp
, dt_regset_t
*drp
)
460 size_t srcsize
= dt_node_type_size(src
);
461 size_t dstsize
= dt_node_type_size(dst
);
466 if (!dt_node_is_scalar(dst
))
467 return; /* not a scalar */
468 if (dstsize
== srcsize
&&
469 ((src
->dn_flags
^ dst
->dn_flags
) & DT_NF_SIGNED
) == 0)
470 return; /* not narrowing or changing signed-ness */
471 if (dstsize
> srcsize
&& (src
->dn_flags
& DT_NF_SIGNED
) == 0)
472 return; /* nothing to do in this case */
474 rg
= dt_regset_alloc(drp
);
476 if (dstsize
> srcsize
) {
477 int n
= sizeof (uint64_t) * NBBY
- srcsize
* NBBY
;
478 int s
= (dstsize
- srcsize
) * NBBY
;
480 dt_cg_setx(dlp
, rg
, n
);
482 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, src
->dn_reg
, rg
, dst
->dn_reg
);
483 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
485 if ((dst
->dn_flags
& DT_NF_SIGNED
) || n
== s
) {
486 instr
= DIF_INSTR_FMT(DIF_OP_SRA
,
487 dst
->dn_reg
, rg
, dst
->dn_reg
);
488 dt_irlist_append(dlp
,
489 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
491 dt_cg_setx(dlp
, rg
, s
);
492 instr
= DIF_INSTR_FMT(DIF_OP_SRA
,
493 dst
->dn_reg
, rg
, dst
->dn_reg
);
494 dt_irlist_append(dlp
,
495 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
496 dt_cg_setx(dlp
, rg
, n
- s
);
497 instr
= DIF_INSTR_FMT(DIF_OP_SRL
,
498 dst
->dn_reg
, rg
, dst
->dn_reg
);
499 dt_irlist_append(dlp
,
500 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
502 } else if (dstsize
!= sizeof (uint64_t)) {
503 int n
= sizeof (uint64_t) * NBBY
- dstsize
* NBBY
;
505 dt_cg_setx(dlp
, rg
, n
);
507 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, src
->dn_reg
, rg
, dst
->dn_reg
);
508 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
510 instr
= DIF_INSTR_FMT((dst
->dn_flags
& DT_NF_SIGNED
) ?
511 DIF_OP_SRA
: DIF_OP_SRL
, dst
->dn_reg
, rg
, dst
->dn_reg
);
512 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
515 dt_regset_free(drp
, rg
);
519 * Generate code to push the specified argument list on to the tuple stack.
520 * We use this routine for handling subroutine calls and associative arrays.
521 * We must first generate code for all subexpressions before loading the stack
522 * because any subexpression could itself require the use of the tuple stack.
523 * This holds a number of registers equal to the number of arguments, but this
524 * is not a huge problem because the number of arguments can't exceed the
525 * number of tuple register stack elements anyway. At most one extra register
526 * is required (either by dt_cg_typecast() or for dtdt_size, below). This
527 * implies that a DIF implementation should offer a number of general purpose
528 * registers at least one greater than the number of tuple registers.
531 dt_cg_arglist(dt_ident_t
*idp
, dt_node_t
*args
,
532 dt_irlist_t
*dlp
, dt_regset_t
*drp
)
534 const dt_idsig_t
*isp
= idp
->di_data
;
538 for (dnp
= args
; dnp
!= NULL
; dnp
= dnp
->dn_list
)
539 dt_cg_node(dnp
, dlp
, drp
);
541 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, DIF_INSTR_FLUSHTS
));
543 for (dnp
= args
; dnp
!= NULL
; dnp
= dnp
->dn_list
, i
++) {
549 dt_node_diftype(yypcb
->pcb_hdl
, dnp
, &t
);
551 isp
->dis_args
[i
].dn_reg
= dnp
->dn_reg
; /* re-use register */
552 dt_cg_typecast(dnp
, &isp
->dis_args
[i
], dlp
, drp
);
553 isp
->dis_args
[i
].dn_reg
= -1;
555 if (t
.dtdt_flags
& DIF_TF_BYREF
) {
557 if (t
.dtdt_size
!= 0) {
558 reg
= dt_regset_alloc(drp
);
559 dt_cg_setx(dlp
, reg
, t
.dtdt_size
);
568 instr
= DIF_INSTR_PUSHTS(op
, t
.dtdt_kind
, reg
, dnp
->dn_reg
);
569 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
570 dt_regset_free(drp
, dnp
->dn_reg
);
572 if (reg
!= DIF_REG_R0
)
573 dt_regset_free(drp
, reg
);
576 if (i
> yypcb
->pcb_hdl
->dt_conf
.dtc_diftupregs
)
577 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOTUPREG
);
581 dt_cg_arithmetic_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
,
582 dt_regset_t
*drp
, uint_t op
)
584 int is_ptr_op
= (dnp
->dn_op
== DT_TOK_ADD
|| dnp
->dn_op
== DT_TOK_SUB
||
585 dnp
->dn_op
== DT_TOK_ADD_EQ
|| dnp
->dn_op
== DT_TOK_SUB_EQ
);
587 int lp_is_ptr
= dt_node_is_pointer(dnp
->dn_left
);
588 int rp_is_ptr
= dt_node_is_pointer(dnp
->dn_right
);
592 if (lp_is_ptr
&& rp_is_ptr
) {
593 assert(dnp
->dn_op
== DT_TOK_SUB
);
597 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
598 if (is_ptr_op
&& rp_is_ptr
)
599 dt_cg_ptrsize(dnp
, dlp
, drp
, DIF_OP_MUL
, dnp
->dn_left
->dn_reg
);
601 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
602 if (is_ptr_op
&& lp_is_ptr
)
603 dt_cg_ptrsize(dnp
, dlp
, drp
, DIF_OP_MUL
, dnp
->dn_right
->dn_reg
);
605 instr
= DIF_INSTR_FMT(op
, dnp
->dn_left
->dn_reg
,
606 dnp
->dn_right
->dn_reg
, dnp
->dn_left
->dn_reg
);
608 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
609 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
610 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
612 if (lp_is_ptr
&& rp_is_ptr
)
613 dt_cg_ptrsize(dnp
->dn_right
,
614 dlp
, drp
, DIF_OP_UDIV
, dnp
->dn_reg
);
618 dt_cg_stvar(const dt_ident_t
*idp
)
620 static const uint_t aops
[] = { DIF_OP_STGAA
, DIF_OP_STTAA
, DIF_OP_NOP
};
621 static const uint_t sops
[] = { DIF_OP_STGS
, DIF_OP_STTS
, DIF_OP_STLS
};
623 uint_t i
= (((idp
->di_flags
& DT_IDFLG_LOCAL
) != 0) << 1) |
624 ((idp
->di_flags
& DT_IDFLG_TLS
) != 0);
626 return (idp
->di_kind
== DT_IDENT_ARRAY
? aops
[i
] : sops
[i
]);
630 dt_cg_prearith_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
, uint_t op
)
632 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
638 if (dt_node_is_pointer(dnp
)) {
639 type
= ctf_type_resolve(ctfp
, dnp
->dn_type
);
640 assert(ctf_type_kind(ctfp
, type
) == CTF_K_POINTER
);
641 size
= ctf_type_size(ctfp
, ctf_type_reference(ctfp
, type
));
644 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
645 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
647 reg
= dt_regset_alloc(drp
);
648 dt_cg_setx(dlp
, reg
, size
);
650 instr
= DIF_INSTR_FMT(op
, dnp
->dn_reg
, reg
, dnp
->dn_reg
);
651 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
652 dt_regset_free(drp
, reg
);
655 * If we are modifying a variable, generate an stv instruction from
656 * the variable specified by the identifier. If we are storing to a
657 * memory address, generate code again for the left-hand side using
658 * DT_NF_REF to get the address, and then generate a store to it.
659 * In both paths, we store the value in dnp->dn_reg (the new value).
661 if (dnp
->dn_child
->dn_kind
== DT_NODE_VAR
) {
662 dt_ident_t
*idp
= dt_ident_resolve(dnp
->dn_child
->dn_ident
);
664 idp
->di_flags
|= DT_IDFLG_DIFW
;
665 instr
= DIF_INSTR_STV(dt_cg_stvar(idp
),
666 idp
->di_id
, dnp
->dn_reg
);
667 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
669 uint_t rbit
= dnp
->dn_child
->dn_flags
& DT_NF_REF
;
671 assert(dnp
->dn_child
->dn_flags
& DT_NF_WRITABLE
);
672 assert(dnp
->dn_child
->dn_flags
& DT_NF_LVALUE
);
674 dnp
->dn_child
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
675 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
677 dt_cg_store(dnp
, dlp
, drp
, dnp
->dn_child
);
678 dt_regset_free(drp
, dnp
->dn_child
->dn_reg
);
680 dnp
->dn_left
->dn_flags
&= ~DT_NF_REF
;
681 dnp
->dn_left
->dn_flags
|= rbit
;
686 dt_cg_postarith_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
,
687 dt_regset_t
*drp
, uint_t op
)
689 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
695 if (dt_node_is_pointer(dnp
)) {
696 type
= ctf_type_resolve(ctfp
, dnp
->dn_type
);
697 assert(ctf_type_kind(ctfp
, type
) == CTF_K_POINTER
);
698 size
= ctf_type_size(ctfp
, ctf_type_reference(ctfp
, type
));
701 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
702 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
704 nreg
= dt_regset_alloc(drp
);
705 dt_cg_setx(dlp
, nreg
, size
);
706 instr
= DIF_INSTR_FMT(op
, dnp
->dn_reg
, nreg
, nreg
);
707 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
710 * If we are modifying a variable, generate an stv instruction from
711 * the variable specified by the identifier. If we are storing to a
712 * memory address, generate code again for the left-hand side using
713 * DT_NF_REF to get the address, and then generate a store to it.
714 * In both paths, we store the value from 'nreg' (the new value).
716 if (dnp
->dn_child
->dn_kind
== DT_NODE_VAR
) {
717 dt_ident_t
*idp
= dt_ident_resolve(dnp
->dn_child
->dn_ident
);
719 idp
->di_flags
|= DT_IDFLG_DIFW
;
720 instr
= DIF_INSTR_STV(dt_cg_stvar(idp
), idp
->di_id
, nreg
);
721 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
723 uint_t rbit
= dnp
->dn_child
->dn_flags
& DT_NF_REF
;
724 int oreg
= dnp
->dn_reg
;
726 assert(dnp
->dn_child
->dn_flags
& DT_NF_WRITABLE
);
727 assert(dnp
->dn_child
->dn_flags
& DT_NF_LVALUE
);
729 dnp
->dn_child
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
730 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
733 dt_cg_store(dnp
, dlp
, drp
, dnp
->dn_child
);
736 dt_regset_free(drp
, dnp
->dn_child
->dn_reg
);
737 dnp
->dn_left
->dn_flags
&= ~DT_NF_REF
;
738 dnp
->dn_left
->dn_flags
|= rbit
;
741 dt_regset_free(drp
, nreg
);
745 * Determine if we should perform signed or unsigned comparison for an OP2.
746 * If both operands are of arithmetic type, perform the usual arithmetic
747 * conversions to determine the common real type for comparison [ISOC 6.5.8.3].
750 dt_cg_compare_signed(dt_node_t
*dnp
)
754 if (dt_node_is_string(dnp
->dn_left
) ||
755 dt_node_is_string(dnp
->dn_right
))
756 return (1); /* strings always compare signed */
757 else if (!dt_node_is_arith(dnp
->dn_left
) ||
758 !dt_node_is_arith(dnp
->dn_right
))
759 return (0); /* non-arithmetic types always compare unsigned */
761 bzero(&dn
, sizeof (dn
));
762 dt_node_promote(dnp
->dn_left
, dnp
->dn_right
, &dn
);
763 return (dn
.dn_flags
& DT_NF_SIGNED
);
767 dt_cg_compare_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
, uint_t op
)
769 uint_t lbl_true
= dt_irlist_label(dlp
);
770 uint_t lbl_post
= dt_irlist_label(dlp
);
775 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
776 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
778 if (dt_node_is_string(dnp
->dn_left
) || dt_node_is_string(dnp
->dn_right
))
783 instr
= DIF_INSTR_CMP(opc
, dnp
->dn_left
->dn_reg
, dnp
->dn_right
->dn_reg
);
784 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
785 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
786 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
788 instr
= DIF_INSTR_BRANCH(op
, lbl_true
);
789 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
791 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
792 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
794 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
795 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
797 dt_cg_xsetx(dlp
, NULL
, lbl_true
, dnp
->dn_reg
, 1);
798 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
802 * Code generation for the ternary op requires some trickery with the assembler
803 * in order to conserve registers. We generate code for dn_expr and dn_left
804 * and free their registers so they do not have be consumed across codegen for
805 * dn_right. We insert a dummy MOV at the end of dn_left into the destination
806 * register, which is not yet known because we haven't done dn_right yet, and
807 * save the pointer to this instruction node. We then generate code for
808 * dn_right and use its register as our output. Finally, we reach back and
809 * patch the instruction for dn_left to move its output into this register.
812 dt_cg_ternary_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
814 uint_t lbl_false
= dt_irlist_label(dlp
);
815 uint_t lbl_post
= dt_irlist_label(dlp
);
820 dt_cg_node(dnp
->dn_expr
, dlp
, drp
);
821 instr
= DIF_INSTR_TST(dnp
->dn_expr
->dn_reg
);
822 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
823 dt_regset_free(drp
, dnp
->dn_expr
->dn_reg
);
825 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
826 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
828 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
829 instr
= DIF_INSTR_MOV(dnp
->dn_left
->dn_reg
, DIF_REG_R0
);
830 dip
= dt_cg_node_alloc(DT_LBL_NONE
, instr
); /* save dip for below */
831 dt_irlist_append(dlp
, dip
);
832 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
834 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
835 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
837 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_false
, DIF_INSTR_NOP
));
838 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
839 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
842 * Now that dn_reg is assigned, reach back and patch the correct MOV
843 * instruction into the tail of dn_left. We know dn_reg was unused
844 * at that point because otherwise dn_right couldn't have allocated it.
846 dip
->di_instr
= DIF_INSTR_MOV(dnp
->dn_left
->dn_reg
, dnp
->dn_reg
);
847 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
851 dt_cg_logical_and(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
853 uint_t lbl_false
= dt_irlist_label(dlp
);
854 uint_t lbl_post
= dt_irlist_label(dlp
);
858 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
859 instr
= DIF_INSTR_TST(dnp
->dn_left
->dn_reg
);
860 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
861 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
863 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
864 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
866 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
867 instr
= DIF_INSTR_TST(dnp
->dn_right
->dn_reg
);
868 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
869 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
871 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
872 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
874 dt_cg_setx(dlp
, dnp
->dn_reg
, 1);
876 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
877 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
879 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
880 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_false
, instr
));
882 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
886 dt_cg_logical_xor(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
888 uint_t lbl_next
= dt_irlist_label(dlp
);
889 uint_t lbl_tail
= dt_irlist_label(dlp
);
893 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
894 instr
= DIF_INSTR_TST(dnp
->dn_left
->dn_reg
);
895 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
897 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_next
);
898 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
899 dt_cg_setx(dlp
, dnp
->dn_left
->dn_reg
, 1);
901 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_next
, DIF_INSTR_NOP
));
902 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
904 instr
= DIF_INSTR_TST(dnp
->dn_right
->dn_reg
);
905 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
907 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_tail
);
908 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
909 dt_cg_setx(dlp
, dnp
->dn_right
->dn_reg
, 1);
911 instr
= DIF_INSTR_FMT(DIF_OP_XOR
, dnp
->dn_left
->dn_reg
,
912 dnp
->dn_right
->dn_reg
, dnp
->dn_left
->dn_reg
);
914 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_tail
, instr
));
916 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
917 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
921 dt_cg_logical_or(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
923 uint_t lbl_true
= dt_irlist_label(dlp
);
924 uint_t lbl_false
= dt_irlist_label(dlp
);
925 uint_t lbl_post
= dt_irlist_label(dlp
);
929 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
930 instr
= DIF_INSTR_TST(dnp
->dn_left
->dn_reg
);
931 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
932 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
934 instr
= DIF_INSTR_BRANCH(DIF_OP_BNE
, lbl_true
);
935 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
937 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
938 instr
= DIF_INSTR_TST(dnp
->dn_right
->dn_reg
);
939 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
940 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
942 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_false
);
943 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
945 dt_cg_xsetx(dlp
, NULL
, lbl_true
, dnp
->dn_reg
, 1);
947 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
948 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
950 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
951 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_false
, instr
));
953 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
957 dt_cg_logical_neg(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
959 uint_t lbl_zero
= dt_irlist_label(dlp
);
960 uint_t lbl_post
= dt_irlist_label(dlp
);
964 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
965 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
967 instr
= DIF_INSTR_TST(dnp
->dn_reg
);
968 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
970 instr
= DIF_INSTR_BRANCH(DIF_OP_BE
, lbl_zero
);
971 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
973 instr
= DIF_INSTR_MOV(DIF_REG_R0
, dnp
->dn_reg
);
974 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
976 instr
= DIF_INSTR_BRANCH(DIF_OP_BA
, lbl_post
);
977 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
979 dt_cg_xsetx(dlp
, NULL
, lbl_zero
, dnp
->dn_reg
, 1);
980 dt_irlist_append(dlp
, dt_cg_node_alloc(lbl_post
, DIF_INSTR_NOP
));
984 dt_cg_asgn_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
990 * If we are performing a structure assignment of a translated type,
991 * we must instantiate all members and create a snapshot of the object
992 * in scratch space. We allocs a chunk of memory, generate code for
993 * each member, and then set dnp->dn_reg to the scratch object address.
995 if ((idp
= dt_node_resolve(dnp
->dn_right
, DT_IDENT_XLSOU
)) != NULL
) {
997 dt_xlator_t
*dxp
= idp
->di_data
;
998 dt_node_t
*mnp
, dn
, mn
;
1002 * Create two fake dt_node_t's representing operator "." and a
1003 * right-hand identifier child node. These will be repeatedly
1004 * modified according to each instantiated member so that we
1005 * can pass them to dt_cg_store() and effect a member store.
1007 bzero(&dn
, sizeof (dt_node_t
));
1008 dn
.dn_kind
= DT_NODE_OP2
;
1009 dn
.dn_op
= DT_TOK_DOT
;
1013 bzero(&mn
, sizeof (dt_node_t
));
1014 mn
.dn_kind
= DT_NODE_IDENT
;
1015 mn
.dn_op
= DT_TOK_IDENT
;
1018 * Allocate a register for our scratch data pointer. First we
1019 * set it to the size of our data structure, and then replace
1020 * it with the result of an allocs of the specified size.
1022 r1
= dt_regset_alloc(drp
);
1024 ctf_type_size(dxp
->dx_dst_ctfp
, dxp
->dx_dst_base
));
1026 instr
= DIF_INSTR_ALLOCS(r1
, r1
);
1027 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1030 * When dt_cg_asgn_op() is called, we have already generated
1031 * code for dnp->dn_right, which is the translator input. We
1032 * now associate this register with the translator's input
1033 * identifier so it can be referenced during our member loop.
1035 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
1036 dxp
->dx_ident
->di_id
= dnp
->dn_right
->dn_reg
;
1038 for (mnp
= dxp
->dx_members
; mnp
!= NULL
; mnp
= mnp
->dn_list
) {
1040 * Generate code for the translator member expression,
1041 * and then cast the result to the member type.
1043 dt_cg_node(mnp
->dn_membexpr
, dlp
, drp
);
1044 mnp
->dn_reg
= mnp
->dn_membexpr
->dn_reg
;
1045 dt_cg_typecast(mnp
->dn_membexpr
, mnp
, dlp
, drp
);
1048 * Ask CTF for the offset of the member so we can store
1049 * to the appropriate offset. This call has already
1050 * been done once by the parser, so it should succeed.
1052 if (ctf_member_info(dxp
->dx_dst_ctfp
, dxp
->dx_dst_base
,
1053 mnp
->dn_membname
, &ctm
) == CTF_ERR
) {
1054 yypcb
->pcb_hdl
->dt_ctferr
=
1055 ctf_errno(dxp
->dx_dst_ctfp
);
1056 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
1060 * If the destination member is at offset 0, store the
1061 * result directly to r1 (the scratch buffer address).
1062 * Otherwise allocate another temporary for the offset
1063 * and add r1 to it before storing the result.
1065 if (ctm
.ctm_offset
!= 0) {
1066 r2
= dt_regset_alloc(drp
);
1069 * Add the member offset rounded down to the
1070 * nearest byte. If the offset was not aligned
1071 * on a byte boundary, this member is a bit-
1072 * field and dt_cg_store() will handle masking.
1074 dt_cg_setx(dlp
, r2
, ctm
.ctm_offset
/ NBBY
);
1075 instr
= DIF_INSTR_FMT(DIF_OP_ADD
, r1
, r2
, r2
);
1076 dt_irlist_append(dlp
,
1077 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1079 dt_node_type_propagate(mnp
, &dn
);
1080 dn
.dn_right
->dn_string
= mnp
->dn_membname
;
1083 dt_cg_store(mnp
, dlp
, drp
, &dn
);
1084 dt_regset_free(drp
, r2
);
1087 dt_node_type_propagate(mnp
, &dn
);
1088 dn
.dn_right
->dn_string
= mnp
->dn_membname
;
1091 dt_cg_store(mnp
, dlp
, drp
, &dn
);
1094 dt_regset_free(drp
, mnp
->dn_reg
);
1097 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
1098 dxp
->dx_ident
->di_id
= 0;
1100 if (dnp
->dn_right
->dn_reg
!= -1)
1101 dt_regset_free(drp
, dnp
->dn_right
->dn_reg
);
1103 assert(dnp
->dn_reg
== dnp
->dn_right
->dn_reg
);
1108 * If we are storing to a variable, generate an stv instruction from
1109 * the variable specified by the identifier. If we are storing to a
1110 * memory address, generate code again for the left-hand side using
1111 * DT_NF_REF to get the address, and then generate a store to it.
1112 * In both paths, we assume dnp->dn_reg already has the new value.
1114 if (dnp
->dn_left
->dn_kind
== DT_NODE_VAR
) {
1115 idp
= dt_ident_resolve(dnp
->dn_left
->dn_ident
);
1117 if (idp
->di_kind
== DT_IDENT_ARRAY
)
1118 dt_cg_arglist(idp
, dnp
->dn_left
->dn_args
, dlp
, drp
);
1120 idp
->di_flags
|= DT_IDFLG_DIFW
;
1121 instr
= DIF_INSTR_STV(dt_cg_stvar(idp
),
1122 idp
->di_id
, dnp
->dn_reg
);
1123 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1125 uint_t rbit
= dnp
->dn_left
->dn_flags
& DT_NF_REF
;
1127 assert(dnp
->dn_left
->dn_flags
& DT_NF_WRITABLE
);
1128 assert(dnp
->dn_left
->dn_flags
& DT_NF_LVALUE
);
1130 dnp
->dn_left
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
1132 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
1133 dt_cg_store(dnp
, dlp
, drp
, dnp
->dn_left
);
1134 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
1136 dnp
->dn_left
->dn_flags
&= ~DT_NF_REF
;
1137 dnp
->dn_left
->dn_flags
|= rbit
;
1142 dt_cg_assoc_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1147 assert(dnp
->dn_kind
== DT_NODE_VAR
);
1148 assert(!(dnp
->dn_ident
->di_flags
& DT_IDFLG_LOCAL
));
1149 assert(dnp
->dn_args
!= NULL
);
1151 dt_cg_arglist(dnp
->dn_ident
, dnp
->dn_args
, dlp
, drp
);
1153 dnp
->dn_reg
= dt_regset_alloc(drp
);
1155 if (dnp
->dn_ident
->di_flags
& DT_IDFLG_TLS
)
1160 dnp
->dn_ident
->di_flags
|= DT_IDFLG_DIFR
;
1161 instr
= DIF_INSTR_LDV(op
, dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
1162 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1165 * If the associative array is a pass-by-reference type, then we are
1166 * loading its value as a pointer to either load or store through it.
1167 * The array element in question may not have been faulted in yet, in
1168 * which case DIF_OP_LD*AA will return zero. We append an epilogue
1169 * of instructions similar to the following:
1171 * ld?aa id, %r1 ! base ld?aa instruction above
1172 * tst %r1 ! start of epilogue
1179 * label: < rest of code >
1181 * The idea is that we allocs a zero-filled chunk of scratch space and
1182 * do a DIF_OP_ST*AA to fault in and initialize the array element, and
1183 * then reload it to get the faulted-in address of the new variable
1184 * storage. This isn't cheap, but pass-by-ref associative array values
1185 * are (thus far) uncommon and the allocs cost only occurs once. If
1186 * this path becomes important to DTrace users, we can improve things
1187 * by adding a new DIF opcode to fault in associative array elements.
1189 if (dnp
->dn_flags
& DT_NF_REF
) {
1190 uint_t stvop
= op
== DIF_OP_LDTAA
? DIF_OP_STTAA
: DIF_OP_STGAA
;
1191 uint_t label
= dt_irlist_label(dlp
);
1193 instr
= DIF_INSTR_TST(dnp
->dn_reg
);
1194 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1196 instr
= DIF_INSTR_BRANCH(DIF_OP_BNE
, label
);
1197 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1199 dt_cg_setx(dlp
, dnp
->dn_reg
, dt_node_type_size(dnp
));
1200 instr
= DIF_INSTR_ALLOCS(dnp
->dn_reg
, dnp
->dn_reg
);
1201 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1203 dnp
->dn_ident
->di_flags
|= DT_IDFLG_DIFW
;
1204 instr
= DIF_INSTR_STV(stvop
, dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
1205 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1207 instr
= DIF_INSTR_LDV(op
, dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
1208 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1210 dt_irlist_append(dlp
, dt_cg_node_alloc(label
, DIF_INSTR_NOP
));
1215 dt_cg_array_op(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1217 dt_probe_t
*prp
= yypcb
->pcb_probe
;
1218 uintmax_t saved
= dnp
->dn_args
->dn_value
;
1219 dt_ident_t
*idp
= dnp
->dn_ident
;
1226 assert(dnp
->dn_kind
== DT_NODE_VAR
);
1227 assert(!(idp
->di_flags
& DT_IDFLG_LOCAL
));
1229 assert(dnp
->dn_args
->dn_kind
== DT_NODE_INT
);
1230 assert(dnp
->dn_args
->dn_list
== NULL
);
1233 * If this is a reference in the args[] array, temporarily modify the
1234 * array index according to the static argument mapping (if any),
1235 * unless the argument reference is provided by a dynamic translator.
1236 * If we're using a dynamic translator for args[], then just set dn_reg
1237 * to an invalid reg and return: DIF_OP_XLARG will fetch the arg later.
1239 if (idp
->di_id
== DIF_VAR_ARGS
) {
1240 if ((idp
->di_kind
== DT_IDENT_XLPTR
||
1241 idp
->di_kind
== DT_IDENT_XLSOU
) &&
1242 dt_xlator_dynamic(idp
->di_data
)) {
1246 dnp
->dn_args
->dn_value
= prp
->pr_mapping
[saved
];
1249 dt_cg_node(dnp
->dn_args
, dlp
, drp
);
1250 dnp
->dn_args
->dn_value
= saved
;
1252 dnp
->dn_reg
= dnp
->dn_args
->dn_reg
;
1254 if (idp
->di_flags
& DT_IDFLG_TLS
)
1259 idp
->di_flags
|= DT_IDFLG_DIFR
;
1261 instr
= DIF_INSTR_LDA(op
, idp
->di_id
,
1262 dnp
->dn_args
->dn_reg
, dnp
->dn_reg
);
1264 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1267 * If this is a reference to the args[] array, we need to take the
1268 * additional step of explicitly eliminating any bits larger than the
1269 * type size: the DIF interpreter in the kernel will always give us
1270 * the raw (64-bit) argument value, and any bits larger than the type
1271 * size may be junk. As a practical matter, this arises only on 64-bit
1272 * architectures and only when the argument index is larger than the
1273 * number of arguments passed directly to DTrace: if a 8-, 16- or
1274 * 32-bit argument must be retrieved from the stack, it is possible
1275 * (and it some cases, likely) that the upper bits will be garbage.
1277 if (idp
->di_id
!= DIF_VAR_ARGS
|| !dt_node_is_scalar(dnp
))
1280 if ((size
= dt_node_type_size(dnp
)) == sizeof (uint64_t))
1283 reg
= dt_regset_alloc(drp
);
1284 assert(size
< sizeof (uint64_t));
1285 n
= sizeof (uint64_t) * NBBY
- size
* NBBY
;
1287 dt_cg_setx(dlp
, reg
, n
);
1289 instr
= DIF_INSTR_FMT(DIF_OP_SLL
, dnp
->dn_reg
, reg
, dnp
->dn_reg
);
1290 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1292 instr
= DIF_INSTR_FMT((dnp
->dn_flags
& DT_NF_SIGNED
) ?
1293 DIF_OP_SRA
: DIF_OP_SRL
, dnp
->dn_reg
, reg
, dnp
->dn_reg
);
1295 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1296 dt_regset_free(drp
, reg
);
1300 * Generate code for an inlined variable reference. Inlines can be used to
1301 * define either scalar or associative array substitutions. For scalars, we
1302 * simply generate code for the parse tree saved in the identifier's din_root,
1303 * and then cast the resulting expression to the inline's declaration type.
1304 * For arrays, we take the input parameter subtrees from dnp->dn_args and
1305 * temporarily store them in the din_root of each din_argv[i] identifier,
1306 * which are themselves inlines and were set up for us by the parser. The
1307 * result is that any reference to the inlined parameter inside the top-level
1308 * din_root will turn into a recursive call to dt_cg_inline() for a scalar
1309 * inline whose din_root will refer to the subtree pointed to by the argument.
1312 dt_cg_inline(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1314 dt_ident_t
*idp
= dnp
->dn_ident
;
1315 dt_idnode_t
*inp
= idp
->di_iarg
;
1321 assert(idp
->di_flags
& DT_IDFLG_INLINE
);
1322 assert(idp
->di_ops
== &dt_idops_inline
);
1324 if (idp
->di_kind
== DT_IDENT_ARRAY
) {
1325 for (i
= 0, pnp
= dnp
->dn_args
;
1326 pnp
!= NULL
; pnp
= pnp
->dn_list
, i
++) {
1327 if (inp
->din_argv
[i
] != NULL
) {
1328 pinp
= inp
->din_argv
[i
]->di_iarg
;
1329 pinp
->din_root
= pnp
;
1334 dt_cg_node(inp
->din_root
, dlp
, drp
);
1335 dnp
->dn_reg
= inp
->din_root
->dn_reg
;
1336 dt_cg_typecast(inp
->din_root
, dnp
, dlp
, drp
);
1338 if (idp
->di_kind
== DT_IDENT_ARRAY
) {
1339 for (i
= 0; i
< inp
->din_argc
; i
++) {
1340 pinp
= inp
->din_argv
[i
]->di_iarg
;
1341 pinp
->din_root
= NULL
;
1346 typedef struct dt_xlmemb
{
1347 dt_ident_t
*dtxl_idp
; /* translated ident */
1348 dt_irlist_t
*dtxl_dlp
; /* instruction list */
1349 dt_regset_t
*dtxl_drp
; /* register set */
1350 int dtxl_sreg
; /* location of the translation input */
1351 int dtxl_dreg
; /* location of our allocated buffer */
1356 dt_cg_xlate_member(const char *name
, ctf_id_t type
, ulong_t off
, void *arg
)
1358 dt_xlmemb_t
*dx
= arg
;
1359 dt_ident_t
*idp
= dx
->dtxl_idp
;
1360 dt_irlist_t
*dlp
= dx
->dtxl_dlp
;
1361 dt_regset_t
*drp
= dx
->dtxl_drp
;
1370 /* Generate code for the translation. */
1372 mnp
= dt_xlator_member(dxp
, name
);
1374 /* If there's no translator for the given member, skip it. */
1378 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
1379 dxp
->dx_ident
->di_id
= dx
->dtxl_sreg
;
1381 dt_cg_node(mnp
->dn_membexpr
, dlp
, drp
);
1383 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
1384 dxp
->dx_ident
->di_id
= 0;
1386 treg
= mnp
->dn_membexpr
->dn_reg
;
1388 /* Compute the offset into our buffer and store the result there. */
1389 reg
= dt_regset_alloc(drp
);
1391 dt_cg_setx(dlp
, reg
, off
/ NBBY
);
1392 instr
= DIF_INSTR_FMT(DIF_OP_ADD
, dx
->dtxl_dreg
, reg
, reg
);
1393 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1395 size
= ctf_type_size(mnp
->dn_membexpr
->dn_ctfp
,
1396 mnp
->dn_membexpr
->dn_type
);
1397 if (dt_node_is_scalar(mnp
->dn_membexpr
)) {
1399 * Copying scalars is simple.
1403 instr
= DIF_INSTR_STORE(DIF_OP_STB
, treg
, reg
);
1406 instr
= DIF_INSTR_STORE(DIF_OP_STH
, treg
, reg
);
1409 instr
= DIF_INSTR_STORE(DIF_OP_STW
, treg
, reg
);
1412 instr
= DIF_INSTR_STORE(DIF_OP_STX
, treg
, reg
);
1415 xyerror(D_UNKNOWN
, "internal error -- unexpected "
1416 "size: %lu\n", (ulong_t
)size
);
1419 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1421 } else if (dt_node_is_string(mnp
->dn_membexpr
)) {
1425 * Use the copys instruction for strings.
1427 szreg
= dt_regset_alloc(drp
);
1428 dt_cg_setx(dlp
, szreg
, size
);
1429 instr
= DIF_INSTR_COPYS(treg
, szreg
, reg
);
1430 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1431 dt_regset_free(drp
, szreg
);
1436 * If it's anything else then we'll just bcopy it.
1438 szreg
= dt_regset_alloc(drp
);
1439 dt_cg_setx(dlp
, szreg
, size
);
1440 dt_irlist_append(dlp
,
1441 dt_cg_node_alloc(DT_LBL_NONE
, DIF_INSTR_FLUSHTS
));
1442 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
,
1444 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1445 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
,
1447 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1448 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
,
1450 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1451 instr
= DIF_INSTR_CALL(DIF_SUBR_BCOPY
, szreg
);
1452 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1453 dt_regset_free(drp
, szreg
);
1456 dt_regset_free(drp
, reg
);
1457 dt_regset_free(drp
, treg
);
1463 * If we're expanding a translated type, we create an appropriately sized
1464 * buffer with alloca() and then translate each member into it.
1467 dt_cg_xlate_expand(dt_node_t
*dnp
, dt_ident_t
*idp
, dt_irlist_t
*dlp
,
1475 dreg
= dt_regset_alloc(drp
);
1476 size
= ctf_type_size(dnp
->dn_ident
->di_ctfp
, dnp
->dn_ident
->di_type
);
1478 /* Call alloca() to create the buffer. */
1479 dt_cg_setx(dlp
, dreg
, size
);
1481 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, DIF_INSTR_FLUSHTS
));
1483 instr
= DIF_INSTR_PUSHTS(DIF_OP_PUSHTV
, DIF_TYPE_CTF
, DIF_REG_R0
, dreg
);
1484 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1486 instr
= DIF_INSTR_CALL(DIF_SUBR_ALLOCA
, dreg
);
1487 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1489 /* Generate the translation for each member. */
1493 dlm
.dtxl_sreg
= dnp
->dn_reg
;
1494 dlm
.dtxl_dreg
= dreg
;
1495 (void) ctf_member_iter(dnp
->dn_ident
->di_ctfp
,
1496 dnp
->dn_ident
->di_type
, dt_cg_xlate_member
,
1503 dt_cg_node(dt_node_t
*dnp
, dt_irlist_t
*dlp
, dt_regset_t
*drp
)
1505 ctf_file_t
*ctfp
= dnp
->dn_ctfp
;
1515 switch (dnp
->dn_op
) {
1517 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
1518 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
1519 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1520 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1524 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1525 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1526 dt_cg_asgn_op(dnp
, dlp
, drp
);
1530 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1531 dt_cg_asgn_op(dnp
, dlp
, drp
);
1535 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1536 dt_cg_asgn_op(dnp
, dlp
, drp
);
1540 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_MUL
);
1541 dt_cg_asgn_op(dnp
, dlp
, drp
);
1545 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1546 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SDIV
: DIF_OP_UDIV
);
1547 dt_cg_asgn_op(dnp
, dlp
, drp
);
1551 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1552 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SREM
: DIF_OP_UREM
);
1553 dt_cg_asgn_op(dnp
, dlp
, drp
);
1557 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_AND
);
1558 dt_cg_asgn_op(dnp
, dlp
, drp
);
1562 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_XOR
);
1563 dt_cg_asgn_op(dnp
, dlp
, drp
);
1567 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_OR
);
1568 dt_cg_asgn_op(dnp
, dlp
, drp
);
1572 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SLL
);
1573 dt_cg_asgn_op(dnp
, dlp
, drp
);
1577 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1578 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SRA
: DIF_OP_SRL
);
1579 dt_cg_asgn_op(dnp
, dlp
, drp
);
1582 case DT_TOK_QUESTION
:
1583 dt_cg_ternary_op(dnp
, dlp
, drp
);
1587 dt_cg_logical_or(dnp
, dlp
, drp
);
1591 dt_cg_logical_xor(dnp
, dlp
, drp
);
1595 dt_cg_logical_and(dnp
, dlp
, drp
);
1599 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_OR
);
1603 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_XOR
);
1607 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_AND
);
1611 dt_cg_compare_op(dnp
, dlp
, drp
, DIF_OP_BE
);
1615 dt_cg_compare_op(dnp
, dlp
, drp
, DIF_OP_BNE
);
1619 dt_cg_compare_op(dnp
, dlp
, drp
,
1620 dt_cg_compare_signed(dnp
) ? DIF_OP_BL
: DIF_OP_BLU
);
1624 dt_cg_compare_op(dnp
, dlp
, drp
,
1625 dt_cg_compare_signed(dnp
) ? DIF_OP_BLE
: DIF_OP_BLEU
);
1629 dt_cg_compare_op(dnp
, dlp
, drp
,
1630 dt_cg_compare_signed(dnp
) ? DIF_OP_BG
: DIF_OP_BGU
);
1634 dt_cg_compare_op(dnp
, dlp
, drp
,
1635 dt_cg_compare_signed(dnp
) ? DIF_OP_BGE
: DIF_OP_BGEU
);
1639 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SLL
);
1643 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1644 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SRA
: DIF_OP_SRL
);
1648 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1652 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1656 dt_cg_arithmetic_op(dnp
, dlp
, drp
, DIF_OP_MUL
);
1660 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1661 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SDIV
: DIF_OP_UDIV
);
1665 dt_cg_arithmetic_op(dnp
, dlp
, drp
,
1666 (dnp
->dn_flags
& DT_NF_SIGNED
) ? DIF_OP_SREM
: DIF_OP_UREM
);
1670 dt_cg_logical_neg(dnp
, dlp
, drp
);
1674 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1675 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1676 instr
= DIF_INSTR_NOT(dnp
->dn_reg
, dnp
->dn_reg
);
1677 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1681 dt_cg_prearith_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1684 case DT_TOK_POSTINC
:
1685 dt_cg_postarith_op(dnp
, dlp
, drp
, DIF_OP_ADD
);
1689 dt_cg_prearith_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1692 case DT_TOK_POSTDEC
:
1693 dt_cg_postarith_op(dnp
, dlp
, drp
, DIF_OP_SUB
);
1697 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1698 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1702 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1703 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1705 instr
= DIF_INSTR_FMT(DIF_OP_SUB
, DIF_REG_R0
,
1706 dnp
->dn_reg
, dnp
->dn_reg
);
1708 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1712 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1713 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1715 if (dt_node_is_dynamic(dnp
->dn_child
)) {
1717 idp
= dt_node_resolve(dnp
->dn_child
, DT_IDENT_XLPTR
);
1718 assert(idp
!= NULL
);
1719 reg
= dt_cg_xlate_expand(dnp
, idp
, dlp
, drp
);
1721 dt_regset_free(drp
, dnp
->dn_child
->dn_reg
);
1724 } else if (!(dnp
->dn_flags
& DT_NF_REF
)) {
1725 uint_t ubit
= dnp
->dn_flags
& DT_NF_USERLAND
;
1728 * Save and restore DT_NF_USERLAND across dt_cg_load():
1729 * we need the sign bit from dnp and the user bit from
1730 * dnp->dn_child in order to get the proper opcode.
1733 (dnp
->dn_child
->dn_flags
& DT_NF_USERLAND
);
1735 instr
= DIF_INSTR_LOAD(dt_cg_load(dnp
, ctfp
,
1736 dnp
->dn_type
), dnp
->dn_reg
, dnp
->dn_reg
);
1738 dnp
->dn_flags
&= ~DT_NF_USERLAND
;
1739 dnp
->dn_flags
|= ubit
;
1741 dt_irlist_append(dlp
,
1742 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1746 case DT_TOK_ADDROF
: {
1747 uint_t rbit
= dnp
->dn_child
->dn_flags
& DT_NF_REF
;
1749 dnp
->dn_child
->dn_flags
|= DT_NF_REF
; /* force pass-by-ref */
1750 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1751 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1753 dnp
->dn_child
->dn_flags
&= ~DT_NF_REF
;
1754 dnp
->dn_child
->dn_flags
|= rbit
;
1758 case DT_TOK_SIZEOF
: {
1759 size_t size
= dt_node_sizeof(dnp
->dn_child
);
1760 dnp
->dn_reg
= dt_regset_alloc(drp
);
1762 dt_cg_setx(dlp
, dnp
->dn_reg
, size
);
1766 case DT_TOK_STRINGOF
:
1767 dt_cg_node(dnp
->dn_child
, dlp
, drp
);
1768 dnp
->dn_reg
= dnp
->dn_child
->dn_reg
;
1773 * An xlate operator appears in either an XLATOR, indicating a
1774 * reference to a dynamic translator, or an OP2, indicating
1775 * use of the xlate operator in the user's program. For the
1776 * dynamic case, generate an xlate opcode with a reference to
1777 * the corresponding member, pre-computed for us in dn_members.
1779 if (dnp
->dn_kind
== DT_NODE_XLATOR
) {
1780 dt_xlator_t
*dxp
= dnp
->dn_xlator
;
1782 assert(dxp
->dx_ident
->di_flags
& DT_IDFLG_CGREG
);
1783 assert(dxp
->dx_ident
->di_id
!= 0);
1785 dnp
->dn_reg
= dt_regset_alloc(drp
);
1787 if (dxp
->dx_arg
== -1) {
1788 instr
= DIF_INSTR_MOV(
1789 dxp
->dx_ident
->di_id
, dnp
->dn_reg
);
1790 dt_irlist_append(dlp
,
1791 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1796 instr
= DIF_INSTR_XLATE(op
, 0, dnp
->dn_reg
);
1797 dt_irlist_append(dlp
,
1798 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1800 dlp
->dl_last
->di_extern
= dnp
->dn_xmember
;
1804 assert(dnp
->dn_kind
== DT_NODE_OP2
);
1805 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1806 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1810 dt_cg_node(dnp
->dn_right
, dlp
, drp
);
1811 dnp
->dn_reg
= dnp
->dn_right
->dn_reg
;
1812 dt_cg_typecast(dnp
->dn_right
, dnp
, dlp
, drp
);
1817 assert(dnp
->dn_right
->dn_kind
== DT_NODE_IDENT
);
1818 dt_cg_node(dnp
->dn_left
, dlp
, drp
);
1821 * If the left-hand side of PTR or DOT is a dynamic variable,
1822 * we expect it to be the output of a D translator. In this
1823 * case, we look up the parse tree corresponding to the member
1824 * that is being accessed and run the code generator over it.
1825 * We then cast the result as if by the assignment operator.
1827 if ((idp
= dt_node_resolve(
1828 dnp
->dn_left
, DT_IDENT_XLSOU
)) != NULL
||
1829 (idp
= dt_node_resolve(
1830 dnp
->dn_left
, DT_IDENT_XLPTR
)) != NULL
) {
1836 mnp
= dt_xlator_member(dxp
, dnp
->dn_right
->dn_string
);
1837 assert(mnp
!= NULL
);
1839 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
1840 dxp
->dx_ident
->di_id
= dnp
->dn_left
->dn_reg
;
1842 dt_cg_node(mnp
->dn_membexpr
, dlp
, drp
);
1843 dnp
->dn_reg
= mnp
->dn_membexpr
->dn_reg
;
1844 dt_cg_typecast(mnp
->dn_membexpr
, dnp
, dlp
, drp
);
1846 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
1847 dxp
->dx_ident
->di_id
= 0;
1849 if (dnp
->dn_left
->dn_reg
!= -1)
1850 dt_regset_free(drp
, dnp
->dn_left
->dn_reg
);
1854 ctfp
= dnp
->dn_left
->dn_ctfp
;
1855 type
= ctf_type_resolve(ctfp
, dnp
->dn_left
->dn_type
);
1857 if (dnp
->dn_op
== DT_TOK_PTR
) {
1858 type
= ctf_type_reference(ctfp
, type
);
1859 type
= ctf_type_resolve(ctfp
, type
);
1862 if ((ctfp
= dt_cg_membinfo(octfp
= ctfp
, type
,
1863 dnp
->dn_right
->dn_string
, &m
)) == NULL
) {
1864 yypcb
->pcb_hdl
->dt_ctferr
= ctf_errno(octfp
);
1865 longjmp(yypcb
->pcb_jmpbuf
, EDT_CTF
);
1868 if (m
.ctm_offset
!= 0) {
1871 reg
= dt_regset_alloc(drp
);
1874 * If the offset is not aligned on a byte boundary, it
1875 * is a bit-field member and we will extract the value
1876 * bits below after we generate the appropriate load.
1878 dt_cg_setx(dlp
, reg
, m
.ctm_offset
/ NBBY
);
1880 instr
= DIF_INSTR_FMT(DIF_OP_ADD
,
1881 dnp
->dn_left
->dn_reg
, reg
, dnp
->dn_left
->dn_reg
);
1883 dt_irlist_append(dlp
,
1884 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1885 dt_regset_free(drp
, reg
);
1888 if (!(dnp
->dn_flags
& DT_NF_REF
)) {
1889 uint_t ubit
= dnp
->dn_flags
& DT_NF_USERLAND
;
1892 * Save and restore DT_NF_USERLAND across dt_cg_load():
1893 * we need the sign bit from dnp and the user bit from
1894 * dnp->dn_left in order to get the proper opcode.
1897 (dnp
->dn_left
->dn_flags
& DT_NF_USERLAND
);
1899 instr
= DIF_INSTR_LOAD(dt_cg_load(dnp
,
1900 ctfp
, m
.ctm_type
), dnp
->dn_left
->dn_reg
,
1901 dnp
->dn_left
->dn_reg
);
1903 dnp
->dn_flags
&= ~DT_NF_USERLAND
;
1904 dnp
->dn_flags
|= ubit
;
1906 dt_irlist_append(dlp
,
1907 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1909 if (dnp
->dn_flags
& DT_NF_BITFIELD
)
1910 dt_cg_field_get(dnp
, dlp
, drp
, ctfp
, &m
);
1913 dnp
->dn_reg
= dnp
->dn_left
->dn_reg
;
1917 dnp
->dn_reg
= dt_regset_alloc(drp
);
1919 assert(dnp
->dn_kind
== DT_NODE_STRING
);
1920 stroff
= dt_strtab_insert(yypcb
->pcb_strtab
, dnp
->dn_string
);
1923 longjmp(yypcb
->pcb_jmpbuf
, EDT_NOMEM
);
1924 if (stroff
> DIF_STROFF_MAX
)
1925 longjmp(yypcb
->pcb_jmpbuf
, EDT_STR2BIG
);
1927 instr
= DIF_INSTR_SETS((ulong_t
)stroff
, dnp
->dn_reg
);
1928 dt_irlist_append(dlp
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1933 * If the specified identifier is a variable on which we have
1934 * set the code generator register flag, then this variable
1935 * has already had code generated for it and saved in di_id.
1936 * Allocate a new register and copy the existing value to it.
1938 if (dnp
->dn_kind
== DT_NODE_VAR
&&
1939 (dnp
->dn_ident
->di_flags
& DT_IDFLG_CGREG
)) {
1940 dnp
->dn_reg
= dt_regset_alloc(drp
);
1941 instr
= DIF_INSTR_MOV(dnp
->dn_ident
->di_id
,
1943 dt_irlist_append(dlp
,
1944 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1949 * Identifiers can represent function calls, variable refs, or
1950 * symbols. First we check for inlined variables, and handle
1951 * them by generating code for the inline parse tree.
1953 if (dnp
->dn_kind
== DT_NODE_VAR
&&
1954 (dnp
->dn_ident
->di_flags
& DT_IDFLG_INLINE
)) {
1955 dt_cg_inline(dnp
, dlp
, drp
);
1959 switch (dnp
->dn_kind
) {
1961 if ((idp
= dnp
->dn_ident
)->di_kind
!= DT_IDENT_FUNC
) {
1962 dnerror(dnp
, D_CG_EXPR
, "%s %s( ) may not be "
1963 "called from a D expression (D program "
1964 "context required)\n",
1965 dt_idkind_name(idp
->di_kind
), idp
->di_name
);
1968 dt_cg_arglist(dnp
->dn_ident
, dnp
->dn_args
, dlp
, drp
);
1970 dnp
->dn_reg
= dt_regset_alloc(drp
);
1971 instr
= DIF_INSTR_CALL(dnp
->dn_ident
->di_id
,
1974 dt_irlist_append(dlp
,
1975 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
1980 if (dnp
->dn_ident
->di_kind
== DT_IDENT_XLSOU
||
1981 dnp
->dn_ident
->di_kind
== DT_IDENT_XLPTR
) {
1983 * This can only happen if we have translated
1984 * args[]. See dt_idcook_args() for details.
1986 assert(dnp
->dn_ident
->di_id
== DIF_VAR_ARGS
);
1987 dt_cg_array_op(dnp
, dlp
, drp
);
1991 if (dnp
->dn_ident
->di_kind
== DT_IDENT_ARRAY
) {
1992 if (dnp
->dn_ident
->di_id
> DIF_VAR_ARRAY_MAX
)
1993 dt_cg_assoc_op(dnp
, dlp
, drp
);
1995 dt_cg_array_op(dnp
, dlp
, drp
);
1999 dnp
->dn_reg
= dt_regset_alloc(drp
);
2001 if (dnp
->dn_ident
->di_flags
& DT_IDFLG_LOCAL
)
2003 else if (dnp
->dn_ident
->di_flags
& DT_IDFLG_TLS
)
2008 dnp
->dn_ident
->di_flags
|= DT_IDFLG_DIFR
;
2010 instr
= DIF_INSTR_LDV(op
,
2011 dnp
->dn_ident
->di_id
, dnp
->dn_reg
);
2013 dt_irlist_append(dlp
,
2014 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
2018 dtrace_hdl_t
*dtp
= yypcb
->pcb_hdl
;
2019 dtrace_syminfo_t
*sip
= dnp
->dn_ident
->di_data
;
2022 if (dtrace_lookup_by_name(dtp
,
2023 sip
->dts_object
, sip
->dts_name
, &sym
, NULL
) == -1) {
2024 xyerror(D_UNKNOWN
, "cg failed for symbol %s`%s:"
2025 " %s\n", sip
->dts_object
, sip
->dts_name
,
2026 dtrace_errmsg(dtp
, dtrace_errno(dtp
)));
2029 dnp
->dn_reg
= dt_regset_alloc(drp
);
2030 dt_cg_xsetx(dlp
, dnp
->dn_ident
,
2031 DT_LBL_NONE
, dnp
->dn_reg
, sym
.st_value
);
2033 if (!(dnp
->dn_flags
& DT_NF_REF
)) {
2034 instr
= DIF_INSTR_LOAD(dt_cg_load(dnp
, ctfp
,
2035 dnp
->dn_type
), dnp
->dn_reg
, dnp
->dn_reg
);
2036 dt_irlist_append(dlp
,
2037 dt_cg_node_alloc(DT_LBL_NONE
, instr
));
2043 xyerror(D_UNKNOWN
, "internal error -- node type %u is "
2044 "not valid for an identifier\n", dnp
->dn_kind
);
2049 dnp
->dn_reg
= dt_regset_alloc(drp
);
2050 dt_cg_setx(dlp
, dnp
->dn_reg
, dnp
->dn_value
);
2054 xyerror(D_UNKNOWN
, "internal error -- token type %u is not a "
2055 "valid D compilation token\n", dnp
->dn_op
);
2060 dt_cg(dt_pcb_t
*pcb
, dt_node_t
*dnp
)
2066 if (pcb
->pcb_regs
== NULL
&& (pcb
->pcb_regs
=
2067 dt_regset_create(pcb
->pcb_hdl
->dt_conf
.dtc_difintregs
)) == NULL
)
2068 longjmp(pcb
->pcb_jmpbuf
, EDT_NOMEM
);
2070 dt_regset_reset(pcb
->pcb_regs
);
2071 (void) dt_regset_alloc(pcb
->pcb_regs
); /* allocate %r0 */
2073 if (pcb
->pcb_inttab
!= NULL
)
2074 dt_inttab_destroy(pcb
->pcb_inttab
);
2076 if ((pcb
->pcb_inttab
= dt_inttab_create(yypcb
->pcb_hdl
)) == NULL
)
2077 longjmp(pcb
->pcb_jmpbuf
, EDT_NOMEM
);
2079 if (pcb
->pcb_strtab
!= NULL
)
2080 dt_strtab_destroy(pcb
->pcb_strtab
);
2082 if ((pcb
->pcb_strtab
= dt_strtab_create(BUFSIZ
)) == NULL
)
2083 longjmp(pcb
->pcb_jmpbuf
, EDT_NOMEM
);
2085 dt_irlist_destroy(&pcb
->pcb_ir
);
2086 dt_irlist_create(&pcb
->pcb_ir
);
2088 assert(pcb
->pcb_dret
== NULL
);
2089 pcb
->pcb_dret
= dnp
;
2091 if (dt_node_resolve(dnp
, DT_IDENT_XLPTR
) != NULL
) {
2092 dnerror(dnp
, D_CG_DYN
, "expression cannot evaluate to result "
2093 "of a translated pointer\n");
2097 * If we're generating code for a translator body, assign the input
2098 * parameter to the first available register (i.e. caller passes %r1).
2100 if (dnp
->dn_kind
== DT_NODE_MEMBER
) {
2101 dxp
= dnp
->dn_membxlator
;
2102 dnp
= dnp
->dn_membexpr
;
2104 dxp
->dx_ident
->di_flags
|= DT_IDFLG_CGREG
;
2105 dxp
->dx_ident
->di_id
= dt_regset_alloc(pcb
->pcb_regs
);
2108 dt_cg_node(dnp
, &pcb
->pcb_ir
, pcb
->pcb_regs
);
2110 if ((idp
= dt_node_resolve(dnp
, DT_IDENT_XLSOU
)) != NULL
) {
2111 int reg
= dt_cg_xlate_expand(dnp
, idp
,
2112 &pcb
->pcb_ir
, pcb
->pcb_regs
);
2113 dt_regset_free(pcb
->pcb_regs
, dnp
->dn_reg
);
2117 instr
= DIF_INSTR_RET(dnp
->dn_reg
);
2118 dt_regset_free(pcb
->pcb_regs
, dnp
->dn_reg
);
2119 dt_irlist_append(&pcb
->pcb_ir
, dt_cg_node_alloc(DT_LBL_NONE
, instr
));
2121 if (dnp
->dn_kind
== DT_NODE_MEMBER
) {
2122 dt_regset_free(pcb
->pcb_regs
, dxp
->dx_ident
->di_id
);
2123 dxp
->dx_ident
->di_id
= 0;
2124 dxp
->dx_ident
->di_flags
&= ~DT_IDFLG_CGREG
;
2127 dt_regset_free(pcb
->pcb_regs
, 0);
2128 dt_regset_assert_free(pcb
->pcb_regs
);