1 /*-------------------------------------------------------------------------
4 * Generate code for deforming a heap tuple.
6 * This gains performance benefits over unJITed deforming from compile-time
7 * knowledge of the tuple descriptor. Fixed column widths, NOT NULLness, etc
8 * can be taken advantage of.
10 * Portions Copyright (c) 1996-2024, PostgreSQL Global Development Group
11 * Portions Copyright (c) 1994, Regents of the University of California
14 * src/backend/jit/llvm/llvmjit_deform.c
16 *-------------------------------------------------------------------------
21 #include <llvm-c/Core.h>
23 #include "access/htup_details.h"
24 #include "access/tupdesc_details.h"
25 #include "executor/tuptable.h"
26 #include "jit/llvmjit.h"
27 #include "jit/llvmjit_emit.h"
31 * Create a function that deforms a tuple of type desc up to natts columns.
34 slot_compile_deform(LLVMJitContext
*context
, TupleDesc desc
,
35 const TupleTableSlotOps
*ops
, int natts
)
43 LLVMTypeRef deform_sig
;
44 LLVMValueRef v_deform_fn
;
46 LLVMBasicBlockRef b_entry
;
47 LLVMBasicBlockRef b_adjust_unavail_cols
;
48 LLVMBasicBlockRef b_find_start
;
50 LLVMBasicBlockRef b_out
;
51 LLVMBasicBlockRef b_dead
;
52 LLVMBasicBlockRef
*attcheckattnoblocks
;
53 LLVMBasicBlockRef
*attstartblocks
;
54 LLVMBasicBlockRef
*attisnullblocks
;
55 LLVMBasicBlockRef
*attcheckalignblocks
;
56 LLVMBasicBlockRef
*attalignblocks
;
57 LLVMBasicBlockRef
*attstoreblocks
;
61 LLVMValueRef v_tupdata_base
;
62 LLVMValueRef v_tts_values
;
63 LLVMValueRef v_tts_nulls
;
64 LLVMValueRef v_slotoffp
;
65 LLVMValueRef v_flagsp
;
66 LLVMValueRef v_nvalidp
;
67 LLVMValueRef v_nvalid
;
68 LLVMValueRef v_maxatt
;
72 LLVMValueRef v_tupleheaderp
;
73 LLVMValueRef v_tuplep
;
74 LLVMValueRef v_infomask1
;
75 LLVMValueRef v_infomask2
;
80 LLVMValueRef v_hasnulls
;
82 /* last column (0 indexed) guaranteed to exist */
83 int guaranteed_column_number
= -1;
85 /* current known alignment */
86 int known_alignment
= 0;
88 /* if true, known_alignment describes definite offset of column */
89 bool attguaranteedalign
= true;
93 /* virtual tuples never need deforming, so don't generate code */
94 if (ops
== &TTSOpsVirtual
)
97 /* decline to JIT for slot types we don't know to handle */
98 if (ops
!= &TTSOpsHeapTuple
&& ops
!= &TTSOpsBufferHeapTuple
&&
99 ops
!= &TTSOpsMinimalTuple
)
102 mod
= llvm_mutable_module(context
);
103 lc
= LLVMGetModuleContext(mod
);
105 funcname
= llvm_expand_funcname(context
, "deform");
108 * Check which columns have to exist, so we don't have to check the row's
109 * natts unnecessarily.
111 for (attnum
= 0; attnum
< desc
->natts
; attnum
++)
113 Form_pg_attribute att
= TupleDescAttr(desc
, attnum
);
116 * If the column is declared NOT NULL then it must be present in every
117 * tuple, unless there's a "missing" entry that could provide a
118 * non-NULL value for it. That in turn guarantees that the NULL bitmap
119 * - if there are any NULLable columns - is at least long enough to
120 * cover columns up to attnum.
122 * Be paranoid and also check !attisdropped, even though the
123 * combination of attisdropped && attnotnull combination shouldn't
126 if (att
->attnotnull
&&
127 !att
->atthasmissing
&&
129 guaranteed_column_number
= attnum
;
132 /* Create the signature and function */
134 LLVMTypeRef param_types
[1];
136 param_types
[0] = l_ptr(StructTupleTableSlot
);
138 deform_sig
= LLVMFunctionType(LLVMVoidTypeInContext(lc
),
139 param_types
, lengthof(param_types
), 0);
141 v_deform_fn
= LLVMAddFunction(mod
, funcname
, deform_sig
);
142 LLVMSetLinkage(v_deform_fn
, LLVMInternalLinkage
);
143 LLVMSetParamAlignment(LLVMGetParam(v_deform_fn
, 0), MAXIMUM_ALIGNOF
);
144 llvm_copy_attributes(AttributeTemplate
, v_deform_fn
);
147 LLVMAppendBasicBlockInContext(lc
, v_deform_fn
, "entry");
148 b_adjust_unavail_cols
=
149 LLVMAppendBasicBlockInContext(lc
, v_deform_fn
, "adjust_unavail_cols");
151 LLVMAppendBasicBlockInContext(lc
, v_deform_fn
, "find_startblock");
153 LLVMAppendBasicBlockInContext(lc
, v_deform_fn
, "outblock");
155 LLVMAppendBasicBlockInContext(lc
, v_deform_fn
, "deadblock");
157 b
= LLVMCreateBuilderInContext(lc
);
159 attcheckattnoblocks
= palloc(sizeof(LLVMBasicBlockRef
) * natts
);
160 attstartblocks
= palloc(sizeof(LLVMBasicBlockRef
) * natts
);
161 attisnullblocks
= palloc(sizeof(LLVMBasicBlockRef
) * natts
);
162 attcheckalignblocks
= palloc(sizeof(LLVMBasicBlockRef
) * natts
);
163 attalignblocks
= palloc(sizeof(LLVMBasicBlockRef
) * natts
);
164 attstoreblocks
= palloc(sizeof(LLVMBasicBlockRef
) * natts
);
168 LLVMPositionBuilderAtEnd(b
, b_entry
);
170 /* perform allocas first, llvm only converts those to registers */
171 v_offp
= LLVMBuildAlloca(b
, TypeSizeT
, "v_offp");
173 v_slot
= LLVMGetParam(v_deform_fn
, 0);
176 l_load_struct_gep(b
, StructTupleTableSlot
, v_slot
, FIELDNO_TUPLETABLESLOT_VALUES
,
179 l_load_struct_gep(b
, StructTupleTableSlot
, v_slot
, FIELDNO_TUPLETABLESLOT_ISNULL
,
181 v_flagsp
= l_struct_gep(b
, StructTupleTableSlot
, v_slot
, FIELDNO_TUPLETABLESLOT_FLAGS
, "");
182 v_nvalidp
= l_struct_gep(b
, StructTupleTableSlot
, v_slot
, FIELDNO_TUPLETABLESLOT_NVALID
, "");
184 if (ops
== &TTSOpsHeapTuple
|| ops
== &TTSOpsBufferHeapTuple
)
186 LLVMValueRef v_heapslot
;
191 l_ptr(StructHeapTupleTableSlot
),
193 v_slotoffp
= l_struct_gep(b
, StructHeapTupleTableSlot
, v_heapslot
, FIELDNO_HEAPTUPLETABLESLOT_OFF
, "");
195 l_load_struct_gep(b
, StructHeapTupleTableSlot
, v_heapslot
, FIELDNO_HEAPTUPLETABLESLOT_TUPLE
,
198 else if (ops
== &TTSOpsMinimalTuple
)
200 LLVMValueRef v_minimalslot
;
205 l_ptr(StructMinimalTupleTableSlot
),
207 v_slotoffp
= l_struct_gep(b
,
208 StructMinimalTupleTableSlot
,
210 FIELDNO_MINIMALTUPLETABLESLOT_OFF
, "");
213 StructMinimalTupleTableSlot
,
215 FIELDNO_MINIMALTUPLETABLESLOT_TUPLE
,
220 /* should've returned at the start of the function */
228 FIELDNO_HEAPTUPLEDATA_DATA
,
233 StructHeapTupleHeaderData
,
235 FIELDNO_HEAPTUPLEHEADERDATA_BITS
,
237 l_ptr(LLVMInt8TypeInContext(lc
)),
241 StructHeapTupleHeaderData
,
243 FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK
,
247 StructHeapTupleHeaderData
,
248 v_tuplep
, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2
,
251 /* t_infomask & HEAP_HASNULL */
253 LLVMBuildICmp(b
, LLVMIntNE
,
255 l_int16_const(lc
, HEAP_HASNULL
),
257 l_int16_const(lc
, 0),
260 /* t_infomask2 & HEAP_NATTS_MASK */
261 v_maxatt
= LLVMBuildAnd(b
,
262 l_int16_const(lc
, HEAP_NATTS_MASK
),
267 * Need to zext, as getelementptr otherwise treats hoff as a signed 8bit
268 * integer, which'd yield a negative offset for t_hoff > 127.
273 StructHeapTupleHeaderData
,
275 FIELDNO_HEAPTUPLEHEADERDATA_HOFF
,
277 LLVMInt32TypeInContext(lc
), "t_hoff");
279 v_tupdata_base
= l_gep(b
,
280 LLVMInt8TypeInContext(lc
),
283 l_ptr(LLVMInt8TypeInContext(lc
)),
289 * Load tuple start offset from slot. Will be reset below in case there's
290 * no existing deformed columns in slot.
293 LLVMValueRef v_off_start
;
295 v_off_start
= l_load(b
, LLVMInt32TypeInContext(lc
), v_slotoffp
, "v_slot_off");
296 v_off_start
= LLVMBuildZExt(b
, v_off_start
, TypeSizeT
, "");
297 LLVMBuildStore(b
, v_off_start
, v_offp
);
300 /* build the basic block for each attribute, need them as jump target */
301 for (attnum
= 0; attnum
< natts
; attnum
++)
303 attcheckattnoblocks
[attnum
] =
304 l_bb_append_v(v_deform_fn
, "block.attr.%d.attcheckattno", attnum
);
305 attstartblocks
[attnum
] =
306 l_bb_append_v(v_deform_fn
, "block.attr.%d.start", attnum
);
307 attisnullblocks
[attnum
] =
308 l_bb_append_v(v_deform_fn
, "block.attr.%d.attisnull", attnum
);
309 attcheckalignblocks
[attnum
] =
310 l_bb_append_v(v_deform_fn
, "block.attr.%d.attcheckalign", attnum
);
311 attalignblocks
[attnum
] =
312 l_bb_append_v(v_deform_fn
, "block.attr.%d.align", attnum
);
313 attstoreblocks
[attnum
] =
314 l_bb_append_v(v_deform_fn
, "block.attr.%d.store", attnum
);
318 * Check if it is guaranteed that all the desired attributes are available
319 * in the tuple (but still possibly NULL), by dint of either the last
320 * to-be-deformed column being NOT NULL, or subsequent ones not accessed
321 * here being NOT NULL. If that's not guaranteed the tuple headers natt's
322 * has to be checked, and missing attributes potentially have to be
323 * fetched (using slot_getmissingattrs().
325 if ((natts
- 1) <= guaranteed_column_number
)
327 /* just skip through unnecessary blocks */
328 LLVMBuildBr(b
, b_adjust_unavail_cols
);
329 LLVMPositionBuilderAtEnd(b
, b_adjust_unavail_cols
);
330 LLVMBuildBr(b
, b_find_start
);
334 LLVMValueRef v_params
[3];
337 /* branch if not all columns available */
339 LLVMBuildICmp(b
, LLVMIntULT
,
341 l_int16_const(lc
, natts
),
343 b_adjust_unavail_cols
,
346 /* if not, memset tts_isnull of relevant cols to true */
347 LLVMPositionBuilderAtEnd(b
, b_adjust_unavail_cols
);
349 v_params
[0] = v_slot
;
350 v_params
[1] = LLVMBuildZExt(b
, v_maxatt
, LLVMInt32TypeInContext(lc
), "");
351 v_params
[2] = l_int32_const(lc
, natts
);
352 f
= llvm_pg_func(mod
, "slot_getmissingattrs");
354 LLVMGetFunctionType(f
), f
,
355 v_params
, lengthof(v_params
), "");
356 LLVMBuildBr(b
, b_find_start
);
359 LLVMPositionBuilderAtEnd(b
, b_find_start
);
361 v_nvalid
= l_load(b
, LLVMInt16TypeInContext(lc
), v_nvalidp
, "");
364 * Build switch to go from nvalid to the right startblock. Callers
365 * currently don't have the knowledge, but it'd be good for performance to
366 * avoid this check when it's known that the slot is empty (e.g. in scan
371 LLVMValueRef v_switch
= LLVMBuildSwitch(b
, v_nvalid
,
374 for (attnum
= 0; attnum
< natts
; attnum
++)
376 LLVMValueRef v_attno
= l_int16_const(lc
, attnum
);
378 LLVMAddCase(v_switch
, v_attno
, attcheckattnoblocks
[attnum
]);
383 /* jump from entry block to first block */
384 LLVMBuildBr(b
, attcheckattnoblocks
[0]);
387 LLVMPositionBuilderAtEnd(b
, b_dead
);
388 LLVMBuildUnreachable(b
);
391 * Iterate over each attribute that needs to be deformed, build code to
394 for (attnum
= 0; attnum
< natts
; attnum
++)
396 Form_pg_attribute att
= TupleDescAttr(desc
, attnum
);
397 LLVMValueRef v_incby
;
399 LLVMValueRef l_attno
= l_int16_const(lc
, attnum
);
400 LLVMValueRef v_attdatap
;
401 LLVMValueRef v_resultp
;
403 /* build block checking whether we did all the necessary attributes */
404 LLVMPositionBuilderAtEnd(b
, attcheckattnoblocks
[attnum
]);
407 * If this is the first attribute, slot->tts_nvalid was 0. Therefore
408 * also reset offset to 0, it may be from a previous execution.
412 LLVMBuildStore(b
, l_sizet_const(0), v_offp
);
416 * Build check whether column is available (i.e. whether the tuple has
417 * that many columns stored). We can avoid the branch if we know
418 * there's a subsequent NOT NULL column.
420 if (attnum
<= guaranteed_column_number
)
422 LLVMBuildBr(b
, attstartblocks
[attnum
]);
426 LLVMValueRef v_islast
;
428 v_islast
= LLVMBuildICmp(b
, LLVMIntUGE
,
432 LLVMBuildCondBr(b
, v_islast
, b_out
, attstartblocks
[attnum
]);
434 LLVMPositionBuilderAtEnd(b
, attstartblocks
[attnum
]);
437 * Check for nulls if necessary. No need to take missing attributes
438 * into account, because if they're present the heaptuple's natts
439 * would have indicated that a slot_getmissingattrs() is needed.
441 if (!att
->attnotnull
)
443 LLVMBasicBlockRef b_ifnotnull
;
444 LLVMBasicBlockRef b_ifnull
;
445 LLVMBasicBlockRef b_next
;
446 LLVMValueRef v_attisnull
;
447 LLVMValueRef v_nullbyteno
;
448 LLVMValueRef v_nullbytemask
;
449 LLVMValueRef v_nullbyte
;
450 LLVMValueRef v_nullbit
;
452 b_ifnotnull
= attcheckalignblocks
[attnum
];
453 b_ifnull
= attisnullblocks
[attnum
];
455 if (attnum
+ 1 == natts
)
458 b_next
= attcheckattnoblocks
[attnum
+ 1];
460 v_nullbyteno
= l_int32_const(lc
, attnum
>> 3);
461 v_nullbytemask
= l_int8_const(lc
, 1 << ((attnum
) & 0x07));
462 v_nullbyte
= l_load_gep1(b
, LLVMInt8TypeInContext(lc
), v_bits
, v_nullbyteno
, "attnullbyte");
464 v_nullbit
= LLVMBuildICmp(b
,
466 LLVMBuildAnd(b
, v_nullbyte
, v_nullbytemask
, ""),
470 v_attisnull
= LLVMBuildAnd(b
, v_hasnulls
, v_nullbit
, "");
472 LLVMBuildCondBr(b
, v_attisnull
, b_ifnull
, b_ifnotnull
);
474 LLVMPositionBuilderAtEnd(b
, b_ifnull
);
476 /* store null-byte */
479 l_gep(b
, LLVMInt8TypeInContext(lc
), v_tts_nulls
, &l_attno
, 1, ""));
480 /* store zero datum */
483 l_gep(b
, TypeSizeT
, v_tts_values
, &l_attno
, 1, ""));
485 LLVMBuildBr(b
, b_next
);
486 attguaranteedalign
= false;
491 LLVMBuildBr(b
, attcheckalignblocks
[attnum
]);
492 LLVMPositionBuilderAtEnd(b
, attisnullblocks
[attnum
]);
493 LLVMBuildBr(b
, attcheckalignblocks
[attnum
]);
495 LLVMPositionBuilderAtEnd(b
, attcheckalignblocks
[attnum
]);
497 /* determine required alignment */
498 if (att
->attalign
== TYPALIGN_INT
)
499 alignto
= ALIGNOF_INT
;
500 else if (att
->attalign
== TYPALIGN_CHAR
)
502 else if (att
->attalign
== TYPALIGN_DOUBLE
)
503 alignto
= ALIGNOF_DOUBLE
;
504 else if (att
->attalign
== TYPALIGN_SHORT
)
505 alignto
= ALIGNOF_SHORT
;
508 elog(ERROR
, "unknown alignment");
513 * Even if alignment is required, we can skip doing it if provably
515 * - first column is guaranteed to be aligned
516 * - columns following a NOT NULL fixed width datum have known
517 * alignment, can skip alignment computation if that known alignment
518 * is compatible with current column.
522 (known_alignment
< 0 || known_alignment
!= TYPEALIGN(alignto
, known_alignment
)))
525 * When accessing a varlena field, we have to "peek" to see if we
526 * are looking at a pad byte or the first byte of a 1-byte-header
527 * datum. A zero byte must be either a pad byte, or the first
528 * byte of a correctly aligned 4-byte length word; in either case,
529 * we can align safely. A non-zero byte must be either a 1-byte
530 * length word, or the first byte of a correctly aligned 4-byte
531 * length word; in either case, we need not align.
533 if (att
->attlen
== -1)
535 LLVMValueRef v_possible_padbyte
;
536 LLVMValueRef v_ispad
;
539 /* don't know if short varlena or not */
540 attguaranteedalign
= false;
542 v_off
= l_load(b
, TypeSizeT
, v_offp
, "");
545 l_load_gep1(b
, LLVMInt8TypeInContext(lc
), v_tupdata_base
, v_off
, "padbyte");
547 LLVMBuildICmp(b
, LLVMIntEQ
,
548 v_possible_padbyte
, l_int8_const(lc
, 0),
550 LLVMBuildCondBr(b
, v_ispad
,
551 attalignblocks
[attnum
],
552 attstoreblocks
[attnum
]);
556 LLVMBuildBr(b
, attalignblocks
[attnum
]);
559 LLVMPositionBuilderAtEnd(b
, attalignblocks
[attnum
]);
561 /* translation of alignment code (cf TYPEALIGN()) */
563 LLVMValueRef v_off_aligned
;
564 LLVMValueRef v_off
= l_load(b
, TypeSizeT
, v_offp
, "");
566 /* ((ALIGNVAL) - 1) */
567 LLVMValueRef v_alignval
= l_sizet_const(alignto
- 1);
569 /* ((uintptr_t) (LEN) + ((ALIGNVAL) - 1)) */
570 LLVMValueRef v_lh
= LLVMBuildAdd(b
, v_off
, v_alignval
, "");
572 /* ~((uintptr_t) ((ALIGNVAL) - 1)) */
573 LLVMValueRef v_rh
= l_sizet_const(~(alignto
- 1));
575 v_off_aligned
= LLVMBuildAnd(b
, v_lh
, v_rh
, "aligned_offset");
577 LLVMBuildStore(b
, v_off_aligned
, v_offp
);
581 * As alignment either was unnecessary or has been performed, we
582 * now know the current alignment. This is only safe because this
583 * value isn't used for varlena and nullable columns.
585 if (known_alignment
>= 0)
587 Assert(known_alignment
!= 0);
588 known_alignment
= TYPEALIGN(alignto
, known_alignment
);
591 LLVMBuildBr(b
, attstoreblocks
[attnum
]);
592 LLVMPositionBuilderAtEnd(b
, attstoreblocks
[attnum
]);
596 LLVMPositionBuilderAtEnd(b
, attcheckalignblocks
[attnum
]);
597 LLVMBuildBr(b
, attalignblocks
[attnum
]);
598 LLVMPositionBuilderAtEnd(b
, attalignblocks
[attnum
]);
599 LLVMBuildBr(b
, attstoreblocks
[attnum
]);
601 LLVMPositionBuilderAtEnd(b
, attstoreblocks
[attnum
]);
604 * Store the current offset if known to be constant. That allows LLVM
605 * to generate better code. Without that LLVM can't figure out that
606 * the offset might be constant due to the jumps for previously
609 if (attguaranteedalign
)
611 Assert(known_alignment
>= 0);
612 LLVMBuildStore(b
, l_sizet_const(known_alignment
), v_offp
);
615 /* compute what following columns are aligned to */
618 /* can't guarantee any alignment after variable length field */
619 known_alignment
= -1;
620 attguaranteedalign
= false;
622 else if (att
->attnotnull
&& attguaranteedalign
&& known_alignment
>= 0)
625 * If the offset to the column was previously known, a NOT NULL &
626 * fixed-width column guarantees that alignment is just the
627 * previous alignment plus column width.
629 Assert(att
->attlen
> 0);
630 known_alignment
+= att
->attlen
;
632 else if (att
->attnotnull
&& (att
->attlen
% alignto
) == 0)
635 * After a NOT NULL fixed-width column with a length that is a
636 * multiple of its alignment requirement, we know the following
637 * column is aligned to at least the current column's alignment.
639 Assert(att
->attlen
> 0);
640 known_alignment
= alignto
;
641 Assert(known_alignment
> 0);
642 attguaranteedalign
= false;
646 known_alignment
= -1;
647 attguaranteedalign
= false;
651 /* compute address to load data from */
653 LLVMValueRef v_off
= l_load(b
, TypeSizeT
, v_offp
, "");
656 l_gep(b
, LLVMInt8TypeInContext(lc
), v_tupdata_base
, &v_off
, 1, "");
659 /* compute address to store value at */
660 v_resultp
= l_gep(b
, TypeSizeT
, v_tts_values
, &l_attno
, 1, "");
662 /* store null-byte (false) */
663 LLVMBuildStore(b
, l_int8_const(lc
, 0),
664 l_gep(b
, TypeStorageBool
, v_tts_nulls
, &l_attno
, 1, ""));
667 * Store datum. For byval: datums copy the value, extend to Datum's
668 * width, and store. For byref types: store pointer to data.
672 LLVMValueRef v_tmp_loaddata
;
673 LLVMTypeRef vartype
= LLVMIntTypeInContext(lc
, att
->attlen
* 8);
674 LLVMTypeRef vartypep
= LLVMPointerType(vartype
, 0);
677 LLVMBuildPointerCast(b
, v_attdatap
, vartypep
, "");
678 v_tmp_loaddata
= l_load(b
, vartype
, v_tmp_loaddata
, "attr_byval");
679 v_tmp_loaddata
= LLVMBuildZExt(b
, v_tmp_loaddata
, TypeSizeT
, "");
681 LLVMBuildStore(b
, v_tmp_loaddata
, v_resultp
);
685 LLVMValueRef v_tmp_loaddata
;
693 LLVMBuildStore(b
, v_tmp_loaddata
, v_resultp
);
696 /* increment data pointer */
699 v_incby
= l_sizet_const(att
->attlen
);
701 else if (att
->attlen
== -1)
704 llvm_pg_var_func_type("varsize_any"),
705 llvm_pg_func(mod
, "varsize_any"),
708 l_callsite_ro(v_incby
);
709 l_callsite_alwaysinline(v_incby
);
711 else if (att
->attlen
== -2)
714 llvm_pg_var_func_type("strlen"),
715 llvm_pg_func(mod
, "strlen"),
716 &v_attdatap
, 1, "strlen");
718 l_callsite_ro(v_incby
);
720 /* add 1 for NUL byte */
721 v_incby
= LLVMBuildAdd(b
, v_incby
, l_sizet_const(1), "");
726 v_incby
= NULL
; /* silence compiler */
729 if (attguaranteedalign
)
731 Assert(known_alignment
>= 0);
732 LLVMBuildStore(b
, l_sizet_const(known_alignment
), v_offp
);
736 LLVMValueRef v_off
= l_load(b
, TypeSizeT
, v_offp
, "");
738 v_off
= LLVMBuildAdd(b
, v_off
, v_incby
, "increment_offset");
739 LLVMBuildStore(b
, v_off
, v_offp
);
743 * jump to next block, unless last possible column, or all desired
744 * (available) attributes have been fetched.
746 if (attnum
+ 1 == natts
)
749 LLVMBuildBr(b
, b_out
);
753 LLVMBuildBr(b
, attcheckattnoblocks
[attnum
+ 1]);
758 /* build block that returns */
759 LLVMPositionBuilderAtEnd(b
, b_out
);
762 LLVMValueRef v_off
= l_load(b
, TypeSizeT
, v_offp
, "");
763 LLVMValueRef v_flags
;
765 LLVMBuildStore(b
, l_int16_const(lc
, natts
), v_nvalidp
);
766 v_off
= LLVMBuildTrunc(b
, v_off
, LLVMInt32TypeInContext(lc
), "");
767 LLVMBuildStore(b
, v_off
, v_slotoffp
);
768 v_flags
= l_load(b
, LLVMInt16TypeInContext(lc
), v_flagsp
, "tts_flags");
769 v_flags
= LLVMBuildOr(b
, v_flags
, l_int16_const(lc
, TTS_FLAG_SLOW
), "");
770 LLVMBuildStore(b
, v_flags
, v_flagsp
);
774 LLVMDisposeBuilder(b
);