nbtree: fix read page recheck typo.
[pgsql.git] / src / backend / jit / llvm / llvmjit_deform.c
blobb07f8e7f75675dc369eb096472549a7c49747056
1 /*-------------------------------------------------------------------------
3 * llvmjit_deform.c
4 * Generate code for deforming a heap tuple.
6 * This gains performance benefits over unJITed deforming from compile-time
7 * knowledge of the tuple descriptor. Fixed column widths, NOT NULLness, etc
8 * can be taken advantage of.
10 * Portions Copyright (c) 1996-2024, PostgreSQL Global Development Group
11 * Portions Copyright (c) 1994, Regents of the University of California
13 * IDENTIFICATION
14 * src/backend/jit/llvm/llvmjit_deform.c
16 *-------------------------------------------------------------------------
19 #include "postgres.h"
21 #include <llvm-c/Core.h>
23 #include "access/htup_details.h"
24 #include "access/tupdesc_details.h"
25 #include "executor/tuptable.h"
26 #include "jit/llvmjit.h"
27 #include "jit/llvmjit_emit.h"
31 * Create a function that deforms a tuple of type desc up to natts columns.
33 LLVMValueRef
34 slot_compile_deform(LLVMJitContext *context, TupleDesc desc,
35 const TupleTableSlotOps *ops, int natts)
37 char *funcname;
39 LLVMModuleRef mod;
40 LLVMContextRef lc;
41 LLVMBuilderRef b;
43 LLVMTypeRef deform_sig;
44 LLVMValueRef v_deform_fn;
46 LLVMBasicBlockRef b_entry;
47 LLVMBasicBlockRef b_adjust_unavail_cols;
48 LLVMBasicBlockRef b_find_start;
50 LLVMBasicBlockRef b_out;
51 LLVMBasicBlockRef b_dead;
52 LLVMBasicBlockRef *attcheckattnoblocks;
53 LLVMBasicBlockRef *attstartblocks;
54 LLVMBasicBlockRef *attisnullblocks;
55 LLVMBasicBlockRef *attcheckalignblocks;
56 LLVMBasicBlockRef *attalignblocks;
57 LLVMBasicBlockRef *attstoreblocks;
59 LLVMValueRef v_offp;
61 LLVMValueRef v_tupdata_base;
62 LLVMValueRef v_tts_values;
63 LLVMValueRef v_tts_nulls;
64 LLVMValueRef v_slotoffp;
65 LLVMValueRef v_flagsp;
66 LLVMValueRef v_nvalidp;
67 LLVMValueRef v_nvalid;
68 LLVMValueRef v_maxatt;
70 LLVMValueRef v_slot;
72 LLVMValueRef v_tupleheaderp;
73 LLVMValueRef v_tuplep;
74 LLVMValueRef v_infomask1;
75 LLVMValueRef v_infomask2;
76 LLVMValueRef v_bits;
78 LLVMValueRef v_hoff;
80 LLVMValueRef v_hasnulls;
82 /* last column (0 indexed) guaranteed to exist */
83 int guaranteed_column_number = -1;
85 /* current known alignment */
86 int known_alignment = 0;
88 /* if true, known_alignment describes definite offset of column */
89 bool attguaranteedalign = true;
91 int attnum;
93 /* virtual tuples never need deforming, so don't generate code */
94 if (ops == &TTSOpsVirtual)
95 return NULL;
97 /* decline to JIT for slot types we don't know to handle */
98 if (ops != &TTSOpsHeapTuple && ops != &TTSOpsBufferHeapTuple &&
99 ops != &TTSOpsMinimalTuple)
100 return NULL;
102 mod = llvm_mutable_module(context);
103 lc = LLVMGetModuleContext(mod);
105 funcname = llvm_expand_funcname(context, "deform");
108 * Check which columns have to exist, so we don't have to check the row's
109 * natts unnecessarily.
111 for (attnum = 0; attnum < desc->natts; attnum++)
113 Form_pg_attribute att = TupleDescAttr(desc, attnum);
116 * If the column is declared NOT NULL then it must be present in every
117 * tuple, unless there's a "missing" entry that could provide a
118 * non-NULL value for it. That in turn guarantees that the NULL bitmap
119 * - if there are any NULLable columns - is at least long enough to
120 * cover columns up to attnum.
122 * Be paranoid and also check !attisdropped, even though the
123 * combination of attisdropped && attnotnull combination shouldn't
124 * exist.
126 if (att->attnotnull &&
127 !att->atthasmissing &&
128 !att->attisdropped)
129 guaranteed_column_number = attnum;
132 /* Create the signature and function */
134 LLVMTypeRef param_types[1];
136 param_types[0] = l_ptr(StructTupleTableSlot);
138 deform_sig = LLVMFunctionType(LLVMVoidTypeInContext(lc),
139 param_types, lengthof(param_types), 0);
141 v_deform_fn = LLVMAddFunction(mod, funcname, deform_sig);
142 LLVMSetLinkage(v_deform_fn, LLVMInternalLinkage);
143 LLVMSetParamAlignment(LLVMGetParam(v_deform_fn, 0), MAXIMUM_ALIGNOF);
144 llvm_copy_attributes(AttributeTemplate, v_deform_fn);
146 b_entry =
147 LLVMAppendBasicBlockInContext(lc, v_deform_fn, "entry");
148 b_adjust_unavail_cols =
149 LLVMAppendBasicBlockInContext(lc, v_deform_fn, "adjust_unavail_cols");
150 b_find_start =
151 LLVMAppendBasicBlockInContext(lc, v_deform_fn, "find_startblock");
152 b_out =
153 LLVMAppendBasicBlockInContext(lc, v_deform_fn, "outblock");
154 b_dead =
155 LLVMAppendBasicBlockInContext(lc, v_deform_fn, "deadblock");
157 b = LLVMCreateBuilderInContext(lc);
159 attcheckattnoblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
160 attstartblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
161 attisnullblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
162 attcheckalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
163 attalignblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
164 attstoreblocks = palloc(sizeof(LLVMBasicBlockRef) * natts);
166 known_alignment = 0;
168 LLVMPositionBuilderAtEnd(b, b_entry);
170 /* perform allocas first, llvm only converts those to registers */
171 v_offp = LLVMBuildAlloca(b, TypeSizeT, "v_offp");
173 v_slot = LLVMGetParam(v_deform_fn, 0);
175 v_tts_values =
176 l_load_struct_gep(b, StructTupleTableSlot, v_slot, FIELDNO_TUPLETABLESLOT_VALUES,
177 "tts_values");
178 v_tts_nulls =
179 l_load_struct_gep(b, StructTupleTableSlot, v_slot, FIELDNO_TUPLETABLESLOT_ISNULL,
180 "tts_ISNULL");
181 v_flagsp = l_struct_gep(b, StructTupleTableSlot, v_slot, FIELDNO_TUPLETABLESLOT_FLAGS, "");
182 v_nvalidp = l_struct_gep(b, StructTupleTableSlot, v_slot, FIELDNO_TUPLETABLESLOT_NVALID, "");
184 if (ops == &TTSOpsHeapTuple || ops == &TTSOpsBufferHeapTuple)
186 LLVMValueRef v_heapslot;
188 v_heapslot =
189 LLVMBuildBitCast(b,
190 v_slot,
191 l_ptr(StructHeapTupleTableSlot),
192 "heapslot");
193 v_slotoffp = l_struct_gep(b, StructHeapTupleTableSlot, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_OFF, "");
194 v_tupleheaderp =
195 l_load_struct_gep(b, StructHeapTupleTableSlot, v_heapslot, FIELDNO_HEAPTUPLETABLESLOT_TUPLE,
196 "tupleheader");
198 else if (ops == &TTSOpsMinimalTuple)
200 LLVMValueRef v_minimalslot;
202 v_minimalslot =
203 LLVMBuildBitCast(b,
204 v_slot,
205 l_ptr(StructMinimalTupleTableSlot),
206 "minimalslot");
207 v_slotoffp = l_struct_gep(b,
208 StructMinimalTupleTableSlot,
209 v_minimalslot,
210 FIELDNO_MINIMALTUPLETABLESLOT_OFF, "");
211 v_tupleheaderp =
212 l_load_struct_gep(b,
213 StructMinimalTupleTableSlot,
214 v_minimalslot,
215 FIELDNO_MINIMALTUPLETABLESLOT_TUPLE,
216 "tupleheader");
218 else
220 /* should've returned at the start of the function */
221 pg_unreachable();
224 v_tuplep =
225 l_load_struct_gep(b,
226 StructHeapTupleData,
227 v_tupleheaderp,
228 FIELDNO_HEAPTUPLEDATA_DATA,
229 "tuple");
230 v_bits =
231 LLVMBuildBitCast(b,
232 l_struct_gep(b,
233 StructHeapTupleHeaderData,
234 v_tuplep,
235 FIELDNO_HEAPTUPLEHEADERDATA_BITS,
236 ""),
237 l_ptr(LLVMInt8TypeInContext(lc)),
238 "t_bits");
239 v_infomask1 =
240 l_load_struct_gep(b,
241 StructHeapTupleHeaderData,
242 v_tuplep,
243 FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK,
244 "infomask1");
245 v_infomask2 =
246 l_load_struct_gep(b,
247 StructHeapTupleHeaderData,
248 v_tuplep, FIELDNO_HEAPTUPLEHEADERDATA_INFOMASK2,
249 "infomask2");
251 /* t_infomask & HEAP_HASNULL */
252 v_hasnulls =
253 LLVMBuildICmp(b, LLVMIntNE,
254 LLVMBuildAnd(b,
255 l_int16_const(lc, HEAP_HASNULL),
256 v_infomask1, ""),
257 l_int16_const(lc, 0),
258 "hasnulls");
260 /* t_infomask2 & HEAP_NATTS_MASK */
261 v_maxatt = LLVMBuildAnd(b,
262 l_int16_const(lc, HEAP_NATTS_MASK),
263 v_infomask2,
264 "maxatt");
267 * Need to zext, as getelementptr otherwise treats hoff as a signed 8bit
268 * integer, which'd yield a negative offset for t_hoff > 127.
270 v_hoff =
271 LLVMBuildZExt(b,
272 l_load_struct_gep(b,
273 StructHeapTupleHeaderData,
274 v_tuplep,
275 FIELDNO_HEAPTUPLEHEADERDATA_HOFF,
276 ""),
277 LLVMInt32TypeInContext(lc), "t_hoff");
279 v_tupdata_base = l_gep(b,
280 LLVMInt8TypeInContext(lc),
281 LLVMBuildBitCast(b,
282 v_tuplep,
283 l_ptr(LLVMInt8TypeInContext(lc)),
284 ""),
285 &v_hoff, 1,
286 "v_tupdata_base");
289 * Load tuple start offset from slot. Will be reset below in case there's
290 * no existing deformed columns in slot.
293 LLVMValueRef v_off_start;
295 v_off_start = l_load(b, LLVMInt32TypeInContext(lc), v_slotoffp, "v_slot_off");
296 v_off_start = LLVMBuildZExt(b, v_off_start, TypeSizeT, "");
297 LLVMBuildStore(b, v_off_start, v_offp);
300 /* build the basic block for each attribute, need them as jump target */
301 for (attnum = 0; attnum < natts; attnum++)
303 attcheckattnoblocks[attnum] =
304 l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckattno", attnum);
305 attstartblocks[attnum] =
306 l_bb_append_v(v_deform_fn, "block.attr.%d.start", attnum);
307 attisnullblocks[attnum] =
308 l_bb_append_v(v_deform_fn, "block.attr.%d.attisnull", attnum);
309 attcheckalignblocks[attnum] =
310 l_bb_append_v(v_deform_fn, "block.attr.%d.attcheckalign", attnum);
311 attalignblocks[attnum] =
312 l_bb_append_v(v_deform_fn, "block.attr.%d.align", attnum);
313 attstoreblocks[attnum] =
314 l_bb_append_v(v_deform_fn, "block.attr.%d.store", attnum);
318 * Check if it is guaranteed that all the desired attributes are available
319 * in the tuple (but still possibly NULL), by dint of either the last
320 * to-be-deformed column being NOT NULL, or subsequent ones not accessed
321 * here being NOT NULL. If that's not guaranteed the tuple headers natt's
322 * has to be checked, and missing attributes potentially have to be
323 * fetched (using slot_getmissingattrs().
325 if ((natts - 1) <= guaranteed_column_number)
327 /* just skip through unnecessary blocks */
328 LLVMBuildBr(b, b_adjust_unavail_cols);
329 LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
330 LLVMBuildBr(b, b_find_start);
332 else
334 LLVMValueRef v_params[3];
335 LLVMValueRef f;
337 /* branch if not all columns available */
338 LLVMBuildCondBr(b,
339 LLVMBuildICmp(b, LLVMIntULT,
340 v_maxatt,
341 l_int16_const(lc, natts),
342 ""),
343 b_adjust_unavail_cols,
344 b_find_start);
346 /* if not, memset tts_isnull of relevant cols to true */
347 LLVMPositionBuilderAtEnd(b, b_adjust_unavail_cols);
349 v_params[0] = v_slot;
350 v_params[1] = LLVMBuildZExt(b, v_maxatt, LLVMInt32TypeInContext(lc), "");
351 v_params[2] = l_int32_const(lc, natts);
352 f = llvm_pg_func(mod, "slot_getmissingattrs");
353 l_call(b,
354 LLVMGetFunctionType(f), f,
355 v_params, lengthof(v_params), "");
356 LLVMBuildBr(b, b_find_start);
359 LLVMPositionBuilderAtEnd(b, b_find_start);
361 v_nvalid = l_load(b, LLVMInt16TypeInContext(lc), v_nvalidp, "");
364 * Build switch to go from nvalid to the right startblock. Callers
365 * currently don't have the knowledge, but it'd be good for performance to
366 * avoid this check when it's known that the slot is empty (e.g. in scan
367 * nodes).
369 if (true)
371 LLVMValueRef v_switch = LLVMBuildSwitch(b, v_nvalid,
372 b_dead, natts);
374 for (attnum = 0; attnum < natts; attnum++)
376 LLVMValueRef v_attno = l_int16_const(lc, attnum);
378 LLVMAddCase(v_switch, v_attno, attcheckattnoblocks[attnum]);
381 else
383 /* jump from entry block to first block */
384 LLVMBuildBr(b, attcheckattnoblocks[0]);
387 LLVMPositionBuilderAtEnd(b, b_dead);
388 LLVMBuildUnreachable(b);
391 * Iterate over each attribute that needs to be deformed, build code to
392 * deform it.
394 for (attnum = 0; attnum < natts; attnum++)
396 Form_pg_attribute att = TupleDescAttr(desc, attnum);
397 LLVMValueRef v_incby;
398 int alignto;
399 LLVMValueRef l_attno = l_int16_const(lc, attnum);
400 LLVMValueRef v_attdatap;
401 LLVMValueRef v_resultp;
403 /* build block checking whether we did all the necessary attributes */
404 LLVMPositionBuilderAtEnd(b, attcheckattnoblocks[attnum]);
407 * If this is the first attribute, slot->tts_nvalid was 0. Therefore
408 * also reset offset to 0, it may be from a previous execution.
410 if (attnum == 0)
412 LLVMBuildStore(b, l_sizet_const(0), v_offp);
416 * Build check whether column is available (i.e. whether the tuple has
417 * that many columns stored). We can avoid the branch if we know
418 * there's a subsequent NOT NULL column.
420 if (attnum <= guaranteed_column_number)
422 LLVMBuildBr(b, attstartblocks[attnum]);
424 else
426 LLVMValueRef v_islast;
428 v_islast = LLVMBuildICmp(b, LLVMIntUGE,
429 l_attno,
430 v_maxatt,
431 "heap_natts");
432 LLVMBuildCondBr(b, v_islast, b_out, attstartblocks[attnum]);
434 LLVMPositionBuilderAtEnd(b, attstartblocks[attnum]);
437 * Check for nulls if necessary. No need to take missing attributes
438 * into account, because if they're present the heaptuple's natts
439 * would have indicated that a slot_getmissingattrs() is needed.
441 if (!att->attnotnull)
443 LLVMBasicBlockRef b_ifnotnull;
444 LLVMBasicBlockRef b_ifnull;
445 LLVMBasicBlockRef b_next;
446 LLVMValueRef v_attisnull;
447 LLVMValueRef v_nullbyteno;
448 LLVMValueRef v_nullbytemask;
449 LLVMValueRef v_nullbyte;
450 LLVMValueRef v_nullbit;
452 b_ifnotnull = attcheckalignblocks[attnum];
453 b_ifnull = attisnullblocks[attnum];
455 if (attnum + 1 == natts)
456 b_next = b_out;
457 else
458 b_next = attcheckattnoblocks[attnum + 1];
460 v_nullbyteno = l_int32_const(lc, attnum >> 3);
461 v_nullbytemask = l_int8_const(lc, 1 << ((attnum) & 0x07));
462 v_nullbyte = l_load_gep1(b, LLVMInt8TypeInContext(lc), v_bits, v_nullbyteno, "attnullbyte");
464 v_nullbit = LLVMBuildICmp(b,
465 LLVMIntEQ,
466 LLVMBuildAnd(b, v_nullbyte, v_nullbytemask, ""),
467 l_int8_const(lc, 0),
468 "attisnull");
470 v_attisnull = LLVMBuildAnd(b, v_hasnulls, v_nullbit, "");
472 LLVMBuildCondBr(b, v_attisnull, b_ifnull, b_ifnotnull);
474 LLVMPositionBuilderAtEnd(b, b_ifnull);
476 /* store null-byte */
477 LLVMBuildStore(b,
478 l_int8_const(lc, 1),
479 l_gep(b, LLVMInt8TypeInContext(lc), v_tts_nulls, &l_attno, 1, ""));
480 /* store zero datum */
481 LLVMBuildStore(b,
482 l_sizet_const(0),
483 l_gep(b, TypeSizeT, v_tts_values, &l_attno, 1, ""));
485 LLVMBuildBr(b, b_next);
486 attguaranteedalign = false;
488 else
490 /* nothing to do */
491 LLVMBuildBr(b, attcheckalignblocks[attnum]);
492 LLVMPositionBuilderAtEnd(b, attisnullblocks[attnum]);
493 LLVMBuildBr(b, attcheckalignblocks[attnum]);
495 LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
497 /* determine required alignment */
498 if (att->attalign == TYPALIGN_INT)
499 alignto = ALIGNOF_INT;
500 else if (att->attalign == TYPALIGN_CHAR)
501 alignto = 1;
502 else if (att->attalign == TYPALIGN_DOUBLE)
503 alignto = ALIGNOF_DOUBLE;
504 else if (att->attalign == TYPALIGN_SHORT)
505 alignto = ALIGNOF_SHORT;
506 else
508 elog(ERROR, "unknown alignment");
509 alignto = 0;
512 /* ------
513 * Even if alignment is required, we can skip doing it if provably
514 * unnecessary:
515 * - first column is guaranteed to be aligned
516 * - columns following a NOT NULL fixed width datum have known
517 * alignment, can skip alignment computation if that known alignment
518 * is compatible with current column.
519 * ------
521 if (alignto > 1 &&
522 (known_alignment < 0 || known_alignment != TYPEALIGN(alignto, known_alignment)))
525 * When accessing a varlena field, we have to "peek" to see if we
526 * are looking at a pad byte or the first byte of a 1-byte-header
527 * datum. A zero byte must be either a pad byte, or the first
528 * byte of a correctly aligned 4-byte length word; in either case,
529 * we can align safely. A non-zero byte must be either a 1-byte
530 * length word, or the first byte of a correctly aligned 4-byte
531 * length word; in either case, we need not align.
533 if (att->attlen == -1)
535 LLVMValueRef v_possible_padbyte;
536 LLVMValueRef v_ispad;
537 LLVMValueRef v_off;
539 /* don't know if short varlena or not */
540 attguaranteedalign = false;
542 v_off = l_load(b, TypeSizeT, v_offp, "");
544 v_possible_padbyte =
545 l_load_gep1(b, LLVMInt8TypeInContext(lc), v_tupdata_base, v_off, "padbyte");
546 v_ispad =
547 LLVMBuildICmp(b, LLVMIntEQ,
548 v_possible_padbyte, l_int8_const(lc, 0),
549 "ispadbyte");
550 LLVMBuildCondBr(b, v_ispad,
551 attalignblocks[attnum],
552 attstoreblocks[attnum]);
554 else
556 LLVMBuildBr(b, attalignblocks[attnum]);
559 LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
561 /* translation of alignment code (cf TYPEALIGN()) */
563 LLVMValueRef v_off_aligned;
564 LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
566 /* ((ALIGNVAL) - 1) */
567 LLVMValueRef v_alignval = l_sizet_const(alignto - 1);
569 /* ((uintptr_t) (LEN) + ((ALIGNVAL) - 1)) */
570 LLVMValueRef v_lh = LLVMBuildAdd(b, v_off, v_alignval, "");
572 /* ~((uintptr_t) ((ALIGNVAL) - 1)) */
573 LLVMValueRef v_rh = l_sizet_const(~(alignto - 1));
575 v_off_aligned = LLVMBuildAnd(b, v_lh, v_rh, "aligned_offset");
577 LLVMBuildStore(b, v_off_aligned, v_offp);
581 * As alignment either was unnecessary or has been performed, we
582 * now know the current alignment. This is only safe because this
583 * value isn't used for varlena and nullable columns.
585 if (known_alignment >= 0)
587 Assert(known_alignment != 0);
588 known_alignment = TYPEALIGN(alignto, known_alignment);
591 LLVMBuildBr(b, attstoreblocks[attnum]);
592 LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
594 else
596 LLVMPositionBuilderAtEnd(b, attcheckalignblocks[attnum]);
597 LLVMBuildBr(b, attalignblocks[attnum]);
598 LLVMPositionBuilderAtEnd(b, attalignblocks[attnum]);
599 LLVMBuildBr(b, attstoreblocks[attnum]);
601 LLVMPositionBuilderAtEnd(b, attstoreblocks[attnum]);
604 * Store the current offset if known to be constant. That allows LLVM
605 * to generate better code. Without that LLVM can't figure out that
606 * the offset might be constant due to the jumps for previously
607 * decoded columns.
609 if (attguaranteedalign)
611 Assert(known_alignment >= 0);
612 LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
615 /* compute what following columns are aligned to */
616 if (att->attlen < 0)
618 /* can't guarantee any alignment after variable length field */
619 known_alignment = -1;
620 attguaranteedalign = false;
622 else if (att->attnotnull && attguaranteedalign && known_alignment >= 0)
625 * If the offset to the column was previously known, a NOT NULL &
626 * fixed-width column guarantees that alignment is just the
627 * previous alignment plus column width.
629 Assert(att->attlen > 0);
630 known_alignment += att->attlen;
632 else if (att->attnotnull && (att->attlen % alignto) == 0)
635 * After a NOT NULL fixed-width column with a length that is a
636 * multiple of its alignment requirement, we know the following
637 * column is aligned to at least the current column's alignment.
639 Assert(att->attlen > 0);
640 known_alignment = alignto;
641 Assert(known_alignment > 0);
642 attguaranteedalign = false;
644 else
646 known_alignment = -1;
647 attguaranteedalign = false;
651 /* compute address to load data from */
653 LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
655 v_attdatap =
656 l_gep(b, LLVMInt8TypeInContext(lc), v_tupdata_base, &v_off, 1, "");
659 /* compute address to store value at */
660 v_resultp = l_gep(b, TypeSizeT, v_tts_values, &l_attno, 1, "");
662 /* store null-byte (false) */
663 LLVMBuildStore(b, l_int8_const(lc, 0),
664 l_gep(b, TypeStorageBool, v_tts_nulls, &l_attno, 1, ""));
667 * Store datum. For byval: datums copy the value, extend to Datum's
668 * width, and store. For byref types: store pointer to data.
670 if (att->attbyval)
672 LLVMValueRef v_tmp_loaddata;
673 LLVMTypeRef vartype = LLVMIntTypeInContext(lc, att->attlen * 8);
674 LLVMTypeRef vartypep = LLVMPointerType(vartype, 0);
676 v_tmp_loaddata =
677 LLVMBuildPointerCast(b, v_attdatap, vartypep, "");
678 v_tmp_loaddata = l_load(b, vartype, v_tmp_loaddata, "attr_byval");
679 v_tmp_loaddata = LLVMBuildZExt(b, v_tmp_loaddata, TypeSizeT, "");
681 LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
683 else
685 LLVMValueRef v_tmp_loaddata;
687 /* store pointer */
688 v_tmp_loaddata =
689 LLVMBuildPtrToInt(b,
690 v_attdatap,
691 TypeSizeT,
692 "attr_ptr");
693 LLVMBuildStore(b, v_tmp_loaddata, v_resultp);
696 /* increment data pointer */
697 if (att->attlen > 0)
699 v_incby = l_sizet_const(att->attlen);
701 else if (att->attlen == -1)
703 v_incby = l_call(b,
704 llvm_pg_var_func_type("varsize_any"),
705 llvm_pg_func(mod, "varsize_any"),
706 &v_attdatap, 1,
707 "varsize_any");
708 l_callsite_ro(v_incby);
709 l_callsite_alwaysinline(v_incby);
711 else if (att->attlen == -2)
713 v_incby = l_call(b,
714 llvm_pg_var_func_type("strlen"),
715 llvm_pg_func(mod, "strlen"),
716 &v_attdatap, 1, "strlen");
718 l_callsite_ro(v_incby);
720 /* add 1 for NUL byte */
721 v_incby = LLVMBuildAdd(b, v_incby, l_sizet_const(1), "");
723 else
725 Assert(false);
726 v_incby = NULL; /* silence compiler */
729 if (attguaranteedalign)
731 Assert(known_alignment >= 0);
732 LLVMBuildStore(b, l_sizet_const(known_alignment), v_offp);
734 else
736 LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
738 v_off = LLVMBuildAdd(b, v_off, v_incby, "increment_offset");
739 LLVMBuildStore(b, v_off, v_offp);
743 * jump to next block, unless last possible column, or all desired
744 * (available) attributes have been fetched.
746 if (attnum + 1 == natts)
748 /* jump out */
749 LLVMBuildBr(b, b_out);
751 else
753 LLVMBuildBr(b, attcheckattnoblocks[attnum + 1]);
758 /* build block that returns */
759 LLVMPositionBuilderAtEnd(b, b_out);
762 LLVMValueRef v_off = l_load(b, TypeSizeT, v_offp, "");
763 LLVMValueRef v_flags;
765 LLVMBuildStore(b, l_int16_const(lc, natts), v_nvalidp);
766 v_off = LLVMBuildTrunc(b, v_off, LLVMInt32TypeInContext(lc), "");
767 LLVMBuildStore(b, v_off, v_slotoffp);
768 v_flags = l_load(b, LLVMInt16TypeInContext(lc), v_flagsp, "tts_flags");
769 v_flags = LLVMBuildOr(b, v_flags, l_int16_const(lc, TTS_FLAG_SLOW), "");
770 LLVMBuildStore(b, v_flags, v_flagsp);
771 LLVMBuildRetVoid(b);
774 LLVMDisposeBuilder(b);
776 return v_deform_fn;