2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #if defined(POINTER_COMPRESSION)
20 #define POINTER_THUNK_BIT 0
21 #elif defined(POINTER_IGNORE_START)
22 #define POINTER_THUNK_BIT POINTER_IGNORE_TOP_BIT
23 #elif defined(POINTER_TAG)
24 #define POINTER_THUNK_BIT POINTER_TAG_BIT
26 unsupported pointer mode
29 static bool attr_w gen_ptr_is_thunk(struct codegen_context *ctx, unsigned reg, bool jnz, uint32_t label)
32 if (POINTER_THUNK_BIT < 8
33 #if defined(ARCH_X86_32)
37 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, OP_SIZE_1, reg, (uint64_t)1 << POINTER_THUNK_BIT, jnz ? COND_NE : COND_E, label));
41 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, OP_SIZE_SLOT, reg, (uint64_t)1 << POINTER_THUNK_BIT, jnz ? COND_NE : COND_E, label));
46 static bool attr_w gen_barrier(struct codegen_context *ctx)
48 if (ARCH_NEEDS_BARRIER)
49 gen_insn(INSN_MB, 0, 0, 0);
53 static bool attr_w gen_compare_refcount(struct codegen_context *ctx, unsigned ptr, unsigned val, unsigned cond, uint32_t label)
55 unsigned op_size = log_2(sizeof(refcount_int_t));
57 bool logical = COND_IS_LOGICAL(cond);
58 g(gen_address(ctx, ptr, offsetof(struct data, refcount_), IMM_PURPOSE_LDR_OFFSET, op_size));
59 g(gen_imm(ctx, val, IMM_PURPOSE_CMP, op_size));
60 gen_insn(INSN_CMP, op_size, 0, 1 + logical);
64 gen_insn(!logical ? INSN_JMP_COND : INSN_JMP_COND_LOGICAL, op_size, cond, 0);
67 g(gen_address(ctx, ptr, offsetof(struct data, refcount_), IMM_PURPOSE_LDR_OFFSET, op_size));
68 gen_insn(INSN_MOV, op_size, 0, 0);
72 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, op_size, R_SCRATCH_2, val, cond, label));
77 static bool attr_w gen_compare_ptr_tag(struct codegen_context *ctx, unsigned reg, unsigned tag, unsigned cond, uint32_t label, unsigned tmp_reg)
79 #if defined(DATA_TAG_AT_ALLOC)
80 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHR, tmp_reg, reg, POINTER_IGNORE_START, false));
81 #elif defined(REFCOUNT_TAG)
82 #if REFCOUNT_STEP == 256 && defined(C_LITTLE_ENDIAN) && !defined(ARCH_ALPHA)
84 g(gen_imm(ctx, tag, IMM_PURPOSE_CMP, OP_SIZE_4));
85 gen_insn(INSN_CMP, OP_SIZE_1, 0, 1);
86 gen_one(ARG_ADDRESS_1);
88 gen_eight(offsetof(struct data, refcount_));
91 gen_insn(INSN_JMP_COND, OP_SIZE_1, cond, 0);
95 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
97 gen_one(ARG_ADDRESS_1);
99 gen_eight(offsetof(struct data, refcount_));
102 gen_insn(INSN_MOV, log_2(sizeof(refcount_int_t)), 0, 0);
104 gen_one(ARG_ADDRESS_1);
106 gen_eight(offsetof(struct data, refcount_));
108 g(gen_3address_alu_imm(ctx, log_2(sizeof(refcount_int_t)), ALU_AND, tmp_reg, tmp_reg, REFCOUNT_STEP - 1, 0));
111 #if defined(ARCH_S390)
112 if (sizeof(tag_t) == 1 && !cpu_test_feature(CPU_FEATURE_long_displacement)) {
113 g(gen_address(ctx, reg, offsetof(struct data, tag), IMM_PURPOSE_LDR_OFFSET, log_2(sizeof(tag_t))));
114 gen_insn(INSN_MOV_MASK, OP_SIZE_NATIVE, MOV_MASK_0_8, 0);
117 gen_address_offset();
119 g(gen_extend(ctx, log_2(sizeof(tag_t)), zero_x, tmp_reg, tmp_reg));
123 g(gen_address(ctx, reg, offsetof(struct data, tag), IMM_PURPOSE_LDR_OFFSET, log_2(sizeof(tag_t))));
124 gen_insn(INSN_MOV, log_2(sizeof(tag_t)), 0, 0);
126 gen_address_offset();
129 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, i_size(OP_SIZE_4), tmp_reg, tag, cond, label));
133 static bool attr_w gen_compare_da_tag(struct codegen_context *ctx, unsigned reg, unsigned tag, unsigned cond, uint32_t label, unsigned tmp_reg)
135 #if defined(POINTER_COMPRESSION)
136 #if defined(ARCH_X86) && POINTER_COMPRESSION <= 3 && defined(REFCOUNT_TAG) && REFCOUNT_STEP == 256 && defined(C_LITTLE_ENDIAN)
137 g(gen_imm(ctx, tag, IMM_PURPOSE_CMP, log_2(sizeof(tag_t))));
138 gen_insn(INSN_CMP, log_2(sizeof(tag_t)), 0, 0);
139 gen_one(ARG_ADDRESS_1 + POINTER_COMPRESSION);
141 gen_eight(offsetof(struct data, refcount_));
144 gen_insn(INSN_JMP_COND, OP_SIZE_4, cond, 0);
149 if (ARCH_PREFERS_SX(OP_SIZE_4)) {
150 g(gen_extend(ctx, OP_SIZE_4, zero_x, tmp_reg, reg));
152 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, tmp_reg, tmp_reg, POINTER_COMPRESSION, false));
154 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, tmp_reg, reg, POINTER_COMPRESSION, false));
156 g(gen_compare_ptr_tag(ctx, tmp_reg, tag, cond, label, tmp_reg));
159 g(gen_compare_ptr_tag(ctx, reg, tag, cond, label, tmp_reg));
163 static bool attr_w gen_compare_tag_and_refcount(struct codegen_context *ctx, unsigned reg, unsigned tag, uint32_t label, unsigned attr_unused tmp_reg)
165 #if defined(REFCOUNT_TAG)
166 g(gen_compare_refcount(ctx, reg, tag, COND_NE, label));
168 g(gen_compare_ptr_tag(ctx, reg, tag, COND_NE, label, tmp_reg));
169 g(gen_compare_refcount(ctx, reg, REFCOUNT_STEP, COND_AE, label));
174 static bool attr_w gen_decompress_pointer(struct codegen_context *ctx, bool attr_unused zx, unsigned reg, int64_t offset)
176 #ifdef POINTER_COMPRESSION
177 #if defined(ARCH_X86) && POINTER_COMPRESSION <= 3
179 g(gen_imm(ctx, offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
180 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
182 gen_one(ARG_SHIFTED_REGISTER);
183 gen_one(ARG_SHIFT_LSL | POINTER_COMPRESSION);
190 g(gen_extend(ctx, OP_SIZE_4, zero_x, reg, reg));
191 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg, reg, POINTER_COMPRESSION, false));
194 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg, reg, offset, 0));
198 static bool attr_w gen_compress_pointer(struct codegen_context attr_unused *ctx, unsigned attr_unused reg)
200 #ifdef POINTER_COMPRESSION
201 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHR, reg, reg, POINTER_COMPRESSION, false));
206 static bool attr_w gen_frame_get_pointer(struct codegen_context *ctx, frame_t slot, bool deref, unsigned dest)
209 g(gen_upcall_start(ctx, 1));
210 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot, 0, R_ARG0));
211 g(gen_upcall_argument(ctx, 0));
212 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
213 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot, 0, dest));
214 } else if (!da(ctx->fn,function)->local_variables_flags[slot].may_be_borrowed) {
215 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot, 0, dest));
216 g(gen_set_1(ctx, R_FRAME, slot, 0, false));
217 flag_set(ctx, slot, false);
220 skip_label = alloc_label(ctx);
221 if (unlikely(!skip_label))
223 if (flag_is_set(ctx, slot)) {
224 g(gen_set_1(ctx, R_FRAME, slot, 0, false));
227 if (flag_is_clear(ctx, slot))
229 g(gen_test_1(ctx, R_FRAME, slot, 0, skip_label, false, TEST_CLEAR));
231 g(gen_upcall_start(ctx, 1));
232 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot, 0, R_ARG0));
233 g(gen_upcall_argument(ctx, 0));
234 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
236 gen_label(skip_label);
237 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot, 0, dest));
238 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot));
239 flag_set(ctx, slot, false);
244 static bool attr_w gen_frame_set_pointer(struct codegen_context *ctx, frame_t slot, unsigned src)
246 g(gen_set_1(ctx, R_FRAME, slot, 0, true));
247 flag_set(ctx, slot, true);
248 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot, 0, src));