2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #define OP_SIZE_NATIVE OP_SIZE_8
20 #define OP_SIZE_ADDRESS OP_SIZE_NATIVE
22 #define JMP_LIMIT (cpu_test_feature(CPU_FEATURE_brl) ? JMP_SHORT : JMP_SHORTEST)
24 #define UNALIGNED_TRAP 1
26 #define ALU_WRITES_FLAGS(alu, im) 0
27 #define ALU1_WRITES_FLAGS(alu) 0
28 #define ROT_WRITES_FLAGS(alu) 0
29 #define COND_IS_LOGICAL(cond) 0
31 #define ARCH_PARTIAL_ALU(size) 0
32 #define ARCH_IS_3ADDRESS 1
33 #define ARCH_HAS_FLAGS 0
34 #define ARCH_PREFERS_SX(size) 0
35 #define ARCH_HAS_BWX 1
36 #define ARCH_HAS_MUL 0
37 #define ARCH_HAS_DIV 0
38 #define ARCH_HAS_ANDN 1
39 #define ARCH_HAS_SHIFTED_ADD(bits) ((bits) <= 4)
40 #define ARCH_HAS_BTX(btx, size, cnst) (((btx) == BTX_BTS || (btx) == BTX_BTR) && (cnst))
41 #define ARCH_SHIFT_SIZE 32
42 #define ARCH_NEEDS_BARRIER 0
44 #define i_size(size) OP_SIZE_NATIVE
45 #define i_size_rot(size) OP_SIZE_NATIVE
148 #define R_UPCALL R_33
149 #define R_TIMESTAMP R_34
150 #define R_SAVED_B0 R_35
151 #define R_SAVED_AR_PFS R_36
152 #define R_SAVED_1 R_37
153 #define R_SAVED_2 R_38
161 #define R_SCRATCH_NA_1 R_14
162 #define R_SCRATCH_NA_2 R_15
163 #define R_SCRATCH_NA_3 R_16
164 #define R_SCRATCH_1 R_17
165 #define R_SCRATCH_2 R_18
166 #define R_SCRATCH_3 R_19
167 #define R_SCRATCH_4 R_20
169 #define R_OFFSET_IMM R_2
170 #define R_CONST_IMM R_3
171 #define R_CMP_RESULT P_6
173 #define R_SCRATCH_B B_6
175 #define FR_SCRATCH_1 FR_6
176 #define FR_SCRATCH_2 FR_7
178 #define SUPPORTED_FP 0xe
180 static inline bool reg_is_gr(unsigned reg)
185 static inline bool reg_is_fp(unsigned reg)
187 return reg >= 0x80 && reg < 0x90;
190 static inline bool reg_is_p(unsigned reg)
192 return reg >= 0xa0 && reg < 0xa8;
195 static inline bool reg_is_b(unsigned reg)
197 return reg >= 0xb0 && reg < 0xb8;
200 static inline uint64_t bits_gr(unsigned reg)
202 ajla_assert_lo(reg_is_gr(reg), (file_line, "bits_gr: register %x", reg));
206 static inline uint64_t bits_fp(unsigned reg)
208 ajla_assert_lo(reg_is_fp(reg), (file_line, "bits_fp: register %x", reg));
212 static inline uint64_t bits_p(unsigned reg)
214 ajla_assert_lo(reg_is_p(reg), (file_line, "bits_p: register %x", reg));
218 static inline uint64_t bits_b(unsigned reg)
220 ajla_assert_lo(reg_is_b(reg), (file_line, "bits_b: register %x", reg));
224 static char *codegen_stub_alloc(struct codegen_context *ctx, char *code)
226 uintptr_t *stub = mem_alloc_mayfail(uintptr_t *, sizeof(uintptr_t) * 2, &ctx->err);
229 stub[0] = ptr_to_num(code);
231 return cast_ptr(char *, stub);
233 #define codegen_stub_alloc codegen_stub_alloc
234 #define codegen_stub_free(stub) mem_free(stub)
236 static bool attr_w gen_load_constant(struct codegen_context *ctx, unsigned reg, uint64_t c)
238 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
245 /*static bool attr_w gen_upcall(struct codegen_context *ctx, unsigned offset, unsigned attr_unused n_args);*/
246 static bool attr_w gen_imm(struct codegen_context *ctx, int64_t imm, unsigned purpose, unsigned size);
248 static bool attr_w gen_address(struct codegen_context *ctx, unsigned base, int64_t imm, unsigned attr_unused purpose, unsigned attr_unused size)
251 ctx->offset_imm = imm;
252 ctx->offset_reg = false;
253 ctx->base_reg = base;
255 g(gen_imm(ctx, imm, IMM_PURPOSE_ADD, OP_SIZE_NATIVE));
256 gen_insn(INSN_ALU, OP_SIZE_NATIVE, ALU_ADD, 0);
257 gen_one(R_OFFSET_IMM);
261 ctx->offset_reg = false;
262 ctx->base_reg = R_OFFSET_IMM;
267 static bool is_direct_const(int64_t imm, unsigned purpose, unsigned size)
270 case IMM_PURPOSE_STORE_VALUE:
274 case IMM_PURPOSE_ADD:
275 case IMM_PURPOSE_MOVR:
276 if (imm >= -0x2000 && imm < 0x2000)
279 case IMM_PURPOSE_SUB:
280 if (imm > -0x2000 && imm <= 0x2000)
283 case IMM_PURPOSE_AND:
285 case IMM_PURPOSE_XOR:
286 if (imm >= -0x80 && imm < 0x80)
289 case IMM_PURPOSE_CMP:
290 if (imm > -0x80 && imm < 0x80)
293 case IMM_PURPOSE_ANDN:
295 case IMM_PURPOSE_TEST:
297 case IMM_PURPOSE_BITWISE:
300 internal(file_line, "is_direct_const: invalid purpose %u (imm %"PRIxMAX", size %u)", purpose, (uintmax_t)imm, size);
305 static bool attr_w gen_imm(struct codegen_context *ctx, int64_t imm, unsigned purpose, unsigned size)
307 if (is_direct_const(imm, purpose, size)) {
308 ctx->const_imm = imm;
309 ctx->const_reg = false;
311 g(gen_load_constant(ctx, R_CONST_IMM, imm));
312 ctx->const_reg = true;
317 static bool attr_w gen_entry(struct codegen_context *ctx)
319 gen_insn(INSN_IA64_ALLOC, OP_SIZE_NATIVE, 0, 0);
320 gen_one(R_SAVED_AR_PFS);
326 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
330 /*gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
333 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
336 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
338 gen_one(R_TIMESTAMP);
339 g(gen_load_constant(ctx, R_ARG3, 0x123));
340 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_debug), 4));*/
345 static bool attr_w gen_escape_arg(struct codegen_context *ctx, ip_t ip, uint32_t escape_label)
347 g(gen_load_constant(ctx, R_RET1, ip));
349 gen_insn(INSN_JMP, 0, 0, 0);
350 gen_four(escape_label);
355 static bool attr_w gen_escape(struct codegen_context *ctx)
357 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
361 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
365 gen_insn(INSN_IA64_DEALLOC, OP_SIZE_NATIVE, 0, 0);
366 gen_one(R_SAVED_AR_PFS);
368 gen_insn(INSN_RET, 0, 0, 0);
373 static bool attr_w gen_upcall_argument(struct codegen_context attr_unused *ctx, unsigned attr_unused arg)
378 static bool attr_w gen_upcall(struct codegen_context *ctx, unsigned offset, unsigned attr_unused n_args)
380 g(gen_address(ctx, R_UPCALL, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_NATIVE));
381 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
382 gen_one(R_SCRATCH_NA_1);
383 gen_address_offset();
385 g(gen_address(ctx, R_SCRATCH_NA_1, 0, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_NATIVE));
386 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
387 gen_one(R_SCRATCH_NA_2);
388 gen_address_offset();
390 g(gen_address(ctx, R_SCRATCH_NA_1, 8, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_NATIVE));
391 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
393 gen_address_offset();
395 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
396 gen_one(R_SCRATCH_B);
397 gen_one(R_SCRATCH_NA_2);
399 gen_insn(INSN_CALL_INDIRECT, OP_SIZE_8, 0, 0);
400 gen_one(R_SCRATCH_B);
405 static bool attr_w gen_cmp_test_jmp(struct codegen_context *ctx, unsigned insn, unsigned op_size, unsigned reg1, unsigned reg2, unsigned cond, uint32_t label);
407 static bool attr_w gen_timestamp_test(struct codegen_context *ctx, uint32_t label_id)
409 g(gen_address(ctx, R_UPCALL, offsetof(struct cg_upcall_vector_s, ts), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_NATIVE));
410 gen_insn(INSN_MOV, OP_SIZE_4, 0, 0);
411 gen_one(R_SCRATCH_1);
412 gen_address_offset();
414 g(gen_cmp_test_jmp(ctx, INSN_CMP, OP_SIZE_4, R_SCRATCH_1, R_TIMESTAMP, COND_E, label_id));