don't merge variables with different "must_be_flat" to the same slot
[ajla.git] / arithm-i.h
blobbc9b69da1ccf1a94c58f2187736cf11f1727ac19
1 /*
2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
9 * version.
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #ifndef AJLA_ARITHM_H
20 #define AJLA_ARITHM_H
22 #include "asm.h"
23 #include "arithm-b.h"
25 #if (defined(__HP_cc) && EFFICIENT_WORD_SIZE >= 64) ^ defined(UNUSUAL_ARITHMETICS)
26 #define add_subtract_overflow_test_mode 1
27 #define neg_overflow_test_mode 1
28 #else
29 #define add_subtract_overflow_test_mode 0
30 #define neg_overflow_test_mode 0
31 #endif
34 #if defined(HAVE_BUILTIN_ADD_SUB_OVERFLOW) && !defined(UNUSUAL)
36 #define gen_generic_addsub(fn, type, utype, mode) \
37 static maybe_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
38 { \
39 type r; \
40 if (!mode) { \
41 if (unlikely(__builtin_add_overflow(*op1, *op2, &r))) \
42 return false; \
43 } else { \
44 if (unlikely(__builtin_sub_overflow(*op1, *op2, &r))) \
45 return false; \
46 } \
47 *res = r; \
48 return true; \
51 #define gen_generic_inc_dec(type, utype) \
52 static maybe_inline bool attr_unused cat(INT_unary_inc_,type)(const type *op, type *res)\
53 { \
54 type r; \
55 if (unlikely(__builtin_add_overflow(*op, 1, &r))) \
56 return false; \
57 *res = r; \
58 return true; \
59 } \
60 static maybe_inline bool attr_unused cat(INT_unary_dec_,type)(const type *op, type *res)\
61 { \
62 type r; \
63 if (unlikely(__builtin_sub_overflow(*op, 1, &r))) \
64 return false; \
65 *res = r; \
66 return true; \
69 #else
71 #define gen_generic_addsub(fn, type, utype, mode) \
72 static maybe_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
73 { \
74 type o1 = *op1, o2 = *op2; \
75 type r; \
76 if (!mode) { \
77 if (sizeof(type) < sizeof(int_efficient_t)) { \
78 int_efficient_t lr = (int_efficient_t)o1 + (int_efficient_t)o2;\
79 r = (type)lr; \
80 if (unlikely(r != lr)) \
81 return false; \
82 } else { \
83 r = (utype)o1 + (utype)o2; \
84 if (!(add_subtract_overflow_test_mode)) { \
85 if (unlikely((~(o1 ^ o2) & (o2 ^ r) & sign_bit(utype)) != 0))\
86 return false; \
87 } else { \
88 if ((r >= o1) != (o2 >= 0)) \
89 return false; \
90 } \
91 } \
92 } else { \
93 if (sizeof(type) < sizeof(int_efficient_t)) { \
94 int_efficient_t lr = (int_efficient_t)o1 - (int_efficient_t)o2;\
95 r = (type)lr; \
96 if (unlikely(r != lr)) \
97 return false; \
98 } else { \
99 r = (utype)o1 - (utype)o2; \
100 if (!(add_subtract_overflow_test_mode)) { \
101 if (unlikely((~(o2 ^ r) & (o1 ^ r) & sign_bit(utype)) != 0))\
102 return false; \
103 } else { \
104 if ((r <= o1) != (o2 >= 0)) \
105 return false; \
109 *res = r; \
110 return true; \
113 #define gen_generic_inc_dec(type, utype) \
114 static maybe_inline bool attr_unused cat(INT_unary_inc_,type)(const type *op, type *res)\
116 type o = *op; \
117 if (unlikely(o == signed_maximum(type))) \
118 return false; \
119 *res = (utype)o + 1; \
120 return true; \
122 static maybe_inline bool attr_unused cat(INT_unary_dec_,type)(const type *op, type *res)\
124 type o = *op; \
125 if (unlikely(o == sign_bit(type))) \
126 return false; \
127 *res = (utype)o - 1; \
128 return true; \
131 #endif
134 #if defined(HAVE_BUILTIN_MUL_OVERFLOW) && !defined(UNUSUAL)
136 #define gen_generic_multiply(type, utype) \
137 static maybe_inline bool attr_unused cat(INT_binary_multiply_,type)(const type *op1, const type *op2, type *res)\
139 type r; \
140 if (unlikely(__builtin_mul_overflow(*op1, *op2, &r))) \
141 return false; \
142 *res = r; \
143 return true; \
146 #else
148 #define generic_multiply_(n, s, u, sz, bits) \
149 if (sz >= sizeof(unsigned) && sizeof(type) * 2 <= sz) { \
150 u lres = (u)o1 * (u)o2; \
151 if (unlikely(lres != (u)(type)lres)) \
152 return false; \
153 *res = (type)(s)lres; \
154 return true; \
157 #define gen_generic_multiply(type, utype) \
158 static maybe_inline bool attr_unused cat(INT_binary_multiply_,type)(const type *op1, const type *op2, type *res)\
160 const utype half_sign = (utype)1 << (sizeof(type) * 4); \
161 type o1 = *op1, o2 = *op2; \
162 type r; \
163 for_all_fixed(generic_multiply_) \
164 r = (utype)o1 * (utype)o2; \
165 if (likely(!(((utype)(o1 + half_sign / 2) | (utype)(o2 + half_sign / 2)) & -half_sign)))\
166 goto succeed; \
167 if (likely(o1 != 0)) { \
168 if (unlikely(o1 == -1) && unlikely(r == sign_bit(type)))\
169 return false; \
170 if (r / o1 != o2) \
171 return false; \
173 succeed: \
174 *res = r; \
175 return true; \
178 #endif
181 #define gen_generic_divmod(fn, type, utype, operator) \
182 static maybe_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
184 type o1 = *op1, o2 = *op2; \
185 if (unlikely(!o2)) \
186 return false; \
187 if (unlikely(o2 == -1) && unlikely(o1 == sign_bit(type))) \
188 return false; \
189 if (DIVIDE_ROUNDS_TO_ZERO) \
190 *res = o1 operator o2; \
191 else \
192 cat4(FIXED_binary_,fn,_,type)(cast_ptr(const utype *, op1), cast_ptr(const utype *, op2), cast_ptr(utype *, res));\
193 return true; \
196 #define gen_generic_divmod_alt1(fn, type, utype) \
197 static maybe_inline bool attr_unused cat4(INT_binary_,fn,_alt1_,type)(const type *op1, const type *op2, type *res)\
199 type o1 = *op1, o2 = *op2; \
200 if (unlikely(!o2)) \
201 return false; \
202 if (unlikely(o2 == -1) && unlikely(o1 == sign_bit(type))) \
203 return false; \
204 cat4(FIXED_binary_,fn,_alt1_,type)(cast_ptr(const utype *, op1), cast_ptr(const utype *, op2), cast_ptr(utype *, res));\
205 return true; \
209 #define gen_generic_int_power(type, utype) \
210 static bool attr_unused cat(INT_binary_power_,type)(const type *op1, const type *op2, type *res)\
212 type r = 1; \
213 type o1 = *op1; \
214 type o2 = *op2; \
215 if (unlikely(o2 < 0)) \
216 return false; \
217 while (1) { \
218 if (o2 & 1) { \
219 if (unlikely(!cat(INT_binary_multiply_,type)(&r, &o1, &r)))\
220 return false; \
222 o2 >>= 1; \
223 if (!o2) \
224 break; \
225 if (unlikely(!cat(INT_binary_multiply_,type)(&o1, &o1, &o1))) \
226 return false; \
228 *res = r; \
229 return true; \
233 #define gen_generic_shr(type, utype) \
234 static maybe_inline bool attr_unused cat(INT_binary_shr_,type)(const type *op1, const type *op2, type *res)\
236 type o1 = *op1, o2 = *op2; \
237 type r; \
238 if (unlikely((utype)o2 >= (int)sizeof(type) * 8)) \
239 return false; \
240 if (!RIGHT_SHIFT_KEEPS_SIGN) \
241 if (unlikely(o1 < 0)) \
242 return false; \
243 r = o1 >> o2; \
244 *res = r; \
245 return true; \
248 #define gen_generic_shl(type, utype) \
249 static maybe_inline bool attr_unused cat(INT_binary_shl_,type)(const type *op1, const type *op2, type *res)\
251 type o1 = *op1, o2 = *op2; \
252 if (unlikely((utype)o2 >= (int)sizeof(type) * 8)) \
253 return false; \
254 if (sizeof(type) <= sizeof(int_efficient_t) / 2) { \
255 int_efficient_t r = (int_efficient_t)o1 << o2; \
256 if (unlikely(r != (type)r)) \
257 return false; \
258 *res = (type)r; \
259 return true; \
260 } else { \
261 type r = (utype)o1 << o2; \
262 if (!RIGHT_SHIFT_KEEPS_SIGN) \
263 if (unlikely(r < 0)) \
264 return false; \
265 if (unlikely(r >> o2 != o1)) \
266 return false; \
267 *res = r; \
268 return true; \
272 #define gen_generic_btx(fn, type, utype, mode) \
273 static maybe_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
275 if (unlikely((utype)*op2 >= sizeof(type) * 8 - 1)) { \
276 if (unlikely(*op2 < 0)) \
277 return false; \
278 if (mode == 0 && *op1 < 0) { \
279 *res = *op1; \
280 return true; \
282 if (mode == 1 && *op1 >= 0) { \
283 *res = *op1; \
284 return true; \
286 return false; \
288 cat4(FIXED_binary_,fn,_,type)(cast_ptr(utype *, op1), cast_ptr(const utype *, op2), cast_ptr(utype *, res));\
289 return true; \
292 #define gen_generic_bt(type, utype) \
293 static maybe_inline bool attr_unused cat(INT_binary_bt_,type)(const type *op1, const type *op2, ajla_flat_option_t *res)\
295 type o1 = *cast_ptr(type *, op1); \
296 type o2 = *cast_ptr(type *, op2); \
297 if (unlikely((utype)o2 >= sizeof(type) * 8)) { \
298 if (unlikely(o2 < 0)) \
299 return false; \
300 *res = o1 < 0; \
301 } else { \
302 *res = (ajla_flat_option_t)(((utype)o1 >> o2) & 1); \
304 return true; \
307 #define gen_generic_not(type, utype) \
308 static ipret_inline bool attr_unused cat(INT_unary_not_,type)(const type *op, type *res)\
310 *res = ~(utype)*op; \
311 return true; \
314 #define gen_generic_neg(type, utype) \
315 static maybe_inline bool attr_unused cat(INT_unary_neg_,type)(const type *op, type *res)\
317 type o = *op; \
318 type neg; \
319 if (!(neg_overflow_test_mode)) { \
320 if (unlikely(o == sign_bit(type))) \
321 return false; \
322 neg = -(utype)o; \
323 } else { \
324 neg = -(utype)o; \
325 if (unlikely((o & neg) < 0)) \
326 return false; \
328 *res = neg; \
329 return true; \
332 #define gen_generic_int_bsfr(fn, type, utype, bits, mode) \
333 static maybe_inline bool attr_unused cat4(INT_unary_,fn,_,type)(const type *op, type *res)\
335 if (!(mode) && unlikely(!*op)) \
336 return false; \
337 if ((mode) && unlikely(*op <= 0)) \
338 return false; \
339 if (!(mode)) \
340 cat(FIXED_unary_bsf_,type)(cast_ptr(const utype *, op), cast_ptr(utype *, res));\
341 else \
342 cat(FIXED_unary_bsr_,type)(cast_ptr(const utype *, op), cast_ptr(utype *, res));\
343 return true; \
346 #define gen_generic_int_popcnt(type, utype, bits) \
347 static maybe_inline bool attr_unused cat(INT_unary_popcnt_,type)(const type *op, type *res)\
349 if (unlikely(*op < 0)) \
350 return false; \
351 cat(FIXED_unary_popcnt_,type)(cast_ptr(const utype *, op), cast_ptr(utype *, res));\
352 return true; \
355 #define gen_generic_int_popcnt_alt1(type, utype, bits) \
356 static ipret_inline bool attr_unused cat(INT_unary_popcnt_alt1_,type)(const type *op, type *res)\
358 if (unlikely(*op < 0)) \
359 return false; \
360 cat(FIXED_unary_popcnt_alt1_,type)(cast_ptr(const utype *, op), cast_ptr(utype *, res));\
361 return true; \
365 * X86
368 #if defined(INLINE_ASM_GCC_X86)
370 #if defined(INLINE_ASM_GCC_LABELS)
373 * This is a trick. The asm goto syntax doesn't allow us to
374 * specify that the %0 register changed.
376 * We copy the variable op1 to o1 using an asm statement,
377 * so that the compiler doesn't know that *op1 == o1. We
378 * never ever reference o1 again, so the compiler won't
379 * reuse the value in the register %0.
381 #define gen_x86_binary(fn, type, utype, instr, suffix, c1, c2, c3) \
382 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
384 type o1; \
385 asm_copy(o1, *op1); \
386 __asm__ goto (" \n\
387 "#instr #suffix" %1, %0 \n\
388 jo %l[overflow] \n\
389 mov"#suffix" %0, %2 \n\
390 " : : c2(o1), c3(*op2), "m"(*res) : "memory", "cc" : overflow); \
391 return true; \
392 overflow: \
393 return false; \
396 #define gen_x86_binary_2reg(fn, type, utype, instr1, instr2, suffix, reg)\
397 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
399 type o1; \
400 asm_copy(o1, *op1); \
401 __asm__ goto (" \n\
402 "#instr1 #suffix" %1, %%"#reg"ax \n\
403 "#instr2 #suffix" %2, %%"#reg"dx \n\
404 jo %l[overflow] \n\
405 mov"#suffix" %%"#reg"ax, %3 \n\
406 mov"#suffix" %%"#reg"dx, %4 \n\
407 " : : "A"(o1), \
408 "m"(*op2), "m"(*(cast_ptr(char *, op2) + sizeof(type) / 2)),\
409 "m"(*res), "m"(*(cast_ptr(char *, res) + sizeof(type) / 2))\
410 : "memory", "cc" : overflow); \
411 return true; \
412 overflow: \
413 return false; \
417 #else
419 #define gen_x86_binary(fn, type, utype, instr, suffix, c1, c2, c3) \
420 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
422 type r; \
423 uint8_t overflow; \
424 __asm__ (" \n\
425 "#instr #suffix" %2, %1 \n\
426 setob %0 \n\
427 " : "=q"X86_ASM_M(overflow), c1(r) : c3(*op2), "1"(*op1) : "cc");\
428 if (unlikely(overflow)) \
429 return false; \
430 *res = r; \
431 return true; \
434 #define gen_x86_binary_2reg(fn, type, utype, instr1, instr2, suffix, reg)\
435 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
437 type r; \
438 uint8_t overflow; \
439 __asm__ (" \n\
440 "#instr1 #suffix" %2, %%"#reg"ax \n\
441 "#instr2 #suffix" %3, %%"#reg"dx \n\
442 setob %0 \n\
443 " : "=q"X86_ASM_M(overflow), "=A"(r) \
444 : "m"(*op2), "m"(*(cast_ptr(char *, op2) + sizeof(type) / 2)),\
445 "1"(*op1) \
446 : "memory", "cc"); \
447 if (unlikely(overflow)) \
448 return false; \
449 *res = r; \
450 return true; \
453 #endif
455 #if defined(INLINE_ASM_GCC_LABELS)
457 #define gen_x86_neg(type, utype, suffix, constr) \
458 static ipret_inline bool attr_unused cat(INT_unary_neg_,type)(const type *op, type *res)\
460 type o; \
461 asm_copy(o, *op); \
462 __asm__ goto (" \n\
463 neg"#suffix" %0 \n\
464 jo %l[overflow] \n\
465 mov"#suffix" %0, %1 \n\
466 " : : constr(o), "m"(*res) : "memory", "cc" : overflow); \
467 return true; \
468 overflow: \
469 return false; \
472 #define gen_x86_neg_2reg(type, utype, suffix, reg) \
473 static ipret_inline bool attr_unused cat(INT_unary_neg_,type)(const type *op, type *res)\
475 type o; \
476 asm_copy(o, *op); \
477 __asm__ goto (" \n\
478 neg"#suffix" %%"#reg"ax \n\
479 not"#suffix" %%"#reg"dx \n\
480 sbb"#suffix" $-1, %%"#reg"dx \n\
481 jo %l[overflow] \n\
482 mov"#suffix" %%"#reg"ax, %1 \n\
483 mov"#suffix" %%"#reg"dx, %2 \n\
484 " : : "A"(o), \
485 "m"(*res), "m"(*(cast_ptr(char *, res) + sizeof(type) / 2))\
486 : "memory", "cc" : overflow); \
487 return true; \
488 overflow: \
489 return false; \
492 #define gen_x86_inc_dec(fn, type, utype, suffix, constr) \
493 static ipret_inline bool attr_unused cat4(INT_unary_,fn,_,type)(const type *op, type *res)\
495 type o; \
496 asm_copy(o, *op); \
497 __asm__ goto (" \n\
498 "#fn""#suffix" %0 \n\
499 jo %l[overflow] \n\
500 mov"#suffix" %0, %1 \n\
501 " : : constr(o), "m"(*res) : "memory", "cc" : overflow); \
502 return true; \
503 overflow: \
504 return false; \
507 #endif
509 #endif
512 * ARM
515 #if defined(INLINE_ASM_GCC_ARM) || defined(INLINE_ASM_GCC_ARM64)
517 #if defined(INLINE_ASM_GCC_LABELS)
519 #define gen_arm_addsub(fn, type, utype, instr, s) \
520 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
522 type o1; \
523 asm_copy(o1, *op1); \
524 __asm__ goto (ARM_ASM_PREFIX " \n\
525 "#instr" %"s"0, %"s"0, %"s"1 \n\
526 bvs %l[overflow] \n\
527 str %"s"0, %2 \n\
528 " : : "r"(o1), "r"(*op2), "m"(*res) : "memory", "cc" : overflow);\
529 return true; \
530 overflow: \
531 return false; \
534 #else
536 #define gen_arm_addsub(fn, type, utype, instr, s) \
537 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
539 type r; \
540 unsigned long overflow; \
541 __asm__ (ARM_ASM_PREFIX " \n\
542 "#instr" %"s"1, %"s"2, %"s"3 \n\
543 mrs %0, "ARM_ASM_APSR" \n\
544 " : "=r"(overflow), "=r"(r) : "r"(*op1), "r"(*op2) : "cc"); \
545 if (unlikely(overflow & (1 << 28))) \
546 return false; \
547 *res = r; \
548 return true; \
551 #endif
553 #if defined(INLINE_ASM_GCC_LABELS) && defined(ARM_ASM_STRD)
555 #define gen_arm_addsub_2reg(fn, type, utype, instr, instr2) \
556 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
558 type o1; \
559 asm_copy(o1, *op1); \
560 __asm__ goto (ARM_ASM_PREFIX " \n\
561 "#instr" %"ARM_ASM_LO"0, %"ARM_ASM_LO"0, %"ARM_ASM_LO"1 \n\
562 "#instr2" %"ARM_ASM_HI"0, %"ARM_ASM_HI"0, %"ARM_ASM_HI"1 \n\
563 bvs %l[overflow] \n\
564 "ARM_ASM_STRD" %"ARM_ASM_LO"0, %"ARM_ASM_HI"0, [ %2 ] \n\
565 " : : "r"(o1), "r"(*op2), "r"(res) : "memory", "cc" : overflow);\
566 return true; \
567 overflow: \
568 return false; \
571 #else
573 #define gen_arm_addsub_2reg(fn, type, utype, instr, instr2) \
574 static ipret_inline bool attr_unused cat4(INT_binary_,fn,_,type)(const type *op1, const type *op2, type *res)\
576 type r; \
577 unsigned long overflow; \
578 __asm__ (ARM_ASM_PREFIX " \n\
579 "#instr" %"ARM_ASM_LO"1, %"ARM_ASM_LO"2, %"ARM_ASM_LO"3 \n\
580 "#instr2" %"ARM_ASM_HI"1, %"ARM_ASM_HI"2, %"ARM_ASM_HI"3 \n\
581 mrs %0, "ARM_ASM_APSR" \n\
582 " : "=r"(overflow), "=r"(r) : "1"(*op1), "r"(*op2) : "cc"); \
583 if (unlikely(overflow & (1 << 28))) \
584 return false; \
585 *res = r; \
586 return true; \
589 #endif
591 #if defined(INLINE_ASM_GCC_LABELS) && ARM_VERSION >= 6
593 #define gen_arm_multiply(type, utype) \
594 static ipret_inline bool attr_unused cat(INT_binary_multiply_,type)(const type *op1, const type *op2, type *res)\
596 type o1, o2; \
597 asm_copy(o1, *op1); \
598 asm_copy(o2, *op2); \
599 __asm__ goto (ARM_ASM_PREFIX " \n\
600 smull %0, %1, %0, %1 \n\
601 cmp %1, %0, asr #31 \n\
602 bne %l[overflow] \n\
603 str %0, %2 \n\
604 " : : "r"(o1), "r"(o2), "m"(*res) : "memory", "cc" : overflow); \
605 return true; \
606 overflow: \
607 return false; \
610 #else
612 #define gen_arm_multiply(type, utype) \
613 static ipret_inline bool attr_unused cat(INT_binary_multiply_,type)(const type *op1, const type *op2, type *res)\
615 uint32_t r, overflow; \
616 __asm__ (ARM_ASM_PREFIX " \n\
617 smull %0, %1, %2, %3 \n\
618 eor %1, %1, %0, asr #31 \n\
619 " : "=&r"(r), "=&r"(overflow) : "r"(*op1), "r"(*op2)); \
620 if (unlikely(overflow != 0)) \
621 return false; \
622 *res = r; \
623 return true; \
626 #endif
628 #if defined(INLINE_ASM_GCC_LABELS)
630 #define gen_arm_neg(type, utype, s) \
631 static ipret_inline bool attr_unused cat(INT_unary_neg_,type)(const type *op, type *res)\
633 type o; \
634 asm_copy(o, *op); \
635 __asm__ goto (ARM_ASM_PREFIX " \n\
636 negs %"s"0, %"s"0 \n\
637 bvs %l[overflow] \n\
638 str %"s"0, %1 \n\
639 " : : "r"(o), "m"(*res) : "memory", "cc" : overflow); \
640 return true; \
641 overflow: \
642 return false; \
645 #if defined(INLINE_ASM_GCC_ARM64)
646 #define arm_neg_2nd "ngcs %"ARM_ASM_HI"0, %"ARM_ASM_HI"0"
647 #define arm_neg_zreg
648 #elif defined(INLINE_ASM_GCC_ARM_THUMB2)
649 #define arm_neg_2nd "sbcs %"ARM_ASM_HI"0, %2, %"ARM_ASM_HI"0"
650 #define arm_neg_zreg , "r"(0L)
651 #else
652 #define arm_neg_2nd "rscs %"ARM_ASM_HI"0, %"ARM_ASM_HI"0, #0"
653 #define arm_neg_zreg
654 #endif
656 #define gen_arm_neg_2reg(type, utype) \
657 static ipret_inline bool attr_unused cat(INT_unary_neg_,type)(const type *op, type *res)\
659 type o; \
660 asm_copy(o, *op); \
661 __asm__ goto (ARM_ASM_PREFIX " \n\
662 negs %"ARM_ASM_LO"0, %"ARM_ASM_LO"0 \n\
663 "arm_neg_2nd" \n\
664 bvs %l[overflow] \n\
665 "ARM_ASM_STRD" %"ARM_ASM_LO"0, %"ARM_ASM_HI"0, [ %1 ] \n\
666 " : : "r"(o), "r"(res) arm_neg_zreg : "memory", "cc" : overflow);\
667 return true; \
668 overflow: \
669 return false; \
672 #endif
674 #endif
676 #ifdef FIXED_DIVIDE_ALT1_TYPES
677 #define INT_DIVIDE_ALT1_TYPES FIXED_DIVIDE_ALT1_TYPES
678 #define INT_DIVIDE_ALT1_FEATURES FIXED_DIVIDE_ALT1_FEATURES
679 #endif
680 #ifdef FIXED_MODULO_ALT1_TYPES
681 #define INT_MODULO_ALT1_TYPES FIXED_MODULO_ALT1_TYPES
682 #define INT_MODULO_ALT1_FEATURES FIXED_MODULO_ALT1_FEATURES
683 #endif
684 #ifdef FIXED_POPCNT_ALT1_TYPES
685 #define INT_POPCNT_ALT1_TYPES FIXED_POPCNT_ALT1_TYPES
686 #define INT_POPCNT_ALT1_FEATURES FIXED_POPCNT_ALT1_FEATURES
687 #endif
689 #define file_inc "arithm-i.inc"
690 #include "for-int.inc"
692 #endif