1 ;; Machine description for RISC-V atomic operations.
2 ;; Copyright (C) 2011-2024 Free Software Foundation, Inc.
3 ;; Contributed by Andrew Waterman (andrew@sifive.com).
4 ;; Based on MIPS target for GNU compiler.
6 ;; This file is part of GCC.
8 ;; GCC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 3, or (at your option)
13 ;; GCC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>.
22 (define_c_enum "unspec" [
23 UNSPEC_COMPARE_AND_SWAP
24 UNSPEC_COMPARE_AND_SWAP_SUBWORD
26 UNSPEC_SYNC_OLD_OP_SUBWORD
27 UNSPEC_SYNC_OLD_OP_ZABHA
29 UNSPEC_SYNC_EXCHANGE_SUBWORD
30 UNSPEC_SYNC_EXCHANGE_ZABHA
38 (define_expand "mem_thread_fence"
39 [(match_operand:SI 0 "const_int_operand" "")] ;; model
42 enum memmodel model = memmodel_base (INTVAL (operands[0]));
44 if (TARGET_ZTSO && model == MEMMODEL_SEQ_CST)
46 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
47 MEM_VOLATILE_P (mem) = 1;
48 emit_insn (gen_mem_thread_fence_ztso (mem, operands[0]));
50 else if (!TARGET_ZTSO && model != MEMMODEL_RELAXED)
52 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
53 MEM_VOLATILE_P (mem) = 1;
54 emit_insn (gen_mem_thread_fence_rvwmo (mem, operands[0]));
59 ;; Atomic memory operations.
61 (define_expand "atomic_load<mode>"
62 [(match_operand:ANYI 0 "register_operand")
63 (match_operand:ANYI 1 "memory_operand")
64 (match_operand:SI 2 "const_int_operand")] ;; model
68 emit_insn (gen_atomic_load_ztso<mode> (operands[0], operands[1],
71 emit_insn (gen_atomic_load_rvwmo<mode> (operands[0], operands[1],
76 (define_expand "atomic_store<mode>"
77 [(match_operand:ANYI 0 "memory_operand")
78 (match_operand:ANYI 1 "reg_or_0_operand")
79 (match_operand:SI 2 "const_int_operand")] ;; model
83 emit_insn (gen_atomic_store_ztso<mode> (operands[0], operands[1],
86 emit_insn (gen_atomic_store_rvwmo<mode> (operands[0], operands[1],
93 (define_insn "atomic_<atomic_optab><mode>"
94 [(set (match_operand:SHORT 0 "memory_operand" "+A")
95 (unspec_volatile:SHORT
96 [(any_atomic:SHORT (match_dup 0)
97 (match_operand:SHORT 1 "reg_or_0_operand" "rJ"))
98 (match_operand:SI 2 "const_int_operand")] ;; model
99 UNSPEC_SYNC_OLD_OP_ZABHA))]
101 "amo<insn>.<amobh>%A2\tzero,%z1,%0"
102 [(set_attr "type" "atomic")
103 (set (attr "length") (const_int 4))])
105 (define_expand "atomic_<atomic_optab><mode>"
106 [(any_atomic:GPR (match_operand:GPR 0 "memory_operand") ;; mem location
107 (match_operand:GPR 1 "reg_or_0_operand")) ;; value for op
108 (match_operand:SI 2 "const_int_operand")] ;; model
109 "TARGET_ZAAMO || TARGET_ZALRSC"
112 emit_insn (gen_amo_atomic_<atomic_optab><mode> (operands[0], operands[1],
115 emit_insn (gen_lrsc_atomic_<atomic_optab><mode> (operands[0], operands[1],
120 (define_insn "amo_atomic_<atomic_optab><mode>"
121 [(set (match_operand:GPR 0 "memory_operand" "+A")
123 [(any_atomic:GPR (match_dup 0)
124 (match_operand:GPR 1 "reg_or_0_operand" "rJ"))
125 (match_operand:SI 2 "const_int_operand")] ;; model
126 UNSPEC_SYNC_OLD_OP))]
128 "amo<insn>.<amo>%A2\tzero,%z1,%0"
129 [(set_attr "type" "atomic")
130 (set (attr "length") (const_int 4))])
132 (define_insn "lrsc_atomic_<atomic_optab><mode>"
133 [(set (match_operand:GPR 0 "memory_operand" "+A")
135 [(any_atomic:GPR (match_dup 0)
136 (match_operand:GPR 1 "reg_or_0_operand" "rJ"))
137 (match_operand:SI 2 "const_int_operand")] ;; model
139 (clobber (match_scratch:GPR 3 "=&r"))] ;; tmp_1
140 "!TARGET_ZAAMO && TARGET_ZALRSC"
143 "lr.<amo>%I2\t%3, %0\;"
144 "<insn>\t%3, %3, %1\;"
145 "sc.<amo>%J2\t%3, %3, %0\;"
148 [(set_attr "type" "atomic")
149 (set (attr "length") (const_int 16))])
153 (define_expand "atomic_fetch_<atomic_optab><mode>"
154 [(match_operand:GPR 0 "register_operand") ;; old value at mem
155 (any_atomic:GPR (match_operand:GPR 1 "memory_operand") ;; mem location
156 (match_operand:GPR 2 "reg_or_0_operand")) ;; value for op
157 (match_operand:SI 3 "const_int_operand")] ;; model
158 "TARGET_ZAAMO || TARGET_ZALRSC"
161 emit_insn (gen_amo_atomic_fetch_<atomic_optab><mode> (operands[0], operands[1],
162 operands[2], operands[3]));
164 emit_insn (gen_lrsc_atomic_fetch_<atomic_optab><mode> (operands[0], operands[1],
165 operands[2], operands[3]));
169 (define_insn "amo_atomic_fetch_<atomic_optab><mode>"
170 [(set (match_operand:GPR 0 "register_operand" "=&r")
171 (match_operand:GPR 1 "memory_operand" "+A"))
174 [(any_atomic:GPR (match_dup 1)
175 (match_operand:GPR 2 "reg_or_0_operand" "rJ"))
176 (match_operand:SI 3 "const_int_operand")] ;; model
177 UNSPEC_SYNC_OLD_OP))]
179 "amo<insn>.<amo>%A3\t%0,%z2,%1"
180 [(set_attr "type" "atomic")
181 (set (attr "length") (const_int 4))])
183 (define_insn "lrsc_atomic_fetch_<atomic_optab><mode>"
184 [(set (match_operand:GPR 0 "register_operand" "=&r")
185 (match_operand:GPR 1 "memory_operand" "+A"))
188 [(any_atomic:GPR (match_dup 1)
189 (match_operand:GPR 2 "reg_or_0_operand" "rJ"))
190 (match_operand:SI 3 "const_int_operand")] ;; model
192 (clobber (match_scratch:GPR 4 "=&r"))] ;; tmp_1
193 "!TARGET_ZAAMO && TARGET_ZALRSC"
196 "lr.<amo>%I3\t%0, %1\;"
197 "<insn>\t%4, %0, %2\;"
198 "sc.<amo>%J3\t%4, %4, %1\;"
201 [(set_attr "type" "atomic")
202 (set (attr "length") (const_int 16))])
204 (define_insn "subword_atomic_fetch_strong_<atomic_optab>"
205 [(set (match_operand:SI 0 "register_operand" "=&r") ;; old value at mem
206 (match_operand:SI 1 "memory_operand" "+A")) ;; mem location
209 [(any_atomic:SI (match_dup 1)
210 (match_operand:SI 2 "register_operand" "rI")) ;; value for op
211 (match_operand:SI 3 "const_int_operand")] ;; model
212 UNSPEC_SYNC_OLD_OP_SUBWORD))
213 (match_operand:SI 4 "register_operand" "rI") ;; mask
214 (match_operand:SI 5 "register_operand" "rI") ;; not_mask
215 (clobber (match_scratch:SI 6 "=&r")) ;; tmp_1
216 (clobber (match_scratch:SI 7 "=&r"))] ;; tmp_2
217 "TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
221 "<insn>\t%6, %0, %2\;"
225 "sc.w%J3\t%6, %7, %1\;"
228 [(set_attr "type" "multi")
229 (set (attr "length") (const_int 28))])
231 (define_expand "atomic_fetch_nand<mode>"
232 [(match_operand:SHORT 0 "register_operand") ;; old value at mem
233 (not:SHORT (and:SHORT (match_operand:SHORT 1 "memory_operand") ;; mem location
234 (match_operand:SHORT 2 "reg_or_0_operand"))) ;; value for op
235 (match_operand:SI 3 "const_int_operand")] ;; model
236 "TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
238 /* We have no QImode/HImode atomics, so form a mask, then use
239 subword_atomic_fetch_strong_nand to implement a LR/SC version of the
242 /* Logic duplicated in gcc/libgcc/config/riscv/atomic.c for use when inlining
245 rtx old = gen_reg_rtx (SImode);
246 rtx mem = operands[1];
247 rtx value = operands[2];
248 rtx model = operands[3];
249 rtx aligned_mem = gen_reg_rtx (SImode);
250 rtx shift = gen_reg_rtx (SImode);
251 rtx mask = gen_reg_rtx (SImode);
252 rtx not_mask = gen_reg_rtx (SImode);
254 riscv_subword_address (mem, &aligned_mem, &shift, &mask, ¬_mask);
256 rtx shifted_value = gen_reg_rtx (SImode);
257 riscv_lshift_subword (<MODE>mode, value, shift, &shifted_value);
259 emit_insn (gen_subword_atomic_fetch_strong_nand (old, aligned_mem,
260 shifted_value, model,
263 emit_move_insn (old, gen_rtx_ASHIFTRT (SImode, old,
264 gen_lowpart (QImode, shift)));
266 emit_move_insn (operands[0], gen_lowpart (<MODE>mode, old));
271 (define_insn "subword_atomic_fetch_strong_nand"
272 [(set (match_operand:SI 0 "register_operand" "=&r") ;; old value at mem
273 (match_operand:SI 1 "memory_operand" "+A")) ;; mem location
276 [(not:SI (and:SI (match_dup 1)
277 (match_operand:SI 2 "register_operand" "rI"))) ;; value for op
278 (match_operand:SI 3 "const_int_operand")] ;; mask
279 UNSPEC_SYNC_OLD_OP_SUBWORD))
280 (match_operand:SI 4 "register_operand" "rI") ;; mask
281 (match_operand:SI 5 "register_operand" "rI") ;; not_mask
282 (clobber (match_scratch:SI 6 "=&r")) ;; tmp_1
283 (clobber (match_scratch:SI 7 "=&r"))] ;; tmp_2
284 "TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
293 "sc.w%J3\t%6, %7, %1\;"
296 [(set_attr "type" "multi")
297 (set (attr "length") (const_int 32))])
299 (define_expand "atomic_fetch_<atomic_optab><mode>"
300 [(match_operand:SHORT 0 "register_operand") ;; old value at mem
301 (any_atomic:SHORT (match_operand:SHORT 1 "memory_operand") ;; mem location
302 (match_operand:SHORT 2 "reg_or_0_operand")) ;; value for op
303 (match_operand:SI 3 "const_int_operand")] ;; model
304 "(TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC) || TARGET_ZABHA"
307 emit_insn(gen_zabha_atomic_fetch_<atomic_optab><mode> (operands[0], operands[1],
308 operands[2], operands[3]));
310 emit_insn(gen_lrsc_atomic_fetch_<atomic_optab><mode> (operands[0], operands[1],
311 operands[2], operands[3]));
315 (define_insn "zabha_atomic_fetch_<atomic_optab><mode>"
316 [(set (match_operand:SHORT 0 "register_operand" "=&r")
317 (match_operand:SHORT 1 "memory_operand" "+A"))
319 (unspec_volatile:SHORT
320 [(any_atomic:SHORT (match_dup 1)
321 (match_operand:SHORT 2 "reg_or_0_operand" "rJ"))
322 (match_operand:SI 3 "const_int_operand")] ;; model
323 UNSPEC_SYNC_OLD_OP_ZABHA))]
325 "amo<insn>.<amobh>%A3\t%0,%z2,%1"
326 [(set_attr "type" "atomic")
327 (set (attr "length") (const_int 4))])
329 (define_expand "lrsc_atomic_fetch_<atomic_optab><mode>"
330 [(match_operand:SHORT 0 "register_operand") ;; old value at mem
331 (any_atomic:SHORT (match_operand:SHORT 1 "memory_operand") ;; mem location
332 (match_operand:SHORT 2 "reg_or_0_operand")) ;; value for op
333 (match_operand:SI 3 "const_int_operand")] ;; model
334 "!TARGET_ZABHA && TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
336 /* We have no QImode/HImode atomics, so form a mask, then use
337 subword_atomic_fetch_strong_<mode> to implement a LR/SC version of the
340 /* Logic duplicated in gcc/libgcc/config/riscv/atomic.c for use when inlining
343 rtx old = gen_reg_rtx (SImode);
344 rtx mem = operands[1];
345 rtx value = operands[2];
346 rtx model = operands[3];
347 rtx aligned_mem = gen_reg_rtx (SImode);
348 rtx shift = gen_reg_rtx (SImode);
349 rtx mask = gen_reg_rtx (SImode);
350 rtx not_mask = gen_reg_rtx (SImode);
352 riscv_subword_address (mem, &aligned_mem, &shift, &mask, ¬_mask);
354 rtx shifted_value = gen_reg_rtx (SImode);
355 riscv_lshift_subword (<MODE>mode, value, shift, &shifted_value);
357 emit_insn (gen_subword_atomic_fetch_strong_<atomic_optab> (old, aligned_mem,
362 emit_move_insn (old, gen_rtx_ASHIFTRT (SImode, old,
363 gen_lowpart (QImode, shift)));
365 emit_move_insn (operands[0], gen_lowpart (<MODE>mode, old));
370 ; Atomic exchange ops
372 (define_expand "atomic_exchange<mode>"
373 [(match_operand:GPR 0 "register_operand") ;; old value at mem
374 (match_operand:GPR 1 "memory_operand") ;; mem location
375 (match_operand:GPR 2 "register_operand") ;; value for op
376 (match_operand:SI 3 "const_int_operand")] ;; model
377 "TARGET_ZAAMO || TARGET_ZALRSC"
380 emit_insn (gen_amo_atomic_exchange<mode> (operands[0], operands[1],
381 operands[2], operands[3]));
383 emit_insn (gen_lrsc_atomic_exchange<mode> (operands[0], operands[1],
384 operands[2], operands[3]));
388 (define_insn "amo_atomic_exchange<mode>"
389 [(set (match_operand:GPR 0 "register_operand" "=&r")
391 [(match_operand:GPR 1 "memory_operand" "+A")
392 (match_operand:SI 3 "const_int_operand")] ;; model
393 UNSPEC_SYNC_EXCHANGE))
395 (match_operand:GPR 2 "register_operand" "0"))]
397 "amoswap.<amo>%A3\t%0,%z2,%1"
398 [(set_attr "type" "atomic")
399 (set (attr "length") (const_int 4))])
401 (define_insn "lrsc_atomic_exchange<mode>"
402 [(set (match_operand:GPR 0 "register_operand" "=&r")
404 [(match_operand:GPR 1 "memory_operand" "+A")
405 (match_operand:SI 3 "const_int_operand")] ;; model
406 UNSPEC_SYNC_EXCHANGE))
408 (match_operand:GPR 2 "register_operand" "0"))
409 (clobber (match_scratch:GPR 4 "=&r"))] ;; tmp_1
410 "!TARGET_ZAAMO && TARGET_ZALRSC"
413 "lr.<amo>%I3\t%4, %1\;"
414 "sc.<amo>%J3\t%0, %0, %1\;"
418 [(set_attr "type" "atomic")
419 (set (attr "length") (const_int 16))])
421 (define_expand "atomic_exchange<mode>"
422 [(match_operand:SHORT 0 "register_operand") ;; old value at mem
423 (match_operand:SHORT 1 "memory_operand") ;; mem location
424 (match_operand:SHORT 2 "register_operand") ;; value
425 (match_operand:SI 3 "const_int_operand")] ;; model
426 "(TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC) || TARGET_ZABHA"
429 emit_insn(gen_zabha_atomic_exchange<mode>(operands[0], operands[1],
430 operands[2], operands[3]));
432 emit_insn(gen_lrsc_atomic_exchange<mode>(operands[0], operands[1],
433 operands[2], operands[3]));
437 (define_insn "zabha_atomic_exchange<mode>"
438 [(set (match_operand:SHORT 0 "register_operand" "=&r")
439 (unspec_volatile:SHORT
440 [(match_operand:SHORT 1 "memory_operand" "+A")
441 (match_operand:SI 3 "const_int_operand")] ;; model
442 UNSPEC_SYNC_EXCHANGE_ZABHA))
444 (match_operand:SHORT 2 "register_operand" "0"))]
446 "amoswap.<amobh>%A3\t%0,%z2,%1"
447 [(set_attr "type" "atomic")
448 (set (attr "length") (const_int 4))])
450 (define_expand "lrsc_atomic_exchange<mode>"
451 [(match_operand:SHORT 0 "register_operand") ;; old value at mem
452 (match_operand:SHORT 1 "memory_operand") ;; mem location
453 (match_operand:SHORT 2 "register_operand") ;; value
454 (match_operand:SI 3 "const_int_operand")] ;; model
455 "!TARGET_ZABHA && TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
457 rtx old = gen_reg_rtx (SImode);
458 rtx mem = operands[1];
459 rtx value = operands[2];
460 rtx model = operands[3];
461 rtx aligned_mem = gen_reg_rtx (SImode);
462 rtx shift = gen_reg_rtx (SImode);
463 rtx mask = gen_reg_rtx (SImode);
464 rtx not_mask = gen_reg_rtx (SImode);
466 riscv_subword_address (mem, &aligned_mem, &shift, &mask, ¬_mask);
468 rtx shifted_value = gen_reg_rtx (SImode);
469 riscv_lshift_subword (<MODE>mode, value, shift, &shifted_value);
471 emit_insn (gen_subword_atomic_exchange_strong (old, aligned_mem,
472 shifted_value, model,
475 emit_move_insn (old, gen_rtx_ASHIFTRT (SImode, old,
476 gen_lowpart (QImode, shift)));
478 emit_move_insn (operands[0], gen_lowpart (<MODE>mode, old));
482 (define_insn "subword_atomic_exchange_strong"
483 [(set (match_operand:SI 0 "register_operand" "=&r") ;; old value at mem
484 (match_operand:SI 1 "memory_operand" "+A")) ;; mem location
487 [(match_operand:SI 2 "reg_or_0_operand" "rI") ;; value
488 (match_operand:SI 3 "const_int_operand")] ;; model
489 UNSPEC_SYNC_EXCHANGE_SUBWORD))
490 (match_operand:SI 4 "reg_or_0_operand" "rI") ;; not_mask
491 (clobber (match_scratch:SI 5 "=&r"))] ;; tmp_1
492 "TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
498 "sc.w%J3\t%5, %5, %1\;"
501 [(set_attr "type" "multi")
502 (set (attr "length") (const_int 20))])
506 (define_insn "zalrsc_atomic_cas_value_strong<mode>"
507 [(set (match_operand:GPR 0 "register_operand" "=&r")
508 (match_operand:GPR 1 "memory_operand" "+A"))
510 (unspec_volatile:GPR [(match_operand:GPR 2 "reg_or_0_operand" "rJ")
511 (match_operand:GPR 3 "reg_or_0_operand" "rJ")
512 (match_operand:SI 4 "const_int_operand") ;; mod_s
513 (match_operand:SI 5 "const_int_operand")] ;; mod_f
514 UNSPEC_COMPARE_AND_SWAP))
515 (clobber (match_scratch:GPR 6 "=&r"))]
518 enum memmodel model_success = (enum memmodel) INTVAL (operands[4]);
519 enum memmodel model_failure = (enum memmodel) INTVAL (operands[5]);
520 /* Find the union of the two memory models so we can satisfy both success
521 and failure memory models. */
522 operands[5] = GEN_INT (riscv_union_memmodels (model_success, model_failure));
524 "lr.<amo>%I5\t%0,%1\;"
526 "sc.<amo>%J5\t%6,%z3,%1\;"
530 [(set_attr "type" "multi")
531 (set (attr "length") (const_int 16))])
533 ;; Implement compare_exchange with a conservative leading fence when
534 ;; model_failure is seq_cst.
535 ;; This allows us to be compatible with the ISA manual Table A.6 and Table A.7
537 ;; More details: https://github.com/riscv-non-isa/riscv-elf-psabi-doc/issues/444
538 (define_insn "zacas_atomic_cas_value_strong<mode>"
539 [(set (match_operand:GPR 0 "register_operand" "=&r") ;; val output
540 (match_operand:GPR 1 "memory_operand" "+A")) ;; memory
542 (unspec_volatile:GPR [(match_operand:GPR 2 "register_operand" "0") ;; expected val
543 (match_operand:GPR 3 "reg_or_0_operand" "rJ") ;; desired val
544 (match_operand:SI 4 "const_int_operand") ;; mod_s
545 (match_operand:SI 5 "const_int_operand")] ;; mod_f
546 UNSPEC_COMPARE_AND_SWAP))]
549 enum memmodel model_success = (enum memmodel) INTVAL (operands[4]);
550 enum memmodel model_failure = (enum memmodel) INTVAL (operands[5]);
551 /* Find the union of the two memory models so we can satisfy both success
552 and failure memory models. */
553 operands[4] = GEN_INT (riscv_union_memmodels (model_success, model_failure));
555 if (model_failure == MEMMODEL_SEQ_CST)
556 return "fence\trw,rw\;"
557 "amocas.<amo>%A4\t%0,%z3,%1";
559 return "amocas.<amo>%A4\t%0,%z3,%1";
561 [(set_attr "type" "atomic")
563 (symbol_ref "(is_mm_seq_cst (memmodel_from_int (INTVAL (operands[5]))) ? 8
566 (define_expand "atomic_compare_and_swap<mode>"
567 [(match_operand:SI 0 "register_operand" "") ;; bool output
568 (match_operand:GPR 1 "register_operand" "") ;; val output
569 (match_operand:GPR 2 "memory_operand" "") ;; memory
570 (match_operand:GPR 3 "register_operand" "") ;; expected value
571 (match_operand:GPR 4 "reg_or_0_operand" "") ;; desired value
572 (match_operand:SI 5 "const_int_operand" "") ;; is_weak
573 (match_operand:SI 6 "const_int_operand" "") ;; mod_s
574 (match_operand:SI 7 "const_int_operand" "")] ;; mod_f
575 "TARGET_ZALRSC || TARGET_ZACAS"
577 if (word_mode != <MODE>mode && operands[3] != const0_rtx)
579 /* We don't have SI mode compare on RV64, so we need to make sure expected
580 value is sign-extended. */
581 rtx tmp0 = gen_reg_rtx (word_mode);
582 emit_insn (gen_extend_insn (tmp0, operands[3], word_mode, <MODE>mode, 0));
583 operands[3] = simplify_gen_subreg (<MODE>mode, tmp0, word_mode, 0);
587 emit_insn (gen_zacas_atomic_cas_value_strong<mode> (operands[1],
594 emit_insn (gen_zalrsc_atomic_cas_value_strong<mode> (operands[1],
601 rtx compare = operands[1];
602 if (operands[3] != const0_rtx)
604 rtx difference = gen_rtx_MINUS (<MODE>mode, operands[1], operands[3]);
605 compare = gen_reg_rtx (<MODE>mode);
606 emit_insn (gen_rtx_SET (compare, difference));
609 if (word_mode != <MODE>mode)
611 rtx reg = gen_reg_rtx (word_mode);
612 emit_insn (gen_rtx_SET (reg, gen_rtx_SIGN_EXTEND (word_mode, compare)));
616 emit_insn (gen_rtx_SET (operands[0], gen_rtx_EQ (SImode, compare, const0_rtx)));
620 ;; Implement compare_exchange with a conservative leading fence when
621 ;; model_failure is seq_cst.
622 ;; This allows us to be compatible with the ISA manual Table A.6 and Table A.7
624 ;; More details: https://github.com/riscv-non-isa/riscv-elf-psabi-doc/issues/444
625 (define_insn "zacas_atomic_cas_value_strong<mode>"
626 [(set (match_operand:SHORT 0 "register_operand" "=&r") ;; val output
627 (match_operand:SHORT 1 "memory_operand" "+A")) ;; memory
629 (unspec_volatile:SHORT [(match_operand:SHORT 2 "register_operand" "0") ;; expected_val
630 (match_operand:SHORT 3 "register_operand" "rJ") ;; desired_val
631 (match_operand:SI 4 "const_int_operand") ;; mod_s
632 (match_operand:SI 5 "const_int_operand")] ;; mod_f
633 UNSPEC_COMPARE_AND_SWAP))]
634 "TARGET_ZACAS && TARGET_ZABHA"
636 enum memmodel model_success = (enum memmodel) INTVAL (operands[4]);
637 enum memmodel model_failure = (enum memmodel) INTVAL (operands[5]);
638 /* Find the union of the two memory models so we can satisfy both success
639 and failure memory models. */
640 operands[4] = GEN_INT (riscv_union_memmodels (model_success, model_failure));
642 if (model_failure == MEMMODEL_SEQ_CST)
643 return "fence\trw,rw\;"
644 "amocas.<amobh>%A4\t%0,%z3,%1";
646 return "amocas.<amobh>%A4\t%0,%z3,%1";
648 [(set_attr "type" "atomic")
650 (symbol_ref "(is_mm_seq_cst (memmodel_from_int (INTVAL (operands[5]))) ? 8
653 (define_expand "atomic_compare_and_swap<mode>"
654 [(match_operand:SI 0 "register_operand") ;; bool output
655 (match_operand:SHORT 1 "register_operand") ;; val output
656 (match_operand:SHORT 2 "memory_operand") ;; memory
657 (match_operand:SHORT 3 "register_operand") ;; expected value
658 (match_operand:SHORT 4 "reg_or_0_operand") ;; desired value
659 (match_operand:SI 5 "const_int_operand") ;; is_weak
660 (match_operand:SI 6 "const_int_operand") ;; mod_s
661 (match_operand:SI 7 "const_int_operand")] ;; mod_f
662 "(TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC) || (TARGET_ZACAS && TARGET_ZABHA)"
665 if (TARGET_ZACAS && TARGET_ZABHA)
666 emit_insn (gen_zacas_atomic_cas_value_strong<mode> (operands[1],
673 emit_insn (gen_zalrsc_atomic_cas_value_strong<mode> (operands[1],
680 rtx val = gen_reg_rtx (SImode);
681 if (operands[1] != const0_rtx)
682 emit_move_insn (val, gen_rtx_SIGN_EXTEND (SImode, operands[1]));
684 emit_move_insn (val, const0_rtx);
686 rtx exp = gen_reg_rtx (SImode);
687 if (operands[3] != const0_rtx)
688 emit_move_insn (exp, gen_rtx_SIGN_EXTEND (SImode, operands[3]));
690 emit_move_insn (exp, const0_rtx);
693 if (exp != const0_rtx)
695 rtx difference = gen_rtx_MINUS (SImode, val, exp);
696 compare = gen_reg_rtx (SImode);
697 emit_move_insn (compare, difference);
700 if (word_mode != SImode)
702 rtx reg = gen_reg_rtx (word_mode);
703 emit_move_insn (reg, gen_rtx_SIGN_EXTEND (word_mode, compare));
707 emit_move_insn (operands[0], gen_rtx_EQ (SImode, compare, const0_rtx));
711 (define_expand "zalrsc_atomic_cas_value_strong<mode>"
712 [(match_operand:SHORT 0 "register_operand") ;; val output
713 (match_operand:SHORT 1 "memory_operand") ;; memory
714 (match_operand:SHORT 2 "reg_or_0_operand") ;; expected value
715 (match_operand:SHORT 3 "reg_or_0_operand") ;; desired value
716 (match_operand:SI 4 "const_int_operand") ;; mod_s
717 (match_operand:SI 5 "const_int_operand") ;; mod_f
718 (match_scratch:SHORT 6)]
719 "TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
721 /* We have no QImode/HImode atomics, so form a mask, then use
722 subword_atomic_cas_strong<mode> to implement a LR/SC version of the
725 /* Logic duplicated in gcc/libgcc/config/riscv/atomic.c for use when inlining
728 rtx old = gen_reg_rtx (SImode);
729 rtx mem = operands[1];
730 rtx aligned_mem = gen_reg_rtx (SImode);
731 rtx shift = gen_reg_rtx (SImode);
732 rtx mask = gen_reg_rtx (SImode);
733 rtx not_mask = gen_reg_rtx (SImode);
735 riscv_subword_address (mem, &aligned_mem, &shift, &mask, ¬_mask);
739 rtx shifted_o = gen_reg_rtx (SImode);
740 rtx shifted_n = gen_reg_rtx (SImode);
742 riscv_lshift_subword (<MODE>mode, o, shift, &shifted_o);
743 riscv_lshift_subword (<MODE>mode, n, shift, &shifted_n);
745 emit_move_insn (shifted_o, gen_rtx_AND (SImode, shifted_o, mask));
746 emit_move_insn (shifted_n, gen_rtx_AND (SImode, shifted_n, mask));
748 enum memmodel model_success = (enum memmodel) INTVAL (operands[4]);
749 enum memmodel model_failure = (enum memmodel) INTVAL (operands[5]);
750 /* Find the union of the two memory models so we can satisfy both success
751 and failure memory models. */
752 rtx model = GEN_INT (riscv_union_memmodels (model_success, model_failure));
754 emit_insn (gen_subword_atomic_cas_strong (old, aligned_mem,
755 shifted_o, shifted_n,
756 model, mask, not_mask));
758 emit_move_insn (old, gen_rtx_ASHIFTRT (SImode, old,
759 gen_lowpart (QImode, shift)));
761 emit_move_insn (operands[0], gen_lowpart (<MODE>mode, old));
766 (define_insn "subword_atomic_cas_strong"
767 [(set (match_operand:SI 0 "register_operand" "=&r") ;; old value at mem
768 (match_operand:SI 1 "memory_operand" "+A")) ;; mem location
770 (unspec_volatile:SI [(match_operand:SI 2 "reg_or_0_operand" "rJ") ;; expected value
771 (match_operand:SI 3 "reg_or_0_operand" "rJ")] ;; desired value
772 UNSPEC_COMPARE_AND_SWAP_SUBWORD))
773 (match_operand:SI 4 "const_int_operand") ;; model
774 (match_operand:SI 5 "register_operand" "rI") ;; mask
775 (match_operand:SI 6 "register_operand" "rI") ;; not_mask
776 (clobber (match_scratch:SI 7 "=&r"))] ;; tmp_1
777 "TARGET_ZALRSC && TARGET_INLINE_SUBWORD_ATOMIC"
785 "sc.w%J4\t%7, %7, %1\;"
789 [(set_attr "type" "multi")
790 (set (attr "length") (const_int 28))])
792 (define_expand "atomic_test_and_set"
793 [(match_operand:QI 0 "register_operand" "") ;; bool output
794 (match_operand:QI 1 "memory_operand" "+A") ;; memory
795 (match_operand:SI 2 "const_int_operand" "")] ;; model
796 "TARGET_ZAAMO || TARGET_ZALRSC"
798 /* We have no QImode atomics, so use the address LSBs to form a mask,
799 then use an aligned SImode atomic. */
800 rtx old = gen_reg_rtx (SImode);
801 rtx mem = operands[1];
802 rtx model = operands[2];
803 rtx set = gen_reg_rtx (QImode);
804 rtx aligned_mem = gen_reg_rtx (SImode);
805 rtx shift = gen_reg_rtx (SImode);
808 rtx _mask = gen_reg_rtx (SImode);
809 rtx _not_mask = gen_reg_rtx (SImode);
811 riscv_subword_address (mem, &aligned_mem, &shift, &_mask, &_not_mask);
813 emit_move_insn (set, GEN_INT (1));
814 rtx shifted_set = gen_reg_rtx (SImode);
815 riscv_lshift_subword (QImode, set, shift, &shifted_set);
818 emit_insn (gen_amo_atomic_fetch_orsi (old, aligned_mem, shifted_set, model));
819 else if (TARGET_ZALRSC)
820 emit_insn (gen_lrsc_atomic_fetch_orsi (old, aligned_mem, shifted_set, model));
822 emit_move_insn (old, gen_rtx_ASHIFTRT (SImode, old,
823 gen_lowpart (QImode, shift)));
825 emit_move_insn (operands[0], gen_lowpart (QImode, old));