1 //===-- FLATInstructions.td - FLAT Instruction Defintions -----------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 def FLATAtomic : ComplexPattern<i64, 3, "SelectFlatAtomic", [], [SDNPWantRoot], -10>;
10 def FLATOffset : ComplexPattern<i64, 3, "SelectFlatOffset<false>", [], [SDNPWantRoot], -10>;
12 def FLATOffsetSigned : ComplexPattern<i64, 3, "SelectFlatOffset<true>", [], [SDNPWantRoot], -10>;
13 def FLATSignedAtomic : ComplexPattern<i64, 3, "SelectFlatAtomicSigned", [], [SDNPWantRoot], -10>;
15 //===----------------------------------------------------------------------===//
17 //===----------------------------------------------------------------------===//
19 class FLAT_Pseudo<string opName, dag outs, dag ins,
20 string asmOps, list<dag> pattern=[]> :
21 InstSI<outs, ins, "", pattern>,
22 SIMCInstr<opName, SIEncodingFamily.NONE> {
25 let isCodeGenOnly = 1;
29 let UseNamedOperandTable = 1;
30 let hasSideEffects = 0;
31 let SchedRW = [WriteVMEM];
33 string Mnemonic = opName;
34 string AsmOperands = asmOps;
36 bits<1> is_flat_global = 0;
37 bits<1> is_flat_scratch = 0;
41 // We need to distinguish having saddr and enabling saddr because
42 // saddr is only valid for scratch and global instructions. Pre-gfx9
43 // these bits were reserved, so we also don't necessarily want to
44 // set these bits to the disabled value for the original flat
45 // segment instructions.
46 bits<1> has_saddr = 0;
47 bits<1> enabled_saddr = 0;
48 bits<7> saddr_value = 0;
49 bits<1> has_vaddr = 1;
57 let SubtargetPredicate = !if(is_flat_global, HasFlatGlobalInsts,
58 !if(is_flat_scratch, HasFlatScratchInsts, HasFlatAddressSpace));
60 // TODO: M0 if it could possibly access LDS (before gfx9? only)?
61 let Uses = !if(is_flat_global, [EXEC], [EXEC, FLAT_SCR]);
63 // Internally, FLAT instruction are executed as both an LDS and a
64 // Buffer instruction; so, they increment both VM_CNT and LGKM_CNT
65 // and are not considered done until both have been decremented.
67 let LGKM_CNT = !if(!or(is_flat_global, is_flat_scratch), 0, 1);
69 let IsNonFlatSeg = !if(!or(is_flat_global, is_flat_scratch), 1, 0);
72 class FLAT_Real <bits<7> op, FLAT_Pseudo ps> :
73 InstSI <ps.OutOperandList, ps.InOperandList, ps.Mnemonic # ps.AsmOperands, []>,
77 let isCodeGenOnly = 0;
79 // copy relevant pseudo op flags
80 let SubtargetPredicate = ps.SubtargetPredicate;
81 let AsmMatchConverter = ps.AsmMatchConverter;
82 let TSFlags = ps.TSFlags;
83 let UseNamedOperandTable = ps.UseNamedOperandTable;
96 bits<1> lds = 0; // XXX - What does this actually do?
98 // Segment, 00=flat, 01=scratch, 10=global, 11=reserved
99 bits<2> seg = !if(ps.is_flat_global, 0b10,
100 !if(ps.is_flat_scratch, 0b01, 0));
102 // Signed offset. Highest bit ignored for flat and treated as 12-bit
103 // unsigned for flat acceses.
105 bits<1> nv = 0; // XXX - What does this actually do?
107 // We don't use tfe right now, and it was removed in gfx9.
110 // Only valid on GFX9+
111 let Inst{12-0} = offset;
113 let Inst{15-14} = seg;
115 let Inst{16} = !if(ps.has_glc, glc, ps.glcValue);
117 let Inst{24-18} = op;
118 let Inst{31-26} = 0x37; // Encoding.
119 let Inst{39-32} = !if(ps.has_vaddr, vaddr, ?);
120 let Inst{47-40} = !if(ps.has_data, vdata, ?);
121 let Inst{54-48} = !if(ps.has_saddr, !if(ps.enabled_saddr, saddr, 0x7f), 0);
123 // 54-48 is reserved.
124 let Inst{55} = nv; // nv on GFX9+, TFE before.
125 let Inst{63-56} = !if(ps.has_vdst, vdst, ?);
128 class GlobalSaddrTable <bit is_saddr, string Name = ""> {
129 bit IsSaddr = is_saddr;
130 string SaddrOp = Name;
133 // TODO: Is exec allowed for saddr? The disabled value 0x7f is the
134 // same encoding value as exec_hi, so it isn't possible to use that if
135 // saddr is 32-bit (which isn't handled here yet).
136 class FLAT_Load_Pseudo <string opName, RegisterClass regClass,
137 bit HasTiedOutput = 0,
138 bit HasSaddr = 0, bit EnableSaddr = 0> : FLAT_Pseudo<
140 (outs regClass:$vdst),
143 !con((ins VReg_64:$vaddr),
144 !if(EnableSaddr, (ins SReg_64:$saddr), (ins))),
145 (ins flat_offset:$offset, GLC:$glc, SLC:$slc, DLC:$dlc)),
146 !if(HasTiedOutput, (ins regClass:$vdst_in), (ins))),
147 " $vdst, $vaddr"#!if(HasSaddr, !if(EnableSaddr, ", $saddr", ", off"), "")#"$offset$glc$slc$dlc"> {
150 let has_saddr = HasSaddr;
151 let enabled_saddr = EnableSaddr;
152 let PseudoInstr = opName#!if(!and(HasSaddr, EnableSaddr), "_SADDR", "");
155 let Constraints = !if(HasTiedOutput, "$vdst = $vdst_in", "");
156 let DisableEncoding = !if(HasTiedOutput, "$vdst_in", "");
159 class FLAT_Store_Pseudo <string opName, RegisterClass vdataClass,
160 bit HasSaddr = 0, bit EnableSaddr = 0> : FLAT_Pseudo<
164 !con((ins VReg_64:$vaddr, vdataClass:$vdata),
165 !if(EnableSaddr, (ins SReg_64:$saddr), (ins))),
166 (ins flat_offset:$offset, GLC:$glc, SLC:$slc, DLC:$dlc)),
167 " $vaddr, $vdata"#!if(HasSaddr, !if(EnableSaddr, ", $saddr", ", off"), "")#"$offset$glc$slc$dlc"> {
171 let has_saddr = HasSaddr;
172 let enabled_saddr = EnableSaddr;
173 let PseudoInstr = opName#!if(!and(HasSaddr, EnableSaddr), "_SADDR", "");
177 multiclass FLAT_Global_Load_Pseudo<string opName, RegisterClass regClass, bit HasTiedInput = 0> {
178 let is_flat_global = 1 in {
179 def "" : FLAT_Load_Pseudo<opName, regClass, HasTiedInput, 1>,
180 GlobalSaddrTable<0, opName>;
181 def _SADDR : FLAT_Load_Pseudo<opName, regClass, HasTiedInput, 1, 1>,
182 GlobalSaddrTable<1, opName>;
186 multiclass FLAT_Global_Store_Pseudo<string opName, RegisterClass regClass> {
187 let is_flat_global = 1 in {
188 def "" : FLAT_Store_Pseudo<opName, regClass, 1>,
189 GlobalSaddrTable<0, opName>;
190 def _SADDR : FLAT_Store_Pseudo<opName, regClass, 1, 1>,
191 GlobalSaddrTable<1, opName>;
195 class FLAT_Scratch_Load_Pseudo <string opName, RegisterClass regClass,
196 bit EnableSaddr = 0>: FLAT_Pseudo<
198 (outs regClass:$vdst),
200 (ins SReg_32_XEXEC_HI:$saddr, flat_offset:$offset, GLC:$glc, SLC:$slc, DLC:$dlc),
201 (ins VGPR_32:$vaddr, flat_offset:$offset, GLC:$glc, SLC:$slc, DLC:$dlc)),
202 " $vdst, "#!if(EnableSaddr, "off", "$vaddr")#!if(EnableSaddr, ", $saddr", ", off")#"$offset$glc$slc$dlc"> {
206 let enabled_saddr = EnableSaddr;
207 let has_vaddr = !if(EnableSaddr, 0, 1);
208 let PseudoInstr = opName#!if(EnableSaddr, "_SADDR", "");
212 class FLAT_Scratch_Store_Pseudo <string opName, RegisterClass vdataClass, bit EnableSaddr = 0> : FLAT_Pseudo<
216 (ins vdataClass:$vdata, SReg_32_XEXEC_HI:$saddr, flat_offset:$offset, GLC:$glc, SLC:$slc, DLC:$dlc),
217 (ins vdataClass:$vdata, VGPR_32:$vaddr, flat_offset:$offset, GLC:$glc, SLC:$slc, DLC:$dlc)),
218 " "#!if(EnableSaddr, "off", "$vaddr")#", $vdata, "#!if(EnableSaddr, "$saddr", "off")#"$offset$glc$slc$dlc"> {
223 let enabled_saddr = EnableSaddr;
224 let has_vaddr = !if(EnableSaddr, 0, 1);
225 let PseudoInstr = opName#!if(EnableSaddr, "_SADDR", "");
229 multiclass FLAT_Scratch_Load_Pseudo<string opName, RegisterClass regClass> {
230 let is_flat_scratch = 1 in {
231 def "" : FLAT_Scratch_Load_Pseudo<opName, regClass>;
232 def _SADDR : FLAT_Scratch_Load_Pseudo<opName, regClass, 1>;
236 multiclass FLAT_Scratch_Store_Pseudo<string opName, RegisterClass regClass> {
237 let is_flat_scratch = 1 in {
238 def "" : FLAT_Scratch_Store_Pseudo<opName, regClass>;
239 def _SADDR : FLAT_Scratch_Store_Pseudo<opName, regClass, 1>;
243 class FLAT_AtomicNoRet_Pseudo<string opName, dag outs, dag ins,
244 string asm, list<dag> pattern = []> :
245 FLAT_Pseudo<opName, outs, ins, asm, pattern> {
256 class FLAT_AtomicRet_Pseudo<string opName, dag outs, dag ins,
257 string asm, list<dag> pattern = []>
258 : FLAT_AtomicNoRet_Pseudo<opName, outs, ins, asm, pattern> {
259 let hasPostISelHook = 1;
263 let PseudoInstr = NAME # "_RTN";
266 multiclass FLAT_Atomic_Pseudo<
268 RegisterClass vdst_rc,
270 SDPatternOperator atomic = null_frag,
271 ValueType data_vt = vt,
272 RegisterClass data_rc = vdst_rc,
273 bit isFP = isFloatType<data_vt>.ret> {
274 def "" : FLAT_AtomicNoRet_Pseudo <opName,
276 (ins VReg_64:$vaddr, data_rc:$vdata, flat_offset:$offset, SLC:$slc),
277 " $vaddr, $vdata$offset$slc">,
278 GlobalSaddrTable<0, opName>,
279 AtomicNoRet <opName, 0> {
280 let PseudoInstr = NAME;
284 def _RTN : FLAT_AtomicRet_Pseudo <opName,
285 (outs vdst_rc:$vdst),
286 (ins VReg_64:$vaddr, data_rc:$vdata, flat_offset:$offset, SLC:$slc),
287 " $vdst, $vaddr, $vdata$offset glc$slc",
289 (atomic (FLATAtomic i64:$vaddr, i16:$offset, i1:$slc), data_vt:$vdata))]>,
290 GlobalSaddrTable<0, opName#"_rtn">,
291 AtomicNoRet <opName, 1>{
296 multiclass FLAT_Global_Atomic_Pseudo_NO_RTN<
298 RegisterClass vdst_rc,
300 SDPatternOperator atomic = null_frag,
301 ValueType data_vt = vt,
302 RegisterClass data_rc = vdst_rc,
303 bit isFP = isFloatType<data_vt>.ret> {
305 def "" : FLAT_AtomicNoRet_Pseudo <opName,
307 (ins VReg_64:$vaddr, data_rc:$vdata, flat_offset:$offset, SLC:$slc),
308 " $vaddr, $vdata, off$offset$slc">,
309 GlobalSaddrTable<0, opName>,
310 AtomicNoRet <opName, 0> {
312 let PseudoInstr = NAME;
316 def _SADDR : FLAT_AtomicNoRet_Pseudo <opName,
318 (ins VReg_64:$vaddr, data_rc:$vdata, SReg_64:$saddr, flat_offset:$offset, SLC:$slc),
319 " $vaddr, $vdata, $saddr$offset$slc">,
320 GlobalSaddrTable<1, opName>,
321 AtomicNoRet <opName#"_saddr", 0> {
323 let enabled_saddr = 1;
324 let PseudoInstr = NAME#"_SADDR";
329 multiclass FLAT_Global_Atomic_Pseudo_RTN<
331 RegisterClass vdst_rc,
333 SDPatternOperator atomic = null_frag,
334 ValueType data_vt = vt,
335 RegisterClass data_rc = vdst_rc,
336 bit isFP = isFloatType<data_vt>.ret> {
338 def _RTN : FLAT_AtomicRet_Pseudo <opName,
339 (outs vdst_rc:$vdst),
340 (ins VReg_64:$vaddr, data_rc:$vdata, flat_offset:$offset, SLC:$slc),
341 " $vdst, $vaddr, $vdata, off$offset glc$slc",
343 (atomic (FLATSignedAtomic i64:$vaddr, i16:$offset, i1:$slc), data_vt:$vdata))]>,
344 GlobalSaddrTable<0, opName#"_rtn">,
345 AtomicNoRet <opName, 1> {
350 def _SADDR_RTN : FLAT_AtomicRet_Pseudo <opName,
351 (outs vdst_rc:$vdst),
352 (ins VReg_64:$vaddr, data_rc:$vdata, SReg_64:$saddr, flat_offset:$offset, SLC:$slc),
353 " $vdst, $vaddr, $vdata, $saddr$offset glc$slc">,
354 GlobalSaddrTable<1, opName#"_rtn">,
355 AtomicNoRet <opName#"_saddr", 1> {
357 let enabled_saddr = 1;
358 let PseudoInstr = NAME#"_SADDR_RTN";
363 multiclass FLAT_Global_Atomic_Pseudo<
365 RegisterClass vdst_rc,
367 SDPatternOperator atomic = null_frag,
368 ValueType data_vt = vt,
369 RegisterClass data_rc = vdst_rc> :
370 FLAT_Global_Atomic_Pseudo_NO_RTN<opName, vdst_rc, vt, atomic, data_vt, data_rc>,
371 FLAT_Global_Atomic_Pseudo_RTN<opName, vdst_rc, vt, atomic, data_vt, data_rc>;
373 class flat_binary_atomic_op<SDNode atomic_op> : PatFrag<
374 (ops node:$ptr, node:$value),
375 (atomic_op node:$ptr, node:$value),
376 [{return cast<MemSDNode>(N)->getAddressSpace() == AMDGPUAS::FLAT_ADDRESS;}]
379 def atomic_cmp_swap_flat : flat_binary_atomic_op<AMDGPUatomic_cmp_swap>;
380 def atomic_swap_flat : flat_binary_atomic_op<atomic_swap>;
381 def atomic_add_flat : flat_binary_atomic_op<atomic_load_add>;
382 def atomic_and_flat : flat_binary_atomic_op<atomic_load_and>;
383 def atomic_max_flat : flat_binary_atomic_op<atomic_load_max>;
384 def atomic_min_flat : flat_binary_atomic_op<atomic_load_min>;
385 def atomic_or_flat : flat_binary_atomic_op<atomic_load_or>;
386 def atomic_sub_flat : flat_binary_atomic_op<atomic_load_sub>;
387 def atomic_umax_flat : flat_binary_atomic_op<atomic_load_umax>;
388 def atomic_umin_flat : flat_binary_atomic_op<atomic_load_umin>;
389 def atomic_xor_flat : flat_binary_atomic_op<atomic_load_xor>;
390 def atomic_inc_flat : flat_binary_atomic_op<SIatomic_inc>;
391 def atomic_dec_flat : flat_binary_atomic_op<SIatomic_dec>;
395 //===----------------------------------------------------------------------===//
397 //===----------------------------------------------------------------------===//
399 def FLAT_LOAD_UBYTE : FLAT_Load_Pseudo <"flat_load_ubyte", VGPR_32>;
400 def FLAT_LOAD_SBYTE : FLAT_Load_Pseudo <"flat_load_sbyte", VGPR_32>;
401 def FLAT_LOAD_USHORT : FLAT_Load_Pseudo <"flat_load_ushort", VGPR_32>;
402 def FLAT_LOAD_SSHORT : FLAT_Load_Pseudo <"flat_load_sshort", VGPR_32>;
403 def FLAT_LOAD_DWORD : FLAT_Load_Pseudo <"flat_load_dword", VGPR_32>;
404 def FLAT_LOAD_DWORDX2 : FLAT_Load_Pseudo <"flat_load_dwordx2", VReg_64>;
405 def FLAT_LOAD_DWORDX4 : FLAT_Load_Pseudo <"flat_load_dwordx4", VReg_128>;
406 def FLAT_LOAD_DWORDX3 : FLAT_Load_Pseudo <"flat_load_dwordx3", VReg_96>;
408 def FLAT_STORE_BYTE : FLAT_Store_Pseudo <"flat_store_byte", VGPR_32>;
409 def FLAT_STORE_SHORT : FLAT_Store_Pseudo <"flat_store_short", VGPR_32>;
410 def FLAT_STORE_DWORD : FLAT_Store_Pseudo <"flat_store_dword", VGPR_32>;
411 def FLAT_STORE_DWORDX2 : FLAT_Store_Pseudo <"flat_store_dwordx2", VReg_64>;
412 def FLAT_STORE_DWORDX4 : FLAT_Store_Pseudo <"flat_store_dwordx4", VReg_128>;
413 def FLAT_STORE_DWORDX3 : FLAT_Store_Pseudo <"flat_store_dwordx3", VReg_96>;
415 let SubtargetPredicate = HasD16LoadStore in {
416 def FLAT_LOAD_UBYTE_D16 : FLAT_Load_Pseudo <"flat_load_ubyte_d16", VGPR_32, 1>;
417 def FLAT_LOAD_UBYTE_D16_HI : FLAT_Load_Pseudo <"flat_load_ubyte_d16_hi", VGPR_32, 1>;
418 def FLAT_LOAD_SBYTE_D16 : FLAT_Load_Pseudo <"flat_load_sbyte_d16", VGPR_32, 1>;
419 def FLAT_LOAD_SBYTE_D16_HI : FLAT_Load_Pseudo <"flat_load_sbyte_d16_hi", VGPR_32, 1>;
420 def FLAT_LOAD_SHORT_D16 : FLAT_Load_Pseudo <"flat_load_short_d16", VGPR_32, 1>;
421 def FLAT_LOAD_SHORT_D16_HI : FLAT_Load_Pseudo <"flat_load_short_d16_hi", VGPR_32, 1>;
423 def FLAT_STORE_BYTE_D16_HI : FLAT_Store_Pseudo <"flat_store_byte_d16_hi", VGPR_32>;
424 def FLAT_STORE_SHORT_D16_HI : FLAT_Store_Pseudo <"flat_store_short_d16_hi", VGPR_32>;
427 defm FLAT_ATOMIC_CMPSWAP : FLAT_Atomic_Pseudo <"flat_atomic_cmpswap",
428 VGPR_32, i32, atomic_cmp_swap_flat,
431 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Atomic_Pseudo <"flat_atomic_cmpswap_x2",
432 VReg_64, i64, atomic_cmp_swap_flat,
435 defm FLAT_ATOMIC_SWAP : FLAT_Atomic_Pseudo <"flat_atomic_swap",
436 VGPR_32, i32, atomic_swap_flat>;
438 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Atomic_Pseudo <"flat_atomic_swap_x2",
439 VReg_64, i64, atomic_swap_flat>;
441 defm FLAT_ATOMIC_ADD : FLAT_Atomic_Pseudo <"flat_atomic_add",
442 VGPR_32, i32, atomic_add_flat>;
444 defm FLAT_ATOMIC_SUB : FLAT_Atomic_Pseudo <"flat_atomic_sub",
445 VGPR_32, i32, atomic_sub_flat>;
447 defm FLAT_ATOMIC_SMIN : FLAT_Atomic_Pseudo <"flat_atomic_smin",
448 VGPR_32, i32, atomic_min_flat>;
450 defm FLAT_ATOMIC_UMIN : FLAT_Atomic_Pseudo <"flat_atomic_umin",
451 VGPR_32, i32, atomic_umin_flat>;
453 defm FLAT_ATOMIC_SMAX : FLAT_Atomic_Pseudo <"flat_atomic_smax",
454 VGPR_32, i32, atomic_max_flat>;
456 defm FLAT_ATOMIC_UMAX : FLAT_Atomic_Pseudo <"flat_atomic_umax",
457 VGPR_32, i32, atomic_umax_flat>;
459 defm FLAT_ATOMIC_AND : FLAT_Atomic_Pseudo <"flat_atomic_and",
460 VGPR_32, i32, atomic_and_flat>;
462 defm FLAT_ATOMIC_OR : FLAT_Atomic_Pseudo <"flat_atomic_or",
463 VGPR_32, i32, atomic_or_flat>;
465 defm FLAT_ATOMIC_XOR : FLAT_Atomic_Pseudo <"flat_atomic_xor",
466 VGPR_32, i32, atomic_xor_flat>;
468 defm FLAT_ATOMIC_INC : FLAT_Atomic_Pseudo <"flat_atomic_inc",
469 VGPR_32, i32, atomic_inc_flat>;
471 defm FLAT_ATOMIC_DEC : FLAT_Atomic_Pseudo <"flat_atomic_dec",
472 VGPR_32, i32, atomic_dec_flat>;
474 defm FLAT_ATOMIC_ADD_X2 : FLAT_Atomic_Pseudo <"flat_atomic_add_x2",
475 VReg_64, i64, atomic_add_flat>;
477 defm FLAT_ATOMIC_SUB_X2 : FLAT_Atomic_Pseudo <"flat_atomic_sub_x2",
478 VReg_64, i64, atomic_sub_flat>;
480 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Atomic_Pseudo <"flat_atomic_smin_x2",
481 VReg_64, i64, atomic_min_flat>;
483 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Atomic_Pseudo <"flat_atomic_umin_x2",
484 VReg_64, i64, atomic_umin_flat>;
486 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Atomic_Pseudo <"flat_atomic_smax_x2",
487 VReg_64, i64, atomic_max_flat>;
489 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Atomic_Pseudo <"flat_atomic_umax_x2",
490 VReg_64, i64, atomic_umax_flat>;
492 defm FLAT_ATOMIC_AND_X2 : FLAT_Atomic_Pseudo <"flat_atomic_and_x2",
493 VReg_64, i64, atomic_and_flat>;
495 defm FLAT_ATOMIC_OR_X2 : FLAT_Atomic_Pseudo <"flat_atomic_or_x2",
496 VReg_64, i64, atomic_or_flat>;
498 defm FLAT_ATOMIC_XOR_X2 : FLAT_Atomic_Pseudo <"flat_atomic_xor_x2",
499 VReg_64, i64, atomic_xor_flat>;
501 defm FLAT_ATOMIC_INC_X2 : FLAT_Atomic_Pseudo <"flat_atomic_inc_x2",
502 VReg_64, i64, atomic_inc_flat>;
504 defm FLAT_ATOMIC_DEC_X2 : FLAT_Atomic_Pseudo <"flat_atomic_dec_x2",
505 VReg_64, i64, atomic_dec_flat>;
507 // GFX7-, GFX10-only flat instructions.
508 let SubtargetPredicate = isGFX7GFX10 in {
510 defm FLAT_ATOMIC_FCMPSWAP : FLAT_Atomic_Pseudo <"flat_atomic_fcmpswap",
511 VGPR_32, f32, null_frag, v2f32, VReg_64>;
513 defm FLAT_ATOMIC_FCMPSWAP_X2 : FLAT_Atomic_Pseudo <"flat_atomic_fcmpswap_x2",
514 VReg_64, f64, null_frag, v2f64, VReg_128>;
516 defm FLAT_ATOMIC_FMIN : FLAT_Atomic_Pseudo <"flat_atomic_fmin",
519 defm FLAT_ATOMIC_FMAX : FLAT_Atomic_Pseudo <"flat_atomic_fmax",
522 defm FLAT_ATOMIC_FMIN_X2 : FLAT_Atomic_Pseudo <"flat_atomic_fmin_x2",
525 defm FLAT_ATOMIC_FMAX_X2 : FLAT_Atomic_Pseudo <"flat_atomic_fmax_x2",
528 } // End SubtargetPredicate = isGFX7GFX10
530 let SubtargetPredicate = HasFlatGlobalInsts in {
531 defm GLOBAL_LOAD_UBYTE : FLAT_Global_Load_Pseudo <"global_load_ubyte", VGPR_32>;
532 defm GLOBAL_LOAD_SBYTE : FLAT_Global_Load_Pseudo <"global_load_sbyte", VGPR_32>;
533 defm GLOBAL_LOAD_USHORT : FLAT_Global_Load_Pseudo <"global_load_ushort", VGPR_32>;
534 defm GLOBAL_LOAD_SSHORT : FLAT_Global_Load_Pseudo <"global_load_sshort", VGPR_32>;
535 defm GLOBAL_LOAD_DWORD : FLAT_Global_Load_Pseudo <"global_load_dword", VGPR_32>;
536 defm GLOBAL_LOAD_DWORDX2 : FLAT_Global_Load_Pseudo <"global_load_dwordx2", VReg_64>;
537 defm GLOBAL_LOAD_DWORDX3 : FLAT_Global_Load_Pseudo <"global_load_dwordx3", VReg_96>;
538 defm GLOBAL_LOAD_DWORDX4 : FLAT_Global_Load_Pseudo <"global_load_dwordx4", VReg_128>;
540 defm GLOBAL_LOAD_UBYTE_D16 : FLAT_Global_Load_Pseudo <"global_load_ubyte_d16", VGPR_32, 1>;
541 defm GLOBAL_LOAD_UBYTE_D16_HI : FLAT_Global_Load_Pseudo <"global_load_ubyte_d16_hi", VGPR_32, 1>;
542 defm GLOBAL_LOAD_SBYTE_D16 : FLAT_Global_Load_Pseudo <"global_load_sbyte_d16", VGPR_32, 1>;
543 defm GLOBAL_LOAD_SBYTE_D16_HI : FLAT_Global_Load_Pseudo <"global_load_sbyte_d16_hi", VGPR_32, 1>;
544 defm GLOBAL_LOAD_SHORT_D16 : FLAT_Global_Load_Pseudo <"global_load_short_d16", VGPR_32, 1>;
545 defm GLOBAL_LOAD_SHORT_D16_HI : FLAT_Global_Load_Pseudo <"global_load_short_d16_hi", VGPR_32, 1>;
547 defm GLOBAL_STORE_BYTE : FLAT_Global_Store_Pseudo <"global_store_byte", VGPR_32>;
548 defm GLOBAL_STORE_SHORT : FLAT_Global_Store_Pseudo <"global_store_short", VGPR_32>;
549 defm GLOBAL_STORE_DWORD : FLAT_Global_Store_Pseudo <"global_store_dword", VGPR_32>;
550 defm GLOBAL_STORE_DWORDX2 : FLAT_Global_Store_Pseudo <"global_store_dwordx2", VReg_64>;
551 defm GLOBAL_STORE_DWORDX3 : FLAT_Global_Store_Pseudo <"global_store_dwordx3", VReg_96>;
552 defm GLOBAL_STORE_DWORDX4 : FLAT_Global_Store_Pseudo <"global_store_dwordx4", VReg_128>;
554 defm GLOBAL_STORE_BYTE_D16_HI : FLAT_Global_Store_Pseudo <"global_store_byte_d16_hi", VGPR_32>;
555 defm GLOBAL_STORE_SHORT_D16_HI : FLAT_Global_Store_Pseudo <"global_store_short_d16_hi", VGPR_32>;
557 let is_flat_global = 1 in {
558 defm GLOBAL_ATOMIC_CMPSWAP : FLAT_Global_Atomic_Pseudo <"global_atomic_cmpswap",
559 VGPR_32, i32, AMDGPUatomic_cmp_swap_global,
562 defm GLOBAL_ATOMIC_CMPSWAP_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_cmpswap_x2",
563 VReg_64, i64, AMDGPUatomic_cmp_swap_global,
566 defm GLOBAL_ATOMIC_SWAP : FLAT_Global_Atomic_Pseudo <"global_atomic_swap",
567 VGPR_32, i32, atomic_swap_global_32>;
569 defm GLOBAL_ATOMIC_SWAP_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_swap_x2",
570 VReg_64, i64, atomic_swap_global_64>;
572 defm GLOBAL_ATOMIC_ADD : FLAT_Global_Atomic_Pseudo <"global_atomic_add",
573 VGPR_32, i32, atomic_load_add_global_32>;
575 defm GLOBAL_ATOMIC_SUB : FLAT_Global_Atomic_Pseudo <"global_atomic_sub",
576 VGPR_32, i32, atomic_load_sub_global_32>;
578 defm GLOBAL_ATOMIC_SMIN : FLAT_Global_Atomic_Pseudo <"global_atomic_smin",
579 VGPR_32, i32, atomic_load_min_global_32>;
581 defm GLOBAL_ATOMIC_UMIN : FLAT_Global_Atomic_Pseudo <"global_atomic_umin",
582 VGPR_32, i32, atomic_load_umin_global_32>;
584 defm GLOBAL_ATOMIC_SMAX : FLAT_Global_Atomic_Pseudo <"global_atomic_smax",
585 VGPR_32, i32, atomic_load_max_global_32>;
587 defm GLOBAL_ATOMIC_UMAX : FLAT_Global_Atomic_Pseudo <"global_atomic_umax",
588 VGPR_32, i32, atomic_load_umax_global_32>;
590 defm GLOBAL_ATOMIC_AND : FLAT_Global_Atomic_Pseudo <"global_atomic_and",
591 VGPR_32, i32, atomic_load_and_global_32>;
593 defm GLOBAL_ATOMIC_OR : FLAT_Global_Atomic_Pseudo <"global_atomic_or",
594 VGPR_32, i32, atomic_load_or_global_32>;
596 defm GLOBAL_ATOMIC_XOR : FLAT_Global_Atomic_Pseudo <"global_atomic_xor",
597 VGPR_32, i32, atomic_load_xor_global_32>;
599 defm GLOBAL_ATOMIC_INC : FLAT_Global_Atomic_Pseudo <"global_atomic_inc",
600 VGPR_32, i32, atomic_inc_global_32>;
602 defm GLOBAL_ATOMIC_DEC : FLAT_Global_Atomic_Pseudo <"global_atomic_dec",
603 VGPR_32, i32, atomic_dec_global_32>;
605 defm GLOBAL_ATOMIC_ADD_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_add_x2",
606 VReg_64, i64, atomic_load_add_global_64>;
608 defm GLOBAL_ATOMIC_SUB_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_sub_x2",
609 VReg_64, i64, atomic_load_sub_global_64>;
611 defm GLOBAL_ATOMIC_SMIN_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_smin_x2",
612 VReg_64, i64, atomic_load_min_global_64>;
614 defm GLOBAL_ATOMIC_UMIN_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_umin_x2",
615 VReg_64, i64, atomic_load_umin_global_64>;
617 defm GLOBAL_ATOMIC_SMAX_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_smax_x2",
618 VReg_64, i64, atomic_load_max_global_64>;
620 defm GLOBAL_ATOMIC_UMAX_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_umax_x2",
621 VReg_64, i64, atomic_load_umax_global_64>;
623 defm GLOBAL_ATOMIC_AND_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_and_x2",
624 VReg_64, i64, atomic_load_and_global_64>;
626 defm GLOBAL_ATOMIC_OR_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_or_x2",
627 VReg_64, i64, atomic_load_or_global_64>;
629 defm GLOBAL_ATOMIC_XOR_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_xor_x2",
630 VReg_64, i64, atomic_load_xor_global_64>;
632 defm GLOBAL_ATOMIC_INC_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_inc_x2",
633 VReg_64, i64, atomic_inc_global_64>;
635 defm GLOBAL_ATOMIC_DEC_X2 : FLAT_Global_Atomic_Pseudo <"global_atomic_dec_x2",
636 VReg_64, i64, atomic_dec_global_64>;
637 } // End is_flat_global = 1
639 } // End SubtargetPredicate = HasFlatGlobalInsts
642 let SubtargetPredicate = HasFlatScratchInsts in {
643 defm SCRATCH_LOAD_UBYTE : FLAT_Scratch_Load_Pseudo <"scratch_load_ubyte", VGPR_32>;
644 defm SCRATCH_LOAD_SBYTE : FLAT_Scratch_Load_Pseudo <"scratch_load_sbyte", VGPR_32>;
645 defm SCRATCH_LOAD_USHORT : FLAT_Scratch_Load_Pseudo <"scratch_load_ushort", VGPR_32>;
646 defm SCRATCH_LOAD_SSHORT : FLAT_Scratch_Load_Pseudo <"scratch_load_sshort", VGPR_32>;
647 defm SCRATCH_LOAD_DWORD : FLAT_Scratch_Load_Pseudo <"scratch_load_dword", VGPR_32>;
648 defm SCRATCH_LOAD_DWORDX2 : FLAT_Scratch_Load_Pseudo <"scratch_load_dwordx2", VReg_64>;
649 defm SCRATCH_LOAD_DWORDX3 : FLAT_Scratch_Load_Pseudo <"scratch_load_dwordx3", VReg_96>;
650 defm SCRATCH_LOAD_DWORDX4 : FLAT_Scratch_Load_Pseudo <"scratch_load_dwordx4", VReg_128>;
652 defm SCRATCH_LOAD_UBYTE_D16 : FLAT_Scratch_Load_Pseudo <"scratch_load_ubyte_d16", VGPR_32>;
653 defm SCRATCH_LOAD_UBYTE_D16_HI : FLAT_Scratch_Load_Pseudo <"scratch_load_ubyte_d16_hi", VGPR_32>;
654 defm SCRATCH_LOAD_SBYTE_D16 : FLAT_Scratch_Load_Pseudo <"scratch_load_sbyte_d16", VGPR_32>;
655 defm SCRATCH_LOAD_SBYTE_D16_HI : FLAT_Scratch_Load_Pseudo <"scratch_load_sbyte_d16_hi", VGPR_32>;
656 defm SCRATCH_LOAD_SHORT_D16 : FLAT_Scratch_Load_Pseudo <"scratch_load_short_d16", VGPR_32>;
657 defm SCRATCH_LOAD_SHORT_D16_HI : FLAT_Scratch_Load_Pseudo <"scratch_load_short_d16_hi", VGPR_32>;
659 defm SCRATCH_STORE_BYTE : FLAT_Scratch_Store_Pseudo <"scratch_store_byte", VGPR_32>;
660 defm SCRATCH_STORE_SHORT : FLAT_Scratch_Store_Pseudo <"scratch_store_short", VGPR_32>;
661 defm SCRATCH_STORE_DWORD : FLAT_Scratch_Store_Pseudo <"scratch_store_dword", VGPR_32>;
662 defm SCRATCH_STORE_DWORDX2 : FLAT_Scratch_Store_Pseudo <"scratch_store_dwordx2", VReg_64>;
663 defm SCRATCH_STORE_DWORDX3 : FLAT_Scratch_Store_Pseudo <"scratch_store_dwordx3", VReg_96>;
664 defm SCRATCH_STORE_DWORDX4 : FLAT_Scratch_Store_Pseudo <"scratch_store_dwordx4", VReg_128>;
666 defm SCRATCH_STORE_BYTE_D16_HI : FLAT_Scratch_Store_Pseudo <"scratch_store_byte_d16_hi", VGPR_32>;
667 defm SCRATCH_STORE_SHORT_D16_HI : FLAT_Scratch_Store_Pseudo <"scratch_store_short_d16_hi", VGPR_32>;
669 } // End SubtargetPredicate = HasFlatScratchInsts
671 let SubtargetPredicate = isGFX10Plus, is_flat_global = 1 in {
672 defm GLOBAL_ATOMIC_FCMPSWAP :
673 FLAT_Global_Atomic_Pseudo<"global_atomic_fcmpswap", VGPR_32, f32>;
674 defm GLOBAL_ATOMIC_FMIN :
675 FLAT_Global_Atomic_Pseudo<"global_atomic_fmin", VGPR_32, f32>;
676 defm GLOBAL_ATOMIC_FMAX :
677 FLAT_Global_Atomic_Pseudo<"global_atomic_fmax", VGPR_32, f32>;
678 defm GLOBAL_ATOMIC_FCMPSWAP_X2 :
679 FLAT_Global_Atomic_Pseudo<"global_atomic_fcmpswap_x2", VReg_64, f64>;
680 defm GLOBAL_ATOMIC_FMIN_X2 :
681 FLAT_Global_Atomic_Pseudo<"global_atomic_fmin_x2", VReg_64, f64>;
682 defm GLOBAL_ATOMIC_FMAX_X2 :
683 FLAT_Global_Atomic_Pseudo<"global_atomic_fmax_x2", VReg_64, f64>;
684 } // End SubtargetPredicate = isGFX10Plus, is_flat_global = 1
686 let SubtargetPredicate = HasAtomicFaddInsts, is_flat_global = 1 in {
688 defm GLOBAL_ATOMIC_ADD_F32 : FLAT_Global_Atomic_Pseudo_NO_RTN <
689 "global_atomic_add_f32", VGPR_32, f32, atomic_fadd_global_noret
691 defm GLOBAL_ATOMIC_PK_ADD_F16 : FLAT_Global_Atomic_Pseudo_NO_RTN <
692 "global_atomic_pk_add_f16", VGPR_32, v2f16, atomic_pk_fadd_global_noret
695 } // End SubtargetPredicate = HasAtomicFaddInsts
697 //===----------------------------------------------------------------------===//
699 //===----------------------------------------------------------------------===//
701 // Patterns for global loads with no offset.
702 class FlatLoadPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
703 (vt (node (FLATOffset i64:$vaddr, i16:$offset, i1:$slc))),
704 (inst $vaddr, $offset, 0, 0, $slc)
707 class FlatLoadPat_D16 <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
708 (node (FLATOffset (i64 VReg_64:$vaddr), i16:$offset, i1:$slc), vt:$in),
709 (inst $vaddr, $offset, 0, 0, $slc, $in)
712 class FlatSignedLoadPat_D16 <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
713 (node (FLATOffsetSigned (i64 VReg_64:$vaddr), i16:$offset, i1:$slc), vt:$in),
714 (inst $vaddr, $offset, 0, 0, $slc, $in)
717 class FlatLoadAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
718 (vt (node (FLATAtomic (i64 VReg_64:$vaddr), i16:$offset, i1:$slc))),
719 (inst $vaddr, $offset, 0, 0, $slc)
722 class FlatLoadSignedPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
723 (vt (node (FLATOffsetSigned (i64 VReg_64:$vaddr), i16:$offset, i1:$slc))),
724 (inst $vaddr, $offset, 0, 0, $slc)
727 class FlatStorePat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt, RegisterClass rc = VGPR_32> : GCNPat <
728 (node vt:$data, (FLATOffset i64:$vaddr, i16:$offset, i1:$slc)),
729 (inst $vaddr, rc:$data, $offset, 0, 0, $slc)
732 class FlatStoreSignedPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt, RegisterClass rc = VGPR_32> : GCNPat <
733 (node vt:$data, (FLATOffsetSigned i64:$vaddr, i16:$offset, i1:$slc)),
734 (inst $vaddr, rc:$data, $offset, 0, 0, $slc)
737 class FlatStoreAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt, RegisterClass rc = VGPR_32> : GCNPat <
738 // atomic store follows atomic binop convention so the address comes
740 (node (FLATAtomic i64:$vaddr, i16:$offset, i1:$slc), vt:$data),
741 (inst $vaddr, rc:$data, $offset, 0, 0, $slc)
744 class FlatStoreSignedAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt, RegisterClass rc = VGPR_32> : GCNPat <
745 // atomic store follows atomic binop convention so the address comes
747 (node (FLATSignedAtomic i64:$vaddr, i16:$offset, i1:$slc), vt:$data),
748 (inst $vaddr, rc:$data, $offset, 0, 0, $slc)
751 class FlatAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt,
752 ValueType data_vt = vt> : GCNPat <
753 (vt (node (FLATAtomic i64:$vaddr, i16:$offset, i1:$slc), data_vt:$data)),
754 (inst $vaddr, $data, $offset, $slc)
757 class FlatAtomicPatNoRtn <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt> : GCNPat <
758 (node (FLATAtomic i64:$vaddr, i16:$offset, i1:$slc), vt:$data),
759 (inst $vaddr, $data, $offset, $slc)
762 class FlatSignedAtomicPat <FLAT_Pseudo inst, SDPatternOperator node, ValueType vt,
763 ValueType data_vt = vt> : GCNPat <
764 (vt (node (FLATSignedAtomic i64:$vaddr, i16:$offset, i1:$slc), data_vt:$data)),
765 (inst $vaddr, $data, $offset, $slc)
768 let OtherPredicates = [HasFlatAddressSpace] in {
770 def : FlatLoadPat <FLAT_LOAD_UBYTE, extloadi8_flat, i32>;
771 def : FlatLoadPat <FLAT_LOAD_UBYTE, zextloadi8_flat, i32>;
772 def : FlatLoadPat <FLAT_LOAD_SBYTE, sextloadi8_flat, i32>;
773 def : FlatLoadPat <FLAT_LOAD_UBYTE, extloadi8_flat, i16>;
774 def : FlatLoadPat <FLAT_LOAD_UBYTE, zextloadi8_flat, i16>;
775 def : FlatLoadPat <FLAT_LOAD_SBYTE, sextloadi8_flat, i16>;
776 def : FlatLoadPat <FLAT_LOAD_USHORT, extloadi16_flat, i32>;
777 def : FlatLoadPat <FLAT_LOAD_USHORT, zextloadi16_flat, i32>;
778 def : FlatLoadPat <FLAT_LOAD_USHORT, load_flat, i16>;
779 def : FlatLoadPat <FLAT_LOAD_SSHORT, sextloadi16_flat, i32>;
780 def : FlatLoadPat <FLAT_LOAD_DWORDX3, load_flat, v3i32>;
781 def : FlatLoadPat <FLAT_LOAD_DWORDX4, load_flat, v4i32>;
783 def : FlatLoadAtomicPat <FLAT_LOAD_DWORD, atomic_load_32_flat, i32>;
784 def : FlatLoadAtomicPat <FLAT_LOAD_DWORDX2, atomic_load_64_flat, i64>;
786 def : FlatStorePat <FLAT_STORE_BYTE, truncstorei8_flat, i32>;
787 def : FlatStorePat <FLAT_STORE_SHORT, truncstorei16_flat, i32>;
789 foreach vt = Reg32Types.types in {
790 def : FlatLoadPat <FLAT_LOAD_DWORD, load_flat, vt>;
791 def : FlatStorePat <FLAT_STORE_DWORD, store_flat, vt>;
794 foreach vt = VReg_64.RegTypes in {
795 def : FlatStorePat <FLAT_STORE_DWORDX2, store_flat, vt, VReg_64>;
796 def : FlatLoadPat <FLAT_LOAD_DWORDX2, load_flat, vt>;
799 def : FlatStorePat <FLAT_STORE_DWORDX3, store_flat, v3i32, VReg_96>;
800 def : FlatStorePat <FLAT_STORE_DWORDX4, store_flat, v4i32, VReg_128>;
802 def : FlatStoreAtomicPat <FLAT_STORE_DWORD, atomic_store_flat_32, i32>;
803 def : FlatStoreAtomicPat <FLAT_STORE_DWORDX2, atomic_store_flat_64, i64, VReg_64>;
805 def : FlatAtomicPat <FLAT_ATOMIC_ADD_RTN, atomic_load_add_global_32, i32>;
806 def : FlatAtomicPat <FLAT_ATOMIC_SUB_RTN, atomic_load_sub_global_32, i32>;
807 def : FlatAtomicPat <FLAT_ATOMIC_INC_RTN, atomic_inc_global_32, i32>;
808 def : FlatAtomicPat <FLAT_ATOMIC_DEC_RTN, atomic_dec_global_32, i32>;
809 def : FlatAtomicPat <FLAT_ATOMIC_AND_RTN, atomic_load_and_global_32, i32>;
810 def : FlatAtomicPat <FLAT_ATOMIC_SMAX_RTN, atomic_load_max_global_32, i32>;
811 def : FlatAtomicPat <FLAT_ATOMIC_UMAX_RTN, atomic_load_umax_global_32, i32>;
812 def : FlatAtomicPat <FLAT_ATOMIC_SMIN_RTN, atomic_load_min_global_32, i32>;
813 def : FlatAtomicPat <FLAT_ATOMIC_UMIN_RTN, atomic_load_umin_global_32, i32>;
814 def : FlatAtomicPat <FLAT_ATOMIC_OR_RTN, atomic_load_or_global_32, i32>;
815 def : FlatAtomicPat <FLAT_ATOMIC_SWAP_RTN, atomic_swap_global_32, i32>;
816 def : FlatAtomicPat <FLAT_ATOMIC_CMPSWAP_RTN, AMDGPUatomic_cmp_swap_global, i32, v2i32>;
817 def : FlatAtomicPat <FLAT_ATOMIC_XOR_RTN, atomic_load_xor_global_32, i32>;
819 def : FlatAtomicPat <FLAT_ATOMIC_ADD_X2_RTN, atomic_load_add_global_64, i64>;
820 def : FlatAtomicPat <FLAT_ATOMIC_SUB_X2_RTN, atomic_load_sub_global_64, i64>;
821 def : FlatAtomicPat <FLAT_ATOMIC_INC_X2_RTN, atomic_inc_global_64, i64>;
822 def : FlatAtomicPat <FLAT_ATOMIC_DEC_X2_RTN, atomic_dec_global_64, i64>;
823 def : FlatAtomicPat <FLAT_ATOMIC_AND_X2_RTN, atomic_load_and_global_64, i64>;
824 def : FlatAtomicPat <FLAT_ATOMIC_SMAX_X2_RTN, atomic_load_max_global_64, i64>;
825 def : FlatAtomicPat <FLAT_ATOMIC_UMAX_X2_RTN, atomic_load_umax_global_64, i64>;
826 def : FlatAtomicPat <FLAT_ATOMIC_SMIN_X2_RTN, atomic_load_min_global_64, i64>;
827 def : FlatAtomicPat <FLAT_ATOMIC_UMIN_X2_RTN, atomic_load_umin_global_64, i64>;
828 def : FlatAtomicPat <FLAT_ATOMIC_OR_X2_RTN, atomic_load_or_global_64, i64>;
829 def : FlatAtomicPat <FLAT_ATOMIC_SWAP_X2_RTN, atomic_swap_global_64, i64>;
830 def : FlatAtomicPat <FLAT_ATOMIC_CMPSWAP_X2_RTN, AMDGPUatomic_cmp_swap_global, i64, v2i64>;
831 def : FlatAtomicPat <FLAT_ATOMIC_XOR_X2_RTN, atomic_load_xor_global_64, i64>;
833 def : FlatStorePat <FLAT_STORE_BYTE, truncstorei8_flat, i16>;
834 def : FlatStorePat <FLAT_STORE_SHORT, store_flat, i16>;
836 let OtherPredicates = [D16PreservesUnusedBits] in {
837 def : FlatStorePat <FLAT_STORE_SHORT_D16_HI, truncstorei16_hi16_flat, i32>;
838 def : FlatStorePat <FLAT_STORE_BYTE_D16_HI, truncstorei8_hi16_flat, i32>;
840 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_flat, v2i16>;
841 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_flat, v2f16>;
842 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_flat, v2i16>;
843 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_flat, v2f16>;
844 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16_HI, load_d16_hi_flat, v2i16>;
845 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16_HI, load_d16_hi_flat, v2f16>;
847 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16, az_extloadi8_d16_lo_flat, v2i16>;
848 def : FlatLoadPat_D16 <FLAT_LOAD_UBYTE_D16, az_extloadi8_d16_lo_flat, v2f16>;
849 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16, sextloadi8_d16_lo_flat, v2i16>;
850 def : FlatLoadPat_D16 <FLAT_LOAD_SBYTE_D16, sextloadi8_d16_lo_flat, v2f16>;
851 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16, load_d16_lo_flat, v2i16>;
852 def : FlatLoadPat_D16 <FLAT_LOAD_SHORT_D16, load_d16_lo_flat, v2f16>;
855 } // End OtherPredicates = [HasFlatAddressSpace]
857 let OtherPredicates = [HasFlatGlobalInsts], AddedComplexity = 10 in {
859 def : FlatLoadSignedPat <GLOBAL_LOAD_UBYTE, extloadi8_global, i32>;
860 def : FlatLoadSignedPat <GLOBAL_LOAD_UBYTE, zextloadi8_global, i32>;
861 def : FlatLoadSignedPat <GLOBAL_LOAD_SBYTE, sextloadi8_global, i32>;
862 def : FlatLoadSignedPat <GLOBAL_LOAD_UBYTE, extloadi8_global, i16>;
863 def : FlatLoadSignedPat <GLOBAL_LOAD_UBYTE, zextloadi8_global, i16>;
864 def : FlatLoadSignedPat <GLOBAL_LOAD_SBYTE, sextloadi8_global, i16>;
865 def : FlatLoadSignedPat <GLOBAL_LOAD_USHORT, extloadi16_global, i32>;
866 def : FlatLoadSignedPat <GLOBAL_LOAD_USHORT, zextloadi16_global, i32>;
867 def : FlatLoadSignedPat <GLOBAL_LOAD_SSHORT, sextloadi16_global, i32>;
868 def : FlatLoadSignedPat <GLOBAL_LOAD_USHORT, load_global, i16>;
870 foreach vt = Reg32Types.types in {
871 def : FlatLoadSignedPat <GLOBAL_LOAD_DWORD, load_global, vt>;
872 def : FlatStoreSignedPat <GLOBAL_STORE_DWORD, store_global, vt, VGPR_32>;
875 foreach vt = VReg_64.RegTypes in {
876 def : FlatLoadSignedPat <GLOBAL_LOAD_DWORDX2, load_global, vt>;
877 def : FlatStoreSignedPat <GLOBAL_STORE_DWORDX2, store_global, vt, VReg_64>;
880 def : FlatLoadSignedPat <GLOBAL_LOAD_DWORDX3, load_global, v3i32>;
881 def : FlatLoadSignedPat <GLOBAL_LOAD_DWORDX4, load_global, v4i32>;
883 def : FlatLoadAtomicPat <GLOBAL_LOAD_DWORD, atomic_load_32_global, i32>;
884 def : FlatLoadAtomicPat <GLOBAL_LOAD_DWORDX2, atomic_load_64_global, i64>;
886 def : FlatStoreSignedPat <GLOBAL_STORE_BYTE, truncstorei8_global, i32, VGPR_32>;
887 def : FlatStoreSignedPat <GLOBAL_STORE_BYTE, truncstorei8_global, i16, VGPR_32>;
888 def : FlatStoreSignedPat <GLOBAL_STORE_SHORT, truncstorei16_global, i32, VGPR_32>;
889 def : FlatStoreSignedPat <GLOBAL_STORE_SHORT, store_global, i16, VGPR_32>;
890 def : FlatStoreSignedPat <GLOBAL_STORE_DWORDX3, store_global, v3i32, VReg_96>;
891 def : FlatStoreSignedPat <GLOBAL_STORE_DWORDX4, store_global, v4i32, VReg_128>;
893 let OtherPredicates = [D16PreservesUnusedBits] in {
894 def : FlatStoreSignedPat <GLOBAL_STORE_SHORT_D16_HI, truncstorei16_hi16_global, i32>;
895 def : FlatStoreSignedPat <GLOBAL_STORE_BYTE_D16_HI, truncstorei8_hi16_global, i32>;
897 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_global, v2i16>;
898 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_UBYTE_D16_HI, az_extloadi8_d16_hi_global, v2f16>;
899 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_global, v2i16>;
900 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SBYTE_D16_HI, sextloadi8_d16_hi_global, v2f16>;
901 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SHORT_D16_HI, load_d16_hi_global, v2i16>;
902 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SHORT_D16_HI, load_d16_hi_global, v2f16>;
904 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_UBYTE_D16, az_extloadi8_d16_lo_global, v2i16>;
905 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_UBYTE_D16, az_extloadi8_d16_lo_global, v2f16>;
906 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SBYTE_D16, sextloadi8_d16_lo_global, v2i16>;
907 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SBYTE_D16, sextloadi8_d16_lo_global, v2f16>;
908 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SHORT_D16, load_d16_lo_global, v2i16>;
909 def : FlatSignedLoadPat_D16 <GLOBAL_LOAD_SHORT_D16, load_d16_lo_global, v2f16>;
912 def : FlatStoreSignedAtomicPat <GLOBAL_STORE_DWORD, store_atomic_global, i32>;
913 def : FlatStoreSignedAtomicPat <GLOBAL_STORE_DWORDX2, store_atomic_global, i64, VReg_64>;
915 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_ADD_RTN, atomic_load_add_global_32, i32>;
916 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SUB_RTN, atomic_load_sub_global_32, i32>;
917 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_INC_RTN, atomic_inc_global_32, i32>;
918 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_DEC_RTN, atomic_dec_global_32, i32>;
919 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_AND_RTN, atomic_load_and_global_32, i32>;
920 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SMAX_RTN, atomic_load_max_global_32, i32>;
921 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_UMAX_RTN, atomic_load_umax_global_32, i32>;
922 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SMIN_RTN, atomic_load_min_global_32, i32>;
923 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_UMIN_RTN, atomic_load_umin_global_32, i32>;
924 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_OR_RTN, atomic_load_or_global_32, i32>;
925 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SWAP_RTN, atomic_swap_global_32, i32>;
926 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_CMPSWAP_RTN, AMDGPUatomic_cmp_swap_global, i32, v2i32>;
927 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_XOR_RTN, atomic_load_xor_global_32, i32>;
929 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_ADD_X2_RTN, atomic_load_add_global_64, i64>;
930 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SUB_X2_RTN, atomic_load_sub_global_64, i64>;
931 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_INC_X2_RTN, atomic_inc_global_64, i64>;
932 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_DEC_X2_RTN, atomic_dec_global_64, i64>;
933 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_AND_X2_RTN, atomic_load_and_global_64, i64>;
934 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SMAX_X2_RTN, atomic_load_max_global_64, i64>;
935 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_UMAX_X2_RTN, atomic_load_umax_global_64, i64>;
936 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SMIN_X2_RTN, atomic_load_min_global_64, i64>;
937 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_UMIN_X2_RTN, atomic_load_umin_global_64, i64>;
938 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_OR_X2_RTN, atomic_load_or_global_64, i64>;
939 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_SWAP_X2_RTN, atomic_swap_global_64, i64>;
940 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_CMPSWAP_X2_RTN, AMDGPUatomic_cmp_swap_global, i64, v2i64>;
941 def : FlatSignedAtomicPat <GLOBAL_ATOMIC_XOR_X2_RTN, atomic_load_xor_global_64, i64>;
943 def : FlatAtomicPatNoRtn <GLOBAL_ATOMIC_ADD_F32, atomic_fadd_global_noret, f32>;
944 def : FlatAtomicPatNoRtn <GLOBAL_ATOMIC_PK_ADD_F16, atomic_pk_fadd_global_noret, v2f16>;
946 } // End OtherPredicates = [HasFlatGlobalInsts], AddedComplexity = 10
949 //===----------------------------------------------------------------------===//
951 //===----------------------------------------------------------------------===//
953 //===----------------------------------------------------------------------===//
955 //===----------------------------------------------------------------------===//
957 class FLAT_Real_ci <bits<7> op, FLAT_Pseudo ps> :
959 SIMCInstr <ps.PseudoInstr, SIEncodingFamily.SI> {
960 let AssemblerPredicate = isGFX7Only;
961 let DecoderNamespace="GFX7";
964 def FLAT_LOAD_UBYTE_ci : FLAT_Real_ci <0x8, FLAT_LOAD_UBYTE>;
965 def FLAT_LOAD_SBYTE_ci : FLAT_Real_ci <0x9, FLAT_LOAD_SBYTE>;
966 def FLAT_LOAD_USHORT_ci : FLAT_Real_ci <0xa, FLAT_LOAD_USHORT>;
967 def FLAT_LOAD_SSHORT_ci : FLAT_Real_ci <0xb, FLAT_LOAD_SSHORT>;
968 def FLAT_LOAD_DWORD_ci : FLAT_Real_ci <0xc, FLAT_LOAD_DWORD>;
969 def FLAT_LOAD_DWORDX2_ci : FLAT_Real_ci <0xd, FLAT_LOAD_DWORDX2>;
970 def FLAT_LOAD_DWORDX4_ci : FLAT_Real_ci <0xe, FLAT_LOAD_DWORDX4>;
971 def FLAT_LOAD_DWORDX3_ci : FLAT_Real_ci <0xf, FLAT_LOAD_DWORDX3>;
973 def FLAT_STORE_BYTE_ci : FLAT_Real_ci <0x18, FLAT_STORE_BYTE>;
974 def FLAT_STORE_SHORT_ci : FLAT_Real_ci <0x1a, FLAT_STORE_SHORT>;
975 def FLAT_STORE_DWORD_ci : FLAT_Real_ci <0x1c, FLAT_STORE_DWORD>;
976 def FLAT_STORE_DWORDX2_ci : FLAT_Real_ci <0x1d, FLAT_STORE_DWORDX2>;
977 def FLAT_STORE_DWORDX4_ci : FLAT_Real_ci <0x1e, FLAT_STORE_DWORDX4>;
978 def FLAT_STORE_DWORDX3_ci : FLAT_Real_ci <0x1f, FLAT_STORE_DWORDX3>;
980 multiclass FLAT_Real_Atomics_ci <bits<7> op, FLAT_Pseudo ps> {
981 def _ci : FLAT_Real_ci<op, !cast<FLAT_Pseudo>(ps.PseudoInstr)>;
982 def _RTN_ci : FLAT_Real_ci<op, !cast<FLAT_Pseudo>(ps.PseudoInstr # "_RTN")>;
985 defm FLAT_ATOMIC_SWAP : FLAT_Real_Atomics_ci <0x30, FLAT_ATOMIC_SWAP>;
986 defm FLAT_ATOMIC_CMPSWAP : FLAT_Real_Atomics_ci <0x31, FLAT_ATOMIC_CMPSWAP>;
987 defm FLAT_ATOMIC_ADD : FLAT_Real_Atomics_ci <0x32, FLAT_ATOMIC_ADD>;
988 defm FLAT_ATOMIC_SUB : FLAT_Real_Atomics_ci <0x33, FLAT_ATOMIC_SUB>;
989 defm FLAT_ATOMIC_SMIN : FLAT_Real_Atomics_ci <0x35, FLAT_ATOMIC_SMIN>;
990 defm FLAT_ATOMIC_UMIN : FLAT_Real_Atomics_ci <0x36, FLAT_ATOMIC_UMIN>;
991 defm FLAT_ATOMIC_SMAX : FLAT_Real_Atomics_ci <0x37, FLAT_ATOMIC_SMAX>;
992 defm FLAT_ATOMIC_UMAX : FLAT_Real_Atomics_ci <0x38, FLAT_ATOMIC_UMAX>;
993 defm FLAT_ATOMIC_AND : FLAT_Real_Atomics_ci <0x39, FLAT_ATOMIC_AND>;
994 defm FLAT_ATOMIC_OR : FLAT_Real_Atomics_ci <0x3a, FLAT_ATOMIC_OR>;
995 defm FLAT_ATOMIC_XOR : FLAT_Real_Atomics_ci <0x3b, FLAT_ATOMIC_XOR>;
996 defm FLAT_ATOMIC_INC : FLAT_Real_Atomics_ci <0x3c, FLAT_ATOMIC_INC>;
997 defm FLAT_ATOMIC_DEC : FLAT_Real_Atomics_ci <0x3d, FLAT_ATOMIC_DEC>;
998 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Real_Atomics_ci <0x50, FLAT_ATOMIC_SWAP_X2>;
999 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Real_Atomics_ci <0x51, FLAT_ATOMIC_CMPSWAP_X2>;
1000 defm FLAT_ATOMIC_ADD_X2 : FLAT_Real_Atomics_ci <0x52, FLAT_ATOMIC_ADD_X2>;
1001 defm FLAT_ATOMIC_SUB_X2 : FLAT_Real_Atomics_ci <0x53, FLAT_ATOMIC_SUB_X2>;
1002 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Real_Atomics_ci <0x55, FLAT_ATOMIC_SMIN_X2>;
1003 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Real_Atomics_ci <0x56, FLAT_ATOMIC_UMIN_X2>;
1004 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Real_Atomics_ci <0x57, FLAT_ATOMIC_SMAX_X2>;
1005 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Real_Atomics_ci <0x58, FLAT_ATOMIC_UMAX_X2>;
1006 defm FLAT_ATOMIC_AND_X2 : FLAT_Real_Atomics_ci <0x59, FLAT_ATOMIC_AND_X2>;
1007 defm FLAT_ATOMIC_OR_X2 : FLAT_Real_Atomics_ci <0x5a, FLAT_ATOMIC_OR_X2>;
1008 defm FLAT_ATOMIC_XOR_X2 : FLAT_Real_Atomics_ci <0x5b, FLAT_ATOMIC_XOR_X2>;
1009 defm FLAT_ATOMIC_INC_X2 : FLAT_Real_Atomics_ci <0x5c, FLAT_ATOMIC_INC_X2>;
1010 defm FLAT_ATOMIC_DEC_X2 : FLAT_Real_Atomics_ci <0x5d, FLAT_ATOMIC_DEC_X2>;
1012 // CI Only flat instructions
1013 defm FLAT_ATOMIC_FCMPSWAP : FLAT_Real_Atomics_ci <0x3e, FLAT_ATOMIC_FCMPSWAP>;
1014 defm FLAT_ATOMIC_FMIN : FLAT_Real_Atomics_ci <0x3f, FLAT_ATOMIC_FMIN>;
1015 defm FLAT_ATOMIC_FMAX : FLAT_Real_Atomics_ci <0x40, FLAT_ATOMIC_FMAX>;
1016 defm FLAT_ATOMIC_FCMPSWAP_X2 : FLAT_Real_Atomics_ci <0x5e, FLAT_ATOMIC_FCMPSWAP_X2>;
1017 defm FLAT_ATOMIC_FMIN_X2 : FLAT_Real_Atomics_ci <0x5f, FLAT_ATOMIC_FMIN_X2>;
1018 defm FLAT_ATOMIC_FMAX_X2 : FLAT_Real_Atomics_ci <0x60, FLAT_ATOMIC_FMAX_X2>;
1021 //===----------------------------------------------------------------------===//
1023 //===----------------------------------------------------------------------===//
1025 class FLAT_Real_vi <bits<7> op, FLAT_Pseudo ps> :
1027 SIMCInstr <ps.PseudoInstr, SIEncodingFamily.VI> {
1028 let AssemblerPredicate = isGFX8GFX9;
1029 let DecoderNamespace = "GFX8";
1032 multiclass FLAT_Real_AllAddr_vi<bits<7> op> {
1033 def _vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(NAME)>;
1034 def _SADDR_vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(NAME#"_SADDR")>;
1037 def FLAT_LOAD_UBYTE_vi : FLAT_Real_vi <0x10, FLAT_LOAD_UBYTE>;
1038 def FLAT_LOAD_SBYTE_vi : FLAT_Real_vi <0x11, FLAT_LOAD_SBYTE>;
1039 def FLAT_LOAD_USHORT_vi : FLAT_Real_vi <0x12, FLAT_LOAD_USHORT>;
1040 def FLAT_LOAD_SSHORT_vi : FLAT_Real_vi <0x13, FLAT_LOAD_SSHORT>;
1041 def FLAT_LOAD_DWORD_vi : FLAT_Real_vi <0x14, FLAT_LOAD_DWORD>;
1042 def FLAT_LOAD_DWORDX2_vi : FLAT_Real_vi <0x15, FLAT_LOAD_DWORDX2>;
1043 def FLAT_LOAD_DWORDX4_vi : FLAT_Real_vi <0x17, FLAT_LOAD_DWORDX4>;
1044 def FLAT_LOAD_DWORDX3_vi : FLAT_Real_vi <0x16, FLAT_LOAD_DWORDX3>;
1046 def FLAT_STORE_BYTE_vi : FLAT_Real_vi <0x18, FLAT_STORE_BYTE>;
1047 def FLAT_STORE_BYTE_D16_HI_vi : FLAT_Real_vi <0x19, FLAT_STORE_BYTE_D16_HI>;
1048 def FLAT_STORE_SHORT_vi : FLAT_Real_vi <0x1a, FLAT_STORE_SHORT>;
1049 def FLAT_STORE_SHORT_D16_HI_vi : FLAT_Real_vi <0x1b, FLAT_STORE_SHORT_D16_HI>;
1050 def FLAT_STORE_DWORD_vi : FLAT_Real_vi <0x1c, FLAT_STORE_DWORD>;
1051 def FLAT_STORE_DWORDX2_vi : FLAT_Real_vi <0x1d, FLAT_STORE_DWORDX2>;
1052 def FLAT_STORE_DWORDX4_vi : FLAT_Real_vi <0x1f, FLAT_STORE_DWORDX4>;
1053 def FLAT_STORE_DWORDX3_vi : FLAT_Real_vi <0x1e, FLAT_STORE_DWORDX3>;
1055 def FLAT_LOAD_UBYTE_D16_vi : FLAT_Real_vi <0x20, FLAT_LOAD_UBYTE_D16>;
1056 def FLAT_LOAD_UBYTE_D16_HI_vi : FLAT_Real_vi <0x21, FLAT_LOAD_UBYTE_D16_HI>;
1057 def FLAT_LOAD_SBYTE_D16_vi : FLAT_Real_vi <0x22, FLAT_LOAD_SBYTE_D16>;
1058 def FLAT_LOAD_SBYTE_D16_HI_vi : FLAT_Real_vi <0x23, FLAT_LOAD_SBYTE_D16_HI>;
1059 def FLAT_LOAD_SHORT_D16_vi : FLAT_Real_vi <0x24, FLAT_LOAD_SHORT_D16>;
1060 def FLAT_LOAD_SHORT_D16_HI_vi : FLAT_Real_vi <0x25, FLAT_LOAD_SHORT_D16_HI>;
1062 multiclass FLAT_Real_Atomics_vi <bits<7> op, FLAT_Pseudo ps> {
1063 def _vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(ps.PseudoInstr)>;
1064 def _RTN_vi : FLAT_Real_vi<op, !cast<FLAT_Pseudo>(ps.PseudoInstr # "_RTN")>;
1067 multiclass FLAT_Global_Real_Atomics_vi<bits<7> op> :
1068 FLAT_Real_AllAddr_vi<op> {
1069 def _RTN_vi : FLAT_Real_vi <op, !cast<FLAT_Pseudo>(NAME#"_RTN")>;
1070 def _SADDR_RTN_vi : FLAT_Real_vi <op, !cast<FLAT_Pseudo>(NAME#"_SADDR_RTN")>;
1074 defm FLAT_ATOMIC_SWAP : FLAT_Real_Atomics_vi <0x40, FLAT_ATOMIC_SWAP>;
1075 defm FLAT_ATOMIC_CMPSWAP : FLAT_Real_Atomics_vi <0x41, FLAT_ATOMIC_CMPSWAP>;
1076 defm FLAT_ATOMIC_ADD : FLAT_Real_Atomics_vi <0x42, FLAT_ATOMIC_ADD>;
1077 defm FLAT_ATOMIC_SUB : FLAT_Real_Atomics_vi <0x43, FLAT_ATOMIC_SUB>;
1078 defm FLAT_ATOMIC_SMIN : FLAT_Real_Atomics_vi <0x44, FLAT_ATOMIC_SMIN>;
1079 defm FLAT_ATOMIC_UMIN : FLAT_Real_Atomics_vi <0x45, FLAT_ATOMIC_UMIN>;
1080 defm FLAT_ATOMIC_SMAX : FLAT_Real_Atomics_vi <0x46, FLAT_ATOMIC_SMAX>;
1081 defm FLAT_ATOMIC_UMAX : FLAT_Real_Atomics_vi <0x47, FLAT_ATOMIC_UMAX>;
1082 defm FLAT_ATOMIC_AND : FLAT_Real_Atomics_vi <0x48, FLAT_ATOMIC_AND>;
1083 defm FLAT_ATOMIC_OR : FLAT_Real_Atomics_vi <0x49, FLAT_ATOMIC_OR>;
1084 defm FLAT_ATOMIC_XOR : FLAT_Real_Atomics_vi <0x4a, FLAT_ATOMIC_XOR>;
1085 defm FLAT_ATOMIC_INC : FLAT_Real_Atomics_vi <0x4b, FLAT_ATOMIC_INC>;
1086 defm FLAT_ATOMIC_DEC : FLAT_Real_Atomics_vi <0x4c, FLAT_ATOMIC_DEC>;
1087 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Real_Atomics_vi <0x60, FLAT_ATOMIC_SWAP_X2>;
1088 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Real_Atomics_vi <0x61, FLAT_ATOMIC_CMPSWAP_X2>;
1089 defm FLAT_ATOMIC_ADD_X2 : FLAT_Real_Atomics_vi <0x62, FLAT_ATOMIC_ADD_X2>;
1090 defm FLAT_ATOMIC_SUB_X2 : FLAT_Real_Atomics_vi <0x63, FLAT_ATOMIC_SUB_X2>;
1091 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Real_Atomics_vi <0x64, FLAT_ATOMIC_SMIN_X2>;
1092 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Real_Atomics_vi <0x65, FLAT_ATOMIC_UMIN_X2>;
1093 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Real_Atomics_vi <0x66, FLAT_ATOMIC_SMAX_X2>;
1094 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Real_Atomics_vi <0x67, FLAT_ATOMIC_UMAX_X2>;
1095 defm FLAT_ATOMIC_AND_X2 : FLAT_Real_Atomics_vi <0x68, FLAT_ATOMIC_AND_X2>;
1096 defm FLAT_ATOMIC_OR_X2 : FLAT_Real_Atomics_vi <0x69, FLAT_ATOMIC_OR_X2>;
1097 defm FLAT_ATOMIC_XOR_X2 : FLAT_Real_Atomics_vi <0x6a, FLAT_ATOMIC_XOR_X2>;
1098 defm FLAT_ATOMIC_INC_X2 : FLAT_Real_Atomics_vi <0x6b, FLAT_ATOMIC_INC_X2>;
1099 defm FLAT_ATOMIC_DEC_X2 : FLAT_Real_Atomics_vi <0x6c, FLAT_ATOMIC_DEC_X2>;
1101 defm GLOBAL_LOAD_UBYTE : FLAT_Real_AllAddr_vi <0x10>;
1102 defm GLOBAL_LOAD_SBYTE : FLAT_Real_AllAddr_vi <0x11>;
1103 defm GLOBAL_LOAD_USHORT : FLAT_Real_AllAddr_vi <0x12>;
1104 defm GLOBAL_LOAD_SSHORT : FLAT_Real_AllAddr_vi <0x13>;
1105 defm GLOBAL_LOAD_DWORD : FLAT_Real_AllAddr_vi <0x14>;
1106 defm GLOBAL_LOAD_DWORDX2 : FLAT_Real_AllAddr_vi <0x15>;
1107 defm GLOBAL_LOAD_DWORDX3 : FLAT_Real_AllAddr_vi <0x16>;
1108 defm GLOBAL_LOAD_DWORDX4 : FLAT_Real_AllAddr_vi <0x17>;
1110 defm GLOBAL_LOAD_UBYTE_D16 : FLAT_Real_AllAddr_vi <0x20>;
1111 defm GLOBAL_LOAD_UBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x21>;
1112 defm GLOBAL_LOAD_SBYTE_D16 : FLAT_Real_AllAddr_vi <0x22>;
1113 defm GLOBAL_LOAD_SBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x23>;
1114 defm GLOBAL_LOAD_SHORT_D16 : FLAT_Real_AllAddr_vi <0x24>;
1115 defm GLOBAL_LOAD_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x25>;
1117 defm GLOBAL_STORE_BYTE : FLAT_Real_AllAddr_vi <0x18>;
1118 defm GLOBAL_STORE_BYTE_D16_HI : FLAT_Real_AllAddr_vi <0x19>;
1119 defm GLOBAL_STORE_SHORT : FLAT_Real_AllAddr_vi <0x1a>;
1120 defm GLOBAL_STORE_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x1b>;
1121 defm GLOBAL_STORE_DWORD : FLAT_Real_AllAddr_vi <0x1c>;
1122 defm GLOBAL_STORE_DWORDX2 : FLAT_Real_AllAddr_vi <0x1d>;
1123 defm GLOBAL_STORE_DWORDX3 : FLAT_Real_AllAddr_vi <0x1e>;
1124 defm GLOBAL_STORE_DWORDX4 : FLAT_Real_AllAddr_vi <0x1f>;
1127 defm GLOBAL_ATOMIC_SWAP : FLAT_Global_Real_Atomics_vi <0x40>;
1128 defm GLOBAL_ATOMIC_CMPSWAP : FLAT_Global_Real_Atomics_vi <0x41>;
1129 defm GLOBAL_ATOMIC_ADD : FLAT_Global_Real_Atomics_vi <0x42>;
1130 defm GLOBAL_ATOMIC_SUB : FLAT_Global_Real_Atomics_vi <0x43>;
1131 defm GLOBAL_ATOMIC_SMIN : FLAT_Global_Real_Atomics_vi <0x44>;
1132 defm GLOBAL_ATOMIC_UMIN : FLAT_Global_Real_Atomics_vi <0x45>;
1133 defm GLOBAL_ATOMIC_SMAX : FLAT_Global_Real_Atomics_vi <0x46>;
1134 defm GLOBAL_ATOMIC_UMAX : FLAT_Global_Real_Atomics_vi <0x47>;
1135 defm GLOBAL_ATOMIC_AND : FLAT_Global_Real_Atomics_vi <0x48>;
1136 defm GLOBAL_ATOMIC_OR : FLAT_Global_Real_Atomics_vi <0x49>;
1137 defm GLOBAL_ATOMIC_XOR : FLAT_Global_Real_Atomics_vi <0x4a>;
1138 defm GLOBAL_ATOMIC_INC : FLAT_Global_Real_Atomics_vi <0x4b>;
1139 defm GLOBAL_ATOMIC_DEC : FLAT_Global_Real_Atomics_vi <0x4c>;
1140 defm GLOBAL_ATOMIC_SWAP_X2 : FLAT_Global_Real_Atomics_vi <0x60>;
1141 defm GLOBAL_ATOMIC_CMPSWAP_X2 : FLAT_Global_Real_Atomics_vi <0x61>;
1142 defm GLOBAL_ATOMIC_ADD_X2 : FLAT_Global_Real_Atomics_vi <0x62>;
1143 defm GLOBAL_ATOMIC_SUB_X2 : FLAT_Global_Real_Atomics_vi <0x63>;
1144 defm GLOBAL_ATOMIC_SMIN_X2 : FLAT_Global_Real_Atomics_vi <0x64>;
1145 defm GLOBAL_ATOMIC_UMIN_X2 : FLAT_Global_Real_Atomics_vi <0x65>;
1146 defm GLOBAL_ATOMIC_SMAX_X2 : FLAT_Global_Real_Atomics_vi <0x66>;
1147 defm GLOBAL_ATOMIC_UMAX_X2 : FLAT_Global_Real_Atomics_vi <0x67>;
1148 defm GLOBAL_ATOMIC_AND_X2 : FLAT_Global_Real_Atomics_vi <0x68>;
1149 defm GLOBAL_ATOMIC_OR_X2 : FLAT_Global_Real_Atomics_vi <0x69>;
1150 defm GLOBAL_ATOMIC_XOR_X2 : FLAT_Global_Real_Atomics_vi <0x6a>;
1151 defm GLOBAL_ATOMIC_INC_X2 : FLAT_Global_Real_Atomics_vi <0x6b>;
1152 defm GLOBAL_ATOMIC_DEC_X2 : FLAT_Global_Real_Atomics_vi <0x6c>;
1154 defm SCRATCH_LOAD_UBYTE : FLAT_Real_AllAddr_vi <0x10>;
1155 defm SCRATCH_LOAD_SBYTE : FLAT_Real_AllAddr_vi <0x11>;
1156 defm SCRATCH_LOAD_USHORT : FLAT_Real_AllAddr_vi <0x12>;
1157 defm SCRATCH_LOAD_SSHORT : FLAT_Real_AllAddr_vi <0x13>;
1158 defm SCRATCH_LOAD_DWORD : FLAT_Real_AllAddr_vi <0x14>;
1159 defm SCRATCH_LOAD_DWORDX2 : FLAT_Real_AllAddr_vi <0x15>;
1160 defm SCRATCH_LOAD_DWORDX3 : FLAT_Real_AllAddr_vi <0x16>;
1161 defm SCRATCH_LOAD_DWORDX4 : FLAT_Real_AllAddr_vi <0x17>;
1162 defm SCRATCH_STORE_BYTE : FLAT_Real_AllAddr_vi <0x18>;
1163 defm SCRATCH_STORE_BYTE_D16_HI : FLAT_Real_AllAddr_vi <0x19>;
1164 defm SCRATCH_LOAD_UBYTE_D16 : FLAT_Real_AllAddr_vi <0x20>;
1165 defm SCRATCH_LOAD_UBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x21>;
1166 defm SCRATCH_LOAD_SBYTE_D16 : FLAT_Real_AllAddr_vi <0x22>;
1167 defm SCRATCH_LOAD_SBYTE_D16_HI : FLAT_Real_AllAddr_vi <0x23>;
1168 defm SCRATCH_LOAD_SHORT_D16 : FLAT_Real_AllAddr_vi <0x24>;
1169 defm SCRATCH_LOAD_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x25>;
1170 defm SCRATCH_STORE_SHORT : FLAT_Real_AllAddr_vi <0x1a>;
1171 defm SCRATCH_STORE_SHORT_D16_HI : FLAT_Real_AllAddr_vi <0x1b>;
1172 defm SCRATCH_STORE_DWORD : FLAT_Real_AllAddr_vi <0x1c>;
1173 defm SCRATCH_STORE_DWORDX2 : FLAT_Real_AllAddr_vi <0x1d>;
1174 defm SCRATCH_STORE_DWORDX3 : FLAT_Real_AllAddr_vi <0x1e>;
1175 defm SCRATCH_STORE_DWORDX4 : FLAT_Real_AllAddr_vi <0x1f>;
1178 //===----------------------------------------------------------------------===//
1180 //===----------------------------------------------------------------------===//
1182 class FLAT_Real_gfx10<bits<7> op, FLAT_Pseudo ps> :
1183 FLAT_Real<op, ps>, SIMCInstr<ps.PseudoInstr, SIEncodingFamily.GFX10> {
1184 let AssemblerPredicate = isGFX10Plus;
1185 let DecoderNamespace = "GFX10";
1187 let Inst{11-0} = offset{11-0};
1188 let Inst{12} = !if(ps.has_dlc, dlc, ps.dlcValue);
1189 let Inst{54-48} = !if(ps.has_saddr, !if(ps.enabled_saddr, saddr, 0x7d), 0x7d);
1194 multiclass FLAT_Real_Base_gfx10<bits<7> op> {
1196 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME)>;
1199 multiclass FLAT_Real_RTN_gfx10<bits<7> op> {
1201 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME#"_RTN")>;
1204 multiclass FLAT_Real_SADDR_gfx10<bits<7> op> {
1206 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME#"_SADDR")>;
1209 multiclass FLAT_Real_SADDR_RTN_gfx10<bits<7> op> {
1210 def _SADDR_RTN_gfx10 :
1211 FLAT_Real_gfx10<op, !cast<FLAT_Pseudo>(NAME#"_SADDR_RTN")>;
1215 multiclass FLAT_Real_AllAddr_gfx10<bits<7> op> :
1216 FLAT_Real_Base_gfx10<op>,
1217 FLAT_Real_SADDR_gfx10<op>;
1219 multiclass FLAT_Real_Atomics_gfx10<bits<7> op> :
1220 FLAT_Real_Base_gfx10<op>,
1221 FLAT_Real_RTN_gfx10<op>;
1223 multiclass FLAT_Real_GlblAtomics_gfx10<bits<7> op> :
1224 FLAT_Real_AllAddr_gfx10<op>,
1225 FLAT_Real_RTN_gfx10<op>,
1226 FLAT_Real_SADDR_RTN_gfx10<op>;
1230 defm FLAT_LOAD_UBYTE : FLAT_Real_Base_gfx10<0x008>;
1231 defm FLAT_LOAD_SBYTE : FLAT_Real_Base_gfx10<0x009>;
1232 defm FLAT_LOAD_USHORT : FLAT_Real_Base_gfx10<0x00a>;
1233 defm FLAT_LOAD_SSHORT : FLAT_Real_Base_gfx10<0x00b>;
1234 defm FLAT_LOAD_DWORD : FLAT_Real_Base_gfx10<0x00c>;
1235 defm FLAT_LOAD_DWORDX2 : FLAT_Real_Base_gfx10<0x00d>;
1236 defm FLAT_LOAD_DWORDX4 : FLAT_Real_Base_gfx10<0x00e>;
1237 defm FLAT_LOAD_DWORDX3 : FLAT_Real_Base_gfx10<0x00f>;
1238 defm FLAT_STORE_BYTE : FLAT_Real_Base_gfx10<0x018>;
1239 defm FLAT_STORE_BYTE_D16_HI : FLAT_Real_Base_gfx10<0x019>;
1240 defm FLAT_STORE_SHORT : FLAT_Real_Base_gfx10<0x01a>;
1241 defm FLAT_STORE_SHORT_D16_HI : FLAT_Real_Base_gfx10<0x01b>;
1242 defm FLAT_STORE_DWORD : FLAT_Real_Base_gfx10<0x01c>;
1243 defm FLAT_STORE_DWORDX2 : FLAT_Real_Base_gfx10<0x01d>;
1244 defm FLAT_STORE_DWORDX4 : FLAT_Real_Base_gfx10<0x01e>;
1245 defm FLAT_STORE_DWORDX3 : FLAT_Real_Base_gfx10<0x01f>;
1246 defm FLAT_LOAD_UBYTE_D16 : FLAT_Real_Base_gfx10<0x020>;
1247 defm FLAT_LOAD_UBYTE_D16_HI : FLAT_Real_Base_gfx10<0x021>;
1248 defm FLAT_LOAD_SBYTE_D16 : FLAT_Real_Base_gfx10<0x022>;
1249 defm FLAT_LOAD_SBYTE_D16_HI : FLAT_Real_Base_gfx10<0x023>;
1250 defm FLAT_LOAD_SHORT_D16 : FLAT_Real_Base_gfx10<0x024>;
1251 defm FLAT_LOAD_SHORT_D16_HI : FLAT_Real_Base_gfx10<0x025>;
1252 defm FLAT_ATOMIC_SWAP : FLAT_Real_Atomics_gfx10<0x030>;
1253 defm FLAT_ATOMIC_CMPSWAP : FLAT_Real_Atomics_gfx10<0x031>;
1254 defm FLAT_ATOMIC_ADD : FLAT_Real_Atomics_gfx10<0x032>;
1255 defm FLAT_ATOMIC_SUB : FLAT_Real_Atomics_gfx10<0x033>;
1256 defm FLAT_ATOMIC_SMIN : FLAT_Real_Atomics_gfx10<0x035>;
1257 defm FLAT_ATOMIC_UMIN : FLAT_Real_Atomics_gfx10<0x036>;
1258 defm FLAT_ATOMIC_SMAX : FLAT_Real_Atomics_gfx10<0x037>;
1259 defm FLAT_ATOMIC_UMAX : FLAT_Real_Atomics_gfx10<0x038>;
1260 defm FLAT_ATOMIC_AND : FLAT_Real_Atomics_gfx10<0x039>;
1261 defm FLAT_ATOMIC_OR : FLAT_Real_Atomics_gfx10<0x03a>;
1262 defm FLAT_ATOMIC_XOR : FLAT_Real_Atomics_gfx10<0x03b>;
1263 defm FLAT_ATOMIC_INC : FLAT_Real_Atomics_gfx10<0x03c>;
1264 defm FLAT_ATOMIC_DEC : FLAT_Real_Atomics_gfx10<0x03d>;
1265 defm FLAT_ATOMIC_FCMPSWAP : FLAT_Real_Atomics_gfx10<0x03e>;
1266 defm FLAT_ATOMIC_FMIN : FLAT_Real_Atomics_gfx10<0x03f>;
1267 defm FLAT_ATOMIC_FMAX : FLAT_Real_Atomics_gfx10<0x040>;
1268 defm FLAT_ATOMIC_SWAP_X2 : FLAT_Real_Atomics_gfx10<0x050>;
1269 defm FLAT_ATOMIC_CMPSWAP_X2 : FLAT_Real_Atomics_gfx10<0x051>;
1270 defm FLAT_ATOMIC_ADD_X2 : FLAT_Real_Atomics_gfx10<0x052>;
1271 defm FLAT_ATOMIC_SUB_X2 : FLAT_Real_Atomics_gfx10<0x053>;
1272 defm FLAT_ATOMIC_SMIN_X2 : FLAT_Real_Atomics_gfx10<0x055>;
1273 defm FLAT_ATOMIC_UMIN_X2 : FLAT_Real_Atomics_gfx10<0x056>;
1274 defm FLAT_ATOMIC_SMAX_X2 : FLAT_Real_Atomics_gfx10<0x057>;
1275 defm FLAT_ATOMIC_UMAX_X2 : FLAT_Real_Atomics_gfx10<0x058>;
1276 defm FLAT_ATOMIC_AND_X2 : FLAT_Real_Atomics_gfx10<0x059>;
1277 defm FLAT_ATOMIC_OR_X2 : FLAT_Real_Atomics_gfx10<0x05a>;
1278 defm FLAT_ATOMIC_XOR_X2 : FLAT_Real_Atomics_gfx10<0x05b>;
1279 defm FLAT_ATOMIC_INC_X2 : FLAT_Real_Atomics_gfx10<0x05c>;
1280 defm FLAT_ATOMIC_DEC_X2 : FLAT_Real_Atomics_gfx10<0x05d>;
1281 defm FLAT_ATOMIC_FCMPSWAP_X2 : FLAT_Real_Atomics_gfx10<0x05e>;
1282 defm FLAT_ATOMIC_FMIN_X2 : FLAT_Real_Atomics_gfx10<0x05f>;
1283 defm FLAT_ATOMIC_FMAX_X2 : FLAT_Real_Atomics_gfx10<0x060>;
1287 defm GLOBAL_LOAD_UBYTE : FLAT_Real_AllAddr_gfx10<0x008>;
1288 defm GLOBAL_LOAD_SBYTE : FLAT_Real_AllAddr_gfx10<0x009>;
1289 defm GLOBAL_LOAD_USHORT : FLAT_Real_AllAddr_gfx10<0x00a>;
1290 defm GLOBAL_LOAD_SSHORT : FLAT_Real_AllAddr_gfx10<0x00b>;
1291 defm GLOBAL_LOAD_DWORD : FLAT_Real_AllAddr_gfx10<0x00c>;
1292 defm GLOBAL_LOAD_DWORDX2 : FLAT_Real_AllAddr_gfx10<0x00d>;
1293 defm GLOBAL_LOAD_DWORDX4 : FLAT_Real_AllAddr_gfx10<0x00e>;
1294 defm GLOBAL_LOAD_DWORDX3 : FLAT_Real_AllAddr_gfx10<0x00f>;
1295 defm GLOBAL_STORE_BYTE : FLAT_Real_AllAddr_gfx10<0x018>;
1296 defm GLOBAL_STORE_BYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x019>;
1297 defm GLOBAL_STORE_SHORT : FLAT_Real_AllAddr_gfx10<0x01a>;
1298 defm GLOBAL_STORE_SHORT_D16_HI : FLAT_Real_AllAddr_gfx10<0x01b>;
1299 defm GLOBAL_STORE_DWORD : FLAT_Real_AllAddr_gfx10<0x01c>;
1300 defm GLOBAL_STORE_DWORDX2 : FLAT_Real_AllAddr_gfx10<0x01d>;
1301 defm GLOBAL_STORE_DWORDX4 : FLAT_Real_AllAddr_gfx10<0x01e>;
1302 defm GLOBAL_STORE_DWORDX3 : FLAT_Real_AllAddr_gfx10<0x01f>;
1303 defm GLOBAL_LOAD_UBYTE_D16 : FLAT_Real_AllAddr_gfx10<0x020>;
1304 defm GLOBAL_LOAD_UBYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x021>;
1305 defm GLOBAL_LOAD_SBYTE_D16 : FLAT_Real_AllAddr_gfx10<0x022>;
1306 defm GLOBAL_LOAD_SBYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x023>;
1307 defm GLOBAL_LOAD_SHORT_D16 : FLAT_Real_AllAddr_gfx10<0x024>;
1308 defm GLOBAL_LOAD_SHORT_D16_HI : FLAT_Real_AllAddr_gfx10<0x025>;
1309 defm GLOBAL_ATOMIC_SWAP : FLAT_Real_GlblAtomics_gfx10<0x030>;
1310 defm GLOBAL_ATOMIC_CMPSWAP : FLAT_Real_GlblAtomics_gfx10<0x031>;
1311 defm GLOBAL_ATOMIC_ADD : FLAT_Real_GlblAtomics_gfx10<0x032>;
1312 defm GLOBAL_ATOMIC_SUB : FLAT_Real_GlblAtomics_gfx10<0x033>;
1313 defm GLOBAL_ATOMIC_SMIN : FLAT_Real_GlblAtomics_gfx10<0x035>;
1314 defm GLOBAL_ATOMIC_UMIN : FLAT_Real_GlblAtomics_gfx10<0x036>;
1315 defm GLOBAL_ATOMIC_SMAX : FLAT_Real_GlblAtomics_gfx10<0x037>;
1316 defm GLOBAL_ATOMIC_UMAX : FLAT_Real_GlblAtomics_gfx10<0x038>;
1317 defm GLOBAL_ATOMIC_AND : FLAT_Real_GlblAtomics_gfx10<0x039>;
1318 defm GLOBAL_ATOMIC_OR : FLAT_Real_GlblAtomics_gfx10<0x03a>;
1319 defm GLOBAL_ATOMIC_XOR : FLAT_Real_GlblAtomics_gfx10<0x03b>;
1320 defm GLOBAL_ATOMIC_INC : FLAT_Real_GlblAtomics_gfx10<0x03c>;
1321 defm GLOBAL_ATOMIC_DEC : FLAT_Real_GlblAtomics_gfx10<0x03d>;
1322 defm GLOBAL_ATOMIC_FCMPSWAP : FLAT_Real_GlblAtomics_gfx10<0x03e>;
1323 defm GLOBAL_ATOMIC_FMIN : FLAT_Real_GlblAtomics_gfx10<0x03f>;
1324 defm GLOBAL_ATOMIC_FMAX : FLAT_Real_GlblAtomics_gfx10<0x040>;
1325 defm GLOBAL_ATOMIC_SWAP_X2 : FLAT_Real_GlblAtomics_gfx10<0x050>;
1326 defm GLOBAL_ATOMIC_CMPSWAP_X2 : FLAT_Real_GlblAtomics_gfx10<0x051>;
1327 defm GLOBAL_ATOMIC_ADD_X2 : FLAT_Real_GlblAtomics_gfx10<0x052>;
1328 defm GLOBAL_ATOMIC_SUB_X2 : FLAT_Real_GlblAtomics_gfx10<0x053>;
1329 defm GLOBAL_ATOMIC_SMIN_X2 : FLAT_Real_GlblAtomics_gfx10<0x055>;
1330 defm GLOBAL_ATOMIC_UMIN_X2 : FLAT_Real_GlblAtomics_gfx10<0x056>;
1331 defm GLOBAL_ATOMIC_SMAX_X2 : FLAT_Real_GlblAtomics_gfx10<0x057>;
1332 defm GLOBAL_ATOMIC_UMAX_X2 : FLAT_Real_GlblAtomics_gfx10<0x058>;
1333 defm GLOBAL_ATOMIC_AND_X2 : FLAT_Real_GlblAtomics_gfx10<0x059>;
1334 defm GLOBAL_ATOMIC_OR_X2 : FLAT_Real_GlblAtomics_gfx10<0x05a>;
1335 defm GLOBAL_ATOMIC_XOR_X2 : FLAT_Real_GlblAtomics_gfx10<0x05b>;
1336 defm GLOBAL_ATOMIC_INC_X2 : FLAT_Real_GlblAtomics_gfx10<0x05c>;
1337 defm GLOBAL_ATOMIC_DEC_X2 : FLAT_Real_GlblAtomics_gfx10<0x05d>;
1338 defm GLOBAL_ATOMIC_FCMPSWAP_X2 : FLAT_Real_GlblAtomics_gfx10<0x05e>;
1339 defm GLOBAL_ATOMIC_FMIN_X2 : FLAT_Real_GlblAtomics_gfx10<0x05f>;
1340 defm GLOBAL_ATOMIC_FMAX_X2 : FLAT_Real_GlblAtomics_gfx10<0x060>;
1343 // ENC_FLAT_SCRATCH.
1344 defm SCRATCH_LOAD_UBYTE : FLAT_Real_AllAddr_gfx10<0x008>;
1345 defm SCRATCH_LOAD_SBYTE : FLAT_Real_AllAddr_gfx10<0x009>;
1346 defm SCRATCH_LOAD_USHORT : FLAT_Real_AllAddr_gfx10<0x00a>;
1347 defm SCRATCH_LOAD_SSHORT : FLAT_Real_AllAddr_gfx10<0x00b>;
1348 defm SCRATCH_LOAD_DWORD : FLAT_Real_AllAddr_gfx10<0x00c>;
1349 defm SCRATCH_LOAD_DWORDX2 : FLAT_Real_AllAddr_gfx10<0x00d>;
1350 defm SCRATCH_LOAD_DWORDX4 : FLAT_Real_AllAddr_gfx10<0x00e>;
1351 defm SCRATCH_LOAD_DWORDX3 : FLAT_Real_AllAddr_gfx10<0x00f>;
1352 defm SCRATCH_STORE_BYTE : FLAT_Real_AllAddr_gfx10<0x018>;
1353 defm SCRATCH_STORE_BYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x019>;
1354 defm SCRATCH_STORE_SHORT : FLAT_Real_AllAddr_gfx10<0x01a>;
1355 defm SCRATCH_STORE_SHORT_D16_HI : FLAT_Real_AllAddr_gfx10<0x01b>;
1356 defm SCRATCH_STORE_DWORD : FLAT_Real_AllAddr_gfx10<0x01c>;
1357 defm SCRATCH_STORE_DWORDX2 : FLAT_Real_AllAddr_gfx10<0x01d>;
1358 defm SCRATCH_STORE_DWORDX4 : FLAT_Real_AllAddr_gfx10<0x01e>;
1359 defm SCRATCH_STORE_DWORDX3 : FLAT_Real_AllAddr_gfx10<0x01f>;
1360 defm SCRATCH_LOAD_UBYTE_D16 : FLAT_Real_AllAddr_gfx10<0x020>;
1361 defm SCRATCH_LOAD_UBYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x021>;
1362 defm SCRATCH_LOAD_SBYTE_D16 : FLAT_Real_AllAddr_gfx10<0x022>;
1363 defm SCRATCH_LOAD_SBYTE_D16_HI : FLAT_Real_AllAddr_gfx10<0x023>;
1364 defm SCRATCH_LOAD_SHORT_D16 : FLAT_Real_AllAddr_gfx10<0x024>;
1365 defm SCRATCH_LOAD_SHORT_D16_HI : FLAT_Real_AllAddr_gfx10<0x025>;
1367 let SubtargetPredicate = HasAtomicFaddInsts in {
1369 defm GLOBAL_ATOMIC_ADD_F32 : FLAT_Real_AllAddr_vi <0x04d>;
1370 defm GLOBAL_ATOMIC_PK_ADD_F16 : FLAT_Real_AllAddr_vi <0x04e>;
1372 } // End SubtargetPredicate = HasAtomicFaddInsts