2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * A small micro-assembler. It is intentionally kept simple, does only
7 * support a subset of instructions, and does not try to hide pipeline
8 * effects like branch delay slots.
10 * Copyright (C) 2004, 2005, 2006, 2008 Thiemo Seufer
11 * Copyright (C) 2005, 2007 Maciej W. Rozycki
12 * Copyright (C) 2006 Ralf Baechle (ralf@linux-mips.org)
15 #include <linux/kernel.h>
16 #include <linux/types.h>
17 #include <linux/init.h>
48 #define IMM_MASK 0xffff
50 #define JIMM_MASK 0x3ffffff
52 #define FUNC_MASK 0x3f
56 #define SCIMM_MASK 0xfffff
61 insn_addiu
, insn_addu
, insn_and
, insn_andi
, insn_bbit0
, insn_bbit1
,
62 insn_beq
, insn_beql
, insn_bgez
, insn_bgezl
, insn_bltz
, insn_bltzl
,
63 insn_bne
, insn_cache
, insn_daddiu
, insn_daddu
, insn_dins
, insn_dinsm
,
64 insn_dmfc0
, insn_dmtc0
, insn_drotr
, insn_drotr32
, insn_dsll
,
65 insn_dsll32
, insn_dsra
, insn_dsrl
, insn_dsrl32
, insn_dsubu
, insn_eret
,
66 insn_j
, insn_jal
, insn_jr
, insn_ld
, insn_ldx
, insn_ll
, insn_lld
,
67 insn_lui
, insn_lw
, insn_lwx
, insn_mfc0
, insn_mtc0
, insn_or
, insn_ori
,
68 insn_pref
, insn_rfe
, insn_rotr
, insn_sc
, insn_scd
, insn_sd
, insn_sll
,
69 insn_sra
, insn_srl
, insn_subu
, insn_sw
, insn_syscall
, insn_tlbp
,
70 insn_tlbr
, insn_tlbwi
, insn_tlbwr
, insn_xor
, insn_xori
,
79 /* This macro sets the non-variable bits of an instruction. */
80 #define M(a, b, c, d, e, f) \
88 static struct insn insn_table
[] __uasminitdata
= {
89 { insn_addiu
, M(addiu_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
90 { insn_addu
, M(spec_op
, 0, 0, 0, 0, addu_op
), RS
| RT
| RD
},
91 { insn_andi
, M(andi_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
92 { insn_and
, M(spec_op
, 0, 0, 0, 0, and_op
), RS
| RT
| RD
},
93 { insn_bbit0
, M(lwc2_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
94 { insn_bbit1
, M(swc2_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
95 { insn_beql
, M(beql_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
96 { insn_beq
, M(beq_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
97 { insn_bgezl
, M(bcond_op
, 0, bgezl_op
, 0, 0, 0), RS
| BIMM
},
98 { insn_bgez
, M(bcond_op
, 0, bgez_op
, 0, 0, 0), RS
| BIMM
},
99 { insn_bltzl
, M(bcond_op
, 0, bltzl_op
, 0, 0, 0), RS
| BIMM
},
100 { insn_bltz
, M(bcond_op
, 0, bltz_op
, 0, 0, 0), RS
| BIMM
},
101 { insn_bne
, M(bne_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
102 { insn_cache
, M(cache_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
103 { insn_daddiu
, M(daddiu_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
104 { insn_daddu
, M(spec_op
, 0, 0, 0, 0, daddu_op
), RS
| RT
| RD
},
105 { insn_dinsm
, M(spec3_op
, 0, 0, 0, 0, dinsm_op
), RS
| RT
| RD
| RE
},
106 { insn_dins
, M(spec3_op
, 0, 0, 0, 0, dins_op
), RS
| RT
| RD
| RE
},
107 { insn_dmfc0
, M(cop0_op
, dmfc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
108 { insn_dmtc0
, M(cop0_op
, dmtc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
109 { insn_drotr32
, M(spec_op
, 1, 0, 0, 0, dsrl32_op
), RT
| RD
| RE
},
110 { insn_drotr
, M(spec_op
, 1, 0, 0, 0, dsrl_op
), RT
| RD
| RE
},
111 { insn_dsll32
, M(spec_op
, 0, 0, 0, 0, dsll32_op
), RT
| RD
| RE
},
112 { insn_dsll
, M(spec_op
, 0, 0, 0, 0, dsll_op
), RT
| RD
| RE
},
113 { insn_dsra
, M(spec_op
, 0, 0, 0, 0, dsra_op
), RT
| RD
| RE
},
114 { insn_dsrl32
, M(spec_op
, 0, 0, 0, 0, dsrl32_op
), RT
| RD
| RE
},
115 { insn_dsrl
, M(spec_op
, 0, 0, 0, 0, dsrl_op
), RT
| RD
| RE
},
116 { insn_dsubu
, M(spec_op
, 0, 0, 0, 0, dsubu_op
), RS
| RT
| RD
},
117 { insn_eret
, M(cop0_op
, cop_op
, 0, 0, 0, eret_op
), 0 },
118 { insn_jal
, M(jal_op
, 0, 0, 0, 0, 0), JIMM
},
119 { insn_j
, M(j_op
, 0, 0, 0, 0, 0), JIMM
},
120 { insn_jr
, M(spec_op
, 0, 0, 0, 0, jr_op
), RS
},
121 { insn_ld
, M(ld_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
122 { insn_ldx
, M(spec3_op
, 0, 0, 0, ldx_op
, lx_op
), RS
| RT
| RD
},
123 { insn_lld
, M(lld_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
124 { insn_ll
, M(ll_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
125 { insn_lui
, M(lui_op
, 0, 0, 0, 0, 0), RT
| SIMM
},
126 { insn_lw
, M(lw_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
127 { insn_lwx
, M(spec3_op
, 0, 0, 0, lwx_op
, lx_op
), RS
| RT
| RD
},
128 { insn_mfc0
, M(cop0_op
, mfc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
129 { insn_mtc0
, M(cop0_op
, mtc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
130 { insn_ori
, M(ori_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
131 { insn_or
, M(spec_op
, 0, 0, 0, 0, or_op
), RS
| RT
| RD
},
132 { insn_pref
, M(pref_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
133 { insn_rfe
, M(cop0_op
, cop_op
, 0, 0, 0, rfe_op
), 0 },
134 { insn_rotr
, M(spec_op
, 1, 0, 0, 0, srl_op
), RT
| RD
| RE
},
135 { insn_scd
, M(scd_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
136 { insn_sc
, M(sc_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
137 { insn_sd
, M(sd_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
138 { insn_sll
, M(spec_op
, 0, 0, 0, 0, sll_op
), RT
| RD
| RE
},
139 { insn_sra
, M(spec_op
, 0, 0, 0, 0, sra_op
), RT
| RD
| RE
},
140 { insn_srl
, M(spec_op
, 0, 0, 0, 0, srl_op
), RT
| RD
| RE
},
141 { insn_subu
, M(spec_op
, 0, 0, 0, 0, subu_op
), RS
| RT
| RD
},
142 { insn_sw
, M(sw_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
143 { insn_syscall
, M(spec_op
, 0, 0, 0, 0, syscall_op
), SCIMM
},
144 { insn_tlbp
, M(cop0_op
, cop_op
, 0, 0, 0, tlbp_op
), 0 },
145 { insn_tlbr
, M(cop0_op
, cop_op
, 0, 0, 0, tlbr_op
), 0 },
146 { insn_tlbwi
, M(cop0_op
, cop_op
, 0, 0, 0, tlbwi_op
), 0 },
147 { insn_tlbwr
, M(cop0_op
, cop_op
, 0, 0, 0, tlbwr_op
), 0 },
148 { insn_xori
, M(xori_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
149 { insn_xor
, M(spec_op
, 0, 0, 0, 0, xor_op
), RS
| RT
| RD
},
150 { insn_invalid
, 0, 0 }
155 static inline __uasminit u32
build_rs(u32 arg
)
157 WARN(arg
& ~RS_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
159 return (arg
& RS_MASK
) << RS_SH
;
162 static inline __uasminit u32
build_rt(u32 arg
)
164 WARN(arg
& ~RT_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
166 return (arg
& RT_MASK
) << RT_SH
;
169 static inline __uasminit u32
build_rd(u32 arg
)
171 WARN(arg
& ~RD_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
173 return (arg
& RD_MASK
) << RD_SH
;
176 static inline __uasminit u32
build_re(u32 arg
)
178 WARN(arg
& ~RE_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
180 return (arg
& RE_MASK
) << RE_SH
;
183 static inline __uasminit u32
build_simm(s32 arg
)
185 WARN(arg
> 0x7fff || arg
< -0x8000,
186 KERN_WARNING
"Micro-assembler field overflow\n");
191 static inline __uasminit u32
build_uimm(u32 arg
)
193 WARN(arg
& ~IMM_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
195 return arg
& IMM_MASK
;
198 static inline __uasminit u32
build_bimm(s32 arg
)
200 WARN(arg
> 0x1ffff || arg
< -0x20000,
201 KERN_WARNING
"Micro-assembler field overflow\n");
203 WARN(arg
& 0x3, KERN_WARNING
"Invalid micro-assembler branch target\n");
205 return ((arg
< 0) ? (1 << 15) : 0) | ((arg
>> 2) & 0x7fff);
208 static inline __uasminit u32
build_jimm(u32 arg
)
210 WARN(arg
& ~(JIMM_MASK
<< 2),
211 KERN_WARNING
"Micro-assembler field overflow\n");
213 return (arg
>> 2) & JIMM_MASK
;
216 static inline __uasminit u32
build_scimm(u32 arg
)
218 WARN(arg
& ~SCIMM_MASK
,
219 KERN_WARNING
"Micro-assembler field overflow\n");
221 return (arg
& SCIMM_MASK
) << SCIMM_SH
;
224 static inline __uasminit u32
build_func(u32 arg
)
226 WARN(arg
& ~FUNC_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
228 return arg
& FUNC_MASK
;
231 static inline __uasminit u32
build_set(u32 arg
)
233 WARN(arg
& ~SET_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
235 return arg
& SET_MASK
;
239 * The order of opcode arguments is implicitly left to right,
240 * starting with RS and ending with FUNC or IMM.
242 static void __uasminit
build_insn(u32
**buf
, enum opcode opc
, ...)
244 struct insn
*ip
= NULL
;
249 for (i
= 0; insn_table
[i
].opcode
!= insn_invalid
; i
++)
250 if (insn_table
[i
].opcode
== opc
) {
255 if (!ip
|| (opc
== insn_daddiu
&& r4k_daddiu_bug()))
256 panic("Unsupported Micro-assembler instruction %d", opc
);
261 op
|= build_rs(va_arg(ap
, u32
));
263 op
|= build_rt(va_arg(ap
, u32
));
265 op
|= build_rd(va_arg(ap
, u32
));
267 op
|= build_re(va_arg(ap
, u32
));
268 if (ip
->fields
& SIMM
)
269 op
|= build_simm(va_arg(ap
, s32
));
270 if (ip
->fields
& UIMM
)
271 op
|= build_uimm(va_arg(ap
, u32
));
272 if (ip
->fields
& BIMM
)
273 op
|= build_bimm(va_arg(ap
, s32
));
274 if (ip
->fields
& JIMM
)
275 op
|= build_jimm(va_arg(ap
, u32
));
276 if (ip
->fields
& FUNC
)
277 op
|= build_func(va_arg(ap
, u32
));
278 if (ip
->fields
& SET
)
279 op
|= build_set(va_arg(ap
, u32
));
280 if (ip
->fields
& SCIMM
)
281 op
|= build_scimm(va_arg(ap
, u32
));
288 #define I_u1u2u3(op) \
291 build_insn(buf, insn##op, a, b, c); \
293 UASM_EXPORT_SYMBOL(uasm_i##op);
295 #define I_u2u1u3(op) \
298 build_insn(buf, insn##op, b, a, c); \
300 UASM_EXPORT_SYMBOL(uasm_i##op);
302 #define I_u3u1u2(op) \
305 build_insn(buf, insn##op, b, c, a); \
307 UASM_EXPORT_SYMBOL(uasm_i##op);
309 #define I_u1u2s3(op) \
312 build_insn(buf, insn##op, a, b, c); \
314 UASM_EXPORT_SYMBOL(uasm_i##op);
316 #define I_u2s3u1(op) \
319 build_insn(buf, insn##op, c, a, b); \
321 UASM_EXPORT_SYMBOL(uasm_i##op);
323 #define I_u2u1s3(op) \
326 build_insn(buf, insn##op, b, a, c); \
328 UASM_EXPORT_SYMBOL(uasm_i##op);
330 #define I_u2u1msbu3(op) \
333 build_insn(buf, insn##op, b, a, c+d-1, c); \
335 UASM_EXPORT_SYMBOL(uasm_i##op);
337 #define I_u2u1msb32u3(op) \
340 build_insn(buf, insn##op, b, a, c+d-33, c); \
342 UASM_EXPORT_SYMBOL(uasm_i##op);
347 build_insn(buf, insn##op, a, b); \
349 UASM_EXPORT_SYMBOL(uasm_i##op);
354 build_insn(buf, insn##op, a, b); \
356 UASM_EXPORT_SYMBOL(uasm_i##op);
361 build_insn(buf, insn##op, a); \
363 UASM_EXPORT_SYMBOL(uasm_i##op);
368 build_insn(buf, insn##op); \
370 UASM_EXPORT_SYMBOL(uasm_i##op);
426 I_u2u1msb32u3(_dinsm
);
433 #ifdef CONFIG_CPU_CAVIUM_OCTEON
434 #include <asm/octeon/octeon.h>
435 void __uasminit
uasm_i_pref(u32
**buf
, unsigned int a
, signed int b
,
438 if (OCTEON_IS_MODEL(OCTEON_CN63XX_PASS1_X
) && a
<= 24 && a
!= 5)
440 * As per erratum Core-14449, replace prefetches 0-4,
441 * 6-24 with 'pref 28'.
443 build_insn(buf
, insn_pref
, c
, 28, b
);
445 build_insn(buf
, insn_pref
, c
, a
, b
);
447 UASM_EXPORT_SYMBOL(uasm_i_pref
);
453 void __uasminit
uasm_build_label(struct uasm_label
**lab
, u32
*addr
, int lid
)
459 UASM_EXPORT_SYMBOL(uasm_build_label
);
461 int __uasminit
uasm_in_compat_space_p(long addr
)
463 /* Is this address in 32bit compat space? */
465 return (((addr
) & 0xffffffff00000000L
) == 0xffffffff00000000L
);
470 UASM_EXPORT_SYMBOL(uasm_in_compat_space_p
);
472 static int __uasminit
uasm_rel_highest(long val
)
475 return ((((val
+ 0x800080008000L
) >> 48) & 0xffff) ^ 0x8000) - 0x8000;
481 static int __uasminit
uasm_rel_higher(long val
)
484 return ((((val
+ 0x80008000L
) >> 32) & 0xffff) ^ 0x8000) - 0x8000;
490 int __uasminit
uasm_rel_hi(long val
)
492 return ((((val
+ 0x8000L
) >> 16) & 0xffff) ^ 0x8000) - 0x8000;
494 UASM_EXPORT_SYMBOL(uasm_rel_hi
);
496 int __uasminit
uasm_rel_lo(long val
)
498 return ((val
& 0xffff) ^ 0x8000) - 0x8000;
500 UASM_EXPORT_SYMBOL(uasm_rel_lo
);
502 void __uasminit
UASM_i_LA_mostly(u32
**buf
, unsigned int rs
, long addr
)
504 if (!uasm_in_compat_space_p(addr
)) {
505 uasm_i_lui(buf
, rs
, uasm_rel_highest(addr
));
506 if (uasm_rel_higher(addr
))
507 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_higher(addr
));
508 if (uasm_rel_hi(addr
)) {
509 uasm_i_dsll(buf
, rs
, rs
, 16);
510 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_hi(addr
));
511 uasm_i_dsll(buf
, rs
, rs
, 16);
513 uasm_i_dsll32(buf
, rs
, rs
, 0);
515 uasm_i_lui(buf
, rs
, uasm_rel_hi(addr
));
517 UASM_EXPORT_SYMBOL(UASM_i_LA_mostly
);
519 void __uasminit
UASM_i_LA(u32
**buf
, unsigned int rs
, long addr
)
521 UASM_i_LA_mostly(buf
, rs
, addr
);
522 if (uasm_rel_lo(addr
)) {
523 if (!uasm_in_compat_space_p(addr
))
524 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_lo(addr
));
526 uasm_i_addiu(buf
, rs
, rs
, uasm_rel_lo(addr
));
529 UASM_EXPORT_SYMBOL(UASM_i_LA
);
531 /* Handle relocations. */
533 uasm_r_mips_pc16(struct uasm_reloc
**rel
, u32
*addr
, int lid
)
536 (*rel
)->type
= R_MIPS_PC16
;
540 UASM_EXPORT_SYMBOL(uasm_r_mips_pc16
);
542 static inline void __uasminit
543 __resolve_relocs(struct uasm_reloc
*rel
, struct uasm_label
*lab
)
545 long laddr
= (long)lab
->addr
;
546 long raddr
= (long)rel
->addr
;
550 *rel
->addr
|= build_bimm(laddr
- (raddr
+ 4));
554 panic("Unsupported Micro-assembler relocation %d",
560 uasm_resolve_relocs(struct uasm_reloc
*rel
, struct uasm_label
*lab
)
562 struct uasm_label
*l
;
564 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++)
565 for (l
= lab
; l
->lab
!= UASM_LABEL_INVALID
; l
++)
566 if (rel
->lab
== l
->lab
)
567 __resolve_relocs(rel
, l
);
569 UASM_EXPORT_SYMBOL(uasm_resolve_relocs
);
572 uasm_move_relocs(struct uasm_reloc
*rel
, u32
*first
, u32
*end
, long off
)
574 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++)
575 if (rel
->addr
>= first
&& rel
->addr
< end
)
578 UASM_EXPORT_SYMBOL(uasm_move_relocs
);
581 uasm_move_labels(struct uasm_label
*lab
, u32
*first
, u32
*end
, long off
)
583 for (; lab
->lab
!= UASM_LABEL_INVALID
; lab
++)
584 if (lab
->addr
>= first
&& lab
->addr
< end
)
587 UASM_EXPORT_SYMBOL(uasm_move_labels
);
590 uasm_copy_handler(struct uasm_reloc
*rel
, struct uasm_label
*lab
, u32
*first
,
591 u32
*end
, u32
*target
)
593 long off
= (long)(target
- first
);
595 memcpy(target
, first
, (end
- first
) * sizeof(u32
));
597 uasm_move_relocs(rel
, first
, end
, off
);
598 uasm_move_labels(lab
, first
, end
, off
);
600 UASM_EXPORT_SYMBOL(uasm_copy_handler
);
602 int __uasminit
uasm_insn_has_bdelay(struct uasm_reloc
*rel
, u32
*addr
)
604 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++) {
605 if (rel
->addr
== addr
606 && (rel
->type
== R_MIPS_PC16
607 || rel
->type
== R_MIPS_26
))
613 UASM_EXPORT_SYMBOL(uasm_insn_has_bdelay
);
615 /* Convenience functions for labeled branches. */
617 uasm_il_bltz(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
619 uasm_r_mips_pc16(r
, *p
, lid
);
620 uasm_i_bltz(p
, reg
, 0);
622 UASM_EXPORT_SYMBOL(uasm_il_bltz
);
625 uasm_il_b(u32
**p
, struct uasm_reloc
**r
, int lid
)
627 uasm_r_mips_pc16(r
, *p
, lid
);
630 UASM_EXPORT_SYMBOL(uasm_il_b
);
633 uasm_il_beqz(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
635 uasm_r_mips_pc16(r
, *p
, lid
);
636 uasm_i_beqz(p
, reg
, 0);
638 UASM_EXPORT_SYMBOL(uasm_il_beqz
);
641 uasm_il_beqzl(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
643 uasm_r_mips_pc16(r
, *p
, lid
);
644 uasm_i_beqzl(p
, reg
, 0);
646 UASM_EXPORT_SYMBOL(uasm_il_beqzl
);
649 uasm_il_bne(u32
**p
, struct uasm_reloc
**r
, unsigned int reg1
,
650 unsigned int reg2
, int lid
)
652 uasm_r_mips_pc16(r
, *p
, lid
);
653 uasm_i_bne(p
, reg1
, reg2
, 0);
655 UASM_EXPORT_SYMBOL(uasm_il_bne
);
658 uasm_il_bnez(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
660 uasm_r_mips_pc16(r
, *p
, lid
);
661 uasm_i_bnez(p
, reg
, 0);
663 UASM_EXPORT_SYMBOL(uasm_il_bnez
);
666 uasm_il_bgezl(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
668 uasm_r_mips_pc16(r
, *p
, lid
);
669 uasm_i_bgezl(p
, reg
, 0);
671 UASM_EXPORT_SYMBOL(uasm_il_bgezl
);
674 uasm_il_bgez(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
676 uasm_r_mips_pc16(r
, *p
, lid
);
677 uasm_i_bgez(p
, reg
, 0);
679 UASM_EXPORT_SYMBOL(uasm_il_bgez
);
682 uasm_il_bbit0(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
,
683 unsigned int bit
, int lid
)
685 uasm_r_mips_pc16(r
, *p
, lid
);
686 uasm_i_bbit0(p
, reg
, bit
, 0);
688 UASM_EXPORT_SYMBOL(uasm_il_bbit0
);
691 uasm_il_bbit1(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
,
692 unsigned int bit
, int lid
)
694 uasm_r_mips_pc16(r
, *p
, lid
);
695 uasm_i_bbit1(p
, reg
, bit
, 0);
697 UASM_EXPORT_SYMBOL(uasm_il_bbit1
);