2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * A small micro-assembler. It is intentionally kept simple, does only
7 * support a subset of instructions, and does not try to hide pipeline
8 * effects like branch delay slots.
10 * Copyright (C) 2004, 2005, 2006, 2008 Thiemo Seufer
11 * Copyright (C) 2005, 2007 Maciej W. Rozycki
12 * Copyright (C) 2006 Ralf Baechle (ralf@linux-mips.org)
15 #include <linux/kernel.h>
16 #include <linux/types.h>
17 #include <linux/init.h>
48 #define IMM_MASK 0xffff
50 #define JIMM_MASK 0x3ffffff
52 #define FUNC_MASK 0x3f
56 #define SCIMM_MASK 0xfffff
61 insn_addiu
, insn_addu
, insn_and
, insn_andi
, insn_bbit0
, insn_bbit1
,
62 insn_beq
, insn_beql
, insn_bgez
, insn_bgezl
, insn_bltz
, insn_bltzl
,
63 insn_bne
, insn_cache
, insn_daddiu
, insn_daddu
, insn_dins
, insn_dinsm
,
64 insn_dmfc0
, insn_dmtc0
, insn_drotr
, insn_drotr32
, insn_dsll
,
65 insn_dsll32
, insn_dsra
, insn_dsrl
, insn_dsrl32
, insn_dsubu
, insn_eret
,
66 insn_ext
, insn_ins
, insn_j
, insn_jal
, insn_jr
, insn_ld
, insn_ldx
,
67 insn_ll
, insn_lld
, insn_lui
, insn_lw
, insn_lwx
, insn_mfc0
, insn_mtc0
,
68 insn_or
, insn_ori
, insn_pref
, insn_rfe
, insn_rotr
, insn_sc
, insn_scd
,
69 insn_sd
, insn_sll
, insn_sra
, insn_srl
, insn_subu
, insn_sw
,
70 insn_syscall
, insn_tlbp
, insn_tlbr
, insn_tlbwi
, insn_tlbwr
, insn_xor
,
80 /* This macro sets the non-variable bits of an instruction. */
81 #define M(a, b, c, d, e, f) \
89 static struct insn insn_table
[] __uasminitdata
= {
90 { insn_addiu
, M(addiu_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
91 { insn_addu
, M(spec_op
, 0, 0, 0, 0, addu_op
), RS
| RT
| RD
},
92 { insn_andi
, M(andi_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
93 { insn_and
, M(spec_op
, 0, 0, 0, 0, and_op
), RS
| RT
| RD
},
94 { insn_bbit0
, M(lwc2_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
95 { insn_bbit1
, M(swc2_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
96 { insn_beql
, M(beql_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
97 { insn_beq
, M(beq_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
98 { insn_bgezl
, M(bcond_op
, 0, bgezl_op
, 0, 0, 0), RS
| BIMM
},
99 { insn_bgez
, M(bcond_op
, 0, bgez_op
, 0, 0, 0), RS
| BIMM
},
100 { insn_bltzl
, M(bcond_op
, 0, bltzl_op
, 0, 0, 0), RS
| BIMM
},
101 { insn_bltz
, M(bcond_op
, 0, bltz_op
, 0, 0, 0), RS
| BIMM
},
102 { insn_bne
, M(bne_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
103 { insn_cache
, M(cache_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
104 { insn_daddiu
, M(daddiu_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
105 { insn_daddu
, M(spec_op
, 0, 0, 0, 0, daddu_op
), RS
| RT
| RD
},
106 { insn_dinsm
, M(spec3_op
, 0, 0, 0, 0, dinsm_op
), RS
| RT
| RD
| RE
},
107 { insn_dins
, M(spec3_op
, 0, 0, 0, 0, dins_op
), RS
| RT
| RD
| RE
},
108 { insn_dmfc0
, M(cop0_op
, dmfc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
109 { insn_dmtc0
, M(cop0_op
, dmtc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
110 { insn_drotr32
, M(spec_op
, 1, 0, 0, 0, dsrl32_op
), RT
| RD
| RE
},
111 { insn_drotr
, M(spec_op
, 1, 0, 0, 0, dsrl_op
), RT
| RD
| RE
},
112 { insn_dsll32
, M(spec_op
, 0, 0, 0, 0, dsll32_op
), RT
| RD
| RE
},
113 { insn_dsll
, M(spec_op
, 0, 0, 0, 0, dsll_op
), RT
| RD
| RE
},
114 { insn_dsra
, M(spec_op
, 0, 0, 0, 0, dsra_op
), RT
| RD
| RE
},
115 { insn_dsrl32
, M(spec_op
, 0, 0, 0, 0, dsrl32_op
), RT
| RD
| RE
},
116 { insn_dsrl
, M(spec_op
, 0, 0, 0, 0, dsrl_op
), RT
| RD
| RE
},
117 { insn_dsubu
, M(spec_op
, 0, 0, 0, 0, dsubu_op
), RS
| RT
| RD
},
118 { insn_eret
, M(cop0_op
, cop_op
, 0, 0, 0, eret_op
), 0 },
119 { insn_ext
, M(spec3_op
, 0, 0, 0, 0, ext_op
), RS
| RT
| RD
| RE
},
120 { insn_ins
, M(spec3_op
, 0, 0, 0, 0, ins_op
), RS
| RT
| RD
| RE
},
121 { insn_j
, M(j_op
, 0, 0, 0, 0, 0), JIMM
},
122 { insn_jal
, M(jal_op
, 0, 0, 0, 0, 0), JIMM
},
123 { insn_j
, M(j_op
, 0, 0, 0, 0, 0), JIMM
},
124 { insn_jr
, M(spec_op
, 0, 0, 0, 0, jr_op
), RS
},
125 { insn_ld
, M(ld_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
126 { insn_ldx
, M(spec3_op
, 0, 0, 0, ldx_op
, lx_op
), RS
| RT
| RD
},
127 { insn_lld
, M(lld_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
128 { insn_ll
, M(ll_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
129 { insn_lui
, M(lui_op
, 0, 0, 0, 0, 0), RT
| SIMM
},
130 { insn_lw
, M(lw_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
131 { insn_lwx
, M(spec3_op
, 0, 0, 0, lwx_op
, lx_op
), RS
| RT
| RD
},
132 { insn_mfc0
, M(cop0_op
, mfc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
133 { insn_mtc0
, M(cop0_op
, mtc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
134 { insn_ori
, M(ori_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
135 { insn_or
, M(spec_op
, 0, 0, 0, 0, or_op
), RS
| RT
| RD
},
136 { insn_pref
, M(pref_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
137 { insn_rfe
, M(cop0_op
, cop_op
, 0, 0, 0, rfe_op
), 0 },
138 { insn_rotr
, M(spec_op
, 1, 0, 0, 0, srl_op
), RT
| RD
| RE
},
139 { insn_scd
, M(scd_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
140 { insn_sc
, M(sc_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
141 { insn_sd
, M(sd_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
142 { insn_sll
, M(spec_op
, 0, 0, 0, 0, sll_op
), RT
| RD
| RE
},
143 { insn_sra
, M(spec_op
, 0, 0, 0, 0, sra_op
), RT
| RD
| RE
},
144 { insn_srl
, M(spec_op
, 0, 0, 0, 0, srl_op
), RT
| RD
| RE
},
145 { insn_subu
, M(spec_op
, 0, 0, 0, 0, subu_op
), RS
| RT
| RD
},
146 { insn_sw
, M(sw_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
147 { insn_syscall
, M(spec_op
, 0, 0, 0, 0, syscall_op
), SCIMM
},
148 { insn_tlbp
, M(cop0_op
, cop_op
, 0, 0, 0, tlbp_op
), 0 },
149 { insn_tlbr
, M(cop0_op
, cop_op
, 0, 0, 0, tlbr_op
), 0 },
150 { insn_tlbwi
, M(cop0_op
, cop_op
, 0, 0, 0, tlbwi_op
), 0 },
151 { insn_tlbwr
, M(cop0_op
, cop_op
, 0, 0, 0, tlbwr_op
), 0 },
152 { insn_xori
, M(xori_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
153 { insn_xor
, M(spec_op
, 0, 0, 0, 0, xor_op
), RS
| RT
| RD
},
154 { insn_invalid
, 0, 0 }
159 static inline __uasminit u32
build_rs(u32 arg
)
161 WARN(arg
& ~RS_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
163 return (arg
& RS_MASK
) << RS_SH
;
166 static inline __uasminit u32
build_rt(u32 arg
)
168 WARN(arg
& ~RT_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
170 return (arg
& RT_MASK
) << RT_SH
;
173 static inline __uasminit u32
build_rd(u32 arg
)
175 WARN(arg
& ~RD_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
177 return (arg
& RD_MASK
) << RD_SH
;
180 static inline __uasminit u32
build_re(u32 arg
)
182 WARN(arg
& ~RE_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
184 return (arg
& RE_MASK
) << RE_SH
;
187 static inline __uasminit u32
build_simm(s32 arg
)
189 WARN(arg
> 0x7fff || arg
< -0x8000,
190 KERN_WARNING
"Micro-assembler field overflow\n");
195 static inline __uasminit u32
build_uimm(u32 arg
)
197 WARN(arg
& ~IMM_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
199 return arg
& IMM_MASK
;
202 static inline __uasminit u32
build_bimm(s32 arg
)
204 WARN(arg
> 0x1ffff || arg
< -0x20000,
205 KERN_WARNING
"Micro-assembler field overflow\n");
207 WARN(arg
& 0x3, KERN_WARNING
"Invalid micro-assembler branch target\n");
209 return ((arg
< 0) ? (1 << 15) : 0) | ((arg
>> 2) & 0x7fff);
212 static inline __uasminit u32
build_jimm(u32 arg
)
214 WARN(arg
& ~(JIMM_MASK
<< 2),
215 KERN_WARNING
"Micro-assembler field overflow\n");
217 return (arg
>> 2) & JIMM_MASK
;
220 static inline __uasminit u32
build_scimm(u32 arg
)
222 WARN(arg
& ~SCIMM_MASK
,
223 KERN_WARNING
"Micro-assembler field overflow\n");
225 return (arg
& SCIMM_MASK
) << SCIMM_SH
;
228 static inline __uasminit u32
build_func(u32 arg
)
230 WARN(arg
& ~FUNC_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
232 return arg
& FUNC_MASK
;
235 static inline __uasminit u32
build_set(u32 arg
)
237 WARN(arg
& ~SET_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
239 return arg
& SET_MASK
;
243 * The order of opcode arguments is implicitly left to right,
244 * starting with RS and ending with FUNC or IMM.
246 static void __uasminit
build_insn(u32
**buf
, enum opcode opc
, ...)
248 struct insn
*ip
= NULL
;
253 for (i
= 0; insn_table
[i
].opcode
!= insn_invalid
; i
++)
254 if (insn_table
[i
].opcode
== opc
) {
259 if (!ip
|| (opc
== insn_daddiu
&& r4k_daddiu_bug()))
260 panic("Unsupported Micro-assembler instruction %d", opc
);
265 op
|= build_rs(va_arg(ap
, u32
));
267 op
|= build_rt(va_arg(ap
, u32
));
269 op
|= build_rd(va_arg(ap
, u32
));
271 op
|= build_re(va_arg(ap
, u32
));
272 if (ip
->fields
& SIMM
)
273 op
|= build_simm(va_arg(ap
, s32
));
274 if (ip
->fields
& UIMM
)
275 op
|= build_uimm(va_arg(ap
, u32
));
276 if (ip
->fields
& BIMM
)
277 op
|= build_bimm(va_arg(ap
, s32
));
278 if (ip
->fields
& JIMM
)
279 op
|= build_jimm(va_arg(ap
, u32
));
280 if (ip
->fields
& FUNC
)
281 op
|= build_func(va_arg(ap
, u32
));
282 if (ip
->fields
& SET
)
283 op
|= build_set(va_arg(ap
, u32
));
284 if (ip
->fields
& SCIMM
)
285 op
|= build_scimm(va_arg(ap
, u32
));
292 #define I_u1u2u3(op) \
295 build_insn(buf, insn##op, a, b, c); \
297 UASM_EXPORT_SYMBOL(uasm_i##op);
299 #define I_u2u1u3(op) \
302 build_insn(buf, insn##op, b, a, c); \
304 UASM_EXPORT_SYMBOL(uasm_i##op);
306 #define I_u3u1u2(op) \
309 build_insn(buf, insn##op, b, c, a); \
311 UASM_EXPORT_SYMBOL(uasm_i##op);
313 #define I_u1u2s3(op) \
316 build_insn(buf, insn##op, a, b, c); \
318 UASM_EXPORT_SYMBOL(uasm_i##op);
320 #define I_u2s3u1(op) \
323 build_insn(buf, insn##op, c, a, b); \
325 UASM_EXPORT_SYMBOL(uasm_i##op);
327 #define I_u2u1s3(op) \
330 build_insn(buf, insn##op, b, a, c); \
332 UASM_EXPORT_SYMBOL(uasm_i##op);
334 #define I_u2u1msbu3(op) \
337 build_insn(buf, insn##op, b, a, c+d-1, c); \
339 UASM_EXPORT_SYMBOL(uasm_i##op);
341 #define I_u2u1msb32u3(op) \
344 build_insn(buf, insn##op, b, a, c+d-33, c); \
346 UASM_EXPORT_SYMBOL(uasm_i##op);
348 #define I_u2u1msbdu3(op) \
351 build_insn(buf, insn##op, b, a, d-1, c); \
353 UASM_EXPORT_SYMBOL(uasm_i##op);
358 build_insn(buf, insn##op, a, b); \
360 UASM_EXPORT_SYMBOL(uasm_i##op);
365 build_insn(buf, insn##op, a, b); \
367 UASM_EXPORT_SYMBOL(uasm_i##op);
372 build_insn(buf, insn##op, a); \
374 UASM_EXPORT_SYMBOL(uasm_i##op);
379 build_insn(buf, insn##op); \
381 UASM_EXPORT_SYMBOL(uasm_i##op);
439 I_u2u1msb32u3(_dinsm
);
446 #ifdef CONFIG_CPU_CAVIUM_OCTEON
447 #include <asm/octeon/octeon.h>
448 void __uasminit
uasm_i_pref(u32
**buf
, unsigned int a
, signed int b
,
451 if (OCTEON_IS_MODEL(OCTEON_CN63XX_PASS1_X
) && a
<= 24 && a
!= 5)
453 * As per erratum Core-14449, replace prefetches 0-4,
454 * 6-24 with 'pref 28'.
456 build_insn(buf
, insn_pref
, c
, 28, b
);
458 build_insn(buf
, insn_pref
, c
, a
, b
);
460 UASM_EXPORT_SYMBOL(uasm_i_pref
);
466 void __uasminit
uasm_build_label(struct uasm_label
**lab
, u32
*addr
, int lid
)
472 UASM_EXPORT_SYMBOL(uasm_build_label
);
474 int __uasminit
uasm_in_compat_space_p(long addr
)
476 /* Is this address in 32bit compat space? */
478 return (((addr
) & 0xffffffff00000000L
) == 0xffffffff00000000L
);
483 UASM_EXPORT_SYMBOL(uasm_in_compat_space_p
);
485 static int __uasminit
uasm_rel_highest(long val
)
488 return ((((val
+ 0x800080008000L
) >> 48) & 0xffff) ^ 0x8000) - 0x8000;
494 static int __uasminit
uasm_rel_higher(long val
)
497 return ((((val
+ 0x80008000L
) >> 32) & 0xffff) ^ 0x8000) - 0x8000;
503 int __uasminit
uasm_rel_hi(long val
)
505 return ((((val
+ 0x8000L
) >> 16) & 0xffff) ^ 0x8000) - 0x8000;
507 UASM_EXPORT_SYMBOL(uasm_rel_hi
);
509 int __uasminit
uasm_rel_lo(long val
)
511 return ((val
& 0xffff) ^ 0x8000) - 0x8000;
513 UASM_EXPORT_SYMBOL(uasm_rel_lo
);
515 void __uasminit
UASM_i_LA_mostly(u32
**buf
, unsigned int rs
, long addr
)
517 if (!uasm_in_compat_space_p(addr
)) {
518 uasm_i_lui(buf
, rs
, uasm_rel_highest(addr
));
519 if (uasm_rel_higher(addr
))
520 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_higher(addr
));
521 if (uasm_rel_hi(addr
)) {
522 uasm_i_dsll(buf
, rs
, rs
, 16);
523 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_hi(addr
));
524 uasm_i_dsll(buf
, rs
, rs
, 16);
526 uasm_i_dsll32(buf
, rs
, rs
, 0);
528 uasm_i_lui(buf
, rs
, uasm_rel_hi(addr
));
530 UASM_EXPORT_SYMBOL(UASM_i_LA_mostly
);
532 void __uasminit
UASM_i_LA(u32
**buf
, unsigned int rs
, long addr
)
534 UASM_i_LA_mostly(buf
, rs
, addr
);
535 if (uasm_rel_lo(addr
)) {
536 if (!uasm_in_compat_space_p(addr
))
537 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_lo(addr
));
539 uasm_i_addiu(buf
, rs
, rs
, uasm_rel_lo(addr
));
542 UASM_EXPORT_SYMBOL(UASM_i_LA
);
544 /* Handle relocations. */
546 uasm_r_mips_pc16(struct uasm_reloc
**rel
, u32
*addr
, int lid
)
549 (*rel
)->type
= R_MIPS_PC16
;
553 UASM_EXPORT_SYMBOL(uasm_r_mips_pc16
);
555 static inline void __uasminit
556 __resolve_relocs(struct uasm_reloc
*rel
, struct uasm_label
*lab
)
558 long laddr
= (long)lab
->addr
;
559 long raddr
= (long)rel
->addr
;
563 *rel
->addr
|= build_bimm(laddr
- (raddr
+ 4));
567 panic("Unsupported Micro-assembler relocation %d",
573 uasm_resolve_relocs(struct uasm_reloc
*rel
, struct uasm_label
*lab
)
575 struct uasm_label
*l
;
577 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++)
578 for (l
= lab
; l
->lab
!= UASM_LABEL_INVALID
; l
++)
579 if (rel
->lab
== l
->lab
)
580 __resolve_relocs(rel
, l
);
582 UASM_EXPORT_SYMBOL(uasm_resolve_relocs
);
585 uasm_move_relocs(struct uasm_reloc
*rel
, u32
*first
, u32
*end
, long off
)
587 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++)
588 if (rel
->addr
>= first
&& rel
->addr
< end
)
591 UASM_EXPORT_SYMBOL(uasm_move_relocs
);
594 uasm_move_labels(struct uasm_label
*lab
, u32
*first
, u32
*end
, long off
)
596 for (; lab
->lab
!= UASM_LABEL_INVALID
; lab
++)
597 if (lab
->addr
>= first
&& lab
->addr
< end
)
600 UASM_EXPORT_SYMBOL(uasm_move_labels
);
603 uasm_copy_handler(struct uasm_reloc
*rel
, struct uasm_label
*lab
, u32
*first
,
604 u32
*end
, u32
*target
)
606 long off
= (long)(target
- first
);
608 memcpy(target
, first
, (end
- first
) * sizeof(u32
));
610 uasm_move_relocs(rel
, first
, end
, off
);
611 uasm_move_labels(lab
, first
, end
, off
);
613 UASM_EXPORT_SYMBOL(uasm_copy_handler
);
615 int __uasminit
uasm_insn_has_bdelay(struct uasm_reloc
*rel
, u32
*addr
)
617 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++) {
618 if (rel
->addr
== addr
619 && (rel
->type
== R_MIPS_PC16
620 || rel
->type
== R_MIPS_26
))
626 UASM_EXPORT_SYMBOL(uasm_insn_has_bdelay
);
628 /* Convenience functions for labeled branches. */
630 uasm_il_bltz(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
632 uasm_r_mips_pc16(r
, *p
, lid
);
633 uasm_i_bltz(p
, reg
, 0);
635 UASM_EXPORT_SYMBOL(uasm_il_bltz
);
638 uasm_il_b(u32
**p
, struct uasm_reloc
**r
, int lid
)
640 uasm_r_mips_pc16(r
, *p
, lid
);
643 UASM_EXPORT_SYMBOL(uasm_il_b
);
646 uasm_il_beqz(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
648 uasm_r_mips_pc16(r
, *p
, lid
);
649 uasm_i_beqz(p
, reg
, 0);
651 UASM_EXPORT_SYMBOL(uasm_il_beqz
);
654 uasm_il_beqzl(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
656 uasm_r_mips_pc16(r
, *p
, lid
);
657 uasm_i_beqzl(p
, reg
, 0);
659 UASM_EXPORT_SYMBOL(uasm_il_beqzl
);
662 uasm_il_bne(u32
**p
, struct uasm_reloc
**r
, unsigned int reg1
,
663 unsigned int reg2
, int lid
)
665 uasm_r_mips_pc16(r
, *p
, lid
);
666 uasm_i_bne(p
, reg1
, reg2
, 0);
668 UASM_EXPORT_SYMBOL(uasm_il_bne
);
671 uasm_il_bnez(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
673 uasm_r_mips_pc16(r
, *p
, lid
);
674 uasm_i_bnez(p
, reg
, 0);
676 UASM_EXPORT_SYMBOL(uasm_il_bnez
);
679 uasm_il_bgezl(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
681 uasm_r_mips_pc16(r
, *p
, lid
);
682 uasm_i_bgezl(p
, reg
, 0);
684 UASM_EXPORT_SYMBOL(uasm_il_bgezl
);
687 uasm_il_bgez(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
689 uasm_r_mips_pc16(r
, *p
, lid
);
690 uasm_i_bgez(p
, reg
, 0);
692 UASM_EXPORT_SYMBOL(uasm_il_bgez
);
695 uasm_il_bbit0(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
,
696 unsigned int bit
, int lid
)
698 uasm_r_mips_pc16(r
, *p
, lid
);
699 uasm_i_bbit0(p
, reg
, bit
, 0);
701 UASM_EXPORT_SYMBOL(uasm_il_bbit0
);
704 uasm_il_bbit1(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
,
705 unsigned int bit
, int lid
)
707 uasm_r_mips_pc16(r
, *p
, lid
);
708 uasm_i_bbit1(p
, reg
, bit
, 0);
710 UASM_EXPORT_SYMBOL(uasm_il_bbit1
);