@@ -15,6 +15,7 @@
* tcg-target-con-str.h; the constraint combination is inclusive or.
*/
C_O0_I1(r)
+C_O0_I2(rZ, rZ)
C_O1_I1(r, r)
C_O1_I2(r, r, rC)
C_O1_I2(r, r, ri)
@@ -386,6 +386,44 @@ static void tcg_out_clzctz(TCGContext *s, LoongArchInsn opc,
tcg_out_opc_or(s, a0, TCG_REG_TMP0, a0);
}
+/*
+ * Branch helpers
+ */
+
+static const struct {
+ LoongArchInsn op;
+ bool swap;
+} tcg_brcond_to_loongarch[] = {
+ [TCG_COND_EQ] = { OPC_BEQ, false },
+ [TCG_COND_NE] = { OPC_BNE, false },
+ [TCG_COND_LT] = { OPC_BGT, true },
+ [TCG_COND_GE] = { OPC_BLE, true },
+ [TCG_COND_LE] = { OPC_BLE, false },
+ [TCG_COND_GT] = { OPC_BGT, false },
+ [TCG_COND_LTU] = { OPC_BGTU, true },
+ [TCG_COND_GEU] = { OPC_BLEU, true },
+ [TCG_COND_LEU] = { OPC_BLEU, false },
+ [TCG_COND_GTU] = { OPC_BGTU, false }
+};
+
+static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
+ TCGReg arg2, TCGLabel *l)
+{
+ LoongArchInsn op = tcg_brcond_to_loongarch[cond].op;
+
+ tcg_debug_assert(op != 0);
+
+ if (tcg_brcond_to_loongarch[cond].swap) {
+ TCGReg t = arg1;
+ arg1 = arg2;
+ arg2 = t;
+ }
+
+ /* all conditional branch insns belong to DJSk16-format */
+ tcg_out_reloc(s, s->code_ptr, R_LOONGARCH_BR_SK16, l, 0);
+ tcg_out32(s, encode_djsk16_insn(op, arg1, arg2, 0));
+}
+
/*
* Entry-points
*/
@@ -408,6 +446,17 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_opc_jirl(s, TCG_REG_ZERO, a0, 0);
break;
+ case INDEX_op_br:
+ tcg_out_reloc(s, s->code_ptr, R_LOONGARCH_BR_SD10K16, arg_label(a0),
+ 0);
+ tcg_out_opc_b(s, 0);
+ break;
+
+ case INDEX_op_brcond_i32:
+ case INDEX_op_brcond_i64:
+ tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
+ break;
+
case INDEX_op_ext8s_i32:
case INDEX_op_ext8s_i64:
tcg_out_ext8s(s, a0, a1);
@@ -731,6 +780,10 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_goto_ptr:
return C_O0_I1(r);
+ case INDEX_op_brcond_i32:
+ case INDEX_op_brcond_i64:
+ return C_O0_I2(rZ, rZ);
+
case INDEX_op_ext8s_i32:
case INDEX_op_ext8s_i64:
case INDEX_op_ext8u_i32: