@@ -16,3 +16,5 @@
*/
C_O0_I1(r)
C_O1_I1(r, r)
+C_O1_I2(r, r, rC)
+C_O1_I2(r, r, rU)
@@ -374,6 +374,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
{
TCGArg a0 = args[0];
TCGArg a1 = args[1];
+ TCGArg a2 = args[2];
+ int c2 = const_args[2];
switch (opc) {
case INDEX_op_mb:
@@ -419,6 +421,79 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_opc_srai_d(s, a0, a1, 32);
break;
+ case INDEX_op_not_i32:
+ case INDEX_op_not_i64:
+ tcg_out_opc_nor(s, a0, a1, TCG_REG_ZERO);
+ break;
+
+ case INDEX_op_nor_i32:
+ case INDEX_op_nor_i64:
+ if (c2) {
+ tcg_out_opc_ori(s, a0, a1, a2);
+ tcg_out_opc_nor(s, a0, a0, TCG_REG_ZERO);
+ } else {
+ tcg_out_opc_nor(s, a0, a1, a2);
+ }
+ break;
+
+ case INDEX_op_andc_i32:
+ case INDEX_op_andc_i64:
+ if (c2) {
+ /* guaranteed to fit due to constraint */
+ tcg_out_opc_andi(s, a0, a1, ~a2);
+ } else {
+ tcg_out_opc_andn(s, a0, a1, a2);
+ }
+ break;
+
+ case INDEX_op_orc_i32:
+ case INDEX_op_orc_i64:
+ if (c2) {
+ /* guaranteed to fit due to constraint */
+ tcg_out_opc_ori(s, a0, a1, ~a2);
+ } else {
+ tcg_out_opc_orn(s, a0, a1, a2);
+ }
+ break;
+
+ case INDEX_op_eqv_i32:
+ case INDEX_op_eqv_i64:
+ if (c2) {
+ /* guaranteed to fit due to constraint */
+ tcg_out_opc_xori(s, a0, a1, ~a2);
+ } else {
+ tcg_out_opc_nor(s, a0, a2, TCG_REG_ZERO);
+ tcg_out_opc_xor(s, a0, a1, a0);
+ }
+ break;
+
+ case INDEX_op_and_i32:
+ case INDEX_op_and_i64:
+ if (c2) {
+ tcg_out_opc_andi(s, a0, a1, a2);
+ } else {
+ tcg_out_opc_and(s, a0, a1, a2);
+ }
+ break;
+
+ case INDEX_op_or_i32:
+ case INDEX_op_or_i64:
+ if (c2) {
+ tcg_out_opc_ori(s, a0, a1, a2);
+ } else {
+ tcg_out_opc_or(s, a0, a1, a2);
+ }
+ break;
+
+ case INDEX_op_xor_i32:
+ case INDEX_op_xor_i64:
+ if (c2) {
+ tcg_out_opc_xori(s, a0, a1, a2);
+ } else {
+ tcg_out_opc_xor(s, a0, a1, a2);
+ }
+ break;
+
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_mov_i64:
default:
@@ -446,8 +521,34 @@ static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:
case INDEX_op_ext_i32_i64:
+ case INDEX_op_not_i32:
+ case INDEX_op_not_i64:
return C_O1_I1(r, r);
+ case INDEX_op_andc_i32:
+ case INDEX_op_andc_i64:
+ case INDEX_op_eqv_i32:
+ case INDEX_op_eqv_i64:
+ case INDEX_op_orc_i32:
+ case INDEX_op_orc_i64:
+ /*
+ * LoongArch insns for these ops don't have reg-imm forms, but we
+ * can express using andi/ori/xori if ~constant satisfies
+ * TCG_CT_CONST_U12.
+ */
+ return C_O1_I2(r, r, rC);
+
+ case INDEX_op_and_i32:
+ case INDEX_op_and_i64:
+ case INDEX_op_nor_i32:
+ case INDEX_op_nor_i64:
+ case INDEX_op_or_i32:
+ case INDEX_op_or_i64:
+ case INDEX_op_xor_i32:
+ case INDEX_op_xor_i64:
+ /* LoongArch reg-imm bitops have their imms ZERO-extended */
+ return C_O1_I2(r, r, rU);
+
default:
g_assert_not_reached();
}
Signed-off-by: WANG Xuerui <git@xen0n.name> --- tcg/loongarch64/tcg-target-con-set.h | 2 + tcg/loongarch64/tcg-target.c.inc | 101 +++++++++++++++++++++++++++ 2 files changed, 103 insertions(+)