From patchwork Tue Apr 12 07:25:50 2022 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Tong Tiangen X-Patchwork-Id: 12810178 Return-Path: X-Spam-Checker-Version: SpamAssassin 3.4.0 (2014-02-07) on aws-us-west-2-korg-lkml-1.web.codeaurora.org Received: from bombadil.infradead.org (bombadil.infradead.org [198.137.202.133]) (using TLSv1.2 with cipher ECDHE-RSA-AES256-GCM-SHA384 (256/256 bits)) (No client certificate requested) by smtp.lore.kernel.org (Postfix) with ESMTPS id 03E76C433F5 for ; Tue, 12 Apr 2022 07:08:50 +0000 (UTC) DKIM-Signature: v=1; a=rsa-sha256; q=dns/txt; c=relaxed/relaxed; d=lists.infradead.org; s=bombadil.20210309; h=Sender: Content-Transfer-Encoding:Content-Type:List-Subscribe:List-Help:List-Post: List-Archive:List-Unsubscribe:List-Id:MIME-Version:References:In-Reply-To: Message-ID:Date:Subject:CC:To:From:Reply-To:Content-ID:Content-Description: Resent-Date:Resent-From:Resent-Sender:Resent-To:Resent-Cc:Resent-Message-ID: List-Owner; bh=U8adXIcWhIl4y5WSMk7wIfDN/gL6OC9oaDtKdNPQ3jQ=; b=mSw3J44t2lDl0m TKATnH63Hevmr9+FPXigjO3teGfUGcv40mIjXL4Wb/VOuz1mQdSy7qiZStXpua/P4uUphkR06E5ON rlroeqH8I9dSaxBdyrc+Oz4YojqXopVJxoRZw+3s03C2tQEyxhMXZ8AsSLlft0EeSruGP9l3tq1Zq DIEkS5JHqhm0LN0xZb+u+oIlBotdoPTGv//twQY4f2T/l6aj7sy9QeIZqJR0HON3fAvVlRpRAvQzm wM1gMJQHCL0Dxpacu7gR8llYIaUFKjjnxd97GhT2rpJPp693aHdC9PP0jVb4vbjEB7ijehv4EMIiB yaoFF2te9ksKGguu206w==; Received: from localhost ([::1] helo=bombadil.infradead.org) by bombadil.infradead.org with esmtp (Exim 4.94.2 #2 (Red Hat Linux)) id 1neAcl-00C4Kg-E2; Tue, 12 Apr 2022 07:07:40 +0000 Received: from szxga01-in.huawei.com ([45.249.212.187]) by bombadil.infradead.org with esmtps (Exim 4.94.2 #2 (Red Hat Linux)) id 1neAcb-00C4AZ-Pg for linux-arm-kernel@lists.infradead.org; Tue, 12 Apr 2022 07:07:34 +0000 Received: from kwepemi100004.china.huawei.com (unknown [172.30.72.56]) by szxga01-in.huawei.com (SkyGuard) with ESMTP id 4Kcxbv1gXmzdZLM; Tue, 12 Apr 2022 15:06:51 +0800 (CST) Received: from kwepemm600017.china.huawei.com (7.193.23.234) by kwepemi100004.china.huawei.com (7.221.188.70) with Microsoft SMTP Server (version=TLS1_2, cipher=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256) id 15.1.2375.24; Tue, 12 Apr 2022 15:07:25 +0800 Received: from localhost.localdomain (10.175.112.125) by kwepemm600017.china.huawei.com (7.193.23.234) with Microsoft SMTP Server (version=TLS1_2, cipher=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256) id 15.1.2375.24; Tue, 12 Apr 2022 15:07:24 +0800 From: Tong Tiangen To: Mark Rutland , James Morse , Andrew Morton , Thomas Gleixner , Ingo Molnar , Borislav Petkov , Robin Murphy , Dave Hansen , Catalin Marinas , Will Deacon , Alexander Viro , , "H . Peter Anvin" CC: , , , Kefeng Wang , Xie XiuQi , Tong Tiangen Subject: [RFC PATCH -next V3 4/6] arm64: add copy_{to, from}_user to machine check safe Date: Tue, 12 Apr 2022 07:25:50 +0000 Message-ID: <20220412072552.2526871-5-tongtiangen@huawei.com> X-Mailer: git-send-email 2.18.0.huawei.25 In-Reply-To: <20220412072552.2526871-1-tongtiangen@huawei.com> References: <20220412072552.2526871-1-tongtiangen@huawei.com> MIME-Version: 1.0 X-Originating-IP: [10.175.112.125] X-ClientProxiedBy: dggems705-chm.china.huawei.com (10.3.19.182) To kwepemm600017.china.huawei.com (7.193.23.234) X-CFilter-Loop: Reflected X-CRM114-Version: 20100106-BlameMichelson ( TRE 0.8.0 (BSD) ) MR-646709E3 X-CRM114-CacheID: sfid-20220412_000730_208734_2C33BA37 X-CRM114-Status: GOOD ( 13.01 ) X-BeenThere: linux-arm-kernel@lists.infradead.org X-Mailman-Version: 2.1.34 Precedence: list List-Id: List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Sender: "linux-arm-kernel" Errors-To: linux-arm-kernel-bounces+linux-arm-kernel=archiver.kernel.org@lists.infradead.org Add copy_{to, from}_user() to machine check safe. If copy fail due to hardware memory error, only the relevant processes are affected, so killing the user process and isolate the user page with hardware memory errors is a more reasonable choice than kernel panic. Add new extable type EX_TYPE_UACCESS_MC which can be used for uaccess that can be recovered from hardware memory errors. Signed-off-by: Tong Tiangen Signed-off-by: Robin Murphy --- arch/arm64/include/asm/asm-extable.h | 11 +++++++++++ arch/arm64/include/asm/asm-uaccess.h | 16 ++++++++++++++++ arch/arm64/lib/copy_from_user.S | 15 ++++++++++----- arch/arm64/lib/copy_to_user.S | 25 +++++++++++++++++-------- 4 files changed, 54 insertions(+), 13 deletions(-) diff --git a/arch/arm64/include/asm/asm-extable.h b/arch/arm64/include/asm/asm-extable.h index c39f2437e08e..8af4e7cc9578 100644 --- a/arch/arm64/include/asm/asm-extable.h +++ b/arch/arm64/include/asm/asm-extable.h @@ -8,6 +8,9 @@ #define EX_TYPE_UACCESS_ERR_ZERO 3 #define EX_TYPE_LOAD_UNALIGNED_ZEROPAD 4 +/* _MC indicates that can fixup from machine check errors */ +#define EX_TYPE_UACCESS_MC 5 + #ifdef __ASSEMBLY__ #define __ASM_EXTABLE_RAW(insn, fixup, type, data) \ @@ -27,6 +30,14 @@ __ASM_EXTABLE_RAW(\insn, \fixup, EX_TYPE_FIXUP, 0) .endm +/* + * Create an exception table entry for `insn`, which will branch to `fixup` + * when an unhandled fault(include sea fault) is taken. + */ + .macro _asm_extable_uaccess_mc, insn, fixup + __ASM_EXTABLE_RAW(\insn, \fixup, EX_TYPE_UACCESS_MC, 0) + .endm + /* * Create an exception table entry for `insn` if `fixup` is provided. Otherwise * do nothing. diff --git a/arch/arm64/include/asm/asm-uaccess.h b/arch/arm64/include/asm/asm-uaccess.h index 0557af834e03..bb17f0829042 100644 --- a/arch/arm64/include/asm/asm-uaccess.h +++ b/arch/arm64/include/asm/asm-uaccess.h @@ -92,4 +92,20 @@ alternative_else_nop_endif _asm_extable 8888b,\l; .endm + + .macro user_ldp_mc l, reg1, reg2, addr, post_inc +8888: ldtr \reg1, [\addr]; +8889: ldtr \reg2, [\addr, #8]; + add \addr, \addr, \post_inc; + + _asm_extable_uaccess_mc 8888b, \l; + _asm_extable_uaccess_mc 8889b, \l; + .endm + + .macro user_ldst_mc l, inst, reg, addr, post_inc +8888: \inst \reg, [\addr]; + add \addr, \addr, \post_inc; + + _asm_extable_uaccess_mc 8888b, \l; + .endm #endif diff --git a/arch/arm64/lib/copy_from_user.S b/arch/arm64/lib/copy_from_user.S index 34e317907524..e32c0747a5f1 100644 --- a/arch/arm64/lib/copy_from_user.S +++ b/arch/arm64/lib/copy_from_user.S @@ -21,7 +21,7 @@ */ .macro ldrb1 reg, ptr, val - user_ldst 9998f, ldtrb, \reg, \ptr, \val + user_ldst_mc 9998f, ldtrb, \reg, \ptr, \val .endm .macro strb1 reg, ptr, val @@ -29,7 +29,7 @@ .endm .macro ldrh1 reg, ptr, val - user_ldst 9997f, ldtrh, \reg, \ptr, \val + user_ldst_mc 9997f, ldtrh, \reg, \ptr, \val .endm .macro strh1 reg, ptr, val @@ -37,7 +37,7 @@ .endm .macro ldr1 reg, ptr, val - user_ldst 9997f, ldtr, \reg, \ptr, \val + user_ldst_mc 9997f, ldtr, \reg, \ptr, \val .endm .macro str1 reg, ptr, val @@ -45,7 +45,7 @@ .endm .macro ldp1 reg1, reg2, ptr, val - user_ldp 9997f, \reg1, \reg2, \ptr, \val + user_ldp_mc 9997f, \reg1, \reg2, \ptr, \val .endm .macro stp1 reg1, reg2, ptr, val @@ -54,6 +54,7 @@ end .req x5 srcin .req x15 +esr .req x16 SYM_FUNC_START(__arch_copy_from_user) add end, x0, x2 mov srcin, x1 @@ -62,7 +63,11 @@ SYM_FUNC_START(__arch_copy_from_user) ret // Exception fixups -9997: cmp dst, dstin +9997: mrs esr, esr_el1 // Check exception first + and esr, esr, #ESR_ELx_FSC + cmp esr, #ESR_ELx_FSC_EXTABT + b.eq 9998f + cmp dst, dstin b.ne 9998f // Before being absolutely sure we couldn't copy anything, try harder USER(9998f, ldtrb tmp1w, [srcin]) diff --git a/arch/arm64/lib/copy_to_user.S b/arch/arm64/lib/copy_to_user.S index 802231772608..afb53e45a21f 100644 --- a/arch/arm64/lib/copy_to_user.S +++ b/arch/arm64/lib/copy_to_user.S @@ -20,31 +20,35 @@ * x0 - bytes not copied */ .macro ldrb1 reg, ptr, val - ldrb \reg, [\ptr], \val + 1000: ldrb \reg, [\ptr], \val + _asm_extable_uaccess_mc 1000b, 9998f; .endm .macro strb1 reg, ptr, val - user_ldst 9998f, sttrb, \reg, \ptr, \val + user_ldst_mc 9998f, sttrb, \reg, \ptr, \val .endm .macro ldrh1 reg, ptr, val - ldrh \reg, [\ptr], \val + 1001: ldrh \reg, [\ptr], \val + _asm_extable_uaccess_mc 1001b, 9998f; .endm .macro strh1 reg, ptr, val - user_ldst 9997f, sttrh, \reg, \ptr, \val + user_ldst_mc 9997f, sttrh, \reg, \ptr, \val .endm .macro ldr1 reg, ptr, val - ldr \reg, [\ptr], \val + 1002: ldr \reg, [\ptr], \val + _asm_extable_uaccess_mc 1002b, 9998f; .endm .macro str1 reg, ptr, val - user_ldst 9997f, sttr, \reg, \ptr, \val + user_ldst_mc 9997f, sttr, \reg, \ptr, \val .endm .macro ldp1 reg1, reg2, ptr, val - ldp \reg1, \reg2, [\ptr], \val + 1003: ldp \reg1, \reg2, [\ptr], \val + _asm_extable_uaccess_mc 1003b, 9998f; .endm .macro stp1 reg1, reg2, ptr, val @@ -53,6 +57,7 @@ end .req x5 srcin .req x15 +esr .req x16 SYM_FUNC_START(__arch_copy_to_user) add end, x0, x2 mov srcin, x1 @@ -61,7 +66,11 @@ SYM_FUNC_START(__arch_copy_to_user) ret // Exception fixups -9997: cmp dst, dstin +9997: mrs esr, esr_el1 // Check exception first + and esr, esr, #ESR_ELx_FSC + cmp esr, #ESR_ELx_FSC_EXTABT + b.eq 9998f + cmp dst, dstin b.ne 9998f // Before being absolutely sure we couldn't copy anything, try harder ldrb tmp1w, [srcin]