@@ -118,6 +118,7 @@ config ARM64
select HAVE_ALIGNED_STRUCT_PAGE if SLUB
select HAVE_ARCH_AUDITSYSCALL
select HAVE_ARCH_BITREVERSE
+ select HAVE_ARCH_COMPILER_H
select HAVE_ARCH_HUGE_VMAP
select HAVE_ARCH_JUMP_LABEL
select HAVE_ARCH_JUMP_LABEL_RELATIVE
new file mode 100644
@@ -0,0 +1,15 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+#ifndef __ASM_ARM_COMPILER_H
+#define __ASM_ARM_COMPILER_H
+
+#ifndef __ASSEMBLY__
+
+#if defined(CONFIG_ARM64_PTR_AUTH)
+#define __builtin_return_address(val) \
+ (void *)((unsigned long)__builtin_return_address(val) | \
+ (GENMASK(63, 56) | GENMASK(54, VA_BITS)))
+#endif
+
+#endif
+
+#endif /* __ASM_ARM_COMPILER_H */
This patch redefines __builtin_return_address to mask pac bits when Pointer Authentication is enabled. As __builtin_return_address is used mostly used to refer to the caller function symbol address so masking runtime generated pac bits will make it clear to the user. This change fixes the utilities like cat /proc/vmallocinfo to now show the correct logs. Signed-off-by: Amit Daniel Kachhap <amit.kachhap@arm.com> --- Changes since RFC v2: - New patch arch/arm64/Kconfig | 1 + arch/arm64/include/asm/compiler.h | 15 +++++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 arch/arm64/include/asm/compiler.h