@@ -106,6 +106,11 @@ $(obj)/vmlinux: $(vmlinux-objs-y) $(efi-obj-y) FORCE
$(call if_changed,ld)
OBJCOPYFLAGS_vmlinux.bin := -R .comment -S
+
+ifdef CONFIG_FG_KASLR
+RELOCS_ARGS += --fg-kaslr
+endif
+
$(obj)/vmlinux.bin: vmlinux FORCE
$(call if_changed,objcopy)
@@ -113,7 +118,7 @@ targets += $(patsubst $(obj)/%,%,$(vmlinux-objs-y)) vmlinux.bin.all vmlinux.relo
CMD_RELOCS = arch/x86/tools/relocs
quiet_cmd_relocs = RELOCS $@
- cmd_relocs = $(CMD_RELOCS) $< > $@;$(CMD_RELOCS) --abs-relocs $<
+ cmd_relocs = $(CMD_RELOCS) $(RELOCS_ARGS) $< > $@;$(CMD_RELOCS) $(RELOCS_ARGS) --abs-relocs $<
$(obj)/vmlinux.relocs: vmlinux FORCE
$(call if_changed,relocs)
@@ -42,6 +42,8 @@ struct section {
};
static struct section *secs;
+static int fgkaslr_mode;
+
static const char * const sym_regex_kernel[S_NSYMTYPES] = {
/*
* Following symbols have been audited. There values are constant and do
@@ -818,6 +820,24 @@ static int is_percpu_sym(ElfW(Sym) *sym, const char *symname)
strncmp(symname, "init_per_cpu_", 13);
}
+static int is_function_section(struct section *sec)
+{
+ if (!fgkaslr_mode)
+ return 0;
+
+ return !strncmp(sec_name(sec->shdr.sh_info), ".text.", 6);
+}
+
+static int is_randomized_sym(ElfW(Sym) *sym)
+{
+ if (!fgkaslr_mode)
+ return 0;
+
+ if (sym->st_shndx > shnum)
+ return 0;
+
+ return !strncmp(sec_name(sym_index(sym)), ".text.", 6);
+}
static int do_reloc64(struct section *sec, Elf_Rel *rel, ElfW(Sym) *sym,
const char *symname)
@@ -843,12 +863,15 @@ static int do_reloc64(struct section *sec, Elf_Rel *rel, ElfW(Sym) *sym,
case R_X86_64_PC32:
case R_X86_64_PLT32:
/*
- * PC relative relocations don't need to be adjusted unless
- * referencing a percpu symbol.
+ * we need to keep pc relative relocations for sections which
+ * might be randomized, and for the percpu section.
+ * We also need to keep relocations for any offset which might
+ * reference an address in a section which has been randomized.
*
* NB: R_X86_64_PLT32 can be treated as R_X86_64_PC32.
*/
- if (is_percpu_sym(sym, symname))
+ if (is_function_section(sec) || is_randomized_sym(sym) ||
+ is_percpu_sym(sym, symname))
add_reloc(&relocs32neg, offset);
break;
@@ -1163,8 +1186,9 @@ static void print_reloc_info(void)
void process(FILE *fp, int use_real_mode, int as_text,
int show_absolute_syms, int show_absolute_relocs,
- int show_reloc_info)
+ int show_reloc_info, int fgkaslr)
{
+ fgkaslr_mode = fgkaslr;
regex_init(use_real_mode);
read_ehdr(fp);
read_shdrs(fp);
@@ -31,8 +31,8 @@ enum symtype {
void process_32(FILE *fp, int use_real_mode, int as_text,
int show_absolute_syms, int show_absolute_relocs,
- int show_reloc_info);
+ int show_reloc_info, int fgkaslr);
void process_64(FILE *fp, int use_real_mode, int as_text,
int show_absolute_syms, int show_absolute_relocs,
- int show_reloc_info);
+ int show_reloc_info, int fgkaslr);
#endif /* RELOCS_H */
@@ -12,14 +12,14 @@ void die(char *fmt, ...)
static void usage(void)
{
- die("relocs [--abs-syms|--abs-relocs|--reloc-info|--text|--realmode]" \
- " vmlinux\n");
+ die("relocs [--abs-syms|--abs-relocs|--reloc-info|--text|--realmode|"
+ "--fg-kaslr] vmlinux\n");
}
int main(int argc, char **argv)
{
int show_absolute_syms, show_absolute_relocs, show_reloc_info;
- int as_text, use_real_mode;
+ int as_text, use_real_mode, fgkaslr_opt;
const char *fname;
FILE *fp;
int i;
@@ -30,6 +30,7 @@ int main(int argc, char **argv)
show_reloc_info = 0;
as_text = 0;
use_real_mode = 0;
+ fgkaslr_opt = 0;
fname = NULL;
for (i = 1; i < argc; i++) {
char *arg = argv[i];
@@ -54,6 +55,10 @@ int main(int argc, char **argv)
use_real_mode = 1;
continue;
}
+ if (strcmp(arg, "--fg-kaslr") == 0) {
+ fgkaslr_opt = 1;
+ continue;
+ }
}
else if (!fname) {
fname = arg;
@@ -75,11 +80,11 @@ int main(int argc, char **argv)
if (e_ident[EI_CLASS] == ELFCLASS64)
process_64(fp, use_real_mode, as_text,
show_absolute_syms, show_absolute_relocs,
- show_reloc_info);
+ show_reloc_info, fgkaslr_opt);
else
process_32(fp, use_real_mode, as_text,
show_absolute_syms, show_absolute_relocs,
- show_reloc_info);
+ show_reloc_info, fgkaslr_opt);
fclose(fp);
return 0;
}